forked from Isabelle_DOF/Isabelle_DOF
Merged Isabelle2018 to master.
This commit is contained in:
commit
c7d277b9da
|
@ -24,7 +24,7 @@
|
|||
#
|
||||
# SPDX-License-Identifier: BSD-2-Clause
|
||||
|
||||
FROM logicalhacking:isabelle2017
|
||||
FROM logicalhacking:isabelle2018
|
||||
|
||||
WORKDIR /home/isabelle
|
||||
COPY thy_output.ML /home/isabelle/Isabelle/src/Pure/Thy
|
||||
|
|
|
@ -5,7 +5,7 @@ text\<open> Offering
|
|||
\<^item>
|
||||
\<^item> LaTeX support. \<close>
|
||||
|
||||
|
||||
|
||||
theory Isa_COL
|
||||
imports Isa_DOF
|
||||
begin
|
||||
|
|
424
Isa_DOF.thy
424
Isa_DOF.thy
|
@ -13,20 +13,19 @@ text\<open> Offering
|
|||
\<^item> LaTeX support. \<close>
|
||||
|
||||
text\<open> In this section, we develop on the basis of a management of references Isar-markups
|
||||
that provide direct support in the PIDE framework.
|
||||
\<close>
|
||||
that provide direct support in the PIDE framework. \<close>
|
||||
|
||||
|
||||
theory Isa_DOF (* Isabelle Document Ontology Framework *)
|
||||
imports Main
|
||||
RegExpInterface (* Interface to functional regular automata for monitoring *)
|
||||
Assert
|
||||
Assert
|
||||
|
||||
keywords "+=" ":=" "accepts" "rejects"
|
||||
|
||||
and "title*" "subtitle*"
|
||||
"chapter*" "section*" "subsection*" "subsubsection*"
|
||||
"paragraph*" "subparagraph*"
|
||||
"chapter*" "section*" "subsection*" "subsubsection*"
|
||||
"paragraph*" "subparagraph*"
|
||||
"text*"
|
||||
"figure*"
|
||||
"side_by_side_figure*"
|
||||
|
@ -45,8 +44,7 @@ theory Isa_DOF (* Isabelle Document Ontology Framework *)
|
|||
|
||||
begin
|
||||
|
||||
text\<open> @{file "$ISABELLE_HOME/src/Pure/ROOT.ML"}
|
||||
\<close>
|
||||
text\<open> @{footnote \<open>sdf\<close>}, @{file "$ISABELLE_HOME/src/Pure/ROOT.ML"}\<close>
|
||||
|
||||
section\<open>Primitive Markup Generators\<close>
|
||||
ML\<open>
|
||||
|
@ -87,9 +85,9 @@ fun bstring_to_holstring ctxt x (* (x:bstring) *) : string =
|
|||
|
||||
fun chopper p (x:string) =
|
||||
let fun hss buff [] = rev buff
|
||||
|hss buff (S as a::R) = if p a then let val (front,rest) = take_prefix p S
|
||||
|hss buff (S as a::R) = if p a then let val (front,rest) = chop_prefix p S
|
||||
in hss (String.implode front :: buff) rest end
|
||||
else let val (front,rest) = take_prefix (not o p) S
|
||||
else let val (front,rest) = chop_prefix (not o p) S
|
||||
in hss (String.implode front ::buff) rest end
|
||||
in hss [] (String.explode x) end;
|
||||
|
||||
|
@ -109,14 +107,15 @@ fun holstring_to_bstring ctxt (x:string) : bstring =
|
|||
|
||||
section\<open> A HomeGrown Document Type Management (the ''Model'') \<close>
|
||||
|
||||
ML\<open>
|
||||
ML\<open>
|
||||
structure DOF_core =
|
||||
struct
|
||||
type docclass_struct = {params : (string * sort) list, (*currently not used *)
|
||||
|
||||
type docclass_struct = {params : (string * sort) list, (*currently not used *)
|
||||
name : binding,
|
||||
thy_name : string, id : serial, (* for pide *)
|
||||
inherits_from : (typ list * string) option, (* imports *)
|
||||
attribute_decl : (binding * typ * term option) list, (* class local *)
|
||||
thy_name : string, id : serial, (* for pide *)
|
||||
inherits_from : (typ list * string) option, (* imports *)
|
||||
attribute_decl : (binding*typ*term option)list, (* class local *)
|
||||
rejectS : term list,
|
||||
rex : term list } (* monitoring regexps --- product semantics*)
|
||||
|
||||
|
@ -173,7 +172,8 @@ struct
|
|||
|
||||
type open_monitor_info = {accepted_cids : string list,
|
||||
rejected_cids : string list,
|
||||
automatas : RegExpInterface.automaton list }
|
||||
automatas : RegExpInterface.automaton list
|
||||
}
|
||||
|
||||
type monitor_tab = open_monitor_info Symtab.table
|
||||
val initial_monitor_tab:monitor_tab = Symtab.empty
|
||||
|
@ -364,9 +364,10 @@ fun check_reject_atom cid_long term =
|
|||
(n,_)::_ => error("No schematic variables allowed in monitor regexp:" ^ n)
|
||||
| _ => ()
|
||||
(* Missing: Checks on constants such as undefined, ... *)
|
||||
val _ = case term of
|
||||
(* val _ = case term of
|
||||
Const(_ ,Type(@{type_name "rexp"},[_])) => ()
|
||||
| _ => error("current restriction: only atoms allowed here!")
|
||||
*)
|
||||
in term end
|
||||
|
||||
|
||||
|
@ -385,7 +386,7 @@ fun define_doc_class_global (params', binding) parent fields rexp reject_Atoms t
|
|||
arbitrary type *)
|
||||
NONE => ()
|
||||
| SOME(_,cid_parent) =>
|
||||
if not (is_defined_cid_global cid_parent thy)
|
||||
if not (is_defined_cid_global cid_parent thy)
|
||||
then error("document class undefined : " ^ cid_parent)
|
||||
else ()
|
||||
val cid_long = name2doc_class_name thy cid
|
||||
|
@ -530,7 +531,7 @@ fun update_value_global oid upd thy =
|
|||
SOME{pos,thy_name,value,id,cid} =>
|
||||
let val tab' = Symtab.update(oid,SOME{pos=pos,thy_name=thy_name,
|
||||
value=upd value,id=id, cid=cid})
|
||||
in map_data_global (upd_docobj_tab(fn{tab,maxano}=>{tab=tab' tab,maxano=maxano})) thy end
|
||||
in map_data_global (upd_docobj_tab(fn{tab,maxano}=>{tab=tab' tab,maxano=maxano})) thy end
|
||||
| NONE => error("undefined doc object: "^oid)
|
||||
|
||||
|
||||
|
@ -622,22 +623,16 @@ fun check_doc_global (strict_checking : bool) ctxt =
|
|||
end
|
||||
|
||||
val _ =
|
||||
Outer_Syntax.command @{command_keyword print_doc_classes}
|
||||
"print document classes"
|
||||
(Parse.opt_bang >> (fn b =>
|
||||
Toplevel.keep (print_doc_classes b o Toplevel.context_of)));
|
||||
Outer_Syntax.command \<^command_keyword>\<open>print_doc_classes\<close> "print document classes"
|
||||
(Parse.opt_bang >> (fn b => Toplevel.keep (print_doc_classes b o Toplevel.context_of)));
|
||||
|
||||
val _ =
|
||||
Outer_Syntax.command @{command_keyword print_doc_items}
|
||||
"print document items"
|
||||
(Parse.opt_bang >> (fn b =>
|
||||
Toplevel.keep (print_doc_items b o Toplevel.context_of)));
|
||||
Outer_Syntax.command \<^command_keyword>\<open>print_doc_items\<close> "print document items"
|
||||
(Parse.opt_bang >> (fn b => Toplevel.keep (print_doc_items b o Toplevel.context_of)));
|
||||
|
||||
val _ =
|
||||
Outer_Syntax.command @{command_keyword check_doc_global}
|
||||
"check global document consistency"
|
||||
(Parse.opt_bang >> (fn b =>
|
||||
Toplevel.keep (check_doc_global b o Toplevel.context_of)));
|
||||
Outer_Syntax.command \<^command_keyword>\<open>check_doc_global\<close> "check global document consistency"
|
||||
(Parse.opt_bang >> (fn b => Toplevel.keep (check_doc_global b o Toplevel.context_of)));
|
||||
|
||||
|
||||
|
||||
|
@ -712,13 +707,11 @@ fun write_ontology_latex_sty_template thy =
|
|||
|
||||
|
||||
val _ =
|
||||
Outer_Syntax.command @{command_keyword gen_sty_template}
|
||||
"generate a template LaTeX style file for this ontology"
|
||||
(Parse.opt_bang >> (fn b =>
|
||||
Toplevel.keep (write_ontology_latex_sty_template o Toplevel.theory_of)));
|
||||
Outer_Syntax.command \<^command_keyword>\<open>gen_sty_template\<close> "generate a LaTeX style template"
|
||||
(Parse.opt_bang>>(fn b => Toplevel.keep(write_ontology_latex_sty_template o Toplevel.theory_of)));
|
||||
|
||||
val (strict_monitor_checking, strict_monitor_checking_setup)
|
||||
= Attrib.config_bool @{binding strict_monitor_checking} (K false);
|
||||
= Attrib.config_bool \<^binding>\<open>strict_monitor_checking\<close> (K false);
|
||||
|
||||
end (* struct *)
|
||||
|
||||
|
@ -738,7 +731,7 @@ typedecl "thm"
|
|||
typedecl "file"
|
||||
typedecl "thy"
|
||||
|
||||
-- \<open> and others in the future : file, http, thy, ... \<close>
|
||||
\<comment> \<open> and others in the future : file, http, thy, ... \<close>
|
||||
|
||||
consts ISA_typ :: "string \<Rightarrow> typ" ("@{typ _}")
|
||||
consts ISA_term :: "string \<Rightarrow> term" ("@{term _}")
|
||||
|
@ -751,71 +744,8 @@ consts ISA_docitem_attr :: "string \<Rightarrow> string \<Rightarrow> 'a" (
|
|||
|
||||
\<comment> \<open>Dynamic setup of inner syntax cartouche\<close>
|
||||
|
||||
ML \<open>
|
||||
(* Author: Frédéric Tuong, Université Paris-Saclay *)
|
||||
(* Title: HOL/ex/Cartouche_Examples.thy
|
||||
Author: Makarius
|
||||
*)
|
||||
local
|
||||
fun mk_char f_char (s, _) accu =
|
||||
fold
|
||||
(fn c => fn (accu, l) =>
|
||||
(f_char c accu,
|
||||
Syntax.const @{const_syntax Cons}
|
||||
$ (Syntax.const @{const_syntax Char} $ HOLogic.mk_numeral c)
|
||||
$ l))
|
||||
(rev (map Char.ord (String.explode s)))
|
||||
accu;
|
||||
|
||||
fun mk_string _ accu [] = (accu, Const (@{const_syntax Nil}, @{typ "char list"}))
|
||||
| mk_string f_char accu (s :: ss) = mk_char f_char s (mk_string f_char accu ss);
|
||||
|
||||
in
|
||||
fun string_tr f f_char accu content args =
|
||||
let fun err () = raise TERM ("string_tr", args) in
|
||||
(case args of
|
||||
[(c as Const (@{syntax_const "_constrain"}, _)) $ Free (s, _) $ p] =>
|
||||
(case Term_Position.decode_position p of
|
||||
SOME (pos, _) => c $ f (mk_string f_char accu (content (s, pos))) $ p
|
||||
| NONE => err ())
|
||||
| _ => err ())
|
||||
end;
|
||||
end;
|
||||
\<close>
|
||||
|
||||
syntax "_cartouche_string" :: "cartouche_position \<Rightarrow> _" ("_")
|
||||
|
||||
ML\<open>
|
||||
val cartouche_grammar =
|
||||
[ ("char list", snd)
|
||||
, ("String.literal", (fn (_, x) => Syntax.const @{const_syntax STR} $ x))]
|
||||
\<close>
|
||||
|
||||
ML\<open>
|
||||
fun parse_translation_cartouche binding l f_char accu =
|
||||
let val cartouche_type = Attrib.setup_config_string binding (K (fst (hd l)))
|
||||
(* if there is no type specified, by default we set the first element
|
||||
to be the default type of cartouches *) in
|
||||
fn ctxt =>
|
||||
string_tr
|
||||
let val cart_type = Config.get ctxt cartouche_type in
|
||||
case (List.find (fn (s, _) => s = cart_type)
|
||||
l) of
|
||||
NONE => error ("Unregistered return type for the cartouche: \"" ^ cart_type ^ "\"")
|
||||
| SOME (_, f) => f
|
||||
end
|
||||
f_char
|
||||
accu
|
||||
(Symbol_Pos.cartouche_content o Symbol_Pos.explode)
|
||||
end
|
||||
\<close>
|
||||
|
||||
parse_translation \<open>
|
||||
[( @{syntax_const "_cartouche_string"}
|
||||
, parse_translation_cartouche @{binding cartouche_type} cartouche_grammar (K I) ())]
|
||||
\<close>
|
||||
|
||||
(* (* PORT TO ISABELLE2018 *)
|
||||
(* PORT TO ISABELLE2018 *)
|
||||
ML \<open>
|
||||
(* Author: Frédéric Tuong, Université Paris-Saclay *)
|
||||
(* Title: HOL/ex/Cartouche_Examples.thy
|
||||
|
@ -878,9 +808,8 @@ fun parse_translation_cartouche binding l f_integer accu =
|
|||
|
||||
parse_translation \<open>
|
||||
[( @{syntax_const "_cartouche_string"}
|
||||
, parse_translation_cartouche @{binding cartouche_type} Cartouche_Grammar.default (K I) ())]
|
||||
, parse_translation_cartouche \<^binding>\<open>cartouche_type\<close> Cartouche_Grammar.default (K I) ())]
|
||||
\<close>
|
||||
*)
|
||||
|
||||
(* tests *)
|
||||
term "@{typ ''int => int''}"
|
||||
|
@ -888,18 +817,18 @@ term "@{term ''Bound 0''}"
|
|||
term "@{thm ''refl''}"
|
||||
term "@{docitem ''<doc_ref>''}"
|
||||
ML\<open> @{term "@{docitem ''<doc_ref>''}"}\<close>
|
||||
(**)
|
||||
term "@{typ \<open>int => int\<close>}"
|
||||
term "@{term \<open>Bound 0\<close>}"
|
||||
|
||||
term "@{typ \<open>int \<Rightarrow> int\<close>}"
|
||||
term "@{term \<open>\<forall>x. P x \<longrightarrow> Q\<close>}"
|
||||
term "@{thm \<open>refl\<close>}"
|
||||
term "@{docitem \<open><doc_ref>\<close>}"
|
||||
ML\<open> @{term "@{docitem \<open><doc_ref>\<close>}"}\<close>
|
||||
term "@{docitem \<open>doc_ref\<close>}"
|
||||
ML\<open> @{term "@{docitem \<open>doc_ref\<close>}"}\<close>
|
||||
(**)
|
||||
declare [[cartouche_type = "String.literal"]]
|
||||
term "\<open>Université\<close> :: String.literal"
|
||||
declare [[cartouche_type = "char list"]]
|
||||
term "\<open>Université\<close> :: char list"
|
||||
(**)
|
||||
|
||||
|
||||
|
||||
subsection\<open> Semantics \<close>
|
||||
|
@ -1010,10 +939,78 @@ setup\<open>DOF_core.update_isa_global("docitem" ,ISA_core.ML_isa_check_docitem
|
|||
|
||||
section\<open> Syntax for Annotated Documentation Commands (the '' View'' Part I) \<close>
|
||||
|
||||
ML\<open>open Thy_Output;
|
||||
|
||||
(*Pure_Syn.output_document;*)
|
||||
Pure_Syn.document_command;
|
||||
\<close>
|
||||
|
||||
text\<open>dfg\<close> (* text maps to Pure_Syn.document_command; *)
|
||||
|
||||
(*
|
||||
================== 2018 ======================================================
|
||||
(* Exported from Pure_Syn *)
|
||||
|
||||
fun output_document state markdown txt =
|
||||
let
|
||||
val ctxt = Toplevel.presentation_context state;
|
||||
val _ =
|
||||
Context_Position.report ctxt
|
||||
(Input.pos_of txt) (Markup.language_document (Input.is_delimited txt));
|
||||
in Thy_Output.output_document ctxt markdown txt end;
|
||||
|
||||
fun document_command markdown (loc, txt) =
|
||||
Toplevel.keep (fn state =>
|
||||
(case loc of
|
||||
NONE => ignore (output_document state markdown txt)
|
||||
| SOME (_, pos) =>
|
||||
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
|
||||
Toplevel.present_local_theory loc (fn state =>
|
||||
ignore (output_document state markdown txt));
|
||||
|
||||
|
||||
====================== 2017 ===================================================
|
||||
|
||||
(* Exported from Thy_Output *)
|
||||
fun document_command markdown (loc, txt) =
|
||||
Toplevel.keep (fn state =>
|
||||
(case loc of
|
||||
NONE => ignore (output_text state markdown txt)
|
||||
| SOME (_, pos) =>
|
||||
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
|
||||
Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt));
|
||||
|
||||
|
||||
*)
|
||||
|
||||
ML\<open> Pure_Syn.document_command;
|
||||
structure Pure_Syn_Ext =
|
||||
struct
|
||||
(* This interface function has not been exported from Pure_Syn (2018);
|
||||
it should replace
|
||||
Thy_Output.output_text: Toplevel.state -> {markdown: bool} -> Input.source -> string (2017) *)
|
||||
|
||||
|
||||
fun output_document state markdown src =
|
||||
let
|
||||
val ctxt = Toplevel.presentation_context state;
|
||||
val _ = Context_Position.report ctxt
|
||||
(Input.pos_of src)
|
||||
(Markup.language_document (Input.is_delimited src));
|
||||
in Thy_Output.output_document ctxt markdown src end;
|
||||
(* this thing converts also source to (latex) string ... *)
|
||||
|
||||
end;
|
||||
|
||||
Pure_Syn_Ext.output_document : Toplevel.state -> {markdown: bool} -> Input.source -> Latex.text list;
|
||||
|
||||
\<close>
|
||||
|
||||
ML\<open>
|
||||
structure ODL_Command_Parser =
|
||||
struct
|
||||
|
||||
|
||||
type meta_args_t = (((string * Position.T) *
|
||||
(string * Position.T) option)
|
||||
* ((string * Position.T) * string) list)
|
||||
|
@ -1253,6 +1250,7 @@ fun update_instance_command (((oid:string,pos),cid_pos),
|
|||
end
|
||||
|
||||
|
||||
|
||||
fun gen_enriched_document_command transform
|
||||
markdown
|
||||
(((((oid,pos),cid_pos), doc_attrs) : meta_args_t,
|
||||
|
@ -1261,7 +1259,8 @@ fun gen_enriched_document_command transform
|
|||
: theory -> theory =
|
||||
let
|
||||
(* as side-effect, generates markup *)
|
||||
fun check_text thy = (Thy_Output.output_text(Toplevel.theory_toplevel thy) markdown toks; thy)
|
||||
fun check_text thy = (Pure_Syn_Ext.output_document(Toplevel.theory_toplevel thy) markdown toks;
|
||||
thy)
|
||||
(* generating the level-attribute syntax *)
|
||||
in
|
||||
(create_and_check_docitem false oid pos cid_pos (transform doc_attrs) #> check_text)
|
||||
|
@ -1458,10 +1457,8 @@ val _ =
|
|||
|
||||
|
||||
end (* struct *)
|
||||
|
||||
\<close>
|
||||
|
||||
|
||||
ML\<open>
|
||||
structure ODL_LTX_Converter =
|
||||
struct
|
||||
|
@ -1475,14 +1472,13 @@ fun meta_args_2_string thy ((((lab, _), cid_opt), attr_list) : ODL_Command_Parse
|
|||
| SOME(cid,_) => DOF_core.name2doc_class_name thy cid
|
||||
val cid_txt = "type = " ^ (enclose "{" "}" cid_long);
|
||||
|
||||
|
||||
fun ltx_of_term _ _ (((Const ("List.list.Cons", t1) $ (Const ("String.Char", t2 ) $ t))) $ t')
|
||||
= (HOLogic.dest_string (((Const ("List.list.Cons", t1) $ (Const ("String.Char", t2 ) $ t))) $ t'))
|
||||
fun ltx_of_term _ _ (Const ("List.list.Cons", @{typ "char \<Rightarrow> char list \<Rightarrow> char list"}) $ t1 $ t2)
|
||||
= HOLogic.dest_string (Const ("List.list.Cons", @{typ "char \<Rightarrow> char list \<Rightarrow> char list"}) $ t1 $ t2)
|
||||
| ltx_of_term _ _ (Const ("List.list.Nil", _)) = ""
|
||||
| ltx_of_term _ _ (@{term "numeral :: _ \<Rightarrow> _"} $ t) = Int.toString(HOLogic.dest_numeral t)
|
||||
| ltx_of_term ctx encl ((Const ("List.list.Cons", _) $ t1) $ t2) =
|
||||
let val inner = (case t2 of
|
||||
Const ("List.list.Nil", _) => (ltx_of_term ctx true t1)
|
||||
Const ("List.list.Nil", _) => (ltx_of_term ctx true t1)
|
||||
| _ => ((ltx_of_term ctx false t1)^", " ^(ltx_of_term ctx false t2))
|
||||
)
|
||||
in if encl then enclose "{" "}" inner else inner end
|
||||
|
@ -1528,15 +1524,23 @@ fun meta_args_2_string thy ((((lab, _), cid_opt), attr_list) : ODL_Command_Parse
|
|||
end
|
||||
(* the following 2 lines set parser and converter for LaTeX generation of meta-attributes.
|
||||
Currently of *all* commands, no distinction between text* and text command.
|
||||
This code depends on a MODIFIED Isabelle2017 version resulting fro; applying the files
|
||||
This code depends on a MODIFIED Isabelle2017 version resulting from applying the files
|
||||
under Isabell_DOF/patches.
|
||||
*)
|
||||
val _ = Thy_Output.set_meta_args_parser
|
||||
(fn thy => (Scan.optional (ODL_Command_Parser.attributes >> meta_args_2_string thy) ""))
|
||||
*)
|
||||
(* REMARK PORT 2018 : transmission of meta-args to LaTeX crude and untested. Can be found in
|
||||
present_token. *)
|
||||
|
||||
|
||||
end
|
||||
\<close>
|
||||
ML\<open>
|
||||
val _ = Thy_Output.set_meta_args_parser
|
||||
(fn thy => let val _ = writeln "META_ARGS_PARSING"
|
||||
in (Scan.optional ( ODL_Command_Parser.attributes
|
||||
>> ODL_LTX_Converter.meta_args_2_string thy) "")
|
||||
end);
|
||||
|
||||
\<close>
|
||||
|
||||
|
||||
|
||||
|
@ -1581,8 +1585,56 @@ end\<close>
|
|||
|
||||
section\<open> Syntax for Ontological Antiquotations (the '' View'' Part II) \<close>
|
||||
|
||||
text\<open> @{theory Main} @{file "Isa_DOF.thy"}\<close>
|
||||
|
||||
(* Paradigm theory or paradigm file ?
|
||||
|
||||
val basic_entity = Thy_Output.antiquotation_pretty_source;
|
||||
...
|
||||
basic_entity \<^binding>\<open>theory\<close> (Scan.lift (Parse.position Args.embedded)) pretty_theory #>
|
||||
|
||||
Thy_Output.antiquotation_raw \<^binding>\<open>file\<close>
|
||||
(Scan.lift (Parse.position Parse.path)) (document_antiq check_file) #>
|
||||
|
||||
*)
|
||||
|
||||
ML\<open>
|
||||
(* 2017: used eg by ::: text\<open> @{theory Main}\<close>
|
||||
antiquotation:
|
||||
binding -> 'a context_parser ->
|
||||
({source: Token.src, state: Toplevel.state, context: Proof.context} -> 'a -> string)
|
||||
-> theory -> theory
|
||||
*)
|
||||
|
||||
(* 2018 >>> *)
|
||||
val basic_entity' = Thy_Output.antiquotation_raw
|
||||
: binding -> 'a context_parser ->
|
||||
(Proof.context -> 'a -> Latex.text)
|
||||
-> theory -> theory;
|
||||
|
||||
val basic_entity = Thy_Output.antiquotation_pretty_source
|
||||
: binding -> 'a context_parser ->
|
||||
(Proof.context -> 'a -> Pretty.T)
|
||||
-> theory -> theory;
|
||||
|
||||
(* or ? *)
|
||||
val docref_scan = (Scan.lift (Parse.position Args.embedded))
|
||||
: (string * Position.T) context_parser;
|
||||
(*val X = Document_Antiquotation.setup \<^binding>\<open>docref\<close> docref_scan ; *)
|
||||
\<close>
|
||||
|
||||
text\<open> @{theory Main}\<close>
|
||||
|
||||
ML\<open>
|
||||
Latex.string : string -> Latex.text ;
|
||||
Latex.text: string * Position.T -> Latex.text;
|
||||
Latex.block: Latex.text list -> Latex.text;
|
||||
Latex.enclose_body: string -> string -> Latex.text list -> Latex.text list;
|
||||
Latex.enclose_block: string -> string -> Latex.text list -> Latex.text;
|
||||
Latex.output_text: Latex.text list -> string;
|
||||
\<close>
|
||||
|
||||
|
||||
ML\<open>
|
||||
structure OntoLinkParser =
|
||||
struct
|
||||
|
@ -1600,14 +1652,16 @@ fun check_and_mark ctxt cid_decl (str:{strict_checking: bool}) pos name =
|
|||
andalso not(DOF_core.is_subclass ctxt cid cid_decl)
|
||||
then error("reference ontologically inconsistent")
|
||||
else ()
|
||||
in name end
|
||||
in () end
|
||||
else if DOF_core.is_declared_oid_global name thy
|
||||
then (if #strict_checking str
|
||||
then warning("declared but undefined document reference:"^name)
|
||||
else (); name)
|
||||
else ())
|
||||
else error("undefined document reference:"^name)
|
||||
end
|
||||
|
||||
val _ = check_and_mark : Proof.context -> string -> {strict_checking: bool} ->
|
||||
Position.T -> Symtab.key -> unit
|
||||
|
||||
(* generic syntax for doc_class links. *)
|
||||
|
||||
|
@ -1620,35 +1674,29 @@ val docitem_modes = Scan.optional (Args.parens (Args.$$$ defineN || Args.$$$ unc
|
|||
else {unchecked = true, define= false}))
|
||||
{unchecked = false, define= false} (* default *);
|
||||
|
||||
|
||||
val docitem_antiquotation_parser = (Scan.lift (docitem_modes -- Args.text_input))
|
||||
: ({define: bool, unchecked: bool} * Input.source) context_parser;
|
||||
|
||||
fun docitem_antiquotation_generic cid_decl
|
||||
{context = ctxt, source = src:Token.src, state}
|
||||
({unchecked = x, define= y}, source:Input.source) =
|
||||
let fun label_latex flag = enclose (if flag then "\\label{" else "\\autoref{") "}"
|
||||
val X1 = Thy_Output.output_text state {markdown=false}
|
||||
: Input.source -> string
|
||||
val X2 = check_and_mark ctxt
|
||||
cid_decl
|
||||
({strict_checking = not x})
|
||||
(Input.pos_of source)
|
||||
: string -> string
|
||||
val X3 = label_latex y
|
||||
: string -> string
|
||||
|
||||
in
|
||||
(Thy_Output.output_text state {markdown=false} #>
|
||||
check_and_mark ctxt
|
||||
cid_decl
|
||||
({strict_checking = not x})
|
||||
(Input.pos_of source) #>
|
||||
label_latex y)
|
||||
source
|
||||
end
|
||||
|
||||
|
||||
fun docitem_antiquotation name cid_decl =
|
||||
Thy_Output.antiquotation name docitem_antiquotation_parser (docitem_antiquotation_generic cid_decl)
|
||||
|
||||
fun pretty_docitem_antiquotation_generic cid_decl ctxt ({unchecked = x, define = y}, src ) =
|
||||
let (* val _ = writeln ("ZZZ" ^ Input.source_content src ^ "::2::" ^ cid_decl) *)
|
||||
val _ = check_and_mark ctxt cid_decl
|
||||
({strict_checking = not x})
|
||||
(Input.pos_of src) (Input.source_content src)
|
||||
in
|
||||
(if y then Latex.enclose_block ("\\csname isadof.label[type={"^cid_decl^"}]{") "}\\endcsname"
|
||||
else Latex.enclose_block ("\\csname isadof.ref[type={"^cid_decl^"}]{") "}\\endcsname")
|
||||
[Latex.string (Input.source_content src)]
|
||||
end
|
||||
|
||||
|
||||
|
||||
fun docitem_antiquotation bind cid =
|
||||
Thy_Output.antiquotation_raw bind docitem_antiquotation_parser
|
||||
(pretty_docitem_antiquotation_generic cid);
|
||||
|
||||
|
||||
fun check_and_mark_term ctxt oid =
|
||||
let val thy = Context.theory_of ctxt;
|
||||
|
@ -1663,8 +1711,7 @@ fun check_and_mark_term ctxt oid =
|
|||
in value end
|
||||
else error("undefined document reference:"^oid)
|
||||
end
|
||||
|
||||
|
||||
|
||||
|
||||
fun ML_antiquotation_docitem_value (ctxt, toks) =
|
||||
(Scan.lift (Args.cartouche_input)
|
||||
|
@ -1672,17 +1719,15 @@ fun ML_antiquotation_docitem_value (ctxt, toks) =
|
|||
((check_and_mark_term ctxt o Input.source_content) inp)))
|
||||
(ctxt, toks)
|
||||
|
||||
|
||||
(* Setup for general docrefs of the global DOF_core.default_cid - class ("text")*)
|
||||
val _ = Theory.setup((docitem_antiquotation @{binding docref} DOF_core.default_cid) #>
|
||||
(* deprecated syntax ^^^^^^*)
|
||||
(docitem_antiquotation @{binding docitem_ref} DOF_core.default_cid) #>
|
||||
(* deprecated syntax ^^^^^^^^^^*)
|
||||
docitem_antiquotation @{binding docitem} DOF_core.default_cid #>
|
||||
(* Thy_Output.antiquotation @{binding docitem} docitem_antiquotation_parser
|
||||
(docitem_antiquotation_generic DOF_core.default_cid) #>
|
||||
*)
|
||||
ML_Antiquotation.inline @{binding docitem_value} ML_antiquotation_docitem_value)
|
||||
val _ = Theory.setup
|
||||
(docitem_antiquotation \<^binding>\<open>docref\<close> DOF_core.default_cid #>
|
||||
(* deprecated syntax ^^^^^^*)
|
||||
docitem_antiquotation \<^binding>\<open>docitem_ref\<close> DOF_core.default_cid #>
|
||||
(* deprecated syntax ^^^^^^^^^^^*)
|
||||
docitem_antiquotation \<^binding>\<open>docitem\<close> DOF_core.default_cid #>
|
||||
|
||||
ML_Antiquotation.inline \<^binding>\<open>docitem_value\<close> ML_antiquotation_docitem_value)
|
||||
|
||||
end (* struct *)
|
||||
\<close>
|
||||
|
@ -1757,12 +1802,6 @@ fun trace_attr_2_ML ctxt (oid:string,pos) =
|
|||
in toML (compute_trace_ML ctxt oid @{here} pos) end
|
||||
|
||||
local
|
||||
(* copied from "$ISABELLE_HOME/src/Pure/Thy/thy_output.ML" *)
|
||||
fun basic_entities name scan pretty =
|
||||
Thy_Output.antiquotation name scan (fn {source, context = ctxt, ...} =>
|
||||
Thy_Output.output ctxt o Thy_Output.maybe_pretty_source pretty ctxt source);
|
||||
|
||||
fun basic_entity name scan = basic_entities name (scan >> single);
|
||||
|
||||
fun pretty_attr_access_style ctxt (style, ((oid,pos),(attr,pos'))) =
|
||||
Thy_Output.pretty_term ctxt (style (compute_attr_access (Context.Proof ctxt)
|
||||
|
@ -1772,19 +1811,20 @@ fun pretty_trace_style ctxt (style, (oid,pos)) =
|
|||
"trace" oid pos pos));
|
||||
in
|
||||
val _ = Theory.setup
|
||||
(ML_Antiquotation.inline @{binding docitem_attribute}
|
||||
(ML_Antiquotation.inline \<^binding>\<open>docitem_attribute\<close>
|
||||
(fn (ctxt,toks) => (parse_attribute_access >> attr_2_ML ctxt) (ctxt, toks)) #>
|
||||
ML_Antiquotation.inline @{binding trace_attribute}
|
||||
ML_Antiquotation.inline \<^binding>\<open>trace_attribute\<close>
|
||||
(fn (ctxt,toks) => (parse_oid >> trace_attr_2_ML ctxt) (ctxt, toks)) #>
|
||||
basic_entity @{binding trace_attribute} parse_oid' pretty_trace_style #>
|
||||
basic_entity @{binding docitem_attribute} parse_attribute_access' pretty_attr_access_style
|
||||
basic_entity \<^binding>\<open>trace_attribute\<close> parse_oid' pretty_trace_style #>
|
||||
basic_entity \<^binding>\<open>docitem_attribute\<close> parse_attribute_access' pretty_attr_access_style
|
||||
)
|
||||
end
|
||||
end
|
||||
\<close>
|
||||
|
||||
text\<open> Note that the functions \<^verbatim>\<open>basic_entities\<close> and \<^verbatim>\<open>basic_entity\<close> in @{ML_structure AttributeAccess}
|
||||
are copied from @{file "$ISABELLE_HOME/src/Pure/Thy/thy_output.ML"} \<close>
|
||||
text\<open> Note that the functions \<^verbatim>\<open>basic_entities\<close> and \<^verbatim>\<open>basic_entity\<close> in
|
||||
@{ML_structure AttributeAccess} are copied from
|
||||
@{file "$ISABELLE_HOME/src/Pure/Thy/thy_output.ML"} \<close>
|
||||
|
||||
|
||||
section\<open> Syntax for Ontologies (the '' View'' Part III) \<close>
|
||||
|
@ -1877,21 +1917,45 @@ fun add_doc_class_cmd overloaded (raw_params, binding)
|
|||
|
||||
|
||||
val _ =
|
||||
Outer_Syntax.command @{command_keyword doc_class} "define document class"
|
||||
Outer_Syntax.command \<^command_keyword>\<open>doc_class\<close> "define document class"
|
||||
((Parse_Spec.overloaded
|
||||
-- (Parse.type_args_constrained -- Parse.binding)
|
||||
-- (@{keyword "="}
|
||||
|-- Scan.option (Parse.typ --| @{keyword "+"})
|
||||
-- Scan.repeat1
|
||||
(Parse.const_binding -- Scan.option (@{keyword "<="} |-- Parse.term)))
|
||||
-- ( Scan.optional (@{keyword "rejects"} |-- Parse.enum1 "," Parse.term) []
|
||||
-- Scan.repeat (@{keyword "accepts"} |-- Parse.term)))
|
||||
-- (Parse.type_args_constrained -- Parse.binding)
|
||||
-- (\<^keyword>\<open>=\<close>
|
||||
|-- Scan.option (Parse.typ --| \<^keyword>\<open>+\<close>)
|
||||
-- Scan.repeat1 (Parse.const_binding -- Scan.option (\<^keyword>\<open><=\<close> |-- Parse.term))
|
||||
)
|
||||
-- ( Scan.optional (\<^keyword>\<open>rejects\<close> |-- Parse.enum1 "," Parse.term) []
|
||||
-- Scan.repeat (\<^keyword>\<open>accepts\<close> |-- Parse.term))
|
||||
)
|
||||
>> (fn (((overloaded, x), (y, z)),(rejectS,accept_rex)) =>
|
||||
Toplevel.theory (add_doc_class_cmd {overloaded = overloaded} x y z rejectS accept_rex)));
|
||||
|
||||
end (* struct *)
|
||||
\<close>
|
||||
|
||||
ML\<open>Thy_Output.document_command; Thy_Output.output_text\<close>
|
||||
(*
|
||||
ML\<open>
|
||||
Pretty.text;
|
||||
Pretty.str;
|
||||
Pretty.block_enclose;
|
||||
\<close>
|
||||
|
||||
ML\<open>Pretty.text_fold; Pretty.unformatted_string_of\<close>
|
||||
ML\<open> (String.concatWith ","); Token.content_of\<close>
|
||||
|
||||
|
||||
ML\<open>
|
||||
Document.state;
|
||||
Session.get_keywords();
|
||||
Parse.command;
|
||||
Parse.tags;
|
||||
\<close>
|
||||
ML\<open>
|
||||
Outer_Syntax.print_commands @{theory};
|
||||
Outer_Syntax.parse_spans;
|
||||
Parse.!!!;
|
||||
|
||||
\<close>
|
||||
*)
|
||||
|
||||
end
|
||||
|
|
5
LICENSE
5
LICENSE
|
@ -1,5 +1,6 @@
|
|||
Copyright (C) 2018 The University of Sheffield
|
||||
2018 The University of Paris-Sud
|
||||
Copyright (C) 2018-2019 The University of Sheffield
|
||||
2019-2019 The University of Exeter
|
||||
2018-2019 The University of Paris-Sud
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
|
|
16
README.md
16
README.md
|
@ -6,9 +6,9 @@ well as formal development.
|
|||
|
||||
## Prerequisites
|
||||
|
||||
Isabelle/DOF requires [Isabelle 2017](http://isabelle.in.tum.de/website-Isabelle2017/).
|
||||
Please download the Isabelle 2017 distribution for your operating
|
||||
system from the [Isabelle website](http://isabelle.in.tum.de/website-Isabelle2017/).
|
||||
Isabelle/DOF requires [Isabelle 2018](http://isabelle.in.tum.de/website-Isabelle2018/).
|
||||
Please download the Isabelle 2018 distribution for your operating
|
||||
system from the [Isabelle website](http://isabelle.in.tum.de/website-Isabelle2018/).
|
||||
|
||||
## Installation
|
||||
|
||||
|
@ -22,7 +22,7 @@ If a specific Isabelle version should be used (i.e., not the default
|
|||
one), the full path to the ``isabelle`` command needs to be passed as
|
||||
using the ``-i`` command line argument of the ``install`` script:
|
||||
```console
|
||||
foo@bar:~$ ./install -i /usr/local/Isabelle2017/bin/isabelle
|
||||
foo@bar:~$ ./install -i /usr/local/Isabelle2018/bin/isabelle
|
||||
```
|
||||
|
||||
For further command line options of the installer, please use the
|
||||
|
@ -36,22 +36,22 @@ foo@bar:~$ ./install -h
|
|||
The installer will
|
||||
* apply a patch to Isabelle that is necessary to use Isabelle/DOF.
|
||||
If this patch installations fails, you need to manually replace
|
||||
the file ``Isabelle2017/src/Pure/Thy/thy_output.ML`` in the Isabelle
|
||||
the file ``Isabelle2018/src/Pure/Thy/thy_output.ML`` in the Isabelle
|
||||
distribution with the file ``patches/thy_output.ML`` from the
|
||||
Isabelle/DOF distribution:
|
||||
```console
|
||||
cp patches/thy_output.ML `isabelle getenv -b ISABELLE_HOME`/src/Pure/Thy/
|
||||
```
|
||||
* install required entries from the [AFP](https://www.isa-afp.org). If this
|
||||
installations fails, you need to manually install the AFP for Isabelle 2017 as follows:
|
||||
Download the [AFP for Isabelle 2017](https://sourceforge.net/projects/afp/files/afp-Isabelle2017/afp-2018-08-14.tar.gz")
|
||||
installations fails, you need to manually install the AFP for Isabelle 2018 as follows:
|
||||
Download the [AFP for Isabelle 2018](https://sourceforge.net/projects/afp/files/afp-Isabelle2018/afp-2019-06-04.tar.gz)
|
||||
and follow the [instructions for installing the AFP as Isabelle
|
||||
component](https://www.isa-afp.org/using.html). If you have extracted
|
||||
the AFP archive into the directory to `/home/myself/afp`, you should
|
||||
run the following command to make the AFP session `ROOTS` available to
|
||||
Isabelle:
|
||||
```console
|
||||
echo "/home/myself/afp/thys" >> ~/.isabelle/Isabelle2017/ROOTS
|
||||
echo "/home/myself/afp/thys" >> ~/.isabelle/Isabelle2018/ROOTS
|
||||
```
|
||||
* install the Isabelle/DOF-plugin into the Isabelle user directory
|
||||
(the exact location depends on the Isabelle version).
|
||||
|
|
3
ROOT
3
ROOT
|
@ -4,10 +4,9 @@ session "Isabelle_DOF" = "Functional-Automata" +
|
|||
"Regular-Sets"
|
||||
theories
|
||||
Isa_DOF
|
||||
"ontologies/CENELEC_50128"
|
||||
"ontologies/Conceptual"
|
||||
"ontologies/CENELEC_50128"
|
||||
"ontologies/scholarly_paper"
|
||||
"ontologies/technical_report"
|
||||
"ontologies/mathex_onto"
|
||||
|
||||
|
||||
|
|
|
@ -4,9 +4,10 @@ theory RegExpInterface
|
|||
imports "Functional-Automata.Execute"
|
||||
begin
|
||||
|
||||
|
||||
text\<open> The implementation of the monitoring concept follows the following design decisions:
|
||||
\<^enum> We re-use generated code from the AFP submissions @{theory Regular_Set} and
|
||||
@{theory Automata}, converted by the code-generator into executable SML code
|
||||
\<^enum> We re-use generated code from the AFP submissions @{theory "Regular-Sets.Regular_Set"} and
|
||||
@{theory "Functional-Automata.Automata"}, converted by the code-generator into executable SML code
|
||||
(ports to future Isabelle versions should just reuse future versions of these)
|
||||
\<^enum> Monitor-Expressions are regular expressions (in some adapted syntax)
|
||||
over Document Class identifiers; they denote the language of all possible document object
|
||||
|
@ -40,7 +41,8 @@ text{* or better equivalently: *}
|
|||
value "\<lbrace>(\<lfloor>CHR ''a''\<rfloor> || \<lfloor>CHR ''b''\<rfloor>) ~~ \<lfloor>CHR ''c''\<rfloor>\<rbrace>\<^sup>*"
|
||||
|
||||
section{* Some Standard and Derived Semantics *}
|
||||
text\<open> This is just a reminder - already defined in @{theory Regular_Exp} as @{term lang}.\<close>
|
||||
text\<open> This is just a reminder - already defined in @{theory "Regular-Sets.Regular_Exp"}
|
||||
as @{term lang}.\<close>
|
||||
|
||||
text{* In the following, we give a semantics for our regular expressions, which so far have
|
||||
just been a term language (i.e. abstract syntax). The semantics is a ``denotational semantics'',
|
||||
|
|
|
@ -0,0 +1,375 @@
|
|||
section "Derivatives of regular expressions"
|
||||
|
||||
(* Author: Christian Urban *)
|
||||
|
||||
theory Derivatives
|
||||
imports Regular_Exp
|
||||
begin
|
||||
|
||||
text\<open>This theory is based on work by Brozowski \cite{Brzozowski64} and Antimirov \cite{Antimirov95}.\<close>
|
||||
|
||||
subsection \<open>Brzozowski's derivatives of regular expressions\<close>
|
||||
|
||||
primrec
|
||||
deriv :: "'a \<Rightarrow> 'a rexp \<Rightarrow> 'a rexp"
|
||||
where
|
||||
"deriv c (Zero) = Zero"
|
||||
| "deriv c (One) = Zero"
|
||||
| "deriv c (Atom c') = (if c = c' then One else Zero)"
|
||||
| "deriv c (Plus r1 r2) = Plus (deriv c r1) (deriv c r2)"
|
||||
| "deriv c (Times r1 r2) =
|
||||
(if nullable r1 then Plus (Times (deriv c r1) r2) (deriv c r2) else Times (deriv c r1) r2)"
|
||||
| "deriv c (Star r) = Times (deriv c r) (Star r)"
|
||||
|
||||
primrec
|
||||
derivs :: "'a list \<Rightarrow> 'a rexp \<Rightarrow> 'a rexp"
|
||||
where
|
||||
"derivs [] r = r"
|
||||
| "derivs (c # s) r = derivs s (deriv c r)"
|
||||
|
||||
|
||||
lemma atoms_deriv_subset: "atoms (deriv x r) \<subseteq> atoms r"
|
||||
by (induction r) (auto)
|
||||
|
||||
lemma atoms_derivs_subset: "atoms (derivs w r) \<subseteq> atoms r"
|
||||
by (induction w arbitrary: r) (auto dest: atoms_deriv_subset[THEN subsetD])
|
||||
|
||||
lemma lang_deriv: "lang (deriv c r) = Deriv c (lang r)"
|
||||
by (induct r) (simp_all add: nullable_iff)
|
||||
|
||||
lemma lang_derivs: "lang (derivs s r) = Derivs s (lang r)"
|
||||
by (induct s arbitrary: r) (simp_all add: lang_deriv)
|
||||
|
||||
text \<open>A regular expression matcher:\<close>
|
||||
|
||||
definition matcher :: "'a rexp \<Rightarrow> 'a list \<Rightarrow> bool" where
|
||||
"matcher r s = nullable (derivs s r)"
|
||||
|
||||
lemma matcher_correctness: "matcher r s \<longleftrightarrow> s \<in> lang r"
|
||||
by (induct s arbitrary: r)
|
||||
(simp_all add: nullable_iff lang_deriv matcher_def Deriv_def)
|
||||
|
||||
|
||||
subsection \<open>Antimirov's partial derivatives\<close>
|
||||
|
||||
abbreviation
|
||||
"Timess rs r \<equiv> (\<Union>r' \<in> rs. {Times r' r})"
|
||||
|
||||
lemma Timess_eq_image:
|
||||
"Timess rs r = (\<lambda>r'. Times r' r) ` rs"
|
||||
by auto
|
||||
|
||||
primrec
|
||||
pderiv :: "'a \<Rightarrow> 'a rexp \<Rightarrow> 'a rexp set"
|
||||
where
|
||||
"pderiv c Zero = {}"
|
||||
| "pderiv c One = {}"
|
||||
| "pderiv c (Atom c') = (if c = c' then {One} else {})"
|
||||
| "pderiv c (Plus r1 r2) = (pderiv c r1) \<union> (pderiv c r2)"
|
||||
| "pderiv c (Times r1 r2) =
|
||||
(if nullable r1 then Timess (pderiv c r1) r2 \<union> pderiv c r2 else Timess (pderiv c r1) r2)"
|
||||
| "pderiv c (Star r) = Timess (pderiv c r) (Star r)"
|
||||
|
||||
primrec
|
||||
pderivs :: "'a list \<Rightarrow> 'a rexp \<Rightarrow> ('a rexp) set"
|
||||
where
|
||||
"pderivs [] r = {r}"
|
||||
| "pderivs (c # s) r = \<Union> (pderivs s ` pderiv c r)"
|
||||
|
||||
abbreviation
|
||||
pderiv_set :: "'a \<Rightarrow> 'a rexp set \<Rightarrow> 'a rexp set"
|
||||
where
|
||||
"pderiv_set c rs \<equiv> \<Union> (pderiv c ` rs)"
|
||||
|
||||
abbreviation
|
||||
pderivs_set :: "'a list \<Rightarrow> 'a rexp set \<Rightarrow> 'a rexp set"
|
||||
where
|
||||
"pderivs_set s rs \<equiv> \<Union> (pderivs s ` rs)"
|
||||
|
||||
lemma pderivs_append:
|
||||
"pderivs (s1 @ s2) r = \<Union> (pderivs s2 ` pderivs s1 r)"
|
||||
by (induct s1 arbitrary: r) (simp_all)
|
||||
|
||||
lemma pderivs_snoc:
|
||||
shows "pderivs (s @ [c]) r = pderiv_set c (pderivs s r)"
|
||||
by (simp add: pderivs_append)
|
||||
|
||||
lemma pderivs_simps [simp]:
|
||||
shows "pderivs s Zero = (if s = [] then {Zero} else {})"
|
||||
and "pderivs s One = (if s = [] then {One} else {})"
|
||||
and "pderivs s (Plus r1 r2) = (if s = [] then {Plus r1 r2} else (pderivs s r1) \<union> (pderivs s r2))"
|
||||
by (induct s) (simp_all)
|
||||
|
||||
lemma pderivs_Atom:
|
||||
shows "pderivs s (Atom c) \<subseteq> {Atom c, One}"
|
||||
by (induct s) (simp_all)
|
||||
|
||||
subsection \<open>Relating left-quotients and partial derivatives\<close>
|
||||
|
||||
lemma Deriv_pderiv:
|
||||
shows "Deriv c (lang r) = \<Union> (lang ` pderiv c r)"
|
||||
by (induct r) (auto simp add: nullable_iff conc_UNION_distrib)
|
||||
|
||||
lemma Derivs_pderivs:
|
||||
shows "Derivs s (lang r) = \<Union> (lang ` pderivs s r)"
|
||||
proof (induct s arbitrary: r)
|
||||
case (Cons c s)
|
||||
have ih: "\<And>r. Derivs s (lang r) = \<Union> (lang ` pderivs s r)" by fact
|
||||
have "Derivs (c # s) (lang r) = Derivs s (Deriv c (lang r))" by simp
|
||||
also have "\<dots> = Derivs s (\<Union> (lang ` pderiv c r))" by (simp add: Deriv_pderiv)
|
||||
also have "\<dots> = Derivss s (lang ` (pderiv c r))"
|
||||
by (auto simp add: Derivs_def)
|
||||
also have "\<dots> = \<Union> (lang ` (pderivs_set s (pderiv c r)))"
|
||||
using ih by auto
|
||||
also have "\<dots> = \<Union> (lang ` (pderivs (c # s) r))" by simp
|
||||
finally show "Derivs (c # s) (lang r) = \<Union> (lang ` pderivs (c # s) r)" .
|
||||
qed (simp add: Derivs_def)
|
||||
|
||||
subsection \<open>Relating derivatives and partial derivatives\<close>
|
||||
|
||||
lemma deriv_pderiv:
|
||||
shows "\<Union> (lang ` (pderiv c r)) = lang (deriv c r)"
|
||||
unfolding lang_deriv Deriv_pderiv by simp
|
||||
|
||||
lemma derivs_pderivs:
|
||||
shows "\<Union> (lang ` (pderivs s r)) = lang (derivs s r)"
|
||||
unfolding lang_derivs Derivs_pderivs by simp
|
||||
|
||||
|
||||
subsection \<open>Finiteness property of partial derivatives\<close>
|
||||
|
||||
definition
|
||||
pderivs_lang :: "'a lang \<Rightarrow> 'a rexp \<Rightarrow> 'a rexp set"
|
||||
where
|
||||
"pderivs_lang A r \<equiv> \<Union>x \<in> A. pderivs x r"
|
||||
|
||||
lemma pderivs_lang_subsetI:
|
||||
assumes "\<And>s. s \<in> A \<Longrightarrow> pderivs s r \<subseteq> C"
|
||||
shows "pderivs_lang A r \<subseteq> C"
|
||||
using assms unfolding pderivs_lang_def by (rule UN_least)
|
||||
|
||||
lemma pderivs_lang_union:
|
||||
shows "pderivs_lang (A \<union> B) r = (pderivs_lang A r \<union> pderivs_lang B r)"
|
||||
by (simp add: pderivs_lang_def)
|
||||
|
||||
lemma pderivs_lang_subset:
|
||||
shows "A \<subseteq> B \<Longrightarrow> pderivs_lang A r \<subseteq> pderivs_lang B r"
|
||||
by (auto simp add: pderivs_lang_def)
|
||||
|
||||
definition
|
||||
"UNIV1 \<equiv> UNIV - {[]}"
|
||||
|
||||
lemma pderivs_lang_Zero [simp]:
|
||||
shows "pderivs_lang UNIV1 Zero = {}"
|
||||
unfolding UNIV1_def pderivs_lang_def by auto
|
||||
|
||||
lemma pderivs_lang_One [simp]:
|
||||
shows "pderivs_lang UNIV1 One = {}"
|
||||
unfolding UNIV1_def pderivs_lang_def by (auto split: if_splits)
|
||||
|
||||
lemma pderivs_lang_Atom [simp]:
|
||||
shows "pderivs_lang UNIV1 (Atom c) = {One}"
|
||||
unfolding UNIV1_def pderivs_lang_def
|
||||
apply(auto)
|
||||
apply(frule rev_subsetD)
|
||||
apply(rule pderivs_Atom)
|
||||
apply(simp)
|
||||
apply(case_tac xa)
|
||||
apply(auto split: if_splits)
|
||||
done
|
||||
|
||||
lemma pderivs_lang_Plus [simp]:
|
||||
shows "pderivs_lang UNIV1 (Plus r1 r2) = pderivs_lang UNIV1 r1 \<union> pderivs_lang UNIV1 r2"
|
||||
unfolding UNIV1_def pderivs_lang_def by auto
|
||||
|
||||
|
||||
text \<open>Non-empty suffixes of a string (needed for the cases of @{const Times} and @{const Star} below)\<close>
|
||||
|
||||
definition
|
||||
"PSuf s \<equiv> {v. v \<noteq> [] \<and> (\<exists>u. u @ v = s)}"
|
||||
|
||||
lemma PSuf_snoc:
|
||||
shows "PSuf (s @ [c]) = (PSuf s) @@ {[c]} \<union> {[c]}"
|
||||
unfolding PSuf_def conc_def
|
||||
by (auto simp add: append_eq_append_conv2 append_eq_Cons_conv)
|
||||
|
||||
lemma PSuf_Union:
|
||||
shows "(\<Union>v \<in> PSuf s @@ {[c]}. f v) = (\<Union>v \<in> PSuf s. f (v @ [c]))"
|
||||
by (auto simp add: conc_def)
|
||||
|
||||
lemma pderivs_lang_snoc:
|
||||
shows "pderivs_lang (PSuf s @@ {[c]}) r = (pderiv_set c (pderivs_lang (PSuf s) r))"
|
||||
unfolding pderivs_lang_def
|
||||
by (simp add: PSuf_Union pderivs_snoc)
|
||||
|
||||
lemma pderivs_Times:
|
||||
shows "pderivs s (Times r1 r2) \<subseteq> Timess (pderivs s r1) r2 \<union> (pderivs_lang (PSuf s) r2)"
|
||||
proof (induct s rule: rev_induct)
|
||||
case (snoc c s)
|
||||
have ih: "pderivs s (Times r1 r2) \<subseteq> Timess (pderivs s r1) r2 \<union> (pderivs_lang (PSuf s) r2)"
|
||||
by fact
|
||||
have "pderivs (s @ [c]) (Times r1 r2) = pderiv_set c (pderivs s (Times r1 r2))"
|
||||
by (simp add: pderivs_snoc)
|
||||
also have "\<dots> \<subseteq> pderiv_set c (Timess (pderivs s r1) r2 \<union> (pderivs_lang (PSuf s) r2))"
|
||||
using ih by fastforce
|
||||
also have "\<dots> = pderiv_set c (Timess (pderivs s r1) r2) \<union> pderiv_set c (pderivs_lang (PSuf s) r2)"
|
||||
by (simp)
|
||||
also have "\<dots> = pderiv_set c (Timess (pderivs s r1) r2) \<union> pderivs_lang (PSuf s @@ {[c]}) r2"
|
||||
by (simp add: pderivs_lang_snoc)
|
||||
also
|
||||
have "\<dots> \<subseteq> pderiv_set c (Timess (pderivs s r1) r2) \<union> pderiv c r2 \<union> pderivs_lang (PSuf s @@ {[c]}) r2"
|
||||
by auto
|
||||
also
|
||||
have "\<dots> \<subseteq> Timess (pderiv_set c (pderivs s r1)) r2 \<union> pderiv c r2 \<union> pderivs_lang (PSuf s @@ {[c]}) r2"
|
||||
by (auto simp add: if_splits)
|
||||
also have "\<dots> = Timess (pderivs (s @ [c]) r1) r2 \<union> pderiv c r2 \<union> pderivs_lang (PSuf s @@ {[c]}) r2"
|
||||
by (simp add: pderivs_snoc)
|
||||
also have "\<dots> \<subseteq> Timess (pderivs (s @ [c]) r1) r2 \<union> pderivs_lang (PSuf (s @ [c])) r2"
|
||||
unfolding pderivs_lang_def by (auto simp add: PSuf_snoc)
|
||||
finally show ?case .
|
||||
qed (simp)
|
||||
|
||||
lemma pderivs_lang_Times_aux1:
|
||||
assumes a: "s \<in> UNIV1"
|
||||
shows "pderivs_lang (PSuf s) r \<subseteq> pderivs_lang UNIV1 r"
|
||||
using a unfolding UNIV1_def PSuf_def pderivs_lang_def by auto
|
||||
|
||||
lemma pderivs_lang_Times_aux2:
|
||||
assumes a: "s \<in> UNIV1"
|
||||
shows "Timess (pderivs s r1) r2 \<subseteq> Timess (pderivs_lang UNIV1 r1) r2"
|
||||
using a unfolding pderivs_lang_def by auto
|
||||
|
||||
lemma pderivs_lang_Times:
|
||||
shows "pderivs_lang UNIV1 (Times r1 r2) \<subseteq> Timess (pderivs_lang UNIV1 r1) r2 \<union> pderivs_lang UNIV1 r2"
|
||||
apply(rule pderivs_lang_subsetI)
|
||||
apply(rule subset_trans)
|
||||
apply(rule pderivs_Times)
|
||||
using pderivs_lang_Times_aux1 pderivs_lang_Times_aux2
|
||||
apply auto
|
||||
apply blast
|
||||
done
|
||||
|
||||
lemma pderivs_Star:
|
||||
assumes a: "s \<noteq> []"
|
||||
shows "pderivs s (Star r) \<subseteq> Timess (pderivs_lang (PSuf s) r) (Star r)"
|
||||
using a
|
||||
proof (induct s rule: rev_induct)
|
||||
case (snoc c s)
|
||||
have ih: "s \<noteq> [] \<Longrightarrow> pderivs s (Star r) \<subseteq> Timess (pderivs_lang (PSuf s) r) (Star r)" by fact
|
||||
{ assume asm: "s \<noteq> []"
|
||||
have "pderivs (s @ [c]) (Star r) = pderiv_set c (pderivs s (Star r))" by (simp add: pderivs_snoc)
|
||||
also have "\<dots> \<subseteq> pderiv_set c (Timess (pderivs_lang (PSuf s) r) (Star r))"
|
||||
using ih[OF asm] by fast
|
||||
also have "\<dots> \<subseteq> Timess (pderiv_set c (pderivs_lang (PSuf s) r)) (Star r) \<union> pderiv c (Star r)"
|
||||
by (auto split: if_splits)
|
||||
also have "\<dots> \<subseteq> Timess (pderivs_lang (PSuf (s @ [c])) r) (Star r) \<union> (Timess (pderiv c r) (Star r))"
|
||||
by (simp only: PSuf_snoc pderivs_lang_snoc pderivs_lang_union)
|
||||
(auto simp add: pderivs_lang_def)
|
||||
also have "\<dots> = Timess (pderivs_lang (PSuf (s @ [c])) r) (Star r)"
|
||||
by (auto simp add: PSuf_snoc PSuf_Union pderivs_snoc pderivs_lang_def)
|
||||
finally have ?case .
|
||||
}
|
||||
moreover
|
||||
{ assume asm: "s = []"
|
||||
then have ?case by (auto simp add: pderivs_lang_def pderivs_snoc PSuf_def)
|
||||
}
|
||||
ultimately show ?case by blast
|
||||
qed (simp)
|
||||
|
||||
lemma pderivs_lang_Star:
|
||||
shows "pderivs_lang UNIV1 (Star r) \<subseteq> Timess (pderivs_lang UNIV1 r) (Star r)"
|
||||
apply(rule pderivs_lang_subsetI)
|
||||
apply(rule subset_trans)
|
||||
apply(rule pderivs_Star)
|
||||
apply(simp add: UNIV1_def)
|
||||
apply(simp add: UNIV1_def PSuf_def)
|
||||
apply(auto simp add: pderivs_lang_def)
|
||||
done
|
||||
|
||||
lemma finite_Timess [simp]:
|
||||
assumes a: "finite A"
|
||||
shows "finite (Timess A r)"
|
||||
using a by auto
|
||||
|
||||
lemma finite_pderivs_lang_UNIV1:
|
||||
shows "finite (pderivs_lang UNIV1 r)"
|
||||
apply(induct r)
|
||||
apply(simp_all add:
|
||||
finite_subset[OF pderivs_lang_Times]
|
||||
finite_subset[OF pderivs_lang_Star])
|
||||
done
|
||||
|
||||
lemma pderivs_lang_UNIV:
|
||||
shows "pderivs_lang UNIV r = pderivs [] r \<union> pderivs_lang UNIV1 r"
|
||||
unfolding UNIV1_def pderivs_lang_def
|
||||
by blast
|
||||
|
||||
lemma finite_pderivs_lang_UNIV:
|
||||
shows "finite (pderivs_lang UNIV r)"
|
||||
unfolding pderivs_lang_UNIV
|
||||
by (simp add: finite_pderivs_lang_UNIV1)
|
||||
|
||||
lemma finite_pderivs_lang:
|
||||
shows "finite (pderivs_lang A r)"
|
||||
by (metis finite_pderivs_lang_UNIV pderivs_lang_subset rev_finite_subset subset_UNIV)
|
||||
|
||||
|
||||
text\<open>The following relationship between the alphabetic width of regular expressions
|
||||
(called @{text awidth} below) and the number of partial derivatives was proved
|
||||
by Antimirov~\cite{Antimirov95} and formalized by Max Haslbeck.\<close>
|
||||
|
||||
fun awidth :: "'a rexp \<Rightarrow> nat" where
|
||||
"awidth Zero = 0" |
|
||||
"awidth One = 0" |
|
||||
"awidth (Atom a) = 1" |
|
||||
"awidth (Plus r1 r2) = awidth r1 + awidth r2" |
|
||||
"awidth (Times r1 r2) = awidth r1 + awidth r2" |
|
||||
"awidth (Star r1) = awidth r1"
|
||||
|
||||
lemma card_Timess_pderivs_lang_le:
|
||||
"card (Timess (pderivs_lang A r) s) \<le> card (pderivs_lang A r)"
|
||||
using finite_pderivs_lang unfolding Timess_eq_image by (rule card_image_le)
|
||||
|
||||
lemma card_pderivs_lang_UNIV1_le_awidth: "card (pderivs_lang UNIV1 r) \<le> awidth r"
|
||||
proof (induction r)
|
||||
case (Plus r1 r2)
|
||||
have "card (pderivs_lang UNIV1 (Plus r1 r2)) = card (pderivs_lang UNIV1 r1 \<union> pderivs_lang UNIV1 r2)" by simp
|
||||
also have "\<dots> \<le> card (pderivs_lang UNIV1 r1) + card (pderivs_lang UNIV1 r2)"
|
||||
by(simp add: card_Un_le)
|
||||
also have "\<dots> \<le> awidth (Plus r1 r2)" using Plus.IH by simp
|
||||
finally show ?case .
|
||||
next
|
||||
case (Times r1 r2)
|
||||
have "card (pderivs_lang UNIV1 (Times r1 r2)) \<le> card (Timess (pderivs_lang UNIV1 r1) r2 \<union> pderivs_lang UNIV1 r2)"
|
||||
by (simp add: card_mono finite_pderivs_lang pderivs_lang_Times)
|
||||
also have "\<dots> \<le> card (Timess (pderivs_lang UNIV1 r1) r2) + card (pderivs_lang UNIV1 r2)"
|
||||
by (simp add: card_Un_le)
|
||||
also have "\<dots> \<le> card (pderivs_lang UNIV1 r1) + card (pderivs_lang UNIV1 r2)"
|
||||
by (simp add: card_Timess_pderivs_lang_le)
|
||||
also have "\<dots> \<le> awidth (Times r1 r2)" using Times.IH by simp
|
||||
finally show ?case .
|
||||
next
|
||||
case (Star r)
|
||||
have "card (pderivs_lang UNIV1 (Star r)) \<le> card (Timess (pderivs_lang UNIV1 r) (Star r))"
|
||||
by (simp add: card_mono finite_pderivs_lang pderivs_lang_Star)
|
||||
also have "\<dots> \<le> card (pderivs_lang UNIV1 r)" by (rule card_Timess_pderivs_lang_le)
|
||||
also have "\<dots> \<le> awidth (Star r)" by (simp add: Star.IH)
|
||||
finally show ?case .
|
||||
qed (auto)
|
||||
|
||||
text\<open>Antimirov's Theorem 3.4:\<close>
|
||||
theorem card_pderivs_lang_UNIV_le_awidth: "card (pderivs_lang UNIV r) \<le> awidth r + 1"
|
||||
proof -
|
||||
have "card (insert r (pderivs_lang UNIV1 r)) \<le> Suc (card (pderivs_lang UNIV1 r))"
|
||||
by(auto simp: card_insert_if[OF finite_pderivs_lang_UNIV1])
|
||||
also have "\<dots> \<le> Suc (awidth r)" by(simp add: card_pderivs_lang_UNIV1_le_awidth)
|
||||
finally show ?thesis by(simp add: pderivs_lang_UNIV)
|
||||
qed
|
||||
|
||||
text\<open>Antimirov's Corollary 3.5:\<close>
|
||||
corollary card_pderivs_lang_le_awidth: "card (pderivs_lang A r) \<le> awidth r + 1"
|
||||
by(rule order_trans[OF
|
||||
card_mono[OF finite_pderivs_lang_UNIV pderivs_lang_subset[OF subset_UNIV]]
|
||||
card_pderivs_lang_UNIV_le_awidth])
|
||||
|
||||
end
|
|
@ -0,0 +1,230 @@
|
|||
section \<open>Deciding Regular Expression Equivalence\<close>
|
||||
|
||||
theory Equivalence_Checking
|
||||
imports
|
||||
NDerivative
|
||||
"HOL-Library.While_Combinator"
|
||||
begin
|
||||
|
||||
|
||||
subsection \<open>Bisimulation between languages and regular expressions\<close>
|
||||
|
||||
coinductive bisimilar :: "'a lang \<Rightarrow> 'a lang \<Rightarrow> bool" where
|
||||
"([] \<in> K \<longleftrightarrow> [] \<in> L)
|
||||
\<Longrightarrow> (\<And>x. bisimilar (Deriv x K) (Deriv x L))
|
||||
\<Longrightarrow> bisimilar K L"
|
||||
|
||||
lemma equal_if_bisimilar:
|
||||
assumes "bisimilar K L" shows "K = L"
|
||||
proof (rule set_eqI)
|
||||
fix w
|
||||
from \<open>bisimilar K L\<close> show "w \<in> K \<longleftrightarrow> w \<in> L"
|
||||
proof (induct w arbitrary: K L)
|
||||
case Nil thus ?case by (auto elim: bisimilar.cases)
|
||||
next
|
||||
case (Cons a w K L)
|
||||
from \<open>bisimilar K L\<close> have "bisimilar (Deriv a K) (Deriv a L)"
|
||||
by (auto elim: bisimilar.cases)
|
||||
then have "w \<in> Deriv a K \<longleftrightarrow> w \<in> Deriv a L" by (rule Cons(1))
|
||||
thus ?case by (auto simp: Deriv_def)
|
||||
qed
|
||||
qed
|
||||
|
||||
lemma language_coinduct:
|
||||
fixes R (infixl "\<sim>" 50)
|
||||
assumes "K \<sim> L"
|
||||
assumes "\<And>K L. K \<sim> L \<Longrightarrow> ([] \<in> K \<longleftrightarrow> [] \<in> L)"
|
||||
assumes "\<And>K L x. K \<sim> L \<Longrightarrow> Deriv x K \<sim> Deriv x L"
|
||||
shows "K = L"
|
||||
apply (rule equal_if_bisimilar)
|
||||
apply (rule bisimilar.coinduct[of R, OF \<open>K \<sim> L\<close>])
|
||||
apply (auto simp: assms)
|
||||
done
|
||||
|
||||
type_synonym 'a rexp_pair = "'a rexp * 'a rexp"
|
||||
type_synonym 'a rexp_pairs = "'a rexp_pair list"
|
||||
|
||||
definition is_bisimulation :: "'a::order list \<Rightarrow> 'a rexp_pair set \<Rightarrow> bool"
|
||||
where
|
||||
"is_bisimulation as R =
|
||||
(\<forall>(r,s)\<in> R. (atoms r \<union> atoms s \<subseteq> set as) \<and> (nullable r \<longleftrightarrow> nullable s) \<and>
|
||||
(\<forall>a\<in>set as. (nderiv a r, nderiv a s) \<in> R))"
|
||||
|
||||
lemma bisim_lang_eq:
|
||||
assumes bisim: "is_bisimulation as ps"
|
||||
assumes "(r, s) \<in> ps"
|
||||
shows "lang r = lang s"
|
||||
proof -
|
||||
define ps' where "ps' = insert (Zero, Zero) ps"
|
||||
from bisim have bisim': "is_bisimulation as ps'"
|
||||
by (auto simp: ps'_def is_bisimulation_def)
|
||||
let ?R = "\<lambda>K L. (\<exists>(r,s)\<in>ps'. K = lang r \<and> L = lang s)"
|
||||
show ?thesis
|
||||
proof (rule language_coinduct[where R="?R"])
|
||||
from \<open>(r, s) \<in> ps\<close>
|
||||
have "(r, s) \<in> ps'" by (auto simp: ps'_def)
|
||||
thus "?R (lang r) (lang s)" by auto
|
||||
next
|
||||
fix K L assume "?R K L"
|
||||
then obtain r s where rs: "(r, s) \<in> ps'"
|
||||
and KL: "K = lang r" "L = lang s" by auto
|
||||
with bisim' have "nullable r \<longleftrightarrow> nullable s"
|
||||
by (auto simp: is_bisimulation_def)
|
||||
thus "[] \<in> K \<longleftrightarrow> [] \<in> L" by (auto simp: nullable_iff KL)
|
||||
fix a
|
||||
show "?R (Deriv a K) (Deriv a L)"
|
||||
proof cases
|
||||
assume "a \<in> set as"
|
||||
with rs bisim'
|
||||
have "(nderiv a r, nderiv a s) \<in> ps'"
|
||||
by (auto simp: is_bisimulation_def)
|
||||
thus ?thesis by (force simp: KL lang_nderiv)
|
||||
next
|
||||
assume "a \<notin> set as"
|
||||
with bisim' rs
|
||||
have "a \<notin> atoms r" "a \<notin> atoms s" by (auto simp: is_bisimulation_def)
|
||||
then have "nderiv a r = Zero" "nderiv a s = Zero"
|
||||
by (auto intro: deriv_no_occurrence)
|
||||
then have "Deriv a K = lang Zero"
|
||||
"Deriv a L = lang Zero"
|
||||
unfolding KL lang_nderiv[symmetric] by auto
|
||||
thus ?thesis by (auto simp: ps'_def)
|
||||
qed
|
||||
qed
|
||||
qed
|
||||
|
||||
subsection \<open>Closure computation\<close>
|
||||
|
||||
definition closure ::
|
||||
"'a::order list \<Rightarrow> 'a rexp_pair \<Rightarrow> ('a rexp_pairs * 'a rexp_pair set) option"
|
||||
where
|
||||
"closure as = rtrancl_while (%(r,s). nullable r = nullable s)
|
||||
(%(r,s). map (\<lambda>a. (nderiv a r, nderiv a s)) as)"
|
||||
|
||||
definition pre_bisim :: "'a::order list \<Rightarrow> 'a rexp \<Rightarrow> 'a rexp \<Rightarrow>
|
||||
'a rexp_pairs * 'a rexp_pair set \<Rightarrow> bool"
|
||||
where
|
||||
"pre_bisim as r s = (\<lambda>(ws,R).
|
||||
(r,s) \<in> R \<and> set ws \<subseteq> R \<and>
|
||||
(\<forall>(r,s)\<in> R. atoms r \<union> atoms s \<subseteq> set as) \<and>
|
||||
(\<forall>(r,s)\<in> R - set ws. (nullable r \<longleftrightarrow> nullable s) \<and>
|
||||
(\<forall>a\<in>set as. (nderiv a r, nderiv a s) \<in> R)))"
|
||||
|
||||
theorem closure_sound:
|
||||
assumes result: "closure as (r,s) = Some([],R)"
|
||||
and atoms: "atoms r \<union> atoms s \<subseteq> set as"
|
||||
shows "lang r = lang s"
|
||||
proof-
|
||||
let ?test = "While_Combinator.rtrancl_while_test (%(r,s). nullable r = nullable s)"
|
||||
let ?step = "While_Combinator.rtrancl_while_step (%(r,s). map (\<lambda>a. (nderiv a r, nderiv a s)) as)"
|
||||
{ fix st assume inv: "pre_bisim as r s st" and test: "?test st"
|
||||
have "pre_bisim as r s (?step st)"
|
||||
proof (cases st)
|
||||
fix ws R assume "st = (ws, R)"
|
||||
with test obtain r s t where st: "st = ((r, s) # t, R)" and "nullable r = nullable s"
|
||||
by (cases ws) auto
|
||||
with inv show ?thesis using atoms_nderiv[of _ r] atoms_nderiv[of _ s]
|
||||
unfolding st rtrancl_while_test.simps rtrancl_while_step.simps pre_bisim_def Ball_def
|
||||
by simp_all blast+
|
||||
qed
|
||||
}
|
||||
moreover
|
||||
from atoms
|
||||
have "pre_bisim as r s ([(r,s)],{(r,s)})" by (simp add: pre_bisim_def)
|
||||
ultimately have pre_bisim_ps: "pre_bisim as r s ([],R)"
|
||||
by (rule while_option_rule[OF _ result[unfolded closure_def rtrancl_while_def]])
|
||||
then have "is_bisimulation as R" "(r, s) \<in> R"
|
||||
by (auto simp: pre_bisim_def is_bisimulation_def)
|
||||
thus "lang r = lang s" by (rule bisim_lang_eq)
|
||||
qed
|
||||
|
||||
subsection \<open>Bisimulation-free proof of closure computation\<close>
|
||||
|
||||
text\<open>The equivalence check can be viewed as the product construction
|
||||
of two automata. The state space is the reflexive transitive closure of
|
||||
the pair of next-state functions, i.e. derivatives.\<close>
|
||||
|
||||
lemma rtrancl_nderiv_nderivs: defines "nderivs == foldl (%r a. nderiv a r)"
|
||||
shows "{((r,s),(nderiv a r,nderiv a s))| r s a. a : A}^* =
|
||||
{((r,s),(nderivs r w,nderivs s w))| r s w. w : lists A}" (is "?L = ?R")
|
||||
proof-
|
||||
note [simp] = nderivs_def
|
||||
{ fix r s r' s'
|
||||
have "((r,s),(r',s')) : ?L \<Longrightarrow> ((r,s),(r',s')) : ?R"
|
||||
proof(induction rule: converse_rtrancl_induct2)
|
||||
case refl show ?case by (force intro!: foldl.simps(1)[symmetric])
|
||||
next
|
||||
case step thus ?case by(force intro!: foldl.simps(2)[symmetric])
|
||||
qed
|
||||
} moreover
|
||||
{ fix r s r' s'
|
||||
{ fix w have "\<forall>x\<in>set w. x \<in> A \<Longrightarrow> ((r, s), nderivs r w, nderivs s w) :?L"
|
||||
proof(induction w rule: rev_induct)
|
||||
case Nil show ?case by simp
|
||||
next
|
||||
case snoc thus ?case by (auto elim!: rtrancl_into_rtrancl)
|
||||
qed
|
||||
}
|
||||
hence "((r,s),(r',s')) : ?R \<Longrightarrow> ((r,s),(r',s')) : ?L" by auto
|
||||
} ultimately show ?thesis by (auto simp: in_lists_conv_set) blast
|
||||
qed
|
||||
|
||||
lemma nullable_nderivs:
|
||||
"nullable (foldl (%r a. nderiv a r) r w) = (w : lang r)"
|
||||
by (induct w arbitrary: r) (simp_all add: nullable_iff lang_nderiv Deriv_def)
|
||||
|
||||
theorem closure_sound_complete:
|
||||
assumes result: "closure as (r,s) = Some(ws,R)"
|
||||
and atoms: "set as = atoms r \<union> atoms s"
|
||||
shows "ws = [] \<longleftrightarrow> lang r = lang s"
|
||||
proof -
|
||||
have leq: "(lang r = lang s) =
|
||||
(\<forall>(r',s') \<in> {((r0,s0),(nderiv a r0,nderiv a s0))| r0 s0 a. a : set as}^* `` {(r,s)}.
|
||||
nullable r' = nullable s')"
|
||||
by(simp add: atoms rtrancl_nderiv_nderivs Ball_def lang_eq_ext imp_ex nullable_nderivs
|
||||
del:Un_iff)
|
||||
have "{(x,y). y \<in> set ((\<lambda>(p,q). map (\<lambda>a. (nderiv a p, nderiv a q)) as) x)} =
|
||||
{((r,s), nderiv a r, nderiv a s) |r s a. a \<in> set as}"
|
||||
by auto
|
||||
with atoms rtrancl_while_Some[OF result[unfolded closure_def]]
|
||||
show ?thesis by (auto simp add: leq Ball_def split: if_splits)
|
||||
qed
|
||||
|
||||
subsection \<open>The overall procedure\<close>
|
||||
|
||||
primrec add_atoms :: "'a rexp \<Rightarrow> 'a list \<Rightarrow> 'a list"
|
||||
where
|
||||
"add_atoms Zero = id"
|
||||
| "add_atoms One = id"
|
||||
| "add_atoms (Atom a) = List.insert a"
|
||||
| "add_atoms (Plus r s) = add_atoms s o add_atoms r"
|
||||
| "add_atoms (Times r s) = add_atoms s o add_atoms r"
|
||||
| "add_atoms (Star r) = add_atoms r"
|
||||
|
||||
lemma set_add_atoms: "set (add_atoms r as) = atoms r \<union> set as"
|
||||
by (induct r arbitrary: as) auto
|
||||
|
||||
|
||||
definition check_eqv :: "nat rexp \<Rightarrow> nat rexp \<Rightarrow> bool" where
|
||||
"check_eqv r s =
|
||||
(let nr = norm r; ns = norm s; as = add_atoms nr (add_atoms ns [])
|
||||
in case closure as (nr, ns) of
|
||||
Some([],_) \<Rightarrow> True | _ \<Rightarrow> False)"
|
||||
|
||||
lemma soundness:
|
||||
assumes "check_eqv r s" shows "lang r = lang s"
|
||||
proof -
|
||||
let ?nr = "norm r" let ?ns = "norm s"
|
||||
let ?as = "add_atoms ?nr (add_atoms ?ns [])"
|
||||
obtain R where 1: "closure ?as (?nr,?ns) = Some([],R)"
|
||||
using assms by (auto simp: check_eqv_def Let_def split:option.splits list.splits)
|
||||
then have "lang (norm r) = lang (norm s)"
|
||||
by (rule closure_sound) (auto simp: set_add_atoms dest!: subsetD[OF atoms_norm])
|
||||
thus "lang r = lang s" by simp
|
||||
qed
|
||||
|
||||
text\<open>Test:\<close>
|
||||
lemma "check_eqv (Plus One (Times (Atom 0) (Star(Atom 0)))) (Star(Atom 0))"
|
||||
by eval
|
||||
|
||||
end
|
|
@ -0,0 +1,318 @@
|
|||
section \<open>Deciding Equivalence of Extended Regular Expressions\<close>
|
||||
|
||||
theory Equivalence_Checking2
|
||||
imports Regular_Exp2 "HOL-Library.While_Combinator"
|
||||
begin
|
||||
|
||||
subsection \<open>Term ordering\<close>
|
||||
|
||||
fun le_rexp :: "nat rexp \<Rightarrow> nat rexp \<Rightarrow> bool"
|
||||
where
|
||||
"le_rexp Zero _ = True"
|
||||
| "le_rexp _ Zero = False"
|
||||
| "le_rexp One _ = True"
|
||||
| "le_rexp _ One = False"
|
||||
| "le_rexp (Atom a) (Atom b) = (a <= b)"
|
||||
| "le_rexp (Atom _) _ = True"
|
||||
| "le_rexp _ (Atom _) = False"
|
||||
| "le_rexp (Star r) (Star s) = le_rexp r s"
|
||||
| "le_rexp (Star _) _ = True"
|
||||
| "le_rexp _ (Star _) = False"
|
||||
| "le_rexp (Not r) (Not s) = le_rexp r s"
|
||||
| "le_rexp (Not _) _ = True"
|
||||
| "le_rexp _ (Not _) = False"
|
||||
| "le_rexp (Plus r r') (Plus s s') =
|
||||
(if r = s then le_rexp r' s' else le_rexp r s)"
|
||||
| "le_rexp (Plus _ _) _ = True"
|
||||
| "le_rexp _ (Plus _ _) = False"
|
||||
| "le_rexp (Times r r') (Times s s') =
|
||||
(if r = s then le_rexp r' s' else le_rexp r s)"
|
||||
| "le_rexp (Times _ _) _ = True"
|
||||
| "le_rexp _ (Times _ _) = False"
|
||||
| "le_rexp (Inter r r') (Inter s s') =
|
||||
(if r = s then le_rexp r' s' else le_rexp r s)"
|
||||
|
||||
subsection \<open>Normalizing operations\<close>
|
||||
|
||||
text \<open>associativity, commutativity, idempotence, zero\<close>
|
||||
|
||||
fun nPlus :: "nat rexp \<Rightarrow> nat rexp \<Rightarrow> nat rexp"
|
||||
where
|
||||
"nPlus Zero r = r"
|
||||
| "nPlus r Zero = r"
|
||||
| "nPlus (Plus r s) t = nPlus r (nPlus s t)"
|
||||
| "nPlus r (Plus s t) =
|
||||
(if r = s then (Plus s t)
|
||||
else if le_rexp r s then Plus r (Plus s t)
|
||||
else Plus s (nPlus r t))"
|
||||
| "nPlus r s =
|
||||
(if r = s then r
|
||||
else if le_rexp r s then Plus r s
|
||||
else Plus s r)"
|
||||
|
||||
lemma lang_nPlus[simp]: "lang S (nPlus r s) = lang S (Plus r s)"
|
||||
by (induct r s rule: nPlus.induct) auto
|
||||
|
||||
text \<open>associativity, zero, one\<close>
|
||||
|
||||
fun nTimes :: "nat rexp \<Rightarrow> nat rexp \<Rightarrow> nat rexp"
|
||||
where
|
||||
"nTimes Zero _ = Zero"
|
||||
| "nTimes _ Zero = Zero"
|
||||
| "nTimes One r = r"
|
||||
| "nTimes r One = r"
|
||||
| "nTimes (Times r s) t = Times r (nTimes s t)"
|
||||
| "nTimes r s = Times r s"
|
||||
|
||||
lemma lang_nTimes[simp]: "lang S (nTimes r s) = lang S (Times r s)"
|
||||
by (induct r s rule: nTimes.induct) (auto simp: conc_assoc)
|
||||
|
||||
text \<open>more optimisations:\<close>
|
||||
|
||||
fun nInter :: "nat rexp \<Rightarrow> nat rexp \<Rightarrow> nat rexp"
|
||||
where
|
||||
"nInter Zero _ = Zero"
|
||||
| "nInter _ Zero = Zero"
|
||||
| "nInter r s = Inter r s"
|
||||
|
||||
lemma lang_nInter[simp]: "lang S (nInter r s) = lang S (Inter r s)"
|
||||
by (induct r s rule: nInter.induct) (auto)
|
||||
|
||||
primrec norm :: "nat rexp \<Rightarrow> nat rexp"
|
||||
where
|
||||
"norm Zero = Zero"
|
||||
| "norm One = One"
|
||||
| "norm (Atom a) = Atom a"
|
||||
| "norm (Plus r s) = nPlus (norm r) (norm s)"
|
||||
| "norm (Times r s) = nTimes (norm r) (norm s)"
|
||||
| "norm (Star r) = Star (norm r)"
|
||||
| "norm (Not r) = Not (norm r)"
|
||||
| "norm (Inter r1 r2) = nInter (norm r1) (norm r2)"
|
||||
|
||||
lemma lang_norm[simp]: "lang S (norm r) = lang S r"
|
||||
by (induct r) auto
|
||||
|
||||
|
||||
subsection \<open>Derivative\<close>
|
||||
|
||||
primrec nderiv :: "nat \<Rightarrow> nat rexp \<Rightarrow> nat rexp"
|
||||
where
|
||||
"nderiv _ Zero = Zero"
|
||||
| "nderiv _ One = Zero"
|
||||
| "nderiv a (Atom b) = (if a = b then One else Zero)"
|
||||
| "nderiv a (Plus r s) = nPlus (nderiv a r) (nderiv a s)"
|
||||
| "nderiv a (Times r s) =
|
||||
(let r's = nTimes (nderiv a r) s
|
||||
in if nullable r then nPlus r's (nderiv a s) else r's)"
|
||||
| "nderiv a (Star r) = nTimes (nderiv a r) (Star r)"
|
||||
| "nderiv a (Not r) = Not (nderiv a r)"
|
||||
| "nderiv a (Inter r1 r2) = nInter (nderiv a r1) (nderiv a r2)"
|
||||
|
||||
lemma lang_nderiv: "a:S \<Longrightarrow> lang S (nderiv a r) = Deriv a (lang S r)"
|
||||
by (induct r) (auto simp: Let_def nullable_iff[where S=S])
|
||||
|
||||
lemma atoms_nPlus[simp]: "atoms (nPlus r s) = atoms r \<union> atoms s"
|
||||
by (induct r s rule: nPlus.induct) auto
|
||||
|
||||
lemma atoms_nTimes: "atoms (nTimes r s) \<subseteq> atoms r \<union> atoms s"
|
||||
by (induct r s rule: nTimes.induct) auto
|
||||
|
||||
lemma atoms_nInter: "atoms (nInter r s) \<subseteq> atoms r \<union> atoms s"
|
||||
by (induct r s rule: nInter.induct) auto
|
||||
|
||||
lemma atoms_norm: "atoms (norm r) \<subseteq> atoms r"
|
||||
by (induct r) (auto dest!:subsetD[OF atoms_nTimes]subsetD[OF atoms_nInter])
|
||||
|
||||
lemma atoms_nderiv: "atoms (nderiv a r) \<subseteq> atoms r"
|
||||
by (induct r) (auto simp: Let_def dest!:subsetD[OF atoms_nTimes]subsetD[OF atoms_nInter])
|
||||
|
||||
|
||||
subsection \<open>Bisimulation between languages and regular expressions\<close>
|
||||
|
||||
context
|
||||
fixes S :: "'a set"
|
||||
begin
|
||||
|
||||
coinductive bisimilar :: "'a lang \<Rightarrow> 'a lang \<Rightarrow> bool" where
|
||||
"K \<subseteq> lists S \<Longrightarrow> L \<subseteq> lists S
|
||||
\<Longrightarrow> ([] \<in> K \<longleftrightarrow> [] \<in> L)
|
||||
\<Longrightarrow> (\<And>x. x:S \<Longrightarrow> bisimilar (Deriv x K) (Deriv x L))
|
||||
\<Longrightarrow> bisimilar K L"
|
||||
|
||||
lemma equal_if_bisimilar:
|
||||
assumes "K \<subseteq> lists S" "L \<subseteq> lists S" "bisimilar K L" shows "K = L"
|
||||
proof (rule set_eqI)
|
||||
fix w
|
||||
from assms show "w \<in> K \<longleftrightarrow> w \<in> L"
|
||||
proof (induction w arbitrary: K L)
|
||||
case Nil thus ?case by (auto elim: bisimilar.cases)
|
||||
next
|
||||
case (Cons a w K L)
|
||||
show ?case
|
||||
proof cases
|
||||
assume "a : S"
|
||||
with \<open>bisimilar K L\<close> have "bisimilar (Deriv a K) (Deriv a L)"
|
||||
by (auto elim: bisimilar.cases)
|
||||
then have "w \<in> Deriv a K \<longleftrightarrow> w \<in> Deriv a L"
|
||||
by (metis Cons.IH bisimilar.cases)
|
||||
thus ?case by (auto simp: Deriv_def)
|
||||
next
|
||||
assume "a \<notin> S"
|
||||
thus ?case using Cons.prems by auto
|
||||
qed
|
||||
qed
|
||||
qed
|
||||
|
||||
lemma language_coinduct:
|
||||
fixes R (infixl "\<sim>" 50)
|
||||
assumes "\<And>K L. K \<sim> L \<Longrightarrow> K \<subseteq> lists S \<and> L \<subseteq> lists S"
|
||||
assumes "K \<sim> L"
|
||||
assumes "\<And>K L. K \<sim> L \<Longrightarrow> ([] \<in> K \<longleftrightarrow> [] \<in> L)"
|
||||
assumes "\<And>K L x. K \<sim> L \<Longrightarrow> x : S \<Longrightarrow> Deriv x K \<sim> Deriv x L"
|
||||
shows "K = L"
|
||||
apply (rule equal_if_bisimilar)
|
||||
apply (metis assms(1) assms(2))
|
||||
apply (metis assms(1) assms(2))
|
||||
apply (rule bisimilar.coinduct[of R, OF \<open>K \<sim> L\<close>])
|
||||
apply (auto simp: assms)
|
||||
done
|
||||
|
||||
end
|
||||
|
||||
type_synonym rexp_pair = "nat rexp * nat rexp"
|
||||
type_synonym rexp_pairs = "rexp_pair list"
|
||||
|
||||
definition is_bisimulation :: "nat list \<Rightarrow> rexp_pairs \<Rightarrow> bool"
|
||||
where
|
||||
"is_bisimulation as ps =
|
||||
(\<forall>(r,s)\<in> set ps. (atoms r \<union> atoms s \<subseteq> set as) \<and> (nullable r \<longleftrightarrow> nullable s) \<and>
|
||||
(\<forall>a\<in>set as. (nderiv a r, nderiv a s) \<in> set ps))"
|
||||
|
||||
|
||||
lemma bisim_lang_eq:
|
||||
assumes bisim: "is_bisimulation as ps"
|
||||
assumes "(r, s) \<in> set ps"
|
||||
shows "lang (set as) r = lang (set as) s"
|
||||
proof -
|
||||
let ?R = "\<lambda>K L. (\<exists>(r,s)\<in>set ps. K = lang (set as) r \<and> L = lang (set as) s)"
|
||||
show ?thesis
|
||||
proof (rule language_coinduct[where R="?R" and S = "set as"])
|
||||
from \<open>(r, s) \<in> set ps\<close> show "?R (lang (set as) r) (lang (set as) s)"
|
||||
by auto
|
||||
next
|
||||
fix K L assume "?R K L"
|
||||
then obtain r s where rs: "(r, s) \<in> set ps"
|
||||
and KL: "K = lang (set as) r" "L = lang (set as) s" by auto
|
||||
with bisim have "nullable r \<longleftrightarrow> nullable s"
|
||||
by (auto simp: is_bisimulation_def)
|
||||
thus "[] \<in> K \<longleftrightarrow> [] \<in> L" by (auto simp: nullable_iff[where S="set as"] KL)
|
||||
txt\<open>next case, but shared context\<close>
|
||||
from bisim rs KL lang_subset_lists[of _ "set as"]
|
||||
show "K \<subseteq> lists (set as) \<and> L \<subseteq> lists (set as)"
|
||||
unfolding is_bisimulation_def by blast
|
||||
txt\<open>next case, but shared context\<close>
|
||||
fix a assume "a \<in> set as"
|
||||
with rs bisim
|
||||
have "(nderiv a r, nderiv a s) \<in> set ps"
|
||||
by (auto simp: is_bisimulation_def)
|
||||
thus "?R (Deriv a K) (Deriv a L)" using \<open>a \<in> set as\<close>
|
||||
by (force simp: KL lang_nderiv)
|
||||
qed
|
||||
qed
|
||||
|
||||
subsection \<open>Closure computation\<close>
|
||||
|
||||
fun test :: "rexp_pairs * rexp_pairs \<Rightarrow> bool"
|
||||
where "test (ws, ps) = (case ws of [] \<Rightarrow> False | (p,q)#_ \<Rightarrow> nullable p = nullable q)"
|
||||
|
||||
fun step :: "nat list \<Rightarrow> rexp_pairs * rexp_pairs \<Rightarrow> rexp_pairs * rexp_pairs"
|
||||
where "step as (ws,ps) =
|
||||
(let
|
||||
(r, s) = hd ws;
|
||||
ps' = (r, s) # ps;
|
||||
succs = map (\<lambda>a. (nderiv a r, nderiv a s)) as;
|
||||
new = filter (\<lambda>p. p \<notin> set ps' \<union> set ws) succs
|
||||
in (new @ tl ws, ps'))"
|
||||
|
||||
definition closure ::
|
||||
"nat list \<Rightarrow> rexp_pairs * rexp_pairs
|
||||
\<Rightarrow> (rexp_pairs * rexp_pairs) option" where
|
||||
"closure as = while_option test (step as)"
|
||||
|
||||
definition pre_bisim :: "nat list \<Rightarrow> nat rexp \<Rightarrow> nat rexp \<Rightarrow>
|
||||
rexp_pairs * rexp_pairs \<Rightarrow> bool"
|
||||
where
|
||||
"pre_bisim as r s = (\<lambda>(ws,ps).
|
||||
((r, s) \<in> set ws \<union> set ps) \<and>
|
||||
(\<forall>(r,s)\<in> set ws \<union> set ps. atoms r \<union> atoms s \<subseteq> set as) \<and>
|
||||
(\<forall>(r,s)\<in> set ps. (nullable r \<longleftrightarrow> nullable s) \<and>
|
||||
(\<forall>a\<in>set as. (nderiv a r, nderiv a s) \<in> set ps \<union> set ws)))"
|
||||
|
||||
theorem closure_sound:
|
||||
assumes result: "closure as ([(r,s)],[]) = Some([],ps)"
|
||||
and atoms: "atoms r \<union> atoms s \<subseteq> set as"
|
||||
shows "lang (set as) r = lang (set as) s"
|
||||
proof-
|
||||
{ fix st have "pre_bisim as r s st \<Longrightarrow> test st \<Longrightarrow> pre_bisim as r s (step as st)"
|
||||
unfolding pre_bisim_def
|
||||
by (cases st) (auto simp: split_def split: list.splits prod.splits
|
||||
dest!: subsetD[OF atoms_nderiv]) }
|
||||
moreover
|
||||
from atoms
|
||||
have "pre_bisim as r s ([(r,s)],[])" by (simp add: pre_bisim_def)
|
||||
ultimately have pre_bisim_ps: "pre_bisim as r s ([],ps)"
|
||||
by (rule while_option_rule[OF _ result[unfolded closure_def]])
|
||||
then have "is_bisimulation as ps" "(r, s) \<in> set ps"
|
||||
by (auto simp: pre_bisim_def is_bisimulation_def)
|
||||
thus "lang (set as) r = lang (set as) s" by (rule bisim_lang_eq)
|
||||
qed
|
||||
|
||||
|
||||
subsection \<open>The overall procedure\<close>
|
||||
|
||||
primrec add_atoms :: "nat rexp \<Rightarrow> nat list \<Rightarrow> nat list"
|
||||
where
|
||||
"add_atoms Zero = id"
|
||||
| "add_atoms One = id"
|
||||
| "add_atoms (Atom a) = List.insert a"
|
||||
| "add_atoms (Plus r s) = add_atoms s o add_atoms r"
|
||||
| "add_atoms (Times r s) = add_atoms s o add_atoms r"
|
||||
| "add_atoms (Not r) = add_atoms r"
|
||||
| "add_atoms (Inter r s) = add_atoms s o add_atoms r"
|
||||
| "add_atoms (Star r) = add_atoms r"
|
||||
|
||||
lemma set_add_atoms: "set (add_atoms r as) = atoms r \<union> set as"
|
||||
by (induct r arbitrary: as) auto
|
||||
|
||||
definition check_eqv :: "nat list \<Rightarrow> nat rexp \<Rightarrow> nat rexp \<Rightarrow> bool"
|
||||
where
|
||||
"check_eqv as r s \<longleftrightarrow> set(add_atoms r (add_atoms s [])) \<subseteq> set as \<and>
|
||||
(case closure as ([(norm r, norm s)], []) of
|
||||
Some([],_) \<Rightarrow> True | _ \<Rightarrow> False)"
|
||||
|
||||
lemma soundness:
|
||||
assumes "check_eqv as r s" shows "lang (set as) r = lang (set as) s"
|
||||
proof -
|
||||
obtain ps where cl: "closure as ([(norm r,norm s)],[]) = Some([],ps)"
|
||||
and at: "atoms r \<union> atoms s \<subseteq> set as"
|
||||
using assms
|
||||
by (auto simp: check_eqv_def set_add_atoms split:option.splits list.splits)
|
||||
hence "atoms(norm r) \<union> atoms(norm s) \<subseteq> set as"
|
||||
using atoms_norm by blast
|
||||
hence "lang (set as) (norm r) = lang (set as) (norm s)"
|
||||
by (rule closure_sound[OF cl])
|
||||
thus "lang (set as) r = lang (set as) s" by simp
|
||||
qed
|
||||
|
||||
lemma "check_eqv [0] (Plus One (Times (Atom 0) (Star(Atom 0)))) (Star(Atom 0))"
|
||||
by eval
|
||||
|
||||
lemma "check_eqv [0,1] (Not(Atom 0))
|
||||
(Plus One (Times (Plus (Atom 1) (Times (Atom 0) (Plus (Atom 0) (Atom 1))))
|
||||
(Star(Plus (Atom 0) (Atom 1)))))"
|
||||
by eval
|
||||
|
||||
lemma "check_eqv [0] (Atom 0) (Inter (Star (Atom 0)) (Atom 0))"
|
||||
by eval
|
||||
|
||||
end
|
|
@ -0,0 +1,85 @@
|
|||
section \<open>Normalizing Derivative\<close>
|
||||
|
||||
theory NDerivative
|
||||
imports
|
||||
Regular_Exp
|
||||
begin
|
||||
|
||||
subsection \<open>Normalizing operations\<close>
|
||||
|
||||
text \<open>associativity, commutativity, idempotence, zero\<close>
|
||||
|
||||
fun nPlus :: "'a::order rexp \<Rightarrow> 'a rexp \<Rightarrow> 'a rexp"
|
||||
where
|
||||
"nPlus Zero r = r"
|
||||
| "nPlus r Zero = r"
|
||||
| "nPlus (Plus r s) t = nPlus r (nPlus s t)"
|
||||
| "nPlus r (Plus s t) =
|
||||
(if r = s then (Plus s t)
|
||||
else if le_rexp r s then Plus r (Plus s t)
|
||||
else Plus s (nPlus r t))"
|
||||
| "nPlus r s =
|
||||
(if r = s then r
|
||||
else if le_rexp r s then Plus r s
|
||||
else Plus s r)"
|
||||
|
||||
lemma lang_nPlus[simp]: "lang (nPlus r s) = lang (Plus r s)"
|
||||
by (induction r s rule: nPlus.induct) auto
|
||||
|
||||
text \<open>associativity, zero, one\<close>
|
||||
|
||||
fun nTimes :: "'a::order rexp \<Rightarrow> 'a rexp \<Rightarrow> 'a rexp"
|
||||
where
|
||||
"nTimes Zero _ = Zero"
|
||||
| "nTimes _ Zero = Zero"
|
||||
| "nTimes One r = r"
|
||||
| "nTimes r One = r"
|
||||
| "nTimes (Times r s) t = Times r (nTimes s t)"
|
||||
| "nTimes r s = Times r s"
|
||||
|
||||
lemma lang_nTimes[simp]: "lang (nTimes r s) = lang (Times r s)"
|
||||
by (induction r s rule: nTimes.induct) (auto simp: conc_assoc)
|
||||
|
||||
primrec norm :: "'a::order rexp \<Rightarrow> 'a rexp"
|
||||
where
|
||||
"norm Zero = Zero"
|
||||
| "norm One = One"
|
||||
| "norm (Atom a) = Atom a"
|
||||
| "norm (Plus r s) = nPlus (norm r) (norm s)"
|
||||
| "norm (Times r s) = nTimes (norm r) (norm s)"
|
||||
| "norm (Star r) = Star (norm r)"
|
||||
|
||||
lemma lang_norm[simp]: "lang (norm r) = lang r"
|
||||
by (induct r) auto
|
||||
|
||||
primrec nderiv :: "'a::order \<Rightarrow> 'a rexp \<Rightarrow> 'a rexp"
|
||||
where
|
||||
"nderiv _ Zero = Zero"
|
||||
| "nderiv _ One = Zero"
|
||||
| "nderiv a (Atom b) = (if a = b then One else Zero)"
|
||||
| "nderiv a (Plus r s) = nPlus (nderiv a r) (nderiv a s)"
|
||||
| "nderiv a (Times r s) =
|
||||
(let r's = nTimes (nderiv a r) s
|
||||
in if nullable r then nPlus r's (nderiv a s) else r's)"
|
||||
| "nderiv a (Star r) = nTimes (nderiv a r) (Star r)"
|
||||
|
||||
lemma lang_nderiv: "lang (nderiv a r) = Deriv a (lang r)"
|
||||
by (induction r) (auto simp: Let_def nullable_iff)
|
||||
|
||||
lemma deriv_no_occurrence:
|
||||
"x \<notin> atoms r \<Longrightarrow> nderiv x r = Zero"
|
||||
by (induction r) auto
|
||||
|
||||
lemma atoms_nPlus[simp]: "atoms (nPlus r s) = atoms r \<union> atoms s"
|
||||
by (induction r s rule: nPlus.induct) auto
|
||||
|
||||
lemma atoms_nTimes: "atoms (nTimes r s) \<subseteq> atoms r \<union> atoms s"
|
||||
by (induction r s rule: nTimes.induct) auto
|
||||
|
||||
lemma atoms_norm: "atoms (norm r) \<subseteq> atoms r"
|
||||
by (induction r) (auto dest!:subsetD[OF atoms_nTimes])
|
||||
|
||||
lemma atoms_nderiv: "atoms (nderiv a r) \<subseteq> atoms r"
|
||||
by (induction r) (auto simp: Let_def dest!:subsetD[OF atoms_nTimes])
|
||||
|
||||
end
|
|
@ -0,0 +1,12 @@
|
|||
chapter AFP
|
||||
|
||||
session "Regular-Sets" (AFP) = "HOL-Library" +
|
||||
options [timeout = 600]
|
||||
theories
|
||||
Regexp_Method
|
||||
Regexp_Constructions
|
||||
pEquivalence_Checking
|
||||
Equivalence_Checking2
|
||||
document_files
|
||||
"root.bib"
|
||||
"root.tex"
|
|
@ -0,0 +1,395 @@
|
|||
(*
|
||||
File: Regexp_Constructions.thy
|
||||
Author: Manuel Eberl <eberlm@in.tum.de>
|
||||
|
||||
Some simple constructions on regular expressions to illustrate closure properties of regular
|
||||
languages: reversal, substitution, prefixes, suffixes, subwords ("fragments")
|
||||
*)
|
||||
section \<open>Basic constructions on regular expressions\<close>
|
||||
theory Regexp_Constructions
|
||||
imports
|
||||
Main
|
||||
"HOL-Library.Sublist"
|
||||
Regular_Exp
|
||||
begin
|
||||
|
||||
subsection \<open>Reverse language\<close>
|
||||
|
||||
lemma rev_conc [simp]: "rev ` (A @@ B) = rev ` B @@ rev ` A"
|
||||
unfolding conc_def image_def by force
|
||||
|
||||
lemma rev_compower [simp]: "rev ` (A ^^ n) = (rev ` A) ^^ n"
|
||||
by (induction n) (simp_all add: conc_pow_comm)
|
||||
|
||||
lemma rev_star [simp]: "rev ` star A = star (rev ` A)"
|
||||
by (simp add: star_def image_UN)
|
||||
|
||||
|
||||
subsection \<open>Substituting characters in a language\<close>
|
||||
|
||||
definition subst_word :: "('a \<Rightarrow> 'b list) \<Rightarrow> 'a list \<Rightarrow> 'b list" where
|
||||
"subst_word f xs = concat (map f xs)"
|
||||
|
||||
lemma subst_word_Nil [simp]: "subst_word f [] = []"
|
||||
by (simp add: subst_word_def)
|
||||
|
||||
lemma subst_word_singleton [simp]: "subst_word f [x] = f x"
|
||||
by (simp add: subst_word_def)
|
||||
|
||||
lemma subst_word_append [simp]: "subst_word f (xs @ ys) = subst_word f xs @ subst_word f ys"
|
||||
by (simp add: subst_word_def)
|
||||
|
||||
lemma subst_word_Cons [simp]: "subst_word f (x # xs) = f x @ subst_word f xs"
|
||||
by (simp add: subst_word_def)
|
||||
|
||||
lemma subst_word_conc [simp]: "subst_word f ` (A @@ B) = subst_word f ` A @@ subst_word f ` B"
|
||||
unfolding conc_def image_def by force
|
||||
|
||||
lemma subst_word_compower [simp]: "subst_word f ` (A ^^ n) = (subst_word f ` A) ^^ n"
|
||||
by (induction n) simp_all
|
||||
|
||||
lemma subst_word_star [simp]: "subst_word f ` (star A) = star (subst_word f ` A)"
|
||||
by (simp add: star_def image_UN)
|
||||
|
||||
|
||||
text \<open>Suffix language\<close>
|
||||
|
||||
definition Suffixes :: "'a list set \<Rightarrow> 'a list set" where
|
||||
"Suffixes A = {w. \<exists>q. q @ w \<in> A}"
|
||||
|
||||
lemma Suffixes_altdef [code]: "Suffixes A = (\<Union>w\<in>A. set (suffixes w))"
|
||||
unfolding Suffixes_def set_suffixes_eq suffix_def by blast
|
||||
|
||||
lemma Nil_in_Suffixes_iff [simp]: "[] \<in> Suffixes A \<longleftrightarrow> A \<noteq> {}"
|
||||
by (auto simp: Suffixes_def)
|
||||
|
||||
lemma Suffixes_empty [simp]: "Suffixes {} = {}"
|
||||
by (auto simp: Suffixes_def)
|
||||
|
||||
lemma Suffixes_empty_iff [simp]: "Suffixes A = {} \<longleftrightarrow> A = {}"
|
||||
by (auto simp: Suffixes_altdef)
|
||||
|
||||
lemma Suffixes_singleton [simp]: "Suffixes {xs} = set (suffixes xs)"
|
||||
by (auto simp: Suffixes_altdef)
|
||||
|
||||
lemma Suffixes_insert: "Suffixes (insert xs A) = set (suffixes xs) \<union> Suffixes A"
|
||||
by (simp add: Suffixes_altdef)
|
||||
|
||||
lemma Suffixes_conc [simp]: "A \<noteq> {} \<Longrightarrow> Suffixes (A @@ B) = Suffixes B \<union> (Suffixes A @@ B)"
|
||||
unfolding Suffixes_altdef conc_def by (force simp: suffix_append)
|
||||
|
||||
lemma Suffixes_union [simp]: "Suffixes (A \<union> B) = Suffixes A \<union> Suffixes B"
|
||||
by (auto simp: Suffixes_def)
|
||||
|
||||
lemma Suffixes_UNION [simp]: "Suffixes (UNION A f) = UNION A (\<lambda>x. Suffixes (f x))"
|
||||
by (auto simp: Suffixes_def)
|
||||
|
||||
lemma Suffixes_compower:
|
||||
assumes "A \<noteq> {}"
|
||||
shows "Suffixes (A ^^ n) = insert [] (Suffixes A @@ (\<Union>k<n. A ^^ k))"
|
||||
proof (induction n)
|
||||
case (Suc n)
|
||||
from Suc have "Suffixes (A ^^ Suc n) =
|
||||
insert [] (Suffixes A @@ ((\<Union>k<n. A ^^ k) \<union> A ^^ n))"
|
||||
by (simp_all add: assms conc_Un_distrib)
|
||||
also have "(\<Union>k<n. A ^^ k) \<union> A ^^ n = (\<Union>k\<in>insert n {..<n}. A ^^ k)" by blast
|
||||
also have "insert n {..<n} = {..<Suc n}" by auto
|
||||
finally show ?case .
|
||||
qed simp_all
|
||||
|
||||
lemma Suffixes_star [simp]:
|
||||
assumes "A \<noteq> {}"
|
||||
shows "Suffixes (star A) = Suffixes A @@ star A"
|
||||
proof -
|
||||
have "star A = (\<Union>n. A ^^ n)" unfolding star_def ..
|
||||
also have "Suffixes \<dots> = (\<Union>x. Suffixes (A ^^ x))" by simp
|
||||
also have "\<dots> = (\<Union>n. insert [] (Suffixes A @@ (\<Union>k<n. A ^^ k)))"
|
||||
using assms by (subst Suffixes_compower) auto
|
||||
also have "\<dots> = insert [] (Suffixes A @@ (\<Union>n. (\<Union>k<n. A ^^ k)))"
|
||||
by (simp_all add: conc_UNION_distrib)
|
||||
also have "(\<Union>n. (\<Union>k<n. A ^^ k)) = (\<Union>n. A ^^ n)" by auto
|
||||
also have "\<dots> = star A" unfolding star_def ..
|
||||
also have "insert [] (Suffixes A @@ star A) = Suffixes A @@ star A"
|
||||
using assms by auto
|
||||
finally show ?thesis .
|
||||
qed
|
||||
|
||||
text \<open>Prefix language\<close>
|
||||
|
||||
definition Prefixes :: "'a list set \<Rightarrow> 'a list set" where
|
||||
"Prefixes A = {w. \<exists>q. w @ q \<in> A}"
|
||||
|
||||
lemma Prefixes_altdef [code]: "Prefixes A = (\<Union>w\<in>A. set (prefixes w))"
|
||||
unfolding Prefixes_def set_prefixes_eq prefix_def by blast
|
||||
|
||||
lemma Nil_in_Prefixes_iff [simp]: "[] \<in> Prefixes A \<longleftrightarrow> A \<noteq> {}"
|
||||
by (auto simp: Prefixes_def)
|
||||
|
||||
lemma Prefixes_empty [simp]: "Prefixes {} = {}"
|
||||
by (auto simp: Prefixes_def)
|
||||
|
||||
lemma Prefixes_empty_iff [simp]: "Prefixes A = {} \<longleftrightarrow> A = {}"
|
||||
by (auto simp: Prefixes_altdef)
|
||||
|
||||
lemma Prefixes_singleton [simp]: "Prefixes {xs} = set (prefixes xs)"
|
||||
by (auto simp: Prefixes_altdef)
|
||||
|
||||
lemma Prefixes_insert: "Prefixes (insert xs A) = set (prefixes xs) \<union> Prefixes A"
|
||||
by (simp add: Prefixes_altdef)
|
||||
|
||||
lemma Prefixes_conc [simp]: "B \<noteq> {} \<Longrightarrow> Prefixes (A @@ B) = Prefixes A \<union> (A @@ Prefixes B)"
|
||||
unfolding Prefixes_altdef conc_def by (force simp: prefix_append)
|
||||
|
||||
lemma Prefixes_union [simp]: "Prefixes (A \<union> B) = Prefixes A \<union> Prefixes B"
|
||||
by (auto simp: Prefixes_def)
|
||||
|
||||
lemma Prefixes_UNION [simp]: "Prefixes (UNION A f) = UNION A (\<lambda>x. Prefixes (f x))"
|
||||
by (auto simp: Prefixes_def)
|
||||
|
||||
|
||||
lemma Prefixes_rev: "Prefixes (rev ` A) = rev ` Suffixes A"
|
||||
by (auto simp: Prefixes_altdef prefixes_rev Suffixes_altdef)
|
||||
|
||||
lemma Suffixes_rev: "Suffixes (rev ` A) = rev ` Prefixes A"
|
||||
by (auto simp: Prefixes_altdef suffixes_rev Suffixes_altdef)
|
||||
|
||||
|
||||
lemma Prefixes_compower:
|
||||
assumes "A \<noteq> {}"
|
||||
shows "Prefixes (A ^^ n) = insert [] ((\<Union>k<n. A ^^ k) @@ Prefixes A)"
|
||||
proof -
|
||||
have "A ^^ n = rev ` ((rev ` A) ^^ n)" by (simp add: image_image)
|
||||
also have "Prefixes \<dots> = insert [] ((\<Union>k<n. A ^^ k) @@ Prefixes A)"
|
||||
unfolding Prefixes_rev
|
||||
by (subst Suffixes_compower) (simp_all add: image_UN image_image Suffixes_rev assms)
|
||||
finally show ?thesis .
|
||||
qed
|
||||
|
||||
lemma Prefixes_star [simp]:
|
||||
assumes "A \<noteq> {}"
|
||||
shows "Prefixes (star A) = star A @@ Prefixes A"
|
||||
proof -
|
||||
have "star A = rev ` star (rev ` A)" by (simp add: image_image)
|
||||
also have "Prefixes \<dots> = star A @@ Prefixes A"
|
||||
unfolding Prefixes_rev
|
||||
by (subst Suffixes_star) (simp_all add: assms image_image Suffixes_rev)
|
||||
finally show ?thesis .
|
||||
qed
|
||||
|
||||
|
||||
subsection \<open>Subword language\<close>
|
||||
|
||||
text \<open>
|
||||
The language of all sub-words, i.e. all words that are a contiguous sublist of a word in
|
||||
the original language.
|
||||
\<close>
|
||||
definition Sublists :: "'a list set \<Rightarrow> 'a list set" where
|
||||
"Sublists A = {w. \<exists>q\<in>A. sublist w q}"
|
||||
|
||||
lemma Sublists_altdef [code]: "Sublists A = (\<Union>w\<in>A. set (sublists w))"
|
||||
by (auto simp: Sublists_def)
|
||||
|
||||
lemma Sublists_empty [simp]: "Sublists {} = {}"
|
||||
by (auto simp: Sublists_def)
|
||||
|
||||
lemma Sublists_singleton [simp]: "Sublists {w} = set (sublists w)"
|
||||
by (auto simp: Sublists_altdef)
|
||||
|
||||
lemma Sublists_insert: "Sublists (insert w A) = set (sublists w) \<union> Sublists A"
|
||||
by (auto simp: Sublists_altdef)
|
||||
|
||||
lemma Sublists_Un [simp]: "Sublists (A \<union> B) = Sublists A \<union> Sublists B"
|
||||
by (auto simp: Sublists_altdef)
|
||||
|
||||
lemma Sublists_UN [simp]: "Sublists (UNION A f) = UNION A (\<lambda>x. Sublists (f x))"
|
||||
by (auto simp: Sublists_altdef)
|
||||
|
||||
lemma Sublists_conv_Prefixes: "Sublists A = Prefixes (Suffixes A)"
|
||||
by (auto simp: Sublists_def Prefixes_def Suffixes_def sublist_def)
|
||||
|
||||
lemma Sublists_conv_Suffixes: "Sublists A = Suffixes (Prefixes A)"
|
||||
by (auto simp: Sublists_def Prefixes_def Suffixes_def sublist_def)
|
||||
|
||||
lemma Sublists_conc [simp]:
|
||||
assumes "A \<noteq> {}" "B \<noteq> {}"
|
||||
shows "Sublists (A @@ B) = Sublists A \<union> Sublists B \<union> Suffixes A @@ Prefixes B"
|
||||
using assms unfolding Sublists_conv_Suffixes by auto
|
||||
|
||||
lemma star_not_empty [simp]: "star A \<noteq> {}"
|
||||
by auto
|
||||
|
||||
lemma Sublists_star:
|
||||
"A \<noteq> {} \<Longrightarrow> Sublists (star A) = Sublists A \<union> Suffixes A @@ star A @@ Prefixes A"
|
||||
by (simp add: Sublists_conv_Prefixes)
|
||||
|
||||
lemma Prefixes_subset_Sublists: "Prefixes A \<subseteq> Sublists A"
|
||||
unfolding Prefixes_def Sublists_def by auto
|
||||
|
||||
lemma Suffixes_subset_Sublists: "Suffixes A \<subseteq> Sublists A"
|
||||
unfolding Suffixes_def Sublists_def by auto
|
||||
|
||||
|
||||
subsection \<open>Fragment language\<close>
|
||||
|
||||
text \<open>
|
||||
The following is the fragment language of a given language, i.e. the set of all words that
|
||||
are (not necessarily contiguous) sub-sequences of a word in the original language.
|
||||
\<close>
|
||||
definition Subseqs where "Subseqs A = (\<Union>w\<in>A. set (subseqs w))"
|
||||
|
||||
lemma Subseqs_empty [simp]: "Subseqs {} = {}"
|
||||
by (simp add: Subseqs_def)
|
||||
|
||||
lemma Subseqs_insert [simp]: "Subseqs (insert xs A) = set (subseqs xs) \<union> Subseqs A"
|
||||
by (simp add: Subseqs_def)
|
||||
|
||||
lemma Subseqs_singleton [simp]: "Subseqs {xs} = set (subseqs xs)"
|
||||
by simp
|
||||
|
||||
lemma Subseqs_Un [simp]: "Subseqs (A \<union> B) = Subseqs A \<union> Subseqs B"
|
||||
by (simp add: Subseqs_def)
|
||||
|
||||
lemma Subseqs_UNION [simp]: "Subseqs (UNION A f) = UNION A (\<lambda>x. Subseqs (f x))"
|
||||
by (simp add: Subseqs_def)
|
||||
|
||||
lemma Subseqs_conc [simp]: "Subseqs (A @@ B) = Subseqs A @@ Subseqs B"
|
||||
proof safe
|
||||
fix xs assume "xs \<in> Subseqs (A @@ B)"
|
||||
then obtain ys zs where *: "ys \<in> A" "zs \<in> B" "subseq xs (ys @ zs)"
|
||||
by (auto simp: Subseqs_def conc_def)
|
||||
from *(3) obtain xs1 xs2 where "xs = xs1 @ xs2" "subseq xs1 ys" "subseq xs2 zs"
|
||||
by (rule subseq_appendE)
|
||||
with *(1,2) show "xs \<in> Subseqs A @@ Subseqs B" by (auto simp: Subseqs_def set_subseqs_eq)
|
||||
next
|
||||
fix xs assume "xs \<in> Subseqs A @@ Subseqs B"
|
||||
then obtain xs1 xs2 ys zs
|
||||
where "xs = xs1 @ xs2" "subseq xs1 ys" "subseq xs2 zs" "ys \<in> A" "zs \<in> B"
|
||||
by (auto simp: conc_def Subseqs_def)
|
||||
thus "xs \<in> Subseqs (A @@ B)" by (force simp: Subseqs_def conc_def intro: list_emb_append_mono)
|
||||
qed
|
||||
|
||||
lemma Subseqs_compower [simp]: "Subseqs (A ^^ n) = Subseqs A ^^ n"
|
||||
by (induction n) simp_all
|
||||
|
||||
lemma Subseqs_star [simp]: "Subseqs (star A) = star (Subseqs A)"
|
||||
by (simp add: star_def)
|
||||
|
||||
lemma Sublists_subset_Subseqs: "Sublists A \<subseteq> Subseqs A"
|
||||
by (auto simp: Sublists_def Subseqs_def dest!: sublist_imp_subseq)
|
||||
|
||||
|
||||
subsection \<open>Various regular expression constructions\<close>
|
||||
|
||||
text \<open>A construction for language reversal of a regular expression:\<close>
|
||||
|
||||
primrec rexp_rev where
|
||||
"rexp_rev Zero = Zero"
|
||||
| "rexp_rev One = One"
|
||||
| "rexp_rev (Atom x) = Atom x"
|
||||
| "rexp_rev (Plus r s) = Plus (rexp_rev r) (rexp_rev s)"
|
||||
| "rexp_rev (Times r s) = Times (rexp_rev s) (rexp_rev r)"
|
||||
| "rexp_rev (Star r) = Star (rexp_rev r)"
|
||||
|
||||
lemma lang_rexp_rev [simp]: "lang (rexp_rev r) = rev ` lang r"
|
||||
by (induction r) (simp_all add: image_Un)
|
||||
|
||||
|
||||
text \<open>The obvious construction for a singleton-language regular expression:\<close>
|
||||
|
||||
fun rexp_of_word where
|
||||
"rexp_of_word [] = One"
|
||||
| "rexp_of_word [x] = Atom x"
|
||||
| "rexp_of_word (x#xs) = Times (Atom x) (rexp_of_word xs)"
|
||||
|
||||
lemma lang_rexp_of_word [simp]: "lang (rexp_of_word xs) = {xs}"
|
||||
by (induction xs rule: rexp_of_word.induct) (simp_all add: conc_def)
|
||||
|
||||
lemma size_rexp_of_word [simp]: "size (rexp_of_word xs) = Suc (2 * (length xs - 1))"
|
||||
by (induction xs rule: rexp_of_word.induct) auto
|
||||
|
||||
|
||||
text \<open>Character substitution in a regular expression:\<close>
|
||||
|
||||
primrec rexp_subst where
|
||||
"rexp_subst f Zero = Zero"
|
||||
| "rexp_subst f One = One"
|
||||
| "rexp_subst f (Atom x) = rexp_of_word (f x)"
|
||||
| "rexp_subst f (Plus r s) = Plus (rexp_subst f r) (rexp_subst f s)"
|
||||
| "rexp_subst f (Times r s) = Times (rexp_subst f r) (rexp_subst f s)"
|
||||
| "rexp_subst f (Star r) = Star (rexp_subst f r)"
|
||||
|
||||
lemma lang_rexp_subst: "lang (rexp_subst f r) = subst_word f ` lang r"
|
||||
by (induction r) (simp_all add: image_Un)
|
||||
|
||||
|
||||
text \<open>Suffix language of a regular expression:\<close>
|
||||
|
||||
primrec suffix_rexp :: "'a rexp \<Rightarrow> 'a rexp" where
|
||||
"suffix_rexp Zero = Zero"
|
||||
| "suffix_rexp One = One"
|
||||
| "suffix_rexp (Atom a) = Plus (Atom a) One"
|
||||
| "suffix_rexp (Plus r s) = Plus (suffix_rexp r) (suffix_rexp s)"
|
||||
| "suffix_rexp (Times r s) =
|
||||
(if rexp_empty r then Zero else Plus (Times (suffix_rexp r) s) (suffix_rexp s))"
|
||||
| "suffix_rexp (Star r) =
|
||||
(if rexp_empty r then One else Times (suffix_rexp r) (Star r))"
|
||||
|
||||
theorem lang_suffix_rexp [simp]:
|
||||
"lang (suffix_rexp r) = Suffixes (lang r)"
|
||||
by (induction r) (auto simp: rexp_empty_iff)
|
||||
|
||||
|
||||
text \<open>Prefix language of a regular expression:\<close>
|
||||
|
||||
primrec prefix_rexp :: "'a rexp \<Rightarrow> 'a rexp" where
|
||||
"prefix_rexp Zero = Zero"
|
||||
| "prefix_rexp One = One"
|
||||
| "prefix_rexp (Atom a) = Plus (Atom a) One"
|
||||
| "prefix_rexp (Plus r s) = Plus (prefix_rexp r) (prefix_rexp s)"
|
||||
| "prefix_rexp (Times r s) =
|
||||
(if rexp_empty s then Zero else Plus (Times r (prefix_rexp s)) (prefix_rexp r))"
|
||||
| "prefix_rexp (Star r) =
|
||||
(if rexp_empty r then One else Times (Star r) (prefix_rexp r))"
|
||||
|
||||
theorem lang_prefix_rexp [simp]:
|
||||
"lang (prefix_rexp r) = Prefixes (lang r)"
|
||||
by (induction r) (auto simp: rexp_empty_iff)
|
||||
|
||||
|
||||
text \<open>Sub-word language of a regular expression\<close>
|
||||
|
||||
primrec sublist_rexp :: "'a rexp \<Rightarrow> 'a rexp" where
|
||||
"sublist_rexp Zero = Zero"
|
||||
| "sublist_rexp One = One"
|
||||
| "sublist_rexp (Atom a) = Plus (Atom a) One"
|
||||
| "sublist_rexp (Plus r s) = Plus (sublist_rexp r) (sublist_rexp s)"
|
||||
| "sublist_rexp (Times r s) =
|
||||
(if rexp_empty r \<or> rexp_empty s then Zero else
|
||||
Plus (sublist_rexp r) (Plus (sublist_rexp s) (Times (suffix_rexp r) (prefix_rexp s))))"
|
||||
| "sublist_rexp (Star r) =
|
||||
(if rexp_empty r then One else
|
||||
Plus (sublist_rexp r) (Times (suffix_rexp r) (Times (Star r) (prefix_rexp r))))"
|
||||
|
||||
theorem lang_sublist_rexp [simp]:
|
||||
"lang (sublist_rexp r) = Sublists (lang r)"
|
||||
by (induction r) (auto simp: rexp_empty_iff Sublists_star)
|
||||
|
||||
|
||||
text \<open>Fragment language of a regular expression:\<close>
|
||||
|
||||
primrec subseqs_rexp :: "'a rexp \<Rightarrow> 'a rexp" where
|
||||
"subseqs_rexp Zero = Zero"
|
||||
| "subseqs_rexp One = One"
|
||||
| "subseqs_rexp (Atom x) = Plus (Atom x) One"
|
||||
| "subseqs_rexp (Plus r s) = Plus (subseqs_rexp r) (subseqs_rexp s)"
|
||||
| "subseqs_rexp (Times r s) = Times (subseqs_rexp r) (subseqs_rexp s)"
|
||||
| "subseqs_rexp (Star r) = Star (subseqs_rexp r)"
|
||||
|
||||
lemma lang_subseqs_rexp [simp]: "lang (subseqs_rexp r) = Subseqs (lang r)"
|
||||
by (induction r) auto
|
||||
|
||||
|
||||
text \<open>Subword language of a regular expression\<close>
|
||||
|
||||
|
||||
end
|
|
@ -0,0 +1,67 @@
|
|||
section \<open>Proving Relation (In)equalities via Regular Expressions\<close>
|
||||
|
||||
theory Regexp_Method
|
||||
imports Equivalence_Checking Relation_Interpretation
|
||||
begin
|
||||
|
||||
primrec rel_of_regexp :: "('a * 'a) set list \<Rightarrow> nat rexp \<Rightarrow> ('a * 'a) set" where
|
||||
"rel_of_regexp vs Zero = {}" |
|
||||
"rel_of_regexp vs One = Id" |
|
||||
"rel_of_regexp vs (Atom i) = vs ! i" |
|
||||
"rel_of_regexp vs (Plus r s) = rel_of_regexp vs r \<union> rel_of_regexp vs s " |
|
||||
"rel_of_regexp vs (Times r s) = rel_of_regexp vs r O rel_of_regexp vs s" |
|
||||
"rel_of_regexp vs (Star r) = (rel_of_regexp vs r)^*"
|
||||
|
||||
lemma rel_of_regexp_rel: "rel_of_regexp vs r = rel (\<lambda>i. vs ! i) r"
|
||||
by (induct r) auto
|
||||
|
||||
primrec rel_eq where
|
||||
"rel_eq (r, s) vs = (rel_of_regexp vs r = rel_of_regexp vs s)"
|
||||
|
||||
lemma rel_eqI: "check_eqv r s \<Longrightarrow> rel_eq (r, s) vs"
|
||||
unfolding rel_eq.simps rel_of_regexp_rel
|
||||
by (rule Relation_Interpretation.soundness)
|
||||
(rule Equivalence_Checking.soundness)
|
||||
|
||||
lemmas regexp_reify = rel_of_regexp.simps rel_eq.simps
|
||||
lemmas regexp_unfold = trancl_unfold_left subset_Un_eq
|
||||
|
||||
ML \<open>
|
||||
local
|
||||
|
||||
fun check_eqv (ct, b) = Thm.mk_binop @{cterm "Pure.eq :: bool \<Rightarrow> bool \<Rightarrow> prop"}
|
||||
ct (if b then @{cterm True} else @{cterm False});
|
||||
|
||||
val (_, check_eqv_oracle) = Context.>>> (Context.map_theory_result
|
||||
(Thm.add_oracle (@{binding check_eqv}, check_eqv)));
|
||||
|
||||
in
|
||||
|
||||
val regexp_conv =
|
||||
@{computation_conv bool terms: check_eqv datatypes: "nat rexp"}
|
||||
(fn _ => fn b => fn ct => check_eqv_oracle (ct, b))
|
||||
|
||||
end
|
||||
\<close>
|
||||
|
||||
method_setup regexp = \<open>
|
||||
Scan.succeed (fn ctxt =>
|
||||
SIMPLE_METHOD' (
|
||||
(TRY o eresolve_tac ctxt @{thms rev_subsetD})
|
||||
THEN' (Subgoal.FOCUS_PARAMS (fn {context = ctxt', ...} =>
|
||||
TRY (Local_Defs.unfold_tac ctxt' @{thms regexp_unfold})
|
||||
THEN Reification.tac ctxt' @{thms regexp_reify} NONE 1
|
||||
THEN resolve_tac ctxt' @{thms rel_eqI} 1
|
||||
THEN CONVERSION (HOLogic.Trueprop_conv (regexp_conv ctxt')) 1
|
||||
THEN resolve_tac ctxt' [TrueI] 1) ctxt)))
|
||||
\<close> \<open>decide relation equalities via regular expressions\<close>
|
||||
|
||||
hide_const (open) le_rexp nPlus nTimes norm nullable bisimilar is_bisimulation closure
|
||||
pre_bisim add_atoms check_eqv rel word_rel rel_eq
|
||||
|
||||
text \<open>Example:\<close>
|
||||
|
||||
lemma "(r \<union> s^+)^* = (r \<union> s)^*"
|
||||
by regexp
|
||||
|
||||
end
|
|
@ -0,0 +1,176 @@
|
|||
(* Author: Tobias Nipkow *)
|
||||
|
||||
section "Regular expressions"
|
||||
|
||||
theory Regular_Exp
|
||||
imports Regular_Set
|
||||
begin
|
||||
|
||||
datatype (atoms: 'a) rexp =
|
||||
is_Zero: Zero |
|
||||
is_One: One |
|
||||
Atom 'a |
|
||||
Plus "('a rexp)" "('a rexp)" |
|
||||
Times "('a rexp)" "('a rexp)" |
|
||||
Star "('a rexp)"
|
||||
|
||||
primrec lang :: "'a rexp => 'a lang" where
|
||||
"lang Zero = {}" |
|
||||
"lang One = {[]}" |
|
||||
"lang (Atom a) = {[a]}" |
|
||||
"lang (Plus r s) = (lang r) Un (lang s)" |
|
||||
"lang (Times r s) = conc (lang r) (lang s)" |
|
||||
"lang (Star r) = star(lang r)"
|
||||
|
||||
abbreviation (input) regular_lang where "regular_lang A \<equiv> (\<exists>r. lang r = A)"
|
||||
|
||||
primrec nullable :: "'a rexp \<Rightarrow> bool" where
|
||||
"nullable Zero = False" |
|
||||
"nullable One = True" |
|
||||
"nullable (Atom c) = False" |
|
||||
"nullable (Plus r1 r2) = (nullable r1 \<or> nullable r2)" |
|
||||
"nullable (Times r1 r2) = (nullable r1 \<and> nullable r2)" |
|
||||
"nullable (Star r) = True"
|
||||
|
||||
lemma nullable_iff [code_abbrev]: "nullable r \<longleftrightarrow> [] \<in> lang r"
|
||||
by (induct r) (auto simp add: conc_def split: if_splits)
|
||||
|
||||
primrec rexp_empty where
|
||||
"rexp_empty Zero \<longleftrightarrow> True"
|
||||
| "rexp_empty One \<longleftrightarrow> False"
|
||||
| "rexp_empty (Atom a) \<longleftrightarrow> False"
|
||||
| "rexp_empty (Plus r s) \<longleftrightarrow> rexp_empty r \<and> rexp_empty s"
|
||||
| "rexp_empty (Times r s) \<longleftrightarrow> rexp_empty r \<or> rexp_empty s"
|
||||
| "rexp_empty (Star r) \<longleftrightarrow> False"
|
||||
|
||||
(* TODO Fixme: This code_abbrev rule does not work. Why? *)
|
||||
lemma rexp_empty_iff [code_abbrev]: "rexp_empty r \<longleftrightarrow> lang r = {}"
|
||||
by (induction r) auto
|
||||
|
||||
|
||||
|
||||
text\<open>Composition on rhs usually complicates matters:\<close>
|
||||
lemma map_map_rexp:
|
||||
"map_rexp f (map_rexp g r) = map_rexp (\<lambda>r. f (g r)) r"
|
||||
unfolding rexp.map_comp o_def ..
|
||||
|
||||
lemma map_rexp_ident[simp]: "map_rexp (\<lambda>x. x) = (\<lambda>r. r)"
|
||||
unfolding id_def[symmetric] fun_eq_iff rexp.map_id id_apply by (intro allI refl)
|
||||
|
||||
lemma atoms_lang: "w : lang r \<Longrightarrow> set w \<subseteq> atoms r"
|
||||
proof(induction r arbitrary: w)
|
||||
case Times thus ?case by fastforce
|
||||
next
|
||||
case Star thus ?case by (fastforce simp add: star_conv_concat)
|
||||
qed auto
|
||||
|
||||
lemma lang_eq_ext: "(lang r = lang s) =
|
||||
(\<forall>w \<in> lists(atoms r \<union> atoms s). w \<in> lang r \<longleftrightarrow> w \<in> lang s)"
|
||||
by (auto simp: atoms_lang[unfolded subset_iff])
|
||||
|
||||
lemma lang_eq_ext_Nil_fold_Deriv:
|
||||
fixes r s
|
||||
defines "\<BB> \<equiv> {(fold Deriv w (lang r), fold Deriv w (lang s))| w. w\<in>lists (atoms r \<union> atoms s)}"
|
||||
shows "lang r = lang s \<longleftrightarrow> (\<forall>(K, L) \<in> \<BB>. [] \<in> K \<longleftrightarrow> [] \<in> L)"
|
||||
unfolding lang_eq_ext \<BB>_def by (subst (1 2) in_fold_Deriv[of "[]", simplified, symmetric]) auto
|
||||
|
||||
|
||||
subsection \<open>Term ordering\<close>
|
||||
|
||||
instantiation rexp :: (order) "{order}"
|
||||
begin
|
||||
|
||||
fun le_rexp :: "('a::order) rexp \<Rightarrow> ('a::order) rexp \<Rightarrow> bool"
|
||||
where
|
||||
"le_rexp Zero _ = True"
|
||||
| "le_rexp _ Zero = False"
|
||||
| "le_rexp One _ = True"
|
||||
| "le_rexp _ One = False"
|
||||
| "le_rexp (Atom a) (Atom b) = (a <= b)"
|
||||
| "le_rexp (Atom _) _ = True"
|
||||
| "le_rexp _ (Atom _) = False"
|
||||
| "le_rexp (Star r) (Star s) = le_rexp r s"
|
||||
| "le_rexp (Star _) _ = True"
|
||||
| "le_rexp _ (Star _) = False"
|
||||
| "le_rexp (Plus r r') (Plus s s') =
|
||||
(if r = s then le_rexp r' s' else le_rexp r s)"
|
||||
| "le_rexp (Plus _ _) _ = True"
|
||||
| "le_rexp _ (Plus _ _) = False"
|
||||
| "le_rexp (Times r r') (Times s s') =
|
||||
(if r = s then le_rexp r' s' else le_rexp r s)"
|
||||
|
||||
(* The class instance stuff is by Dmitriy Traytel *)
|
||||
|
||||
definition less_eq_rexp where "r \<le> s \<equiv> le_rexp r s"
|
||||
definition less_rexp where "r < s \<equiv> le_rexp r s \<and> r \<noteq> s"
|
||||
|
||||
lemma le_rexp_Zero: "le_rexp r Zero \<Longrightarrow> r = Zero"
|
||||
by (induction r) auto
|
||||
|
||||
lemma le_rexp_refl: "le_rexp r r"
|
||||
by (induction r) auto
|
||||
|
||||
lemma le_rexp_antisym: "\<lbrakk>le_rexp r s; le_rexp s r\<rbrakk> \<Longrightarrow> r = s"
|
||||
by (induction r s rule: le_rexp.induct) (auto dest: le_rexp_Zero)
|
||||
|
||||
lemma le_rexp_trans: "\<lbrakk>le_rexp r s; le_rexp s t\<rbrakk> \<Longrightarrow> le_rexp r t"
|
||||
proof (induction r s arbitrary: t rule: le_rexp.induct)
|
||||
fix v t assume "le_rexp (Atom v) t" thus "le_rexp One t" by (cases t) auto
|
||||
next
|
||||
fix s1 s2 t assume "le_rexp (Plus s1 s2) t" thus "le_rexp One t" by (cases t) auto
|
||||
next
|
||||
fix s1 s2 t assume "le_rexp (Times s1 s2) t" thus "le_rexp One t" by (cases t) auto
|
||||
next
|
||||
fix s t assume "le_rexp (Star s) t" thus "le_rexp One t" by (cases t) auto
|
||||
next
|
||||
fix v u t assume "le_rexp (Atom v) (Atom u)" "le_rexp (Atom u) t"
|
||||
thus "le_rexp (Atom v) t" by (cases t) auto
|
||||
next
|
||||
fix v s1 s2 t assume "le_rexp (Plus s1 s2) t" thus "le_rexp (Atom v) t" by (cases t) auto
|
||||
next
|
||||
fix v s1 s2 t assume "le_rexp (Times s1 s2) t" thus "le_rexp (Atom v) t" by (cases t) auto
|
||||
next
|
||||
fix v s t assume "le_rexp (Star s) t" thus "le_rexp (Atom v) t" by (cases t) auto
|
||||
next
|
||||
fix r s t
|
||||
assume IH: "\<And>t. le_rexp r s \<Longrightarrow> le_rexp s t \<Longrightarrow> le_rexp r t"
|
||||
and "le_rexp (Star r) (Star s)" "le_rexp (Star s) t"
|
||||
thus "le_rexp (Star r) t" by (cases t) auto
|
||||
next
|
||||
fix r s1 s2 t assume "le_rexp (Plus s1 s2) t" thus "le_rexp (Star r) t" by (cases t) auto
|
||||
next
|
||||
fix r s1 s2 t assume "le_rexp (Times s1 s2) t" thus "le_rexp (Star r) t" by (cases t) auto
|
||||
next
|
||||
fix r1 r2 s1 s2 t
|
||||
assume "\<And>t. r1 = s1 \<Longrightarrow> le_rexp r2 s2 \<Longrightarrow> le_rexp s2 t \<Longrightarrow> le_rexp r2 t"
|
||||
"\<And>t. r1 \<noteq> s1 \<Longrightarrow> le_rexp r1 s1 \<Longrightarrow> le_rexp s1 t \<Longrightarrow> le_rexp r1 t"
|
||||
"le_rexp (Plus r1 r2) (Plus s1 s2)" "le_rexp (Plus s1 s2) t"
|
||||
thus "le_rexp (Plus r1 r2) t" by (cases t) (auto split: if_split_asm intro: le_rexp_antisym)
|
||||
next
|
||||
fix r1 r2 s1 s2 t assume "le_rexp (Times s1 s2) t" thus "le_rexp (Plus r1 r2) t" by (cases t) auto
|
||||
next
|
||||
fix r1 r2 s1 s2 t
|
||||
assume "\<And>t. r1 = s1 \<Longrightarrow> le_rexp r2 s2 \<Longrightarrow> le_rexp s2 t \<Longrightarrow> le_rexp r2 t"
|
||||
"\<And>t. r1 \<noteq> s1 \<Longrightarrow> le_rexp r1 s1 \<Longrightarrow> le_rexp s1 t \<Longrightarrow> le_rexp r1 t"
|
||||
"le_rexp (Times r1 r2) (Times s1 s2)" "le_rexp (Times s1 s2) t"
|
||||
thus "le_rexp (Times r1 r2) t" by (cases t) (auto split: if_split_asm intro: le_rexp_antisym)
|
||||
qed auto
|
||||
|
||||
instance proof
|
||||
qed (auto simp add: less_eq_rexp_def less_rexp_def
|
||||
intro: le_rexp_refl le_rexp_antisym le_rexp_trans)
|
||||
|
||||
end
|
||||
|
||||
instantiation rexp :: (linorder) "{linorder}"
|
||||
begin
|
||||
|
||||
lemma le_rexp_total: "le_rexp (r :: 'a :: linorder rexp) s \<or> le_rexp s r"
|
||||
by (induction r s rule: le_rexp.induct) auto
|
||||
|
||||
instance proof
|
||||
qed (unfold less_eq_rexp_def less_rexp_def, rule le_rexp_total)
|
||||
|
||||
end
|
||||
|
||||
end
|
|
@ -0,0 +1,51 @@
|
|||
(* Author: Tobias Nipkow *)
|
||||
|
||||
section "Extended Regular Expressions"
|
||||
|
||||
theory Regular_Exp2
|
||||
imports Regular_Set
|
||||
begin
|
||||
|
||||
datatype (atoms: 'a) rexp =
|
||||
is_Zero: Zero |
|
||||
is_One: One |
|
||||
Atom 'a |
|
||||
Plus "('a rexp)" "('a rexp)" |
|
||||
Times "('a rexp)" "('a rexp)" |
|
||||
Star "('a rexp)" |
|
||||
Not "('a rexp)" |
|
||||
Inter "('a rexp)" "('a rexp)"
|
||||
|
||||
context
|
||||
fixes S :: "'a set"
|
||||
begin
|
||||
|
||||
primrec lang :: "'a rexp => 'a lang" where
|
||||
"lang Zero = {}" |
|
||||
"lang One = {[]}" |
|
||||
"lang (Atom a) = {[a]}" |
|
||||
"lang (Plus r s) = (lang r) Un (lang s)" |
|
||||
"lang (Times r s) = conc (lang r) (lang s)" |
|
||||
"lang (Star r) = star(lang r)" |
|
||||
"lang (Not r) = lists S - lang r" |
|
||||
"lang (Inter r s) = (lang r Int lang s)"
|
||||
|
||||
end
|
||||
|
||||
lemma lang_subset_lists: "atoms r \<subseteq> S \<Longrightarrow> lang S r \<subseteq> lists S"
|
||||
by(induction r)(auto simp: conc_subset_lists star_subset_lists)
|
||||
|
||||
primrec nullable :: "'a rexp \<Rightarrow> bool" where
|
||||
"nullable Zero = False" |
|
||||
"nullable One = True" |
|
||||
"nullable (Atom c) = False" |
|
||||
"nullable (Plus r1 r2) = (nullable r1 \<or> nullable r2)" |
|
||||
"nullable (Times r1 r2) = (nullable r1 \<and> nullable r2)" |
|
||||
"nullable (Star r) = True" |
|
||||
"nullable (Not r) = (\<not> (nullable r))" |
|
||||
"nullable (Inter r s) = (nullable r \<and> nullable s)"
|
||||
|
||||
lemma nullable_iff: "nullable r \<longleftrightarrow> [] \<in> lang S r"
|
||||
by (induct r) (auto simp add: conc_def split: if_splits)
|
||||
|
||||
end
|
|
@ -0,0 +1,456 @@
|
|||
(* Author: Tobias Nipkow, Alex Krauss, Christian Urban *)
|
||||
|
||||
section "Regular sets"
|
||||
|
||||
theory Regular_Set
|
||||
imports Main
|
||||
begin
|
||||
|
||||
type_synonym 'a lang = "'a list set"
|
||||
|
||||
definition conc :: "'a lang \<Rightarrow> 'a lang \<Rightarrow> 'a lang" (infixr "@@" 75) where
|
||||
"A @@ B = {xs@ys | xs ys. xs:A & ys:B}"
|
||||
|
||||
text \<open>checks the code preprocessor for set comprehensions\<close>
|
||||
export_code conc checking SML
|
||||
|
||||
overloading lang_pow == "compow :: nat \<Rightarrow> 'a lang \<Rightarrow> 'a lang"
|
||||
begin
|
||||
primrec lang_pow :: "nat \<Rightarrow> 'a lang \<Rightarrow> 'a lang" where
|
||||
"lang_pow 0 A = {[]}" |
|
||||
"lang_pow (Suc n) A = A @@ (lang_pow n A)"
|
||||
end
|
||||
|
||||
text \<open>for code generation\<close>
|
||||
|
||||
definition lang_pow :: "nat \<Rightarrow> 'a lang \<Rightarrow> 'a lang" where
|
||||
lang_pow_code_def [code_abbrev]: "lang_pow = compow"
|
||||
|
||||
lemma [code]:
|
||||
"lang_pow (Suc n) A = A @@ (lang_pow n A)"
|
||||
"lang_pow 0 A = {[]}"
|
||||
by (simp_all add: lang_pow_code_def)
|
||||
|
||||
hide_const (open) lang_pow
|
||||
|
||||
definition star :: "'a lang \<Rightarrow> 'a lang" where
|
||||
"star A = (\<Union>n. A ^^ n)"
|
||||
|
||||
|
||||
subsection\<open>@{term "(@@)"}\<close>
|
||||
|
||||
lemma concI[simp,intro]: "u : A \<Longrightarrow> v : B \<Longrightarrow> u@v : A @@ B"
|
||||
by (auto simp add: conc_def)
|
||||
|
||||
lemma concE[elim]:
|
||||
assumes "w \<in> A @@ B"
|
||||
obtains u v where "u \<in> A" "v \<in> B" "w = u@v"
|
||||
using assms by (auto simp: conc_def)
|
||||
|
||||
lemma conc_mono: "A \<subseteq> C \<Longrightarrow> B \<subseteq> D \<Longrightarrow> A @@ B \<subseteq> C @@ D"
|
||||
by (auto simp: conc_def)
|
||||
|
||||
lemma conc_empty[simp]: shows "{} @@ A = {}" and "A @@ {} = {}"
|
||||
by auto
|
||||
|
||||
lemma conc_epsilon[simp]: shows "{[]} @@ A = A" and "A @@ {[]} = A"
|
||||
by (simp_all add:conc_def)
|
||||
|
||||
lemma conc_assoc: "(A @@ B) @@ C = A @@ (B @@ C)"
|
||||
by (auto elim!: concE) (simp only: append_assoc[symmetric] concI)
|
||||
|
||||
lemma conc_Un_distrib:
|
||||
shows "A @@ (B \<union> C) = A @@ B \<union> A @@ C"
|
||||
and "(A \<union> B) @@ C = A @@ C \<union> B @@ C"
|
||||
by auto
|
||||
|
||||
lemma conc_UNION_distrib:
|
||||
shows "A @@ UNION I M = UNION I (%i. A @@ M i)"
|
||||
and "UNION I M @@ A = UNION I (%i. M i @@ A)"
|
||||
by auto
|
||||
|
||||
lemma conc_subset_lists: "A \<subseteq> lists S \<Longrightarrow> B \<subseteq> lists S \<Longrightarrow> A @@ B \<subseteq> lists S"
|
||||
by(fastforce simp: conc_def in_lists_conv_set)
|
||||
|
||||
lemma Nil_in_conc[simp]: "[] \<in> A @@ B \<longleftrightarrow> [] \<in> A \<and> [] \<in> B"
|
||||
by (metis append_is_Nil_conv concE concI)
|
||||
|
||||
lemma concI_if_Nil1: "[] \<in> A \<Longrightarrow> xs : B \<Longrightarrow> xs \<in> A @@ B"
|
||||
by (metis append_Nil concI)
|
||||
|
||||
lemma conc_Diff_if_Nil1: "[] \<in> A \<Longrightarrow> A @@ B = (A - {[]}) @@ B \<union> B"
|
||||
by (fastforce elim: concI_if_Nil1)
|
||||
|
||||
lemma concI_if_Nil2: "[] \<in> B \<Longrightarrow> xs : A \<Longrightarrow> xs \<in> A @@ B"
|
||||
by (metis append_Nil2 concI)
|
||||
|
||||
lemma conc_Diff_if_Nil2: "[] \<in> B \<Longrightarrow> A @@ B = A @@ (B - {[]}) \<union> A"
|
||||
by (fastforce elim: concI_if_Nil2)
|
||||
|
||||
lemma singleton_in_conc:
|
||||
"[x] : A @@ B \<longleftrightarrow> [x] : A \<and> [] : B \<or> [] : A \<and> [x] : B"
|
||||
by (fastforce simp: Cons_eq_append_conv append_eq_Cons_conv
|
||||
conc_Diff_if_Nil1 conc_Diff_if_Nil2)
|
||||
|
||||
|
||||
subsection\<open>@{term "A ^^ n"}\<close>
|
||||
|
||||
lemma lang_pow_add: "A ^^ (n + m) = A ^^ n @@ A ^^ m"
|
||||
by (induct n) (auto simp: conc_assoc)
|
||||
|
||||
lemma lang_pow_empty: "{} ^^ n = (if n = 0 then {[]} else {})"
|
||||
by (induct n) auto
|
||||
|
||||
lemma lang_pow_empty_Suc[simp]: "({}::'a lang) ^^ Suc n = {}"
|
||||
by (simp add: lang_pow_empty)
|
||||
|
||||
lemma conc_pow_comm:
|
||||
shows "A @@ (A ^^ n) = (A ^^ n) @@ A"
|
||||
by (induct n) (simp_all add: conc_assoc[symmetric])
|
||||
|
||||
lemma length_lang_pow_ub:
|
||||
"\<forall>w \<in> A. length w \<le> k \<Longrightarrow> w : A^^n \<Longrightarrow> length w \<le> k*n"
|
||||
by(induct n arbitrary: w) (fastforce simp: conc_def)+
|
||||
|
||||
lemma length_lang_pow_lb:
|
||||
"\<forall>w \<in> A. length w \<ge> k \<Longrightarrow> w : A^^n \<Longrightarrow> length w \<ge> k*n"
|
||||
by(induct n arbitrary: w) (fastforce simp: conc_def)+
|
||||
|
||||
lemma lang_pow_subset_lists: "A \<subseteq> lists S \<Longrightarrow> A ^^ n \<subseteq> lists S"
|
||||
by(induct n)(auto simp: conc_subset_lists)
|
||||
|
||||
|
||||
subsection\<open>@{const star}\<close>
|
||||
|
||||
lemma star_subset_lists: "A \<subseteq> lists S \<Longrightarrow> star A \<subseteq> lists S"
|
||||
unfolding star_def by(blast dest: lang_pow_subset_lists)
|
||||
|
||||
lemma star_if_lang_pow[simp]: "w : A ^^ n \<Longrightarrow> w : star A"
|
||||
by (auto simp: star_def)
|
||||
|
||||
lemma Nil_in_star[iff]: "[] : star A"
|
||||
proof (rule star_if_lang_pow)
|
||||
show "[] : A ^^ 0" by simp
|
||||
qed
|
||||
|
||||
lemma star_if_lang[simp]: assumes "w : A" shows "w : star A"
|
||||
proof (rule star_if_lang_pow)
|
||||
show "w : A ^^ 1" using \<open>w : A\<close> by simp
|
||||
qed
|
||||
|
||||
lemma append_in_starI[simp]:
|
||||
assumes "u : star A" and "v : star A" shows "u@v : star A"
|
||||
proof -
|
||||
from \<open>u : star A\<close> obtain m where "u : A ^^ m" by (auto simp: star_def)
|
||||
moreover
|
||||
from \<open>v : star A\<close> obtain n where "v : A ^^ n" by (auto simp: star_def)
|
||||
ultimately have "u@v : A ^^ (m+n)" by (simp add: lang_pow_add)
|
||||
thus ?thesis by simp
|
||||
qed
|
||||
|
||||
lemma conc_star_star: "star A @@ star A = star A"
|
||||
by (auto simp: conc_def)
|
||||
|
||||
lemma conc_star_comm:
|
||||
shows "A @@ star A = star A @@ A"
|
||||
unfolding star_def conc_pow_comm conc_UNION_distrib
|
||||
by simp
|
||||
|
||||
lemma star_induct[consumes 1, case_names Nil append, induct set: star]:
|
||||
assumes "w : star A"
|
||||
and "P []"
|
||||
and step: "!!u v. u : A \<Longrightarrow> v : star A \<Longrightarrow> P v \<Longrightarrow> P (u@v)"
|
||||
shows "P w"
|
||||
proof -
|
||||
{ fix n have "w : A ^^ n \<Longrightarrow> P w"
|
||||
by (induct n arbitrary: w) (auto intro: \<open>P []\<close> step star_if_lang_pow) }
|
||||
with \<open>w : star A\<close> show "P w" by (auto simp: star_def)
|
||||
qed
|
||||
|
||||
lemma star_empty[simp]: "star {} = {[]}"
|
||||
by (auto elim: star_induct)
|
||||
|
||||
lemma star_epsilon[simp]: "star {[]} = {[]}"
|
||||
by (auto elim: star_induct)
|
||||
|
||||
lemma star_idemp[simp]: "star (star A) = star A"
|
||||
by (auto elim: star_induct)
|
||||
|
||||
lemma star_unfold_left: "star A = A @@ star A \<union> {[]}" (is "?L = ?R")
|
||||
proof
|
||||
show "?L \<subseteq> ?R" by (rule, erule star_induct) auto
|
||||
qed auto
|
||||
|
||||
lemma concat_in_star: "set ws \<subseteq> A \<Longrightarrow> concat ws : star A"
|
||||
by (induct ws) simp_all
|
||||
|
||||
lemma in_star_iff_concat:
|
||||
"w \<in> star A = (\<exists>ws. set ws \<subseteq> A \<and> w = concat ws)"
|
||||
(is "_ = (\<exists>ws. ?R w ws)")
|
||||
proof
|
||||
assume "w : star A" thus "\<exists>ws. ?R w ws"
|
||||
proof induct
|
||||
case Nil have "?R [] []" by simp
|
||||
thus ?case ..
|
||||
next
|
||||
case (append u v)
|
||||
then obtain ws where "set ws \<subseteq> A \<and> v = concat ws" by blast
|
||||
with append have "?R (u@v) (u#ws)" by auto
|
||||
thus ?case ..
|
||||
qed
|
||||
next
|
||||
assume "\<exists>us. ?R w us" thus "w : star A"
|
||||
by (auto simp: concat_in_star)
|
||||
qed
|
||||
|
||||
lemma star_conv_concat: "star A = {concat ws|ws. set ws \<subseteq> A}"
|
||||
by (fastforce simp: in_star_iff_concat)
|
||||
|
||||
lemma star_insert_eps[simp]: "star (insert [] A) = star(A)"
|
||||
proof-
|
||||
{ fix us
|
||||
have "set us \<subseteq> insert [] A \<Longrightarrow> \<exists>vs. concat us = concat vs \<and> set vs \<subseteq> A"
|
||||
(is "?P \<Longrightarrow> \<exists>vs. ?Q vs")
|
||||
proof
|
||||
let ?vs = "filter (%u. u \<noteq> []) us"
|
||||
show "?P \<Longrightarrow> ?Q ?vs" by (induct us) auto
|
||||
qed
|
||||
} thus ?thesis by (auto simp: star_conv_concat)
|
||||
qed
|
||||
|
||||
lemma star_unfold_left_Nil: "star A = (A - {[]}) @@ (star A) \<union> {[]}"
|
||||
by (metis insert_Diff_single star_insert_eps star_unfold_left)
|
||||
|
||||
lemma star_Diff_Nil_fold: "(A - {[]}) @@ star A = star A - {[]}"
|
||||
proof -
|
||||
have "[] \<notin> (A - {[]}) @@ star A" by simp
|
||||
thus ?thesis using star_unfold_left_Nil by blast
|
||||
qed
|
||||
|
||||
lemma star_decom:
|
||||
assumes a: "x \<in> star A" "x \<noteq> []"
|
||||
shows "\<exists>a b. x = a @ b \<and> a \<noteq> [] \<and> a \<in> A \<and> b \<in> star A"
|
||||
using a by (induct rule: star_induct) (blast)+
|
||||
|
||||
|
||||
subsection \<open>Left-Quotients of languages\<close>
|
||||
|
||||
definition Deriv :: "'a \<Rightarrow> 'a lang \<Rightarrow> 'a lang"
|
||||
where "Deriv x A = { xs. x#xs \<in> A }"
|
||||
|
||||
definition Derivs :: "'a list \<Rightarrow> 'a lang \<Rightarrow> 'a lang"
|
||||
where "Derivs xs A = { ys. xs @ ys \<in> A }"
|
||||
|
||||
abbreviation
|
||||
Derivss :: "'a list \<Rightarrow> 'a lang set \<Rightarrow> 'a lang"
|
||||
where
|
||||
"Derivss s As \<equiv> \<Union> (Derivs s ` As)"
|
||||
|
||||
|
||||
lemma Deriv_empty[simp]: "Deriv a {} = {}"
|
||||
and Deriv_epsilon[simp]: "Deriv a {[]} = {}"
|
||||
and Deriv_char[simp]: "Deriv a {[b]} = (if a = b then {[]} else {})"
|
||||
and Deriv_union[simp]: "Deriv a (A \<union> B) = Deriv a A \<union> Deriv a B"
|
||||
and Deriv_inter[simp]: "Deriv a (A \<inter> B) = Deriv a A \<inter> Deriv a B"
|
||||
and Deriv_compl[simp]: "Deriv a (-A) = - Deriv a A"
|
||||
and Deriv_Union[simp]: "Deriv a (Union M) = Union(Deriv a ` M)"
|
||||
and Deriv_UN[simp]: "Deriv a (UN x:I. S x) = (UN x:I. Deriv a (S x))"
|
||||
by (auto simp: Deriv_def)
|
||||
|
||||
lemma Der_conc [simp]:
|
||||
shows "Deriv c (A @@ B) = (Deriv c A) @@ B \<union> (if [] \<in> A then Deriv c B else {})"
|
||||
unfolding Deriv_def conc_def
|
||||
by (auto simp add: Cons_eq_append_conv)
|
||||
|
||||
lemma Deriv_star [simp]:
|
||||
shows "Deriv c (star A) = (Deriv c A) @@ star A"
|
||||
proof -
|
||||
have "Deriv c (star A) = Deriv c ({[]} \<union> A @@ star A)"
|
||||
by (metis star_unfold_left sup.commute)
|
||||
also have "... = Deriv c (A @@ star A)"
|
||||
unfolding Deriv_union by (simp)
|
||||
also have "... = (Deriv c A) @@ (star A) \<union> (if [] \<in> A then Deriv c (star A) else {})"
|
||||
by simp
|
||||
also have "... = (Deriv c A) @@ star A"
|
||||
unfolding conc_def Deriv_def
|
||||
using star_decom by (force simp add: Cons_eq_append_conv)
|
||||
finally show "Deriv c (star A) = (Deriv c A) @@ star A" .
|
||||
qed
|
||||
|
||||
lemma Deriv_diff[simp]:
|
||||
shows "Deriv c (A - B) = Deriv c A - Deriv c B"
|
||||
by(auto simp add: Deriv_def)
|
||||
|
||||
lemma Deriv_lists[simp]: "c : S \<Longrightarrow> Deriv c (lists S) = lists S"
|
||||
by(auto simp add: Deriv_def)
|
||||
|
||||
lemma Derivs_simps [simp]:
|
||||
shows "Derivs [] A = A"
|
||||
and "Derivs (c # s) A = Derivs s (Deriv c A)"
|
||||
and "Derivs (s1 @ s2) A = Derivs s2 (Derivs s1 A)"
|
||||
unfolding Derivs_def Deriv_def by auto
|
||||
|
||||
lemma in_fold_Deriv: "v \<in> fold Deriv w L \<longleftrightarrow> w @ v \<in> L"
|
||||
by (induct w arbitrary: L) (simp_all add: Deriv_def)
|
||||
|
||||
lemma Derivs_alt_def [code]: "Derivs w L = fold Deriv w L"
|
||||
by (induct w arbitrary: L) simp_all
|
||||
|
||||
lemma Deriv_code [code]:
|
||||
"Deriv x A = tl ` Set.filter (\<lambda>xs. case xs of x' # _ \<Rightarrow> x = x' | _ \<Rightarrow> False) A"
|
||||
by (auto simp: Deriv_def Set.filter_def image_iff tl_def split: list.splits)
|
||||
|
||||
subsection \<open>Shuffle product\<close>
|
||||
|
||||
definition Shuffle (infixr "\<parallel>" 80) where
|
||||
"Shuffle A B = \<Union>{shuffle xs ys | xs ys. xs \<in> A \<and> ys \<in> B}"
|
||||
|
||||
lemma Deriv_Shuffle[simp]:
|
||||
"Deriv a (A \<parallel> B) = Deriv a A \<parallel> B \<union> A \<parallel> Deriv a B"
|
||||
unfolding Shuffle_def Deriv_def by (fastforce simp: Cons_in_shuffle_iff neq_Nil_conv)
|
||||
|
||||
lemma shuffle_subset_lists:
|
||||
assumes "A \<subseteq> lists S" "B \<subseteq> lists S"
|
||||
shows "A \<parallel> B \<subseteq> lists S"
|
||||
unfolding Shuffle_def proof safe
|
||||
fix x and zs xs ys :: "'a list"
|
||||
assume zs: "zs \<in> shuffle xs ys" "x \<in> set zs" and "xs \<in> A" "ys \<in> B"
|
||||
with assms have "xs \<in> lists S" "ys \<in> lists S" by auto
|
||||
with zs show "x \<in> S" by (induct xs ys arbitrary: zs rule: shuffle.induct) auto
|
||||
qed
|
||||
|
||||
lemma Nil_in_Shuffle[simp]: "[] \<in> A \<parallel> B \<longleftrightarrow> [] \<in> A \<and> [] \<in> B"
|
||||
unfolding Shuffle_def by force
|
||||
|
||||
lemma shuffle_Un_distrib:
|
||||
shows "A \<parallel> (B \<union> C) = A \<parallel> B \<union> A \<parallel> C"
|
||||
and "A \<parallel> (B \<union> C) = A \<parallel> B \<union> A \<parallel> C"
|
||||
unfolding Shuffle_def by fast+
|
||||
|
||||
lemma shuffle_UNION_distrib:
|
||||
shows "A \<parallel> UNION I M = UNION I (%i. A \<parallel> M i)"
|
||||
and "UNION I M \<parallel> A = UNION I (%i. M i \<parallel> A)"
|
||||
unfolding Shuffle_def by fast+
|
||||
|
||||
lemma Shuffle_empty[simp]:
|
||||
"A \<parallel> {} = {}"
|
||||
"{} \<parallel> B = {}"
|
||||
unfolding Shuffle_def by auto
|
||||
|
||||
lemma Shuffle_eps[simp]:
|
||||
"A \<parallel> {[]} = A"
|
||||
"{[]} \<parallel> B = B"
|
||||
unfolding Shuffle_def by auto
|
||||
|
||||
|
||||
subsection \<open>Arden's Lemma\<close>
|
||||
|
||||
lemma arden_helper:
|
||||
assumes eq: "X = A @@ X \<union> B"
|
||||
shows "X = (A ^^ Suc n) @@ X \<union> (\<Union>m\<le>n. (A ^^ m) @@ B)"
|
||||
proof (induct n)
|
||||
case 0
|
||||
show "X = (A ^^ Suc 0) @@ X \<union> (\<Union>m\<le>0. (A ^^ m) @@ B)"
|
||||
using eq by simp
|
||||
next
|
||||
case (Suc n)
|
||||
have ih: "X = (A ^^ Suc n) @@ X \<union> (\<Union>m\<le>n. (A ^^ m) @@ B)" by fact
|
||||
also have "\<dots> = (A ^^ Suc n) @@ (A @@ X \<union> B) \<union> (\<Union>m\<le>n. (A ^^ m) @@ B)" using eq by simp
|
||||
also have "\<dots> = (A ^^ Suc (Suc n)) @@ X \<union> ((A ^^ Suc n) @@ B) \<union> (\<Union>m\<le>n. (A ^^ m) @@ B)"
|
||||
by (simp add: conc_Un_distrib conc_assoc[symmetric] conc_pow_comm)
|
||||
also have "\<dots> = (A ^^ Suc (Suc n)) @@ X \<union> (\<Union>m\<le>Suc n. (A ^^ m) @@ B)"
|
||||
by (auto simp add: atMost_Suc)
|
||||
finally show "X = (A ^^ Suc (Suc n)) @@ X \<union> (\<Union>m\<le>Suc n. (A ^^ m) @@ B)" .
|
||||
qed
|
||||
|
||||
lemma Arden:
|
||||
assumes "[] \<notin> A"
|
||||
shows "X = A @@ X \<union> B \<longleftrightarrow> X = star A @@ B"
|
||||
proof
|
||||
assume eq: "X = A @@ X \<union> B"
|
||||
{ fix w assume "w : X"
|
||||
let ?n = "size w"
|
||||
from \<open>[] \<notin> A\<close> have "\<forall>u \<in> A. length u \<ge> 1"
|
||||
by (metis Suc_eq_plus1 add_leD2 le_0_eq length_0_conv not_less_eq_eq)
|
||||
hence "\<forall>u \<in> A^^(?n+1). length u \<ge> ?n+1"
|
||||
by (metis length_lang_pow_lb nat_mult_1)
|
||||
hence "\<forall>u \<in> A^^(?n+1)@@X. length u \<ge> ?n+1"
|
||||
by(auto simp only: conc_def length_append)
|
||||
hence "w \<notin> A^^(?n+1)@@X" by auto
|
||||
hence "w : star A @@ B" using \<open>w : X\<close> using arden_helper[OF eq, where n="?n"]
|
||||
by (auto simp add: star_def conc_UNION_distrib)
|
||||
} moreover
|
||||
{ fix w assume "w : star A @@ B"
|
||||
hence "\<exists>n. w \<in> A^^n @@ B" by(auto simp: conc_def star_def)
|
||||
hence "w : X" using arden_helper[OF eq] by blast
|
||||
} ultimately show "X = star A @@ B" by blast
|
||||
next
|
||||
assume eq: "X = star A @@ B"
|
||||
have "star A = A @@ star A \<union> {[]}"
|
||||
by (rule star_unfold_left)
|
||||
then have "star A @@ B = (A @@ star A \<union> {[]}) @@ B"
|
||||
by metis
|
||||
also have "\<dots> = (A @@ star A) @@ B \<union> B"
|
||||
unfolding conc_Un_distrib by simp
|
||||
also have "\<dots> = A @@ (star A @@ B) \<union> B"
|
||||
by (simp only: conc_assoc)
|
||||
finally show "X = A @@ X \<union> B"
|
||||
using eq by blast
|
||||
qed
|
||||
|
||||
|
||||
lemma reversed_arden_helper:
|
||||
assumes eq: "X = X @@ A \<union> B"
|
||||
shows "X = X @@ (A ^^ Suc n) \<union> (\<Union>m\<le>n. B @@ (A ^^ m))"
|
||||
proof (induct n)
|
||||
case 0
|
||||
show "X = X @@ (A ^^ Suc 0) \<union> (\<Union>m\<le>0. B @@ (A ^^ m))"
|
||||
using eq by simp
|
||||
next
|
||||
case (Suc n)
|
||||
have ih: "X = X @@ (A ^^ Suc n) \<union> (\<Union>m\<le>n. B @@ (A ^^ m))" by fact
|
||||
also have "\<dots> = (X @@ A \<union> B) @@ (A ^^ Suc n) \<union> (\<Union>m\<le>n. B @@ (A ^^ m))" using eq by simp
|
||||
also have "\<dots> = X @@ (A ^^ Suc (Suc n)) \<union> (B @@ (A ^^ Suc n)) \<union> (\<Union>m\<le>n. B @@ (A ^^ m))"
|
||||
by (simp add: conc_Un_distrib conc_assoc)
|
||||
also have "\<dots> = X @@ (A ^^ Suc (Suc n)) \<union> (\<Union>m\<le>Suc n. B @@ (A ^^ m))"
|
||||
by (auto simp add: atMost_Suc)
|
||||
finally show "X = X @@ (A ^^ Suc (Suc n)) \<union> (\<Union>m\<le>Suc n. B @@ (A ^^ m))" .
|
||||
qed
|
||||
|
||||
theorem reversed_Arden:
|
||||
assumes nemp: "[] \<notin> A"
|
||||
shows "X = X @@ A \<union> B \<longleftrightarrow> X = B @@ star A"
|
||||
proof
|
||||
assume eq: "X = X @@ A \<union> B"
|
||||
{ fix w assume "w : X"
|
||||
let ?n = "size w"
|
||||
from \<open>[] \<notin> A\<close> have "\<forall>u \<in> A. length u \<ge> 1"
|
||||
by (metis Suc_eq_plus1 add_leD2 le_0_eq length_0_conv not_less_eq_eq)
|
||||
hence "\<forall>u \<in> A^^(?n+1). length u \<ge> ?n+1"
|
||||
by (metis length_lang_pow_lb nat_mult_1)
|
||||
hence "\<forall>u \<in> X @@ A^^(?n+1). length u \<ge> ?n+1"
|
||||
by(auto simp only: conc_def length_append)
|
||||
hence "w \<notin> X @@ A^^(?n+1)" by auto
|
||||
hence "w : B @@ star A" using \<open>w : X\<close> using reversed_arden_helper[OF eq, where n="?n"]
|
||||
by (auto simp add: star_def conc_UNION_distrib)
|
||||
} moreover
|
||||
{ fix w assume "w : B @@ star A"
|
||||
hence "\<exists>n. w \<in> B @@ A^^n" by (auto simp: conc_def star_def)
|
||||
hence "w : X" using reversed_arden_helper[OF eq] by blast
|
||||
} ultimately show "X = B @@ star A" by blast
|
||||
next
|
||||
assume eq: "X = B @@ star A"
|
||||
have "star A = {[]} \<union> star A @@ A"
|
||||
unfolding conc_star_comm[symmetric]
|
||||
by(metis Un_commute star_unfold_left)
|
||||
then have "B @@ star A = B @@ ({[]} \<union> star A @@ A)"
|
||||
by metis
|
||||
also have "\<dots> = B \<union> B @@ (star A @@ A)"
|
||||
unfolding conc_Un_distrib by simp
|
||||
also have "\<dots> = B \<union> (B @@ star A) @@ A"
|
||||
by (simp only: conc_assoc)
|
||||
finally show "X = X @@ A \<union> B"
|
||||
using eq by blast
|
||||
qed
|
||||
|
||||
end
|
|
@ -0,0 +1,53 @@
|
|||
section \<open>Regular Expressions as Homogeneous Binary Relations\<close>
|
||||
|
||||
theory Relation_Interpretation
|
||||
imports Regular_Exp
|
||||
begin
|
||||
|
||||
primrec rel :: "('a \<Rightarrow> ('b * 'b) set) \<Rightarrow> 'a rexp \<Rightarrow> ('b * 'b) set"
|
||||
where
|
||||
"rel v Zero = {}" |
|
||||
"rel v One = Id" |
|
||||
"rel v (Atom a) = v a" |
|
||||
"rel v (Plus r s) = rel v r \<union> rel v s" |
|
||||
"rel v (Times r s) = rel v r O rel v s" |
|
||||
"rel v (Star r) = (rel v r)^*"
|
||||
|
||||
primrec word_rel :: "('a \<Rightarrow> ('b * 'b) set) \<Rightarrow> 'a list \<Rightarrow> ('b * 'b) set"
|
||||
where
|
||||
"word_rel v [] = Id"
|
||||
| "word_rel v (a#as) = v a O word_rel v as"
|
||||
|
||||
lemma word_rel_append:
|
||||
"word_rel v w O word_rel v w' = word_rel v (w @ w')"
|
||||
by (rule sym) (induct w, auto)
|
||||
|
||||
lemma rel_word_rel: "rel v r = (\<Union>w\<in>lang r. word_rel v w)"
|
||||
proof (induct r)
|
||||
case Times thus ?case
|
||||
by (auto simp: rel_def word_rel_append conc_def relcomp_UNION_distrib relcomp_UNION_distrib2)
|
||||
next
|
||||
case (Star r)
|
||||
{ fix n
|
||||
have "(rel v r) ^^ n = (\<Union>w \<in> lang r ^^ n. word_rel v w)"
|
||||
proof (induct n)
|
||||
case 0 show ?case by simp
|
||||
next
|
||||
case (Suc n) thus ?case
|
||||
unfolding relpow.simps relpow_commute[symmetric]
|
||||
by (auto simp add: Star conc_def word_rel_append
|
||||
relcomp_UNION_distrib relcomp_UNION_distrib2)
|
||||
qed }
|
||||
|
||||
thus ?case unfolding rel.simps
|
||||
by (force simp: rtrancl_power star_def)
|
||||
qed auto
|
||||
|
||||
|
||||
text \<open>Soundness:\<close>
|
||||
|
||||
lemma soundness:
|
||||
"lang r = lang s \<Longrightarrow> rel v r = rel v s"
|
||||
unfolding rel_word_rel by auto
|
||||
|
||||
end
|
|
@ -0,0 +1,36 @@
|
|||
@article{KraussN-JAR,author={Alexander Krauss and Tobias Nipkow},
|
||||
title={Proof Pearl: Regular Expression Equivalence and Relation Algebra},
|
||||
journal={J. Automated Reasoning},volume=49,pages={95--106},year=2012,note={published online March 2011}}
|
||||
|
||||
@inproceedings{Rutten98,
|
||||
author = {Jan J. M. M. Rutten},
|
||||
title = {Automata and Coinduction (An Exercise in Coalgebra)},
|
||||
editor = {Davide Sangiorgi and Robert de Simone},
|
||||
booktitle = {Concurrency Theory (CONCUR'98)},
|
||||
pages = {194--218},
|
||||
publisher = {Springer},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
volume = {1466},
|
||||
year = {1998},
|
||||
}
|
||||
|
||||
@article{Brzozowski64,
|
||||
author = {J.~A.~Brzozowski},
|
||||
title = {{D}erivatives of {R}egular {E}xpressions},
|
||||
journal = {Journal of the ACM},
|
||||
volume = {11},
|
||||
issue = {4},
|
||||
year = {1964},
|
||||
pages = {481--494},
|
||||
publisher = {ACM}
|
||||
}
|
||||
|
||||
@ARTICLE{Antimirov95,
|
||||
author = {V.~Antimirov},
|
||||
title = {{P}artial {D}erivatives of {R}egular {E}xpressions and
|
||||
{F}inite {A}utomata {C}onstructions},
|
||||
journal = {Theoretical Computer Science},
|
||||
year = {1995},
|
||||
volume = {155},
|
||||
pages = {291--319}
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
\documentclass[11pt,a4paper]{article}
|
||||
\usepackage{isabelle,isabellesym}
|
||||
\usepackage{eufrak}
|
||||
|
||||
% this should be the last package used
|
||||
\usepackage{pdfsetup}
|
||||
|
||||
% urls in roman style, theory text in math-similar italics
|
||||
\urlstyle{rm}
|
||||
\isabellestyle{it}
|
||||
|
||||
|
||||
\begin{document}
|
||||
|
||||
\title{Regular Sets, Expressions, Derivatives and Relation Algebra}
|
||||
\author{Alexander Krauss, Tobias Nipkow,\\
|
||||
Chunhan Wu, Xingyuan Zhang and Christian Urban}
|
||||
\maketitle
|
||||
|
||||
\begin{abstract}
|
||||
This is a library of constructions on regular expressions and languages. It
|
||||
provides the operations of concatenation, Kleene star and left-quotients of
|
||||
languages. A theory of derivatives and partial derivatives is
|
||||
provided. Arden's lemma and finiteness of partial derivatives is
|
||||
established. A simple regular expression matcher based on Brozowski's
|
||||
derivatives is proved to be correct. An executable equivalence checker for
|
||||
regular expressions is verified; it does not need automata but works directly
|
||||
on regular expressions. By mapping regular expressions to binary relations, an
|
||||
automatic and complete proof method for (in)equalities of binary relations
|
||||
over union, concatenation and (reflexive) transitive closure is obtained.
|
||||
|
||||
For an exposition of the equivalence checker for regular and relation
|
||||
algebraic expressions see the paper by Krauss and Nipkow~\cite{KraussN-JAR}.
|
||||
|
||||
Extended regular expressions with complement and intersection
|
||||
are also defined and an equivalence checker is provided.
|
||||
\end{abstract}
|
||||
|
||||
\tableofcontents
|
||||
|
||||
% include generated text of all theories
|
||||
\input{session}
|
||||
|
||||
\bibliographystyle{abbrv}
|
||||
\bibliography{root}
|
||||
|
||||
\end{document}
|
|
@ -0,0 +1,306 @@
|
|||
section \<open>Deciding Regular Expression Equivalence (2)\<close>
|
||||
|
||||
theory pEquivalence_Checking
|
||||
imports Equivalence_Checking Derivatives
|
||||
begin
|
||||
|
||||
text\<open>\noindent Similar to theory @{theory "Regular-Sets.Equivalence_Checking"}, but
|
||||
with partial derivatives instead of derivatives.\<close>
|
||||
|
||||
text\<open>Lifting many notions to sets:\<close>
|
||||
|
||||
definition "Lang R == UN r:R. lang r"
|
||||
definition "Nullable R == EX r:R. nullable r"
|
||||
definition "Pderiv a R == UN r:R. pderiv a r"
|
||||
definition "Atoms R == UN r:R. atoms r"
|
||||
|
||||
(* FIXME: rename pderiv_set in Derivatives to Pderiv and drop the one above *)
|
||||
|
||||
lemma Atoms_pderiv: "Atoms(pderiv a r) \<subseteq> atoms r"
|
||||
apply (induct r)
|
||||
apply (auto simp: Atoms_def UN_subset_iff)
|
||||
apply (fastforce)+
|
||||
done
|
||||
|
||||
lemma Atoms_Pderiv: "Atoms(Pderiv a R) \<subseteq> Atoms R"
|
||||
using Atoms_pderiv by (fastforce simp: Atoms_def Pderiv_def)
|
||||
|
||||
lemma pderiv_no_occurrence:
|
||||
"x \<notin> atoms r \<Longrightarrow> pderiv x r = {}"
|
||||
by (induct r) auto
|
||||
|
||||
lemma Pderiv_no_occurrence:
|
||||
"x \<notin> Atoms R \<Longrightarrow> Pderiv x R = {}"
|
||||
by(auto simp:pderiv_no_occurrence Atoms_def Pderiv_def)
|
||||
|
||||
lemma Deriv_Lang: "Deriv c (Lang R) = Lang (Pderiv c R)"
|
||||
by(auto simp: Deriv_pderiv Pderiv_def Lang_def)
|
||||
|
||||
lemma Nullable_pderiv[simp]: "Nullable(pderivs w r) = (w : lang r)"
|
||||
apply(induction w arbitrary: r)
|
||||
apply (simp add: Nullable_def nullable_iff singleton_iff)
|
||||
using eqset_imp_iff[OF Deriv_pderiv[where 'a = 'a]]
|
||||
apply (simp add: Nullable_def Deriv_def)
|
||||
done
|
||||
|
||||
|
||||
type_synonym 'a Rexp_pair = "'a rexp set * 'a rexp set"
|
||||
type_synonym 'a Rexp_pairs = "'a Rexp_pair list"
|
||||
|
||||
definition is_Bisimulation :: "'a list \<Rightarrow> 'a Rexp_pairs \<Rightarrow> bool"
|
||||
where
|
||||
"is_Bisimulation as ps =
|
||||
(\<forall>(R,S)\<in> set ps. Atoms R \<union> Atoms S \<subseteq> set as \<and>
|
||||
(Nullable R \<longleftrightarrow> Nullable S) \<and>
|
||||
(\<forall>a\<in>set as. (Pderiv a R, Pderiv a S) \<in> set ps))"
|
||||
|
||||
lemma Bisim_Lang_eq:
|
||||
assumes Bisim: "is_Bisimulation as ps"
|
||||
assumes "(R, S) \<in> set ps"
|
||||
shows "Lang R = Lang S"
|
||||
proof -
|
||||
define ps' where "ps' = ({}, {}) # ps"
|
||||
from Bisim have Bisim': "is_Bisimulation as ps'"
|
||||
by (fastforce simp: ps'_def is_Bisimulation_def UN_subset_iff Pderiv_def Atoms_def)
|
||||
let ?R = "\<lambda>K L. (\<exists>(R,S)\<in>set ps'. K = Lang R \<and> L = Lang S)"
|
||||
show ?thesis
|
||||
proof (rule language_coinduct[where R="?R"])
|
||||
from \<open>(R,S) \<in> set ps\<close>
|
||||
have "(R,S) \<in> set ps'" by (auto simp: ps'_def)
|
||||
thus "?R (Lang R) (Lang S)" by auto
|
||||
next
|
||||
fix K L assume "?R K L"
|
||||
then obtain R S where rs: "(R, S) \<in> set ps'"
|
||||
and KL: "K = Lang R" "L = Lang S" by auto
|
||||
with Bisim' have "Nullable R \<longleftrightarrow> Nullable S"
|
||||
by (auto simp: is_Bisimulation_def)
|
||||
thus "[] \<in> K \<longleftrightarrow> [] \<in> L"
|
||||
by (auto simp: nullable_iff KL Nullable_def Lang_def)
|
||||
fix a
|
||||
show "?R (Deriv a K) (Deriv a L)"
|
||||
proof cases
|
||||
assume "a \<in> set as"
|
||||
with rs Bisim'
|
||||
have "(Pderiv a R, Pderiv a S) \<in> set ps'"
|
||||
by (auto simp: is_Bisimulation_def)
|
||||
thus ?thesis by (fastforce simp: KL Deriv_Lang)
|
||||
next
|
||||
assume "a \<notin> set as"
|
||||
with Bisim' rs
|
||||
have "a \<notin> Atoms R \<union> Atoms S"
|
||||
by (fastforce simp: is_Bisimulation_def UN_subset_iff)
|
||||
then have "Pderiv a R = {}" "Pderiv a S = {}"
|
||||
by (metis Pderiv_no_occurrence Un_iff)+
|
||||
then have "Deriv a K = Lang {}" "Deriv a L = Lang {}"
|
||||
unfolding KL Deriv_Lang by auto
|
||||
thus ?thesis by (auto simp: ps'_def)
|
||||
qed
|
||||
qed
|
||||
qed
|
||||
|
||||
|
||||
subsection \<open>Closure computation\<close>
|
||||
|
||||
fun test :: "'a Rexp_pairs * 'a Rexp_pairs \<Rightarrow> bool" where
|
||||
"test (ws, ps) = (case ws of [] \<Rightarrow> False | (R,S)#_ \<Rightarrow> Nullable R = Nullable S)"
|
||||
|
||||
fun step :: "'a list \<Rightarrow>
|
||||
'a Rexp_pairs * 'a Rexp_pairs \<Rightarrow> 'a Rexp_pairs * 'a Rexp_pairs"
|
||||
where "step as (ws,ps) =
|
||||
(let
|
||||
(R,S) = hd ws;
|
||||
ps' = (R,S) # ps;
|
||||
succs = map (\<lambda>a. (Pderiv a R, Pderiv a S)) as;
|
||||
new = filter (\<lambda>p. p \<notin> set ps \<union> set ws) succs
|
||||
in (remdups new @ tl ws, ps'))"
|
||||
|
||||
definition closure ::
|
||||
"'a list \<Rightarrow> 'a Rexp_pairs * 'a Rexp_pairs
|
||||
\<Rightarrow> ('a Rexp_pairs * 'a Rexp_pairs) option" where
|
||||
"closure as = while_option test (step as)"
|
||||
|
||||
definition pre_Bisim :: "'a list \<Rightarrow> 'a rexp set \<Rightarrow> 'a rexp set \<Rightarrow>
|
||||
'a Rexp_pairs * 'a Rexp_pairs \<Rightarrow> bool"
|
||||
where
|
||||
"pre_Bisim as R S = (\<lambda>(ws,ps).
|
||||
((R,S) \<in> set ws \<union> set ps) \<and>
|
||||
(\<forall>(R,S)\<in> set ws \<union> set ps. Atoms R \<union> Atoms S \<subseteq> set as) \<and>
|
||||
(\<forall>(R,S)\<in> set ps. (Nullable R \<longleftrightarrow> Nullable S) \<and>
|
||||
(\<forall>a\<in>set as. (Pderiv a R, Pderiv a S) \<in> set ps \<union> set ws)))"
|
||||
|
||||
lemma step_set_eq: "\<lbrakk> test (ws,ps); step as (ws,ps) = (ws',ps') \<rbrakk>
|
||||
\<Longrightarrow> set ws' \<union> set ps' =
|
||||
set ws \<union> set ps
|
||||
\<union> (\<Union>a\<in>set as. {(Pderiv a (fst(hd ws)), Pderiv a (snd(hd ws)))})"
|
||||
by(auto split: list.splits)
|
||||
|
||||
theorem closure_sound:
|
||||
assumes result: "closure as ([(R,S)],[]) = Some([],ps)"
|
||||
and atoms: "Atoms R \<union> Atoms S \<subseteq> set as"
|
||||
shows "Lang R = Lang S"
|
||||
proof-
|
||||
{ fix st
|
||||
have "pre_Bisim as R S st \<Longrightarrow> test st \<Longrightarrow> pre_Bisim as R S (step as st)"
|
||||
unfolding pre_Bisim_def
|
||||
proof(split prod.splits, elim case_prodE conjE, intro allI impI conjI, goal_cases)
|
||||
case 1 thus ?case by(auto split: list.splits)
|
||||
next
|
||||
case prems: (2 ws ps ws' ps')
|
||||
note prems(2)[simp]
|
||||
from \<open>test st\<close> obtain wstl R S where [simp]: "ws = (R,S)#wstl"
|
||||
by (auto split: list.splits)
|
||||
from \<open>step as st = (ws',ps')\<close> obtain P where [simp]: "ps' = (R,S) # ps"
|
||||
and [simp]: "ws' = remdups(filter P (map (\<lambda>a. (Pderiv a R, Pderiv a S)) as)) @ wstl"
|
||||
by auto
|
||||
have "\<forall>(R',S')\<in>set wstl \<union> set ps'. Atoms R' \<union> Atoms S' \<subseteq> set as"
|
||||
using prems(4) by auto
|
||||
moreover
|
||||
have "\<forall>a\<in>set as. Atoms(Pderiv a R) \<union> Atoms(Pderiv a S) \<subseteq> set as"
|
||||
using prems(4) by simp (metis (lifting) Atoms_Pderiv order_trans)
|
||||
ultimately show ?case by simp blast
|
||||
next
|
||||
case 3 thus ?case
|
||||
apply (clarsimp simp: image_iff split: prod.splits list.splits)
|
||||
by hypsubst_thin metis
|
||||
qed
|
||||
}
|
||||
moreover
|
||||
from atoms
|
||||
have "pre_Bisim as R S ([(R,S)],[])" by (simp add: pre_Bisim_def)
|
||||
ultimately have pre_Bisim_ps: "pre_Bisim as R S ([],ps)"
|
||||
by (rule while_option_rule[OF _ result[unfolded closure_def]])
|
||||
then have "is_Bisimulation as ps" "(R,S) \<in> set ps"
|
||||
by (auto simp: pre_Bisim_def is_Bisimulation_def)
|
||||
thus "Lang R = Lang S" by (rule Bisim_Lang_eq)
|
||||
qed
|
||||
|
||||
|
||||
subsection \<open>The overall procedure\<close>
|
||||
|
||||
definition check_eqv :: "'a rexp \<Rightarrow> 'a rexp \<Rightarrow> bool"
|
||||
where
|
||||
"check_eqv r s =
|
||||
(case closure (add_atoms r (add_atoms s [])) ([({r}, {s})], []) of
|
||||
Some([],_) \<Rightarrow> True | _ \<Rightarrow> False)"
|
||||
|
||||
lemma soundness: assumes "check_eqv r s" shows "lang r = lang s"
|
||||
proof -
|
||||
let ?as = "add_atoms r (add_atoms s [])"
|
||||
obtain ps where 1: "closure ?as ([({r},{s})],[]) = Some([],ps)"
|
||||
using assms by (auto simp: check_eqv_def split:option.splits list.splits)
|
||||
then have "lang r = lang s"
|
||||
by(rule closure_sound[of _ "{r}" "{s}", simplified Lang_def, simplified])
|
||||
(auto simp: set_add_atoms Atoms_def)
|
||||
thus "lang r = lang s" by simp
|
||||
qed
|
||||
|
||||
text\<open>Test:\<close>
|
||||
lemma "check_eqv
|
||||
(Plus One (Times (Atom 0) (Star(Atom 0))))
|
||||
(Star(Atom(0::nat)))"
|
||||
by eval
|
||||
|
||||
|
||||
subsection "Termination and Completeness"
|
||||
|
||||
definition PDERIVS :: "'a rexp set => 'a rexp set" where
|
||||
"PDERIVS R = (UN r:R. pderivs_lang UNIV r)"
|
||||
|
||||
lemma PDERIVS_incr[simp]: "R \<subseteq> PDERIVS R"
|
||||
apply(auto simp add: PDERIVS_def pderivs_lang_def)
|
||||
by (metis pderivs.simps(1) insertI1)
|
||||
|
||||
lemma Pderiv_PDERIVS: assumes "R' \<subseteq> PDERIVS R" shows "Pderiv a R' \<subseteq> PDERIVS R"
|
||||
proof
|
||||
fix r assume "r : Pderiv a R'"
|
||||
then obtain r' where "r' : R'" "r : pderiv a r'" by(auto simp: Pderiv_def)
|
||||
from \<open>r' : R'\<close> \<open>R' \<subseteq> PDERIVS R\<close> obtain s w where "s : R" "r' : pderivs w s"
|
||||
by(auto simp: PDERIVS_def pderivs_lang_def)
|
||||
hence "r \<in> pderivs (w @ [a]) s" using \<open>r : pderiv a r'\<close> by(auto simp add:pderivs_snoc)
|
||||
thus "r : PDERIVS R" using \<open>s : R\<close> by(auto simp: PDERIVS_def pderivs_lang_def)
|
||||
qed
|
||||
|
||||
lemma finite_PDERIVS: "finite R \<Longrightarrow> finite(PDERIVS R)"
|
||||
by(simp add: PDERIVS_def finite_pderivs_lang_UNIV)
|
||||
|
||||
lemma closure_Some: assumes "finite R0" "finite S0" shows "\<exists>p. closure as ([(R0,S0)],[]) = Some p"
|
||||
proof(unfold closure_def)
|
||||
let ?Inv = "%(ws,bs). distinct ws \<and> (ALL (R,S) : set ws. R \<subseteq> PDERIVS R0 \<and> S \<subseteq> PDERIVS S0 \<and> (R,S) \<notin> set bs)"
|
||||
let ?m1 = "%bs. Pow(PDERIVS R0) \<times> Pow(PDERIVS S0) - set bs"
|
||||
let ?m2 = "%(ws,bs). card(?m1 bs)"
|
||||
have Inv0: "?Inv ([(R0, S0)], [])" by simp
|
||||
{ fix s assume "test s" "?Inv s"
|
||||
obtain ws bs where [simp]: "s = (ws,bs)" by fastforce
|
||||
from \<open>test s\<close> obtain R S ws' where [simp]: "ws = (R,S)#ws'"
|
||||
by(auto split: prod.splits list.splits)
|
||||
let ?bs' = "(R,S) # bs"
|
||||
let ?succs = "map (\<lambda>a. (Pderiv a R, Pderiv a S)) as"
|
||||
let ?new = "filter (\<lambda>p. p \<notin> set bs \<union> set ws) ?succs"
|
||||
let ?ws' = "remdups ?new @ ws'"
|
||||
have *: "?Inv (step as s)"
|
||||
proof -
|
||||
from \<open>?Inv s\<close> have "distinct ?ws'" by auto
|
||||
have "ALL (R,S) : set ?ws'. R \<subseteq> PDERIVS R0 \<and> S \<subseteq> PDERIVS S0 \<and> (R,S) \<notin> set ?bs'" using \<open>?Inv s\<close>
|
||||
by(simp add: Ball_def image_iff) (metis Pderiv_PDERIVS)
|
||||
with \<open>distinct ?ws'\<close> show ?thesis by(simp)
|
||||
qed
|
||||
have "?m2(step as s) < ?m2 s"
|
||||
proof -
|
||||
have "finite(?m1 bs)"
|
||||
by(metis assms finite_Diff finite_PDERIVS finite_cartesian_product finite_Pow_iff)
|
||||
moreover have "?m2(step as s) < ?m2 s" using \<open>?Inv s\<close>
|
||||
by(auto intro: psubset_card_mono[OF \<open>finite(?m1 bs)\<close>])
|
||||
then show ?thesis using \<open>?Inv s\<close> by simp
|
||||
qed
|
||||
note * and this
|
||||
} note step = this
|
||||
show "\<exists>p. while_option test (step as) ([(R0, S0)], []) = Some p"
|
||||
by(rule measure_while_option_Some [where P = ?Inv and f = ?m2, OF _ Inv0])(simp add: step)
|
||||
qed
|
||||
|
||||
theorem closure_Some_Inv: assumes "closure as ([({r},{s})],[]) = Some p"
|
||||
shows "\<forall>(R,S)\<in>set(fst p). \<exists>w. R = pderivs w r \<and> S = pderivs w s" (is "?Inv p")
|
||||
proof-
|
||||
from assms have 1: "while_option test (step as) ([({r},{s})],[]) = Some p"
|
||||
by(simp add: closure_def)
|
||||
have Inv0: "?Inv ([({r},{s})],[])" by simp (metis pderivs.simps(1))
|
||||
{ fix p assume "?Inv p" "test p"
|
||||
obtain ws bs where [simp]: "p = (ws,bs)" by fastforce
|
||||
from \<open>test p\<close> obtain R S ws' where [simp]: "ws = (R,S)#ws'"
|
||||
by(auto split: prod.splits list.splits)
|
||||
let ?succs = "map (\<lambda>a. (Pderiv a R, Pderiv a S)) as"
|
||||
let ?new = "filter (\<lambda>p. p \<notin> set bs \<union> set ws) ?succs"
|
||||
let ?ws' = "remdups ?new @ ws'"
|
||||
from \<open>?Inv p\<close> obtain w where [simp]: "R = pderivs w r" "S = pderivs w s"
|
||||
by auto
|
||||
{ fix x assume "x : set as"
|
||||
have "EX w. Pderiv x R = pderivs w r \<and> Pderiv x S = pderivs w s"
|
||||
by(rule_tac x="w@[x]" in exI)(simp add: pderivs_append Pderiv_def)
|
||||
}
|
||||
with \<open>?Inv p\<close> have "?Inv (step as p)" by auto
|
||||
} note Inv_step = this
|
||||
show ?thesis
|
||||
apply(rule while_option_rule[OF _ 1])
|
||||
apply(erule (1) Inv_step)
|
||||
apply(rule Inv0)
|
||||
done
|
||||
qed
|
||||
|
||||
lemma closure_complete: assumes "lang r = lang s"
|
||||
shows "EX bs. closure as ([({r},{s})],[]) = Some([],bs)" (is ?C)
|
||||
proof(rule ccontr)
|
||||
assume "\<not> ?C"
|
||||
then obtain R S ws bs
|
||||
where cl: "closure as ([({r},{s})],[]) = Some((R,S)#ws,bs)"
|
||||
using closure_Some[of "{r}" "{s}", simplified]
|
||||
by (metis (hide_lams, no_types) list.exhaust prod.exhaust)
|
||||
from assms closure_Some_Inv[OF this]
|
||||
while_option_stop[OF cl[unfolded closure_def]]
|
||||
show "False" by auto
|
||||
qed
|
||||
|
||||
corollary completeness: "lang r = lang s \<Longrightarrow> check_eqv r s"
|
||||
by(auto simp add: check_eqv_def dest!: closure_complete
|
||||
split: option.split list.split)
|
||||
|
||||
end
|
|
@ -1,5 +1,6 @@
|
|||
%% Copyright (C) 2018 The University of Sheffield
|
||||
%% 2018 The University of Paris-Sud
|
||||
%% 2019 The University of Exeter
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
|
@ -50,7 +51,7 @@
|
|||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% begin: side_by_side_figure*
|
||||
\NewEnviron{isamarkupside_by_side_figure*}[1][]{\isaDof[env={side_by_side_figure},#1]{\BODY}}
|
||||
\NewEnviron{isamarkupsideUNDERSCOREbyUNDERSCOREsideUNDERSCOREfigure*}[1][]{\isaDof[env={side_by_side_figure},#1]{\BODY}}
|
||||
\newisadof{side_by_side_figure.Isa_COL.side_by_side_figure}%
|
||||
[label=,type=%
|
||||
,Isa_COL.figure.relative_width=%
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
%% Copyright (C) 2018 The University of Sheffield
|
||||
%% 2018 The University of Paris-Sud
|
||||
%% 2019 The University of Exeter
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
|
@ -126,3 +127,9 @@
|
|||
% end: chapter/section default implementations
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% begin: label and ref
|
||||
\newisadof{label}[label=,type=][1]{\label{#1}}
|
||||
\newisadof{ref}[label=,type=][1]{\autoref{#1}}
|
||||
% end: label and ref
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
|
|
@ -8,13 +8,14 @@ begin
|
|||
section\<open> Some examples of Isabelle's standard antiquotations. \<close>
|
||||
(* some show-off of standard anti-quotations: *)
|
||||
text \<open>THIS IS A TEXT\<close>
|
||||
term "[]"
|
||||
|
||||
text\<open> @{thm refl} of name @{thm [source] refl}
|
||||
@{thm[mode=Rule] conjI}
|
||||
@{file "../../Isa_DOF.thy"}
|
||||
@{value "3+4::int"}
|
||||
@{const hd}
|
||||
@{theory List}}
|
||||
@{theory HOL.List}}
|
||||
@{term "3"}
|
||||
@{type bool}
|
||||
@{term [show_types] "f x = a + x"} \<close>
|
||||
|
@ -28,12 +29,13 @@ text\<open>An "anonymous" text-item, automatically coerced into the top-class "t
|
|||
text*[tralala] \<open> Brexit means Brexit \<close>
|
||||
|
||||
text\<open>Examples for declaration of typed doc-items "assumption" and "hypothesis",
|
||||
concepts defined in the underlying ontology @{theory "CENELEC_50128"}. \<close>
|
||||
concepts defined in the underlying ontology @{theory "Draft.CENELEC_50128"}. \<close>
|
||||
text*[ass1::assumption] \<open> The subsystem Y is safe. \<close>
|
||||
text*[hyp1::hypothesis] \<open> P not equal NP \<close>
|
||||
|
||||
text\<open>A real example fragment from a larger project, declaring a text-element as a
|
||||
"safety-related application condition", a concept defined in the @{theory "CENELEC_50128"}
|
||||
"safety-related application condition", a concept defined in the
|
||||
@{theory "Draft.CENELEC_50128"}
|
||||
ontology:\<close>
|
||||
|
||||
text*[new_ass::hypothesis]\<open>Under the assumption @{assumption \<open>ass1\<close>} we establish the following: ... \<close>
|
||||
|
@ -80,7 +82,7 @@ text\<open> @{docitem \<open>lalala\<close>} -- produces warning. \<close>
|
|||
text\<open> @{docitem (unchecked) \<open>lalala\<close>} -- produces no warning. \<close>
|
||||
|
||||
text\<open> @{docitem \<open>ass122\<close>} -- global reference to a text-item in another file. \<close>
|
||||
|
||||
|
||||
text\<open> @{EC \<open>ass122\<close>} -- global reference to a exported constraint in another file.
|
||||
Note that the link is actually a srac, which, according to
|
||||
the ontology, happens to be an "ec". \<close>
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
session "mini_odo" = "Isabelle_DOF" +
|
||||
options [document = pdf, document_output = "output"]
|
||||
theories [document = false]
|
||||
(* Foo *)
|
||||
(* Bar *)
|
||||
theories
|
||||
"mini_odo"
|
||||
document_files
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
theory mini_odo
|
||||
imports "Isabelle_DOF.CENELEC_50128"
|
||||
"Isabelle_DOF.scholarly_paper"
|
||||
|
||||
begin
|
||||
|
||||
section\<open> Some examples of Isabelle's standard antiquotations. \<close>
|
||||
|
@ -13,7 +14,7 @@ text\<open> @{thm refl} of name @{thm [source] refl}
|
|||
@{file "mini_odo.thy"}
|
||||
@{value "3+4::int"}
|
||||
@{const hd}
|
||||
@{theory List}
|
||||
@{theory HOL.List}
|
||||
@{term "3"}
|
||||
@{type bool}
|
||||
@{term [show_types] "f x = a + x"} \<close>
|
||||
|
@ -27,13 +28,13 @@ text\<open>An "anonymous" text-item, automatically coerced into the top-class "t
|
|||
text*[tralala] \<open> Brexit means Brexit \<close>
|
||||
|
||||
text\<open>Examples for declaration of typed doc-items "assumption" and "hypothesis",
|
||||
concepts defined in the underlying ontology @{theory "CENELEC_50128"}. \<close>
|
||||
concepts defined in the underlying ontology @{theory "Isabelle_DOF.CENELEC_50128"}. \<close>
|
||||
text*[ass1::assumption] \<open> The subsystem Y is safe. \<close>
|
||||
text*[hyp1::hypothesis] \<open> P not equal NP \<close>
|
||||
|
||||
text\<open>A real example fragment from a larger project, declaring a text-element as a
|
||||
"safety-related application condition", a concept defined in the @{theory "CENELEC_50128"}
|
||||
ontology:\<close>
|
||||
"safety-related application condition", a concept defined in the
|
||||
@{theory "Isabelle_DOF.CENELEC_50128"} ontology:\<close>
|
||||
|
||||
text*[new_ass::hypothesis]\<open>Under the assumption @{assumption \<open>ass1\<close>} we establish the following: ... \<close>
|
||||
|
||||
|
|
|
@ -13,7 +13,8 @@ print_doc_items
|
|||
section\<open>Definitions, Lemmas, Theorems, Assertions\<close>
|
||||
|
||||
|
||||
text*[aa::F, property = "[@{term ''True''}]"]\<open>Our definition of the HOL-Logic has the following properties:\<close>
|
||||
text*[aa::F, property = "[@{term ''True''}]"]
|
||||
\<open>Our definition of the HOL-Logic has the following properties:\<close>
|
||||
assert*[aa::F] "True"
|
||||
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ val {docobj_tab={tab = docitem_tab, ...},docclass_tab, ISA_transformer_tab, moni
|
|||
Symtab.dest docitem_tab;
|
||||
Symtab.dest docclass_tab;
|
||||
\<close>
|
||||
|
||||
|
||||
ML\<open>
|
||||
fun fac x = if x = 0 then 1 else x * (fac(x -1));
|
||||
fac 3;
|
||||
|
|
|
@ -4,7 +4,7 @@ theory Concept_Example
|
|||
imports "../../ontologies/Conceptual" (* we use the generic "Conceptual" ontology *)
|
||||
begin
|
||||
|
||||
text\<open>@{theory Conceptual} provides a monitor @{typ M} enforcing a particular document structure.
|
||||
text\<open>@{theory Draft.Conceptual} provides a monitor @{typ M} enforcing a particular document structure.
|
||||
Here, we say: From now on, this structural rules are respected wrt. all doc\_classes M is
|
||||
enabled for.\<close>
|
||||
open_monitor*[struct::M]
|
||||
|
@ -14,14 +14,18 @@ section*[a::A, x = "3"] \<open> Lorem ipsum dolor sit amet, ... \<close>
|
|||
text*[c1::C, x = "''beta''"] \<open> ... suspendisse non arcu malesuada mollis, nibh morbi, ... \<close>
|
||||
|
||||
text*[d::D, a1 = "X3"] \<open> ... phasellus amet id massa nunc, pede suscipit repellendus,
|
||||
... @{docitem c1} @{thm "refl"}\<close>
|
||||
... @{docitem "c1"} @{thm "refl"} \<close>
|
||||
|
||||
|
||||
update_instance*[d::D, a1 := X2]
|
||||
|
||||
text\<open> ... in ut tortor ... @{docitem \<open>a\<close>} ... @{A \<open>a\<close>}\<close>
|
||||
|
||||
text*[c2::C, x = "''delta''"] \<open> ... in ut tortor eleifend augue pretium consectetuer. \<close>
|
||||
text*[c2::C, x = "\<open>delta\<close>"] \<open> ... in ut tortor eleifend augue pretium consectetuer. \<close>
|
||||
|
||||
text\<open>Note that both the notations @{term "''beta''"} and @{term "\<open>beta\<close>"} are possible;
|
||||
the former is a more ancient format only supporting pure ascii, while the latter also supports
|
||||
fancy unicode such as: @{term "\<open>\<beta>\<^sub>i''\<close>"} \<close>
|
||||
|
||||
text*[f::F] \<open> Lectus accumsan velit ultrices, ... }\<close>
|
||||
|
||||
|
@ -33,8 +37,8 @@ update_instance*[f::F,r:="[@{thm ''Concept_Example.some_proof''}]"]
|
|||
text\<open> ..., mauris amet, id elit aliquam aptent id, ... @{docitem \<open>a\<close>} \<close>
|
||||
|
||||
text\<open>Here we add and maintain a link that is actually modeled as m-to-n relation ...\<close>
|
||||
update_instance*[f::F,b:="{(@{docitem ''a''}::A,@{docitem ''c1''}::C),
|
||||
(@{docitem ''a''}, @{docitem ''c2''})}"]
|
||||
update_instance*[f::F,b:="{(@{docitem \<open>a\<close>}::A,@{docitem \<open>c1\<close>}::C),
|
||||
(@{docitem \<open>a\<close>}, @{docitem \<open>c2\<close>})}"]
|
||||
|
||||
close_monitor*[struct]
|
||||
|
||||
|
|
|
@ -43,6 +43,9 @@ subsection*[d::A, x = "4"] \<open> Lorem ipsum dolor sit amet, ... \<close>
|
|||
(* test : update should not fail, invariant still valid *)
|
||||
update_instance*[d::A, x += "1"]
|
||||
|
||||
(* test : with the next step it should fail :
|
||||
update_instance*[d::A, x += "1"]
|
||||
*)
|
||||
|
||||
section\<open>Example: Monitor Class Invariant\<close>
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ They are the key-mechanism to denote
|
|||
\<^item> Ontological Links, i.e. attributes refering to document classes defined by the ontology
|
||||
\<^item> Ontological F-Links, i.e. attributes referring to formal entities inside Isabelle (such as thm's)
|
||||
|
||||
This file contains a number of examples resulting from the @{theory "Conceptual"} - ontology;
|
||||
This file contains a number of examples resulting from the @{theory "Draft.Conceptual"} - ontology;
|
||||
the emphasis of this presentation is to present the expressivity of ODL on a paradigmatical example.
|
||||
\<close>
|
||||
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
theory Noodles
|
||||
imports "small_math"
|
||||
theory Noodles
|
||||
imports "../../ontologies/small_math"
|
||||
"../../ontologies/technical_report"
|
||||
begin
|
||||
|
||||
title*[t::title]\<open>On Noodles\<close>
|
||||
title*[t::title]\<open>On Noodles\<close>
|
||||
|
||||
end
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
theory "On_Noodle"
|
||||
imports small_math
|
||||
theory "On_Noodles"
|
||||
imports "../../ontologies/small_math"
|
||||
"../../ontologies/technical_report"
|
||||
begin
|
||||
|
||||
open_monitor*[this::article]
|
||||
|
@ -7,7 +8,7 @@ open_monitor*[this::article]
|
|||
title*[t1::title]\<open>On Noodles\<close>
|
||||
|
||||
text*[simon::author]\<open>Simon Foster\<close>
|
||||
text*[a::abstract, keyword_list = "[topology]"]
|
||||
text*[a::abstract, keywordlist = "[''topology'']"]
|
||||
\<open>We present the first fundamental results on the goundbreaking theory of noodles...\<close>
|
||||
section*[intro::introduction]\<open>Introduction\<close>
|
||||
|
||||
|
@ -28,3 +29,4 @@ update_instance*[def1, formal_results:="[@{thm ''noodle_def''}]"]
|
|||
|
||||
close_monitor*[this::article]
|
||||
|
||||
end
|
|
@ -20,8 +20,8 @@ text*[paolo::author,
|
|||
email = "''paolo.crisafulli@irt-systemx.fr''",
|
||||
affiliation= "''IRT-SystemX, Paris, France''"]\<open>Paolo Crisafulli\<close>
|
||||
text*[bu::author,
|
||||
email = "''wolff@lri.fr''",
|
||||
affiliation = "''Universit\\'e Paris-Sud, Paris, France''"]\<open>Burkhart Wolff\<close>
|
||||
email = "\<open>wolff@lri.fr\<close>",
|
||||
affiliation = "\<open>Université Paris-Sud, Paris, France\<close>"]\<open>Burkhart Wolff\<close>
|
||||
|
||||
|
||||
text*[abs::abstract,
|
||||
|
|
|
@ -4,17 +4,17 @@ theory Concept_Example
|
|||
imports "../../ontologies/Conceptual" (* we use the generic "Conceptual" ontology *)
|
||||
begin
|
||||
|
||||
text\<open>@{theory Conceptual} provides a monitor @{typ M} enforcing a particular document structure.
|
||||
Here, we say: From now on, this structural rules are respected wrt. all doc\_classes M is
|
||||
enabled for.\<close>
|
||||
text\<open>@{theory \<open>Draft.Conceptual\<close>} provides a monitor @{typ M} enforcing a particular document
|
||||
structure. Here, we say: From now on, this structural rules are respected wrt. all
|
||||
\<^theory_text>\<open>doc_class\<close>es @{typ M} is enabled for.\<close>
|
||||
open_monitor*[struct::M]
|
||||
|
||||
section*[a::A, x = "3"] \<open> Lorem ipsum dolor sit amet, ... \<close>
|
||||
|
||||
text*[c1::C, x = "''beta''"] \<open> ... suspendisse non arcu malesuada mollis, nibh morbi, ... \<close>
|
||||
|
||||
|
||||
text*[d::D, a1 = "X3"] \<open> ... phasellus amet id massa nunc, pede suscipit repellendus,
|
||||
... @{C \<open>c1\<close>} @{thm "refl"}\<close>
|
||||
... @{C c1} @{thm "refl"}\<close>
|
||||
|
||||
|
||||
update_instance*[d::D, a1 := X2]
|
||||
|
@ -32,9 +32,13 @@ update_instance*[f::F,r:="[@{thm ''Concept_Example.some_proof''}]"]
|
|||
|
||||
text\<open> ..., mauris amet, id elit aliquam aptent id, ... @{docitem \<open>a\<close>} \<close>
|
||||
|
||||
text\<open>Here we add and maintain a link that is actually modeled as m-to-n relation ...\<close>
|
||||
update_instance*[f::F,b:="{(@{docitem ''a''}::A,@{docitem ''c1''}::C),
|
||||
(@{docitem ''a''}, @{docitem ''c2''})}"]
|
||||
text\<open>Here we add and maintain a link that is actually modeled as m-to-n relation ...
|
||||
The type annotations with @{typ A} and @{typ C} are optional and may help to get
|
||||
additional information at the HOL level, the arguments of the inner-syntax antiquotation
|
||||
are strings that can be denoted in two different syntactic variants; the former is
|
||||
more robust that the traditional latter.\<close>
|
||||
update_instance*[f::F,b:="{(@{docitem \<open>a\<close>}::A,@{docitem \<open>c1\<close>}::C),
|
||||
(@{docitem ''a''}, @{docitem ''c2''})}"]
|
||||
|
||||
close_monitor*[struct]
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
(*<*)
|
||||
theory "00_Frontmatter"
|
||||
imports "Isabelle_DOF.technical_report"
|
||||
(* imports "Isabelle_DOF.technical_report" *)
|
||||
imports "../../../ontologies/technical_report"
|
||||
begin
|
||||
|
||||
open_monitor*[this::report]
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
(*<*)
|
||||
theory MyCommentedIsabelle
|
||||
imports "Isabelle_DOF.technical_report"
|
||||
(* imports "Isabelle_DOF.technical_report" *)
|
||||
imports "../../../ontologies/technical_report"
|
||||
begin
|
||||
|
||||
|
||||
|
@ -62,7 +63,7 @@ code-base. The preferred programming style is purely functional: \<close>
|
|||
ML\<open> fun fac x = if x = 0 then 1 else x * fac(x-1) ;
|
||||
fac 10;
|
||||
\<close>
|
||||
-- or
|
||||
(* or *)
|
||||
ML\<open> type state = { a : int, b : string}
|
||||
fun incr_state ({a, b}:state) = {a=a+1, b=b}
|
||||
\<close>
|
||||
|
@ -664,7 +665,7 @@ datatype thy = Thy of
|
|||
|
||||
*)
|
||||
|
||||
Theory.check: Proof.context -> string * Position.T -> theory;
|
||||
Theory.check: {long: bool} -> Proof.context -> string * Position.T -> theory;
|
||||
|
||||
Theory.local_setup: (Proof.context -> Proof.context) -> unit;
|
||||
Theory.setup: (theory -> theory) -> unit; (* The thing to extend the table of "command"s with parser - callbacks. *)
|
||||
|
@ -803,107 +804,31 @@ Goal.prove_global : theory -> string list -> term list -> term ->
|
|||
\<close>
|
||||
|
||||
section\<open>The Isar Engine\<close>
|
||||
|
||||
|
||||
text\<open>The main structure of the Isar-engine is @{ ML_structure Toplevel} and provides and
|
||||
internal @{ ML_type state} with the necessary infrastructure ---
|
||||
i.e. the operations to pack and unpack theories and Proof.contexts --- on it:
|
||||
\<close>
|
||||
ML\<open>
|
||||
Toplevel.theory;
|
||||
Toplevel.presentation_context_of; (* Toplevel is a kind of table with call-back functions *)
|
||||
|
||||
Consts.the_const; (* T is a kind of signature ... *)
|
||||
Variable.import_terms;
|
||||
Vartab.update;
|
||||
|
||||
fun control_antiquotation name s1 s2 =
|
||||
Thy_Output.antiquotation name (Scan.lift Args.cartouche_input)
|
||||
(fn {state, ...} => enclose s1 s2 o Thy_Output.output_text state {markdown = false});
|
||||
|
||||
Output.output;
|
||||
|
||||
Syntax.read_input ;
|
||||
Input.source_content;
|
||||
|
||||
(*
|
||||
basic_entity @{binding const} (Args.const {proper = true, strict = false}) pretty_const #>
|
||||
*)
|
||||
\<close>
|
||||
|
||||
ML\<open>
|
||||
Config.get @{context} Thy_Output.display;
|
||||
Config.get @{context} Thy_Output.source;
|
||||
Config.get @{context} Thy_Output.modes;
|
||||
Thy_Output.document_command;
|
||||
(* is:
|
||||
fun document_command markdown (loc, txt) =
|
||||
Toplevel.keep (fn state =>
|
||||
(case loc of
|
||||
NONE => ignore (output_text state markdown txt)
|
||||
| SOME (_, pos) =>
|
||||
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
|
||||
Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt));
|
||||
|
||||
end;
|
||||
|
||||
*)
|
||||
\<close>
|
||||
|
||||
|
||||
|
||||
|
||||
ML\<open> Thy_Output.document_command {markdown = true} \<close>
|
||||
(* Structures related to LaTeX Generation *)
|
||||
ML\<open> Latex.output_ascii;
|
||||
|
||||
Latex.output_token
|
||||
(* Hm, generierter output for
|
||||
subsection*[Shaft_Encoder_characteristics]{ * Shaft Encoder Characteristics * } :
|
||||
|
||||
\begin{isamarkuptext}%
|
||||
\isa{{\isacharbackslash}label{\isacharbraceleft}general{\isacharunderscore}hyps{\isacharbraceright}}%
|
||||
\end{isamarkuptext}\isamarkuptrue%
|
||||
\isacommand{subsection{\isacharasterisk}}\isamarkupfalse%
|
||||
{\isacharbrackleft}Shaft{\isacharunderscore}Encoder{\isacharunderscore}characteristics{\isacharbrackright}{\isacharverbatimopen}\ Shaft\ Encoder\ Characteristics\ {\isacharverbatimclose}%
|
||||
|
||||
Generierter output for: text\<open>\label{sec:Shaft-Encoder-characteristics}\<close>
|
||||
|
||||
\begin{isamarkuptext}%
|
||||
\label{sec:Shaft-Encoder-characteristics}%
|
||||
\end{isamarkuptext}\isamarkuptrue%
|
||||
|
||||
|
||||
*)
|
||||
|
||||
|
||||
\<close>
|
||||
|
||||
ML\<open>
|
||||
Thy_Output.maybe_pretty_source :
|
||||
(Proof.context -> 'a -> Pretty.T) -> Proof.context -> Token.src -> 'a list -> Pretty.T list;
|
||||
|
||||
Thy_Output.output: Proof.context -> Pretty.T list -> string;
|
||||
|
||||
(* nuescht besonderes *)
|
||||
|
||||
fun document_antiq check_file ctxt (name, pos) =
|
||||
let
|
||||
(* val dir = master_directory (Proof_Context.theory_of ctxt); *)
|
||||
(* val _ = check_path check_file ctxt dir (name, pos); *)
|
||||
in
|
||||
space_explode "/" name
|
||||
|> map Latex.output_ascii
|
||||
|> space_implode (Latex.output_ascii "/" ^ "\\discretionary{}{}{}")
|
||||
|> enclose "\\isatt{" "}"
|
||||
end;
|
||||
Toplevel.theory_toplevel: theory -> Toplevel.state;
|
||||
Toplevel.toplevel: Toplevel.state;
|
||||
Toplevel.is_toplevel: Toplevel.state -> bool;
|
||||
Toplevel.is_theory: Toplevel.state -> bool;
|
||||
Toplevel.is_proof: Toplevel.state -> bool;
|
||||
Toplevel.is_skipped_proof: Toplevel.state -> bool;
|
||||
Toplevel.level: Toplevel.state -> int;
|
||||
Toplevel.context_of: Toplevel.state -> Proof.context;
|
||||
Toplevel.generic_theory_of: Toplevel.state -> generic_theory;
|
||||
Toplevel.theory_of: Toplevel.state -> theory;
|
||||
Toplevel.proof_of: Toplevel.state -> Proof.state;
|
||||
Toplevel.presentation_context: Toplevel.state -> Proof.context;
|
||||
|
||||
\<close>
|
||||
ML\<open> Type_Infer_Context.infer_types \<close>
|
||||
ML\<open> Type_Infer_Context.prepare_positions \<close>
|
||||
|
||||
|
||||
|
||||
subsection \<open>Transaction Management in the Isar-Engine : The Toplevel \<close>
|
||||
|
||||
ML\<open>
|
||||
Thy_Output.output_text: Toplevel.state -> {markdown: bool} -> Input.source -> string;
|
||||
Thy_Output.document_command;
|
||||
|
||||
Toplevel.exit: Toplevel.transition -> Toplevel.transition;
|
||||
Toplevel.keep: (Toplevel.state -> unit) -> Toplevel.transition -> Toplevel.transition;
|
||||
|
@ -926,8 +851,6 @@ Toplevel.present_local_theory:
|
|||
| SOME (_, pos) =>
|
||||
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
|
||||
Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt)); *)
|
||||
Thy_Output.document_command : {markdown: bool} -> (xstring * Position.T) option * Input.source ->
|
||||
Toplevel.transition -> Toplevel.transition;
|
||||
|
||||
(* Isar Toplevel Steuerung *)
|
||||
Toplevel.keep : (Toplevel.state -> unit) -> Toplevel.transition -> Toplevel.transition;
|
||||
|
@ -959,22 +882,6 @@ Toplevel.theory : (theory -> theory) -> Toplevel.transition -> Toplevel.transiti
|
|||
|
||||
fun theory f = theory' (K f); *)
|
||||
|
||||
Thy_Output.document_command : {markdown: bool} -> (xstring * Position.T) option * Input.source ->
|
||||
Toplevel.transition -> Toplevel.transition;
|
||||
(* fun document_command markdown (loc, txt) =
|
||||
Toplevel.keep (fn state =>
|
||||
(case loc of
|
||||
NONE => ignore (output_text state markdown txt)
|
||||
| SOME (_, pos) =>
|
||||
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
|
||||
Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt));
|
||||
|
||||
*)
|
||||
|
||||
Thy_Output.output_text : Toplevel.state -> {markdown: bool} -> Input.source -> string ;
|
||||
(* this is where antiquotation expansion happens : uses eval_antiquote *)
|
||||
|
||||
|
||||
\<close>
|
||||
|
||||
|
||||
|
@ -1011,50 +918,48 @@ Toplevel.theory : (theory -> theory) -> Toplevel.transition -> Toplevel.transiti
|
|||
|
||||
fun theory f = theory' (K f); *)
|
||||
|
||||
Thy_Output.document_command : {markdown: bool} -> (xstring * Position.T) option * Input.source ->
|
||||
Toplevel.transition -> Toplevel.transition;
|
||||
(* fun document_command markdown (loc, txt) =
|
||||
Toplevel.keep (fn state =>
|
||||
(case loc of
|
||||
NONE => ignore (output_text state markdown txt)
|
||||
| SOME (_, pos) =>
|
||||
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
|
||||
Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt));
|
||||
|
||||
*)
|
||||
|
||||
Thy_Output.output_text : Toplevel.state -> {markdown: bool} -> Input.source -> string ;
|
||||
(* this is where antiquotation expansion happens : uses eval_antiquote *)
|
||||
|
||||
|
||||
\<close>
|
||||
|
||||
|
||||
subsection\<open> Configuration flags of fixed type in the Isar-engine. \<close>
|
||||
|
||||
text\<open>The toplevel also provides an infrastructure for managing configuration options
|
||||
for system components. Based on a sum-type @{ML_type Config.value }
|
||||
with the alternatives \<^verbatim>\<open> Bool of bool | Int of int | Real of real | String of string\<close>
|
||||
and building the parametric configuration types @{ML_type "'a Config.T" } and the
|
||||
instance \<^verbatim>\<open>type raw = value T\<close>, for all registered configurations the protocol:
|
||||
\<close>
|
||||
ML\<open>
|
||||
Config.get @{context} Thy_Output.quotes;
|
||||
Config.get @{context} Thy_Output.display;
|
||||
Config.get: Proof.context -> 'a Config.T -> 'a;
|
||||
Config.map: 'a Config.T -> ('a -> 'a) -> Proof.context -> Proof.context;
|
||||
Config.put: 'a Config.T -> 'a -> Proof.context -> Proof.context;
|
||||
Config.get_global: theory -> 'a Config.T -> 'a;
|
||||
Config.map_global: 'a Config.T -> ('a -> 'a) -> theory -> theory;
|
||||
Config.put_global: 'a Config.T -> 'a -> theory -> theory;
|
||||
\<close>
|
||||
text\<open>... etc. is defined.\<close>
|
||||
|
||||
text\<open>Example registration of an config attribute XS232: \<close>
|
||||
ML\<open>
|
||||
val (XS232, XS232_setup)
|
||||
= Attrib.config_bool \<^binding>\<open>XS232\<close> (K false);
|
||||
|
||||
val _ = Theory.setup XS232_setup;
|
||||
\<close>
|
||||
|
||||
(* or: just command:
|
||||
|
||||
setup\<open>XS232_setup\<close>
|
||||
|
||||
*)
|
||||
|
||||
text\<open>Another mechanism are global synchronised variables:\<close>
|
||||
ML\<open> (* For example *)
|
||||
|
||||
val C = Synchronized.var "Pretty.modes" "latEEex";
|
||||
(* Synchronized: a mechanism to bookkeep global
|
||||
variables with synchronization mechanism included *)
|
||||
Synchronized.value C;
|
||||
(*
|
||||
fun output ctxt prts =
|
||||
603 prts
|
||||
604 |> Config.get ctxt quotes ? map Pretty.quote
|
||||
605 |> (if Config.get ctxt display then
|
||||
606 map (Pretty.indent (Config.get ctxt indent) #> string_of_margin ctxt #> Output.output)
|
||||
607 #> space_implode "\\isasep\\isanewline%\n"
|
||||
608 #> Latex.environment "isabelle"
|
||||
609 else
|
||||
610 map
|
||||
611 ((if Config.get ctxt break then string_of_margin ctxt else Pretty.unformatted_string_of)
|
||||
612 #> Output.output)
|
||||
613 #> space_implode "\\isasep\\isanewline%\n"
|
||||
614 #> enclose "\\isa{" "}");
|
||||
*)
|
||||
\<close>
|
||||
|
||||
chapter\<open>Front-End \<close>
|
||||
|
@ -1388,11 +1293,16 @@ val Z = let val attribute = Parse.position Parse.name --
|
|||
Scan.optional (Parse.$$$ "=" |-- Parse.!!! Parse.name) "";
|
||||
in (Scan.optional(Parse.$$$ "," |-- (Parse.enum "," attribute))) end ;
|
||||
(* this leads to constructions like the following, where a parser for a *)
|
||||
fn name => (Thy_Output.antiquotation name (Scan.lift (Parse.position Args.cartouche_input)));
|
||||
|
||||
|
||||
|
||||
Thy_Output.antiquotation_pretty_source \<^binding>\<open>theory\<close> (Scan.lift (Parse.position Args.embedded));
|
||||
|
||||
Thy_Output.antiquotation_raw \<^binding>\<open>file\<close> (Scan.lift (Parse.position Parse.path)) ;
|
||||
|
||||
fn name => (Thy_Output.antiquotation_pretty_source name (Scan.lift (Parse.position Args.cartouche_input)));
|
||||
\<close>
|
||||
|
||||
section\<open>\<close>
|
||||
ML\<open>Sign.add_trrules\<close>
|
||||
|
||||
section\<open> The PIDE Framework \<close>
|
||||
subsection\<open> Markup \<close>
|
||||
|
@ -1512,19 +1422,35 @@ Output.output "bla_1:";
|
|||
\<close>
|
||||
|
||||
section \<open> Output: LaTeX \<close>
|
||||
|
||||
ML\<open>
|
||||
Thy_Output.verbatim_text;
|
||||
Thy_Output.output_text: Toplevel.state -> {markdown: bool} -> Input.source -> string;
|
||||
Thy_Output.antiquotation:
|
||||
binding ->
|
||||
'a context_parser ->
|
||||
({context: Proof.context, source: Token.src, state: Toplevel.state} -> 'a -> string) ->
|
||||
theory -> theory;
|
||||
Thy_Output.output: Proof.context -> Pretty.T list -> string;
|
||||
Thy_Output.output_text: Toplevel.state -> {markdown: bool} -> Input.source -> string;
|
||||
|
||||
Thy_Output.output : Proof.context -> Pretty.T list -> string;
|
||||
|
||||
ML\<open> open Thy_Output;
|
||||
|
||||
output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text list;
|
||||
output_source: Proof.context -> string -> Latex.text list;
|
||||
present_thy: Options.T -> theory -> segment list -> Latex.text list;
|
||||
pretty_term: Proof.context -> term -> Pretty.T;
|
||||
pretty_thm: Proof.context -> thm -> Pretty.T;
|
||||
lines: Latex.text list -> Latex.text list;
|
||||
items: Latex.text list -> Latex.text list;
|
||||
isabelle: Proof.context -> Latex.text list -> Latex.text;
|
||||
isabelle_typewriter: Proof.context -> Latex.text list -> Latex.text;
|
||||
typewriter: Proof.context -> string -> Latex.text;
|
||||
verbatim: Proof.context -> string -> Latex.text;
|
||||
source: Proof.context -> Token.src -> Latex.text;
|
||||
pretty: Proof.context -> Pretty.T -> Latex.text;
|
||||
pretty_source: Proof.context -> Token.src -> Pretty.T -> Latex.text;
|
||||
pretty_items: Proof.context -> Pretty.T list -> Latex.text;
|
||||
pretty_items_source: Proof.context -> Token.src -> Pretty.T list -> Latex.text;
|
||||
antiquotation_pretty:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory;
|
||||
antiquotation_pretty_source:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory;
|
||||
antiquotation_raw:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory;
|
||||
antiquotation_verbatim:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory;
|
||||
|
||||
\<close>
|
||||
|
||||
|
||||
|
@ -1533,7 +1459,7 @@ ML\<open>
|
|||
Syntax_Phases.reports_of_scope;
|
||||
\<close>
|
||||
|
||||
|
||||
(* STOP HERE JUNK ZONE :
|
||||
|
||||
(* Pretty.T, pretty-operations. *)
|
||||
ML\<open>
|
||||
|
@ -1732,6 +1658,130 @@ As one can see, check-routines internally generate the markup.
|
|||
|
||||
*)
|
||||
|
||||
|
||||
|
||||
Consts.the_const; (* T is a kind of signature ... *)
|
||||
Variable.import_terms;
|
||||
Vartab.update;
|
||||
|
||||
fun control_antiquotation name s1 s2 =
|
||||
Thy_Output.antiquotation name (Scan.lift Args.cartouche_input)
|
||||
(fn {state, ...} => enclose s1 s2 o Thy_Output.output_text state {markdown = false});
|
||||
|
||||
Output.output;
|
||||
|
||||
Syntax.read_input ;
|
||||
Input.source_content;
|
||||
|
||||
(*
|
||||
basic_entity @{binding const} (Args.const {proper = true, strict = false}) pretty_const #>
|
||||
*)
|
||||
|
||||
|
||||
|
||||
|
||||
chapter\<open>LaTeX Document Generation\<close>
|
||||
text\<open>MORE TO COME\<close>
|
||||
|
||||
|
||||
ML\<open> Thy_Output.document_command {markdown = true} \<close>
|
||||
(* Structures related to LaTeX Generation *)
|
||||
ML\<open> Latex.output_ascii;
|
||||
|
||||
Latex.output_token
|
||||
(* Hm, generierter output for
|
||||
subsection*[Shaft_Encoder_characteristics]{ * Shaft Encoder Characteristics * } :
|
||||
|
||||
\begin{isamarkuptext}%
|
||||
\isa{{\isacharbackslash}label{\isacharbraceleft}general{\isacharunderscore}hyps{\isacharbraceright}}%
|
||||
\end{isamarkuptext}\isamarkuptrue%
|
||||
\isacommand{subsection{\isacharasterisk}}\isamarkupfalse%
|
||||
{\isacharbrackleft}Shaft{\isacharunderscore}Encoder{\isacharunderscore}characteristics{\isacharbrackright}{\isacharverbatimopen}\ Shaft\ Encoder\ Characteristics\ {\isacharverbatimclose}%
|
||||
|
||||
Generierter output for: text\<open>\label{sec:Shaft-Encoder-characteristics}\<close>
|
||||
|
||||
\begin{isamarkuptext}%
|
||||
\label{sec:Shaft-Encoder-characteristics}%
|
||||
\end{isamarkuptext}\isamarkuptrue%
|
||||
|
||||
|
||||
*)
|
||||
|
||||
|
||||
\<close>
|
||||
|
||||
ML\<open>
|
||||
Thy_Output.maybe_pretty_source :
|
||||
(Proof.context -> 'a -> Pretty.T) -> Proof.context -> Token.src -> 'a list -> Pretty.T list;
|
||||
|
||||
Thy_Output.output: Proof.context -> Pretty.T list -> string;
|
||||
|
||||
(* nuescht besonderes *)
|
||||
|
||||
fun document_antiq check_file ctxt (name, pos) =
|
||||
let
|
||||
(* val dir = master_directory (Proof_Context.theory_of ctxt); *)
|
||||
(* val _ = check_path check_file ctxt dir (name, pos); *)
|
||||
in
|
||||
space_explode "/" name
|
||||
|> map Latex.output_ascii
|
||||
|> space_implode (Latex.output_ascii "/" ^ "\\discretionary{}{}{}")
|
||||
|> enclose "\\isatt{" "}"
|
||||
end;
|
||||
|
||||
\<close>
|
||||
|
||||
ML\<open>
|
||||
|
||||
Thy_Output.output_text: Toplevel.state -> {markdown: bool} -> Input.source -> string;
|
||||
Thy_Output.document_command;
|
||||
|
||||
Thy_Output.document_command : {markdown: bool} -> (xstring * Position.T) option * Input.source ->
|
||||
Toplevel.transition -> Toplevel.transition;
|
||||
(* fun document_command markdown (loc, txt) =
|
||||
Toplevel.keep (fn state =>
|
||||
(case loc of
|
||||
NONE => ignore (output_text state markdown txt)
|
||||
| SOME (_, pos) =>
|
||||
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
|
||||
Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt));
|
||||
|
||||
*)
|
||||
|
||||
Thy_Output.output_text : Toplevel.state -> {markdown: bool} -> Input.source -> string ;
|
||||
(* this is where antiquotation expansion happens : uses eval_antiquote *)
|
||||
|
||||
|
||||
Thy_Output.document_command : {markdown: bool} -> (xstring * Position.T) option * Input.source ->
|
||||
Toplevel.transition -> Toplevel.transition;
|
||||
(* fun document_command markdown (loc, txt) =
|
||||
Toplevel.keep (fn state =>
|
||||
(case loc of
|
||||
NONE => ignore (output_text state markdown txt)
|
||||
| SOME (_, pos) =>
|
||||
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
|
||||
Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt));
|
||||
|
||||
*)
|
||||
|
||||
Thy_Output.output_text : Toplevel.state -> {markdown: bool} -> Input.source -> string ;
|
||||
(* this is where antiquotation expansion happens : uses eval_antiquote *)
|
||||
|
||||
*)
|
||||
(* stuff over global environment : *)
|
||||
(*
|
||||
ML\<open> Document.state();\<close>
|
||||
ML\<open> Session.get_keywords(); (* this looks to be really session global. *)
|
||||
Outer_Syntax.command; \<close>
|
||||
ML\<open> Thy_Header.get_keywords @{theory};(* this looks to be really theory global. *) \<close>
|
||||
*)
|
||||
|
||||
|
||||
section\<open>Inner Syntax\<close>
|
||||
text\<open>MORE TO COME\<close>
|
||||
ML\<open>Sign.add_trrules\<close>
|
||||
|
||||
|
||||
section*[c::conclusion]\<open>Conclusion\<close>
|
||||
text\<open>More to come\<close>
|
||||
section*[bib::bibliography]\<open>Bibliography\<close>
|
||||
|
|
41
install
41
install
|
@ -1,6 +1,7 @@
|
|||
#!/usr/bin/env bash
|
||||
# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved.
|
||||
# 2018 The University of Paris-Sud. All rights reserved.
|
||||
# Copyright (c) 2018-2019 The University of Sheffield.
|
||||
# 2019-2019 The University of Exeter.
|
||||
# 2018-2019 The University of Paris-Sud.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions
|
||||
|
@ -30,19 +31,11 @@
|
|||
shopt -s nocasematch
|
||||
|
||||
# Global configuration
|
||||
ISABELLE_VERSION="Isabelle2017: October 2017"
|
||||
ISABELLE_URL="https://isabelle.in.tum.de/website-Isabelle2017/"
|
||||
AFP_URL="https://sourceforge.net/projects/afp/files/afp-Isabelle2017/afp-2018-08-14.tar.gz"
|
||||
ISABELLE_VERSION="Isabelle2018: August 2018"
|
||||
ISABELLE_URL="https://isabelle.in.tum.de/website-Isabelle2018/"
|
||||
|
||||
ISABELLE=`which isabelle`
|
||||
GEN_DIR=document-generator
|
||||
PROG=`echo $0 | sed 's|.*/||'`;
|
||||
ISABELLE_VERSION=`$ISABELLE version`
|
||||
SKIP="false"
|
||||
VARS=`$ISABELLE getenv ISABELLE_HOME_USER ISABELLE_HOME ISABELLE_TOOLS`
|
||||
for i in $VARS; do
|
||||
export "$i"
|
||||
done
|
||||
AFP_DATE="afp-2019-06-04"
|
||||
AFP_URL="https://sourceforge.net/projects/afp/files/afp-Isabelle2018/"$AFP_DATE".tar.gz"
|
||||
|
||||
print_help()
|
||||
{
|
||||
|
@ -69,14 +62,14 @@ exit_error() {
|
|||
|
||||
check_isabelle_version() {
|
||||
echo "* Checking Isabelle version:"
|
||||
if [ "$ISABELLE_VERSION" != "$ISABELLE_VERSION" ]; then
|
||||
if [ "$ISABELLE_VERSION" != "$ACTUAL_ISABELLE_VERSION" ]; then
|
||||
echo " WARNING:"
|
||||
echo " The version of Isabelle (i.e., $ISABELLE_VERSION) you are using"
|
||||
echo " The version of Isabelle (i.e., $ACTUAL_ISABELLE_VERSION) you are using"
|
||||
echo " IS NOT SUPPORTED"
|
||||
echo " by the current version of Isabelle/DOF. Please install a supported"
|
||||
echo " version of Isabelle and rerun the install script, providing the"
|
||||
echo " the \"isabelle\" command as argument."
|
||||
echo " Isabelle 2017 can be obtained from:"
|
||||
echo " Isabelle ($ISABELLE_VERSION) can be obtained from:"
|
||||
echo " $ISABELLE_URL"
|
||||
echo
|
||||
read -p " Still continue (y/N)? " -n 1 -r
|
||||
|
@ -109,14 +102,14 @@ check_afp_entries() {
|
|||
echo " Trying to install AFP (this might take a few *minutes*) ...."
|
||||
extract=""
|
||||
for e in $missing; do
|
||||
extract="$extract afp-2018-08-14/thys/$e"
|
||||
extract="$extract $AFP_DATE/thys/$e"
|
||||
done
|
||||
mkdir -p .afp
|
||||
if curl -s -L $AFP_URL | tar zxf - -C .afp $extract; then
|
||||
for e in $missing; do
|
||||
echo " Registering $e in $ISABELLE_HOME_USER/ROOTS"
|
||||
touch $ISABELLE_HOME_USER/ROOTS
|
||||
grep -q $PWD/.afp/afp-2018-08-14/thys/$e $ISABELLE_HOME_USER/ROOTS || echo "$PWD/.afp/afp-2018-08-14/thys/$e" >> $ISABELLE_HOME_USER/ROOTS
|
||||
grep -q $PWD/.afp/$AFP_DATE/thys/$e $ISABELLE_HOME_USER/ROOTS || echo "$PWD/.afp/$AFP_DATE/thys/$e" >> $ISABELLE_HOME_USER/ROOTS
|
||||
done
|
||||
echo " AFP installation successful."
|
||||
else
|
||||
|
@ -206,6 +199,7 @@ install_and_register(){
|
|||
}
|
||||
|
||||
|
||||
ISABELLE=`which isabelle`
|
||||
|
||||
while [ $# -gt 0 ]
|
||||
do
|
||||
|
@ -225,6 +219,15 @@ do
|
|||
done
|
||||
|
||||
|
||||
ACTUAL_ISABELLE_VERSION=`$ISABELLE version`
|
||||
GEN_DIR=document-generator
|
||||
PROG=`echo $0 | sed 's|.*/||'`;
|
||||
SKIP="false"
|
||||
VARS=`$ISABELLE getenv ISABELLE_HOME_USER ISABELLE_HOME ISABELLE_TOOLS`
|
||||
for i in $VARS; do
|
||||
export "$i"
|
||||
done
|
||||
|
||||
echo "Isabelle/DOF Installer"
|
||||
echo "======================"
|
||||
check_isabelle_version
|
||||
|
|
|
@ -28,8 +28,6 @@ Safety assessment is focused on but not limited to the safety properties of a sy
|
|||
Definition*[assessor::concept, tag="''assessor''"]
|
||||
\<open>entity that carries out an assessment\<close>
|
||||
|
||||
text\<open>@{docitem \<open>assessment\<close>}\<close>
|
||||
|
||||
Definition*[COTS::concept, tag="''commercial off-the-shelf software''"]
|
||||
\<open>software defined by market-driven need, commercially available and whose fitness for purpose
|
||||
has been demonstrated by a broad spectrum of commercial users\<close>
|
||||
|
@ -73,7 +71,7 @@ from the intended performance or behaviour (cf @{concept \<open>error\<close>})\
|
|||
Definition*[failure::concept]
|
||||
\<open>unacceptable difference between required and observed performance\<close>
|
||||
|
||||
Definition*[FT::concept, tag="\<open>fault tolerance\<close>"]
|
||||
Definition*[FT::concept, tag="''fault tolerance''"]
|
||||
\<open>built-in capability of a system to provide continued correct provision of service as specified,
|
||||
in the presence of a limited number of hardware or software faults\<close>
|
||||
|
||||
|
@ -262,16 +260,6 @@ datatype phase = SYSDEV_ext (* System Development Phase (external) *)
|
|||
| SD (* Software Deployment *)
|
||||
| SM (* Software Maintenance *)
|
||||
|
||||
datatype sil = SIL0 | SIL1 | SIL2 | SIL3 | SIL4
|
||||
|
||||
type_synonym saftety_integraytion_level = sil
|
||||
|
||||
doc_class cenelec_text = text_element +
|
||||
phase :: "phase"
|
||||
sil :: sil
|
||||
is_concerned :: "role set" <= "UNIV"
|
||||
|
||||
|
||||
abbreviation software_requirement :: "phase" where "software_requirement \<equiv> SR"
|
||||
abbreviation software_architecture :: "phase" where "software_architecture \<equiv> SA"
|
||||
abbreviation software_design :: "phase" where "software_design \<equiv> SD"
|
||||
|
@ -288,6 +276,9 @@ term "SR" (* meta-test *)
|
|||
|
||||
section\<open>Objectives, Conformance and Software Integrity Levels\<close>
|
||||
|
||||
datatype sil = SIL0 | SIL1 | SIL2 | SIL3 | SIL4
|
||||
|
||||
type_synonym saftety_integraytion_level = sil
|
||||
|
||||
|
||||
doc_class objectives =
|
||||
|
@ -295,7 +286,7 @@ doc_class objectives =
|
|||
is_concerned :: "role set"
|
||||
|
||||
|
||||
doc_class requirement = cenelec_text +
|
||||
doc_class requirement = text_element +
|
||||
long_name :: "string option"
|
||||
is_concerned :: "role set"
|
||||
|
||||
|
@ -459,6 +450,11 @@ doc_class judgement =
|
|||
|
||||
section\<open> Design and Test Documents \<close>
|
||||
|
||||
doc_class cenelec_text = text_element +
|
||||
phase :: "phase"
|
||||
is_concerned :: "role set" <= "UNIV"
|
||||
|
||||
|
||||
|
||||
doc_class SYSREQS = cenelec_text +
|
||||
phase :: "phase" <= "SYSDEV_ext"
|
||||
|
@ -696,36 +692,31 @@ doc_class test_documentation =
|
|||
|
||||
section\<open> META : Testing and Validation \<close>
|
||||
|
||||
|
||||
text\<open>Test : @{concept \<open>COTS\<close>}\<close>
|
||||
|
||||
|
||||
ML\<open>
|
||||
DOF_core.name2doc_class_name @{theory} "requirement";
|
||||
DOF_core.name2doc_class_name @{theory} "SRAC";
|
||||
DOF_core.is_defined_cid_global "SRAC" @{theory};
|
||||
DOF_core.is_defined_cid_global "EC" @{theory};
|
||||
"XXXXXXXXXXXXXXXXX";
|
||||
DOF_core.is_subclass @{context} "CENELEC_50128.EC" "CENELEC_50128.EC";
|
||||
\<close>
|
||||
|
||||
ML\<open>
|
||||
DOF_core.is_subclass @{context} "CENELEC_50128.EC" "CENELEC_50128.EC";
|
||||
DOF_core.is_subclass @{context} "CENELEC_50128.SRAC" "CENELEC_50128.EC";
|
||||
DOF_core.is_subclass @{context} "CENELEC_50128.EC" "CENELEC_50128.SRAC";
|
||||
DOF_core.is_subclass @{context} "CENELEC_50128.EC" "CENELEC_50128.test_requirement";
|
||||
"XXXXXXXXXXXXXXXXX";
|
||||
val {docobj_tab={maxano, tab=ref_tab},docclass_tab=class_tab,...} = DOF_core.get_data @{context};
|
||||
\<close>
|
||||
|
||||
ML\<open>val {docobj_tab={maxano, tab=ref_tab},docclass_tab=class_tab,...} = DOF_core.get_data @{context};
|
||||
Symtab.dest ref_tab;
|
||||
Symtab.dest class_tab;
|
||||
\<close>
|
||||
|
||||
|
||||
ML\<open>
|
||||
"XXXXXXXXXXXXXXXXX";
|
||||
|
||||
DOF_core.get_attributes_local "SRAC" @{context};
|
||||
|
||||
@{term assumption_kind}
|
||||
val internal_data_of_SRAC_definition = DOF_core.get_attributes_local "SRAC" @{context}
|
||||
\<close>
|
||||
|
||||
|
||||
ML\<open>
|
||||
DOF_core.name2doc_class_name @{theory} "requirement";
|
||||
Syntax.parse_typ @{context} "requirement";
|
||||
|
@ -734,10 +725,5 @@ Syntax.read_typ @{context} "hypothesis" handle _ => dummyT;
|
|||
Proof_Context.init_global;
|
||||
\<close>
|
||||
|
||||
text\<open>
|
||||
@{theory_text [display] \<open>definition a\<^sub>E \<equiv> True
|
||||
lemma XXX : "True = False " by auto\<close>}
|
||||
\<close>
|
||||
|
||||
end
|
||||
|
|
@ -46,12 +46,19 @@ doc_class M =
|
|||
accepts "A ~~ \<lbrace>C || D\<rbrace>\<^sup>* ~~ \<lbrakk>F\<rbrakk>"
|
||||
|
||||
|
||||
section*[test::A]\<open> Test and Validation\<close>
|
||||
|
||||
(*
|
||||
ML\<open> Document.state();\<close>
|
||||
ML\<open> Session.get_keywords(); (* this looks to be really session global. *)
|
||||
Outer_Syntax.command; \<close>
|
||||
ML\<open> Thy_Header.get_keywords @{theory};(* this looks to be really theory global. *) \<close>
|
||||
*)
|
||||
|
||||
section* [ test :: A ] \<open> Test and Validation\<close>
|
||||
text\<open>Defining some document elements to be referenced in later on in another theory: \<close>
|
||||
text*[sdf] {* f @{thm refl}*}
|
||||
text*[sdfg] {* fg @{thm refl}*}
|
||||
|
||||
text*[xxxy] {* dd @{docitem \<open>sdfg\<close>} @{thm refl}*}
|
||||
text* [ sdf ] \<open> f @{thm refl}\<close>
|
||||
text* [ sdfg ] \<open> fg @{thm refl}\<close>
|
||||
text* [ xxxy ] \<open> dd @{docitem \<open>sdfg\<close>} @{thm refl}\<close>
|
||||
|
||||
|
||||
end
|
||||
end
|
||||
|
|
|
@ -17,10 +17,10 @@ where the author of the exam is not expected to be physically present.
|
|||
|
||||
|
||||
datatype ContentClass =
|
||||
setter -- \<open>the 'author' of the exam\<close>
|
||||
| checker -- \<open>the 'proof-reader' of the exam\<close>
|
||||
| externalExaminer -- \<open>an external 'proof-reader' of the exam\<close>
|
||||
| student -- \<open>the victim ;-) ... \<close>
|
||||
setter (* \<open>the 'author' of the exam\<close> *)
|
||||
| checker (* \<open>the 'proof-reader' of the exam\<close> *)
|
||||
| externalExaminer (* \<open>an external 'proof-reader' of the exam\<close> *)
|
||||
| student (* \<open>the victim ;-) ... \<close> *)
|
||||
|
||||
|
||||
doc_class Author =
|
||||
|
@ -47,7 +47,7 @@ doc_class Exam_item =
|
|||
doc_class Header = Exam_item +
|
||||
examSubject :: "(Subject) list"
|
||||
date :: string
|
||||
timeAllowed :: int -- minutes
|
||||
timeAllowed :: int (* minutes *)
|
||||
|
||||
|
||||
type_synonym SubQuestion = string
|
||||
|
@ -58,7 +58,7 @@ doc_class Answer_Formal_Step = Exam_item +
|
|||
|
||||
doc_class Answer_YesNo = Exam_item +
|
||||
step_label :: string
|
||||
yes_no :: bool -- \<open>for checkboxes\<close>
|
||||
yes_no :: bool (* \<open>for checkboxes\<close> *)
|
||||
|
||||
datatype Question_Type =
|
||||
formal | informal | mixed
|
||||
|
|
|
@ -10,7 +10,7 @@ doc_class title =
|
|||
doc_class subtitle =
|
||||
abbrev :: "string option" <= "None"
|
||||
|
||||
-- \<open>adding a contribution list and checking that it is cited as well in tech as in conclusion. ? \<close>
|
||||
(* adding a contribution list and checking that it is cited as well in tech as in conclusion. ? *)
|
||||
|
||||
doc_class author =
|
||||
email :: "string" <= "''''"
|
||||
|
@ -18,6 +18,7 @@ doc_class author =
|
|||
orcid :: "string" <= "''''"
|
||||
affiliation :: "string"
|
||||
|
||||
|
||||
doc_class abstract =
|
||||
keywordlist :: "string list" <= "[]"
|
||||
principal_theorems :: "thm list"
|
||||
|
@ -64,7 +65,7 @@ doc_class annex = "text_section" +
|
|||
|
||||
|
||||
text\<open> Besides subtyping, there is another relation between
|
||||
doc_classes: a class can be a \<^emph>\<open>monitor\<close> to other ones,
|
||||
doc\_classes: a class can be a \<^emph>\<open>monitor\<close> to other ones,
|
||||
which is expressed by occurrence in the where clause.
|
||||
While sub-classing refers to data-inheritance of attributes,
|
||||
a monitor captures structural constraints -- the order --
|
||||
|
@ -111,7 +112,7 @@ local
|
|||
fun group f g cidS [] = []
|
||||
|group f g cidS (a::S) = case find_first (f a) cidS of
|
||||
NONE => [a] :: group f g cidS S
|
||||
| SOME cid => let val (pref,suff) = take_prefix (g cid) S
|
||||
| SOME cid => let val (pref,suff) = chop_prefix (g cid) S
|
||||
in (a::pref)::(group f g cidS suff) end;
|
||||
|
||||
fun partition ctxt cidS trace =
|
||||
|
|
|
@ -111,7 +111,7 @@ local
|
|||
fun group f g cidS [] = []
|
||||
|group f g cidS (a::S) = case find_first (f a) cidS of
|
||||
NONE => [a] :: group f g cidS S
|
||||
| SOME cid => let val (pref,suff) = take_prefix (g cid) S
|
||||
| SOME cid => let val (pref,suff) = chop_prefix (g cid) S
|
||||
in (a::pref)::(group f g cidS suff) end;
|
||||
|
||||
fun partition ctxt cidS trace =
|
||||
|
|
|
@ -0,0 +1,495 @@
|
|||
(* Title: Pure/Thy/thy_info.ML
|
||||
Author: Markus Wenzel, TU Muenchen
|
||||
|
||||
Global theory info database, with auto-loading according to theory and
|
||||
file dependencies.
|
||||
*)
|
||||
|
||||
signature THY_INFO =
|
||||
sig
|
||||
type presentation_context =
|
||||
{options: Options.T, file_pos: Position.T, adjust_pos: Position.T -> Position.T,
|
||||
segments: Thy_Output.segment list}
|
||||
val apply_presentation: presentation_context -> theory -> unit
|
||||
val add_presentation: (presentation_context -> theory -> unit) -> theory -> theory
|
||||
val get_names: unit -> string list
|
||||
val lookup_theory: string -> theory option
|
||||
val get_theory: string -> theory
|
||||
val master_directory: string -> Path.T
|
||||
val remove_thy: string -> unit
|
||||
type context =
|
||||
{options: Options.T,
|
||||
symbols: HTML.symbols,
|
||||
bibtex_entries: string list,
|
||||
last_timing: Toplevel.transition -> Time.time}
|
||||
val use_theories: context -> string -> Path.T -> (string * Position.T) list -> unit
|
||||
val use_thy: string -> unit
|
||||
val script_thy: Position.T -> string -> theory -> theory
|
||||
val register_thy: theory -> unit
|
||||
val finish: unit -> unit
|
||||
end;
|
||||
|
||||
structure Thy_Info: THY_INFO =
|
||||
struct
|
||||
|
||||
(** presentation of consolidated theory **)
|
||||
|
||||
type presentation_context =
|
||||
{options: Options.T, file_pos: Position.T, adjust_pos: Position.T -> Position.T,
|
||||
segments: Thy_Output.segment list};
|
||||
|
||||
structure Presentation = Theory_Data
|
||||
(
|
||||
type T = ((presentation_context -> theory -> unit) * stamp) list;
|
||||
val empty = [];
|
||||
val extend = I;
|
||||
fun merge data : T = Library.merge (eq_snd op =) data;
|
||||
);
|
||||
|
||||
fun apply_presentation (context: presentation_context) thy =
|
||||
ignore (Presentation.get thy |> Par_List.map (fn (f, _) => f context thy));
|
||||
|
||||
fun add_presentation f = Presentation.map (cons (f, stamp ()));
|
||||
|
||||
val _ =
|
||||
Theory.setup (add_presentation (fn {options, file_pos, segments, ...} => fn thy =>
|
||||
if exists (Toplevel.is_skipped_proof o #state) segments then ()
|
||||
else
|
||||
let
|
||||
val body = Thy_Output.present_thy options thy segments;
|
||||
val option = Present.document_option options;
|
||||
in
|
||||
if #disabled option then ()
|
||||
else
|
||||
let
|
||||
val latex = Latex.isabelle_body (Context.theory_name thy) body;
|
||||
val output = [Latex.output_text latex, Latex.output_positions file_pos latex];
|
||||
val _ =
|
||||
if Options.bool options "export_document"
|
||||
then Export.export thy "document.tex" output else ();
|
||||
val _ = if #enabled option then Present.theory_output thy output else ();
|
||||
in () end
|
||||
end));
|
||||
|
||||
|
||||
|
||||
(** thy database **)
|
||||
|
||||
(* messages *)
|
||||
|
||||
val show_path = space_implode " via " o map quote;
|
||||
|
||||
fun cycle_msg names = "Cyclic dependency of " ^ show_path names;
|
||||
|
||||
|
||||
(* derived graph operations *)
|
||||
|
||||
fun add_deps name parents G = String_Graph.add_deps_acyclic (name, parents) G
|
||||
handle String_Graph.CYCLES namess => error (cat_lines (map cycle_msg namess));
|
||||
|
||||
fun new_entry name parents entry =
|
||||
String_Graph.new_node (name, entry) #> add_deps name parents;
|
||||
|
||||
|
||||
(* global thys *)
|
||||
|
||||
type deps =
|
||||
{master: (Path.T * SHA1.digest), (*master dependencies for thy file*)
|
||||
imports: (string * Position.T) list}; (*source specification of imports (partially qualified)*)
|
||||
|
||||
fun make_deps master imports : deps = {master = master, imports = imports};
|
||||
|
||||
fun master_dir_deps (d: deps option) =
|
||||
the_default Path.current (Option.map (Path.dir o #1 o #master) d);
|
||||
|
||||
local
|
||||
val global_thys =
|
||||
Synchronized.var "Thy_Info.thys"
|
||||
(String_Graph.empty: (deps option * theory option) String_Graph.T);
|
||||
in
|
||||
fun get_thys () = Synchronized.value global_thys;
|
||||
fun change_thys f = Synchronized.change global_thys f;
|
||||
end;
|
||||
|
||||
fun get_names () = String_Graph.topological_order (get_thys ());
|
||||
|
||||
|
||||
(* access thy *)
|
||||
|
||||
fun lookup thys name = try (String_Graph.get_node thys) name;
|
||||
fun lookup_thy name = lookup (get_thys ()) name;
|
||||
|
||||
fun get thys name =
|
||||
(case lookup thys name of
|
||||
SOME thy => thy
|
||||
| NONE => error ("Theory loader: nothing known about theory " ^ quote name));
|
||||
|
||||
fun get_thy name = get (get_thys ()) name;
|
||||
|
||||
|
||||
(* access deps *)
|
||||
|
||||
val lookup_deps = Option.map #1 o lookup_thy;
|
||||
|
||||
val master_directory = master_dir_deps o #1 o get_thy;
|
||||
|
||||
|
||||
(* access theory *)
|
||||
|
||||
fun lookup_theory name =
|
||||
(case lookup_thy name of
|
||||
SOME (_, SOME theory) => SOME theory
|
||||
| _ => NONE);
|
||||
|
||||
fun get_theory name =
|
||||
(case lookup_theory name of
|
||||
SOME theory => theory
|
||||
| _ => error ("Theory loader: undefined entry for theory " ^ quote name));
|
||||
|
||||
val get_imports = Resources.imports_of o get_theory;
|
||||
|
||||
|
||||
|
||||
(** thy operations **)
|
||||
|
||||
(* remove *)
|
||||
|
||||
fun remove name thys =
|
||||
(case lookup thys name of
|
||||
NONE => thys
|
||||
| SOME (NONE, _) => error ("Cannot update finished theory " ^ quote name)
|
||||
| SOME _ =>
|
||||
let
|
||||
val succs = String_Graph.all_succs thys [name];
|
||||
val _ = writeln ("Theory loader: removing " ^ commas_quote succs);
|
||||
in fold String_Graph.del_node succs thys end);
|
||||
|
||||
val remove_thy = change_thys o remove;
|
||||
|
||||
|
||||
(* update *)
|
||||
|
||||
fun update deps theory thys =
|
||||
let
|
||||
val name = Context.theory_long_name theory;
|
||||
val parents = map Context.theory_long_name (Theory.parents_of theory);
|
||||
|
||||
val thys' = remove name thys;
|
||||
val _ = map (get thys') parents;
|
||||
in new_entry name parents (SOME deps, SOME theory) thys' end;
|
||||
|
||||
fun update_thy deps theory = change_thys (update deps theory);
|
||||
|
||||
|
||||
(* context *)
|
||||
|
||||
type context =
|
||||
{options: Options.T,
|
||||
symbols: HTML.symbols,
|
||||
bibtex_entries: string list,
|
||||
last_timing: Toplevel.transition -> Time.time};
|
||||
|
||||
fun default_context (): context =
|
||||
{options = Options.default (),
|
||||
symbols = HTML.no_symbols,
|
||||
bibtex_entries = [],
|
||||
last_timing = K Time.zeroTime};
|
||||
|
||||
|
||||
(* scheduling loader tasks *)
|
||||
|
||||
datatype result =
|
||||
Result of {theory: theory, exec_id: Document_ID.exec,
|
||||
present: unit -> unit, commit: unit -> unit, weight: int};
|
||||
|
||||
fun theory_result theory =
|
||||
Result {theory = theory, exec_id = Document_ID.none, present = I, commit = I, weight = 0};
|
||||
|
||||
fun result_theory (Result {theory, ...}) = theory;
|
||||
fun result_present (Result {present, ...}) = present;
|
||||
fun result_commit (Result {commit, ...}) = commit;
|
||||
fun result_ord (Result {weight = i, ...}, Result {weight = j, ...}) = int_ord (j, i);
|
||||
|
||||
fun join_theory (Result {theory, exec_id, ...}) =
|
||||
let
|
||||
val _ = Execution.join [exec_id];
|
||||
val res = Exn.capture Thm.consolidate_theory theory;
|
||||
val exns = maps Task_Queue.group_status (Execution.peek exec_id);
|
||||
in res :: map Exn.Exn exns end;
|
||||
|
||||
datatype task =
|
||||
Task of string list * (theory list -> result) |
|
||||
Finished of theory;
|
||||
|
||||
fun task_finished (Task _) = false
|
||||
| task_finished (Finished _) = true;
|
||||
|
||||
fun task_parents deps (parents: string list) = map (the o AList.lookup (op =) deps) parents;
|
||||
|
||||
val schedule_seq =
|
||||
String_Graph.schedule (fn deps => fn (_, task) =>
|
||||
(case task of
|
||||
Task (parents, body) =>
|
||||
let
|
||||
val result = body (task_parents deps parents);
|
||||
val _ = Par_Exn.release_all (join_theory result);
|
||||
val _ = result_present result ();
|
||||
val _ = result_commit result ();
|
||||
in result_theory result end
|
||||
| Finished thy => thy)) #> ignore;
|
||||
|
||||
val schedule_futures = Thread_Attributes.uninterruptible (fn _ => fn tasks =>
|
||||
let
|
||||
val futures = tasks
|
||||
|> String_Graph.schedule (fn deps => fn (name, task) =>
|
||||
(case task of
|
||||
Task (parents, body) =>
|
||||
(singleton o Future.forks)
|
||||
{name = "theory:" ^ name, group = NONE,
|
||||
deps = map (Future.task_of o #2) deps, pri = 0, interrupts = true}
|
||||
(fn () =>
|
||||
(case filter (not o can Future.join o #2) deps of
|
||||
[] => body (map (result_theory o Future.join) (task_parents deps parents))
|
||||
| bad =>
|
||||
error
|
||||
("Failed to load theory " ^ quote name ^
|
||||
" (unresolved " ^ commas_quote (map #1 bad) ^ ")")))
|
||||
| Finished theory => Future.value (theory_result theory)));
|
||||
|
||||
val results1 = futures
|
||||
|> maps (fn future =>
|
||||
(case Future.join_result future of
|
||||
Exn.Res result => join_theory result
|
||||
| Exn.Exn exn => [Exn.Exn exn]));
|
||||
|
||||
val results2 = futures
|
||||
|> map_filter (Exn.get_res o Future.join_result)
|
||||
|> sort result_ord
|
||||
|> Par_List.map (fn result => Exn.capture (result_present result) ());
|
||||
|
||||
(* FIXME more precise commit order (!?) *)
|
||||
val results3 = futures
|
||||
|> map (fn future => Exn.capture (fn () => result_commit (Future.join future) ()) ());
|
||||
|
||||
(* FIXME avoid global Execution.reset (!??) *)
|
||||
val results4 = map Exn.Exn (maps Task_Queue.group_status (Execution.reset ()));
|
||||
|
||||
val _ = Par_Exn.release_all (results1 @ results2 @ results3 @ results4);
|
||||
in () end);
|
||||
|
||||
|
||||
(* eval theory *)
|
||||
|
||||
fun excursion keywords master_dir last_timing init elements =
|
||||
let
|
||||
fun prepare_span st span =
|
||||
Command_Span.content span
|
||||
|> Command.read keywords (Command.read_thy st) master_dir init ([], ~1)
|
||||
|> (fn tr => Toplevel.put_timing (last_timing tr) tr);
|
||||
|
||||
fun element_result span_elem (st, _) =
|
||||
let
|
||||
val elem = Thy_Syntax.map_element (prepare_span st) span_elem;
|
||||
val (results, st') = Toplevel.element_result keywords elem st;
|
||||
val pos' = Toplevel.pos_of (Thy_Syntax.last_element elem);
|
||||
in (results, (st', pos')) end;
|
||||
|
||||
val (results, (end_state, end_pos)) =
|
||||
fold_map element_result elements (Toplevel.toplevel, Position.none);
|
||||
|
||||
val thy = Toplevel.end_theory end_pos end_state;
|
||||
in (results, thy) end;
|
||||
|
||||
fun eval_thy (context: context) update_time master_dir header text_pos text parents =
|
||||
let
|
||||
val {options, symbols, bibtex_entries, last_timing} = context;
|
||||
val (name, _) = #name header;
|
||||
val keywords =
|
||||
fold (curry Keyword.merge_keywords o Thy_Header.get_keywords) parents
|
||||
(Keyword.add_keywords (#keywords header) Keyword.empty_keywords);
|
||||
|
||||
val _ = writeln "eval_thy 1";
|
||||
|
||||
val spans = Outer_Syntax.parse_spans (Token.explode keywords text_pos text);
|
||||
val _ = writeln "eval_thy 2";
|
||||
val elements = Thy_Syntax.parse_elements keywords spans;
|
||||
val _ = writeln "eval_thy 3";
|
||||
|
||||
fun init () =
|
||||
Resources.begin_theory master_dir header parents
|
||||
|> Present.begin_theory bibtex_entries update_time
|
||||
(fn () => implode (map (HTML.present_span symbols keywords) spans));
|
||||
|
||||
val (results, thy) =
|
||||
cond_timeit true ("theory " ^ quote name)
|
||||
(fn () => excursion keywords master_dir last_timing init elements);
|
||||
|
||||
fun present () =
|
||||
let
|
||||
val _ = writeln "eval_thy 4 - present"
|
||||
val segments = (spans ~~ maps Toplevel.join_results results)
|
||||
|> map (fn (span, (tr, st')) => {span = span, command = tr, state = st'});
|
||||
|
||||
val X = if null segments then (writeln "eval_thy 5 - no segments";[])
|
||||
else List.concat(map (fn X => Command_Span.content (#span X)) segments)
|
||||
val Y = (String.concatWith "::") (map Token.content_of X)
|
||||
val _ = writeln("eval_thy 5 BEGIN\n"^Y^"eval_thy 6 END:"^Context.theory_name thy^"\n")
|
||||
|
||||
val context: presentation_context =
|
||||
{options = options, file_pos = text_pos, adjust_pos = I, segments = segments};
|
||||
in apply_presentation context thy end;
|
||||
in (thy, present, size text) end;
|
||||
|
||||
|
||||
(* require_thy -- checking database entries wrt. the file-system *)
|
||||
|
||||
local
|
||||
|
||||
fun required_by _ [] = ""
|
||||
| required_by s initiators = s ^ "(required by " ^ show_path (rev initiators) ^ ")";
|
||||
|
||||
fun load_thy context initiators update_time deps text (name, pos) keywords parents =
|
||||
let
|
||||
val _ = remove_thy name;
|
||||
val _ = writeln ("Loading theory " ^ quote name ^ required_by " " initiators);
|
||||
val _ = Output.try_protocol_message (Markup.loading_theory name) [];
|
||||
|
||||
val {master = (thy_path, _), imports} = deps;
|
||||
val dir = Path.dir thy_path;
|
||||
val header = Thy_Header.make (name, pos) imports keywords;
|
||||
|
||||
val _ = Position.reports (map #2 imports ~~ map Theory.get_markup parents);
|
||||
|
||||
val exec_id = Document_ID.make ();
|
||||
val _ =
|
||||
Execution.running Document_ID.none exec_id [] orelse
|
||||
raise Fail ("Failed to register execution: " ^ Document_ID.print exec_id);
|
||||
|
||||
val timing_start = Timing.start ();
|
||||
|
||||
val text_pos = Position.put_id (Document_ID.print exec_id) (Path.position thy_path);
|
||||
val (theory, present, weight) =
|
||||
eval_thy context update_time dir header text_pos text
|
||||
(if name = Context.PureN then [Context.the_global_context ()] else parents);
|
||||
|
||||
val timing_result = Timing.result timing_start;
|
||||
val timing_props = [Markup.theory_timing, (Markup.nameN, name)];
|
||||
val _ = Output.try_protocol_message (timing_props @ Markup.timing_properties timing_result) []
|
||||
|
||||
fun commit () = update_thy deps theory;
|
||||
in
|
||||
Result {theory = theory, exec_id = exec_id, present = present, commit = commit, weight = weight}
|
||||
end;
|
||||
|
||||
fun check_deps dir name =
|
||||
(case lookup_deps name of
|
||||
SOME NONE => (true, NONE, Position.none, get_imports name, [])
|
||||
| NONE =>
|
||||
let val {master, text, theory_pos, imports, keywords} = Resources.check_thy dir name
|
||||
in (false, SOME (make_deps master imports, text), theory_pos, imports, keywords) end
|
||||
| SOME (SOME {master, ...}) =>
|
||||
let
|
||||
val {master = master', text = text', theory_pos = theory_pos', imports = imports',
|
||||
keywords = keywords'} = Resources.check_thy dir name;
|
||||
val deps' = SOME (make_deps master' imports', text');
|
||||
val current =
|
||||
#2 master = #2 master' andalso
|
||||
(case lookup_theory name of
|
||||
NONE => false
|
||||
| SOME theory => Resources.loaded_files_current theory);
|
||||
in (current, deps', theory_pos', imports', keywords') end);
|
||||
|
||||
in
|
||||
|
||||
fun require_thys context initiators qualifier dir strs tasks =
|
||||
fold_map (require_thy context initiators qualifier dir) strs tasks |>> forall I
|
||||
and require_thy context initiators qualifier dir (s, require_pos) tasks =
|
||||
let
|
||||
val {master_dir, theory_name, ...} = Resources.import_name qualifier dir s;
|
||||
in
|
||||
(case try (String_Graph.get_node tasks) theory_name of
|
||||
SOME task => (task_finished task, tasks)
|
||||
| NONE =>
|
||||
let
|
||||
val _ = member (op =) initiators theory_name andalso error (cycle_msg initiators);
|
||||
|
||||
val (current, deps, theory_pos, imports, keywords) = check_deps master_dir theory_name
|
||||
handle ERROR msg =>
|
||||
cat_error msg
|
||||
("The error(s) above occurred for theory " ^ quote theory_name ^
|
||||
Position.here require_pos ^ required_by "\n" initiators);
|
||||
|
||||
val qualifier' = Resources.theory_qualifier theory_name;
|
||||
val dir' = Path.append dir (master_dir_deps (Option.map #1 deps));
|
||||
|
||||
val parents = map (#theory_name o Resources.import_name qualifier' dir' o #1) imports;
|
||||
val (parents_current, tasks') =
|
||||
require_thys context (theory_name :: initiators) qualifier' dir' imports tasks;
|
||||
|
||||
val all_current = current andalso parents_current;
|
||||
val task =
|
||||
if all_current then Finished (get_theory theory_name)
|
||||
else
|
||||
(case deps of
|
||||
NONE => raise Fail "Malformed deps"
|
||||
| SOME (dep, text) =>
|
||||
let
|
||||
val update_time = serial ();
|
||||
val load =
|
||||
load_thy context initiators update_time
|
||||
dep text (theory_name, theory_pos) keywords;
|
||||
in Task (parents, load) end);
|
||||
|
||||
val tasks'' = new_entry theory_name parents task tasks';
|
||||
in (all_current, tasks'') end)
|
||||
end;
|
||||
|
||||
end;
|
||||
|
||||
|
||||
(* use theories *)
|
||||
|
||||
fun use_theories context qualifier master_dir imports =
|
||||
let val (_, tasks) = require_thys context [] qualifier master_dir imports String_Graph.empty
|
||||
in if Multithreading.max_threads () > 1 then schedule_futures tasks else schedule_seq tasks end;
|
||||
|
||||
fun use_thy name =
|
||||
use_theories (default_context ()) Resources.default_qualifier
|
||||
Path.current [(name, Position.none)];
|
||||
|
||||
|
||||
(* toplevel scripting -- without maintaining database *)
|
||||
|
||||
fun script_thy pos txt thy =
|
||||
let
|
||||
val trs =
|
||||
Outer_Syntax.parse thy pos txt
|
||||
|> map (Toplevel.modify_init (K thy));
|
||||
val end_pos = if null trs then pos else Toplevel.pos_of (List.last trs);
|
||||
val end_state = fold (Toplevel.command_exception true) trs Toplevel.toplevel;
|
||||
in Toplevel.end_theory end_pos end_state end;
|
||||
|
||||
|
||||
(* register theory *)
|
||||
|
||||
fun register_thy theory =
|
||||
let
|
||||
val name = Context.theory_long_name theory;
|
||||
val {master, ...} = Resources.check_thy (Resources.master_directory theory) name;
|
||||
val imports = Resources.imports_of theory;
|
||||
in
|
||||
change_thys (fn thys =>
|
||||
let
|
||||
val thys' = remove name thys;
|
||||
val _ = writeln ("Registering theory " ^ quote name);
|
||||
in update (make_deps master imports) theory thys' end)
|
||||
end;
|
||||
|
||||
|
||||
(* finish all theories *)
|
||||
|
||||
fun finish () = change_thys (String_Graph.map (fn _ => fn (_, entry) => (NONE, entry)));
|
||||
|
||||
end;
|
||||
|
||||
fun use_thy name = Runtime.toplevel_program (fn () => Thy_Info.use_thy name);
|
|
@ -0,0 +1,484 @@
|
|||
(* Title: Pure/Thy/thy_info.ML
|
||||
Author: Markus Wenzel, TU Muenchen
|
||||
|
||||
Global theory info database, with auto-loading according to theory and
|
||||
file dependencies.
|
||||
*)
|
||||
|
||||
signature THY_INFO =
|
||||
sig
|
||||
type presentation_context =
|
||||
{options: Options.T, file_pos: Position.T, adjust_pos: Position.T -> Position.T,
|
||||
segments: Thy_Output.segment list}
|
||||
val apply_presentation: presentation_context -> theory -> unit
|
||||
val add_presentation: (presentation_context -> theory -> unit) -> theory -> theory
|
||||
val get_names: unit -> string list
|
||||
val lookup_theory: string -> theory option
|
||||
val get_theory: string -> theory
|
||||
val master_directory: string -> Path.T
|
||||
val remove_thy: string -> unit
|
||||
type context =
|
||||
{options: Options.T,
|
||||
symbols: HTML.symbols,
|
||||
bibtex_entries: string list,
|
||||
last_timing: Toplevel.transition -> Time.time}
|
||||
val use_theories: context -> string -> Path.T -> (string * Position.T) list -> unit
|
||||
val use_thy: string -> unit
|
||||
val script_thy: Position.T -> string -> theory -> theory
|
||||
val register_thy: theory -> unit
|
||||
val finish: unit -> unit
|
||||
end;
|
||||
|
||||
structure Thy_Info: THY_INFO =
|
||||
struct
|
||||
|
||||
(** presentation of consolidated theory **)
|
||||
|
||||
type presentation_context =
|
||||
{options: Options.T, file_pos: Position.T, adjust_pos: Position.T -> Position.T,
|
||||
segments: Thy_Output.segment list};
|
||||
|
||||
structure Presentation = Theory_Data
|
||||
(
|
||||
type T = ((presentation_context -> theory -> unit) * stamp) list;
|
||||
val empty = [];
|
||||
val extend = I;
|
||||
fun merge data : T = Library.merge (eq_snd op =) data;
|
||||
);
|
||||
|
||||
fun apply_presentation (context: presentation_context) thy =
|
||||
ignore (Presentation.get thy |> Par_List.map (fn (f, _) => f context thy));
|
||||
|
||||
fun add_presentation f = Presentation.map (cons (f, stamp ()));
|
||||
|
||||
val _ =
|
||||
Theory.setup (add_presentation (fn {options, file_pos, segments, ...} => fn thy =>
|
||||
if exists (Toplevel.is_skipped_proof o #state) segments then ()
|
||||
else
|
||||
let
|
||||
val body = Thy_Output.present_thy options thy segments;
|
||||
val option = Present.document_option options;
|
||||
in
|
||||
if #disabled option then ()
|
||||
else
|
||||
let
|
||||
val latex = Latex.isabelle_body (Context.theory_name thy) body;
|
||||
val output = [Latex.output_text latex, Latex.output_positions file_pos latex];
|
||||
val _ =
|
||||
if Options.bool options "export_document"
|
||||
then Export.export thy "document.tex" output else ();
|
||||
val _ = if #enabled option then Present.theory_output thy output else ();
|
||||
in () end
|
||||
end));
|
||||
|
||||
|
||||
|
||||
(** thy database **)
|
||||
|
||||
(* messages *)
|
||||
|
||||
val show_path = space_implode " via " o map quote;
|
||||
|
||||
fun cycle_msg names = "Cyclic dependency of " ^ show_path names;
|
||||
|
||||
|
||||
(* derived graph operations *)
|
||||
|
||||
fun add_deps name parents G = String_Graph.add_deps_acyclic (name, parents) G
|
||||
handle String_Graph.CYCLES namess => error (cat_lines (map cycle_msg namess));
|
||||
|
||||
fun new_entry name parents entry =
|
||||
String_Graph.new_node (name, entry) #> add_deps name parents;
|
||||
|
||||
|
||||
(* global thys *)
|
||||
|
||||
type deps =
|
||||
{master: (Path.T * SHA1.digest), (*master dependencies for thy file*)
|
||||
imports: (string * Position.T) list}; (*source specification of imports (partially qualified)*)
|
||||
|
||||
fun make_deps master imports : deps = {master = master, imports = imports};
|
||||
|
||||
fun master_dir_deps (d: deps option) =
|
||||
the_default Path.current (Option.map (Path.dir o #1 o #master) d);
|
||||
|
||||
local
|
||||
val global_thys =
|
||||
Synchronized.var "Thy_Info.thys"
|
||||
(String_Graph.empty: (deps option * theory option) String_Graph.T);
|
||||
in
|
||||
fun get_thys () = Synchronized.value global_thys;
|
||||
fun change_thys f = Synchronized.change global_thys f;
|
||||
end;
|
||||
|
||||
fun get_names () = String_Graph.topological_order (get_thys ());
|
||||
|
||||
|
||||
(* access thy *)
|
||||
|
||||
fun lookup thys name = try (String_Graph.get_node thys) name;
|
||||
fun lookup_thy name = lookup (get_thys ()) name;
|
||||
|
||||
fun get thys name =
|
||||
(case lookup thys name of
|
||||
SOME thy => thy
|
||||
| NONE => error ("Theory loader: nothing known about theory " ^ quote name));
|
||||
|
||||
fun get_thy name = get (get_thys ()) name;
|
||||
|
||||
|
||||
(* access deps *)
|
||||
|
||||
val lookup_deps = Option.map #1 o lookup_thy;
|
||||
|
||||
val master_directory = master_dir_deps o #1 o get_thy;
|
||||
|
||||
|
||||
(* access theory *)
|
||||
|
||||
fun lookup_theory name =
|
||||
(case lookup_thy name of
|
||||
SOME (_, SOME theory) => SOME theory
|
||||
| _ => NONE);
|
||||
|
||||
fun get_theory name =
|
||||
(case lookup_theory name of
|
||||
SOME theory => theory
|
||||
| _ => error ("Theory loader: undefined entry for theory " ^ quote name));
|
||||
|
||||
val get_imports = Resources.imports_of o get_theory;
|
||||
|
||||
|
||||
|
||||
(** thy operations **)
|
||||
|
||||
(* remove *)
|
||||
|
||||
fun remove name thys =
|
||||
(case lookup thys name of
|
||||
NONE => thys
|
||||
| SOME (NONE, _) => error ("Cannot update finished theory " ^ quote name)
|
||||
| SOME _ =>
|
||||
let
|
||||
val succs = String_Graph.all_succs thys [name];
|
||||
val _ = writeln ("Theory loader: removing " ^ commas_quote succs);
|
||||
in fold String_Graph.del_node succs thys end);
|
||||
|
||||
val remove_thy = change_thys o remove;
|
||||
|
||||
|
||||
(* update *)
|
||||
|
||||
fun update deps theory thys =
|
||||
let
|
||||
val name = Context.theory_long_name theory;
|
||||
val parents = map Context.theory_long_name (Theory.parents_of theory);
|
||||
|
||||
val thys' = remove name thys;
|
||||
val _ = map (get thys') parents;
|
||||
in new_entry name parents (SOME deps, SOME theory) thys' end;
|
||||
|
||||
fun update_thy deps theory = change_thys (update deps theory);
|
||||
|
||||
|
||||
(* context *)
|
||||
|
||||
type context =
|
||||
{options: Options.T,
|
||||
symbols: HTML.symbols,
|
||||
bibtex_entries: string list,
|
||||
last_timing: Toplevel.transition -> Time.time};
|
||||
|
||||
fun default_context (): context =
|
||||
{options = Options.default (),
|
||||
symbols = HTML.no_symbols,
|
||||
bibtex_entries = [],
|
||||
last_timing = K Time.zeroTime};
|
||||
|
||||
|
||||
(* scheduling loader tasks *)
|
||||
|
||||
datatype result =
|
||||
Result of {theory: theory, exec_id: Document_ID.exec,
|
||||
present: unit -> unit, commit: unit -> unit, weight: int};
|
||||
|
||||
fun theory_result theory =
|
||||
Result {theory = theory, exec_id = Document_ID.none, present = I, commit = I, weight = 0};
|
||||
|
||||
fun result_theory (Result {theory, ...}) = theory;
|
||||
fun result_present (Result {present, ...}) = present;
|
||||
fun result_commit (Result {commit, ...}) = commit;
|
||||
fun result_ord (Result {weight = i, ...}, Result {weight = j, ...}) = int_ord (j, i);
|
||||
|
||||
fun join_theory (Result {theory, exec_id, ...}) =
|
||||
let
|
||||
val _ = Execution.join [exec_id];
|
||||
val res = Exn.capture Thm.consolidate_theory theory;
|
||||
val exns = maps Task_Queue.group_status (Execution.peek exec_id);
|
||||
in res :: map Exn.Exn exns end;
|
||||
|
||||
datatype task =
|
||||
Task of string list * (theory list -> result) |
|
||||
Finished of theory;
|
||||
|
||||
fun task_finished (Task _) = false
|
||||
| task_finished (Finished _) = true;
|
||||
|
||||
fun task_parents deps (parents: string list) = map (the o AList.lookup (op =) deps) parents;
|
||||
|
||||
val schedule_seq =
|
||||
String_Graph.schedule (fn deps => fn (_, task) =>
|
||||
(case task of
|
||||
Task (parents, body) =>
|
||||
let
|
||||
val result = body (task_parents deps parents);
|
||||
val _ = Par_Exn.release_all (join_theory result);
|
||||
val _ = result_present result ();
|
||||
val _ = result_commit result ();
|
||||
in result_theory result end
|
||||
| Finished thy => thy)) #> ignore;
|
||||
|
||||
val schedule_futures = Thread_Attributes.uninterruptible (fn _ => fn tasks =>
|
||||
let
|
||||
val futures = tasks
|
||||
|> String_Graph.schedule (fn deps => fn (name, task) =>
|
||||
(case task of
|
||||
Task (parents, body) =>
|
||||
(singleton o Future.forks)
|
||||
{name = "theory:" ^ name, group = NONE,
|
||||
deps = map (Future.task_of o #2) deps, pri = 0, interrupts = true}
|
||||
(fn () =>
|
||||
(case filter (not o can Future.join o #2) deps of
|
||||
[] => body (map (result_theory o Future.join) (task_parents deps parents))
|
||||
| bad =>
|
||||
error
|
||||
("Failed to load theory " ^ quote name ^
|
||||
" (unresolved " ^ commas_quote (map #1 bad) ^ ")")))
|
||||
| Finished theory => Future.value (theory_result theory)));
|
||||
|
||||
val results1 = futures
|
||||
|> maps (fn future =>
|
||||
(case Future.join_result future of
|
||||
Exn.Res result => join_theory result
|
||||
| Exn.Exn exn => [Exn.Exn exn]));
|
||||
|
||||
val results2 = futures
|
||||
|> map_filter (Exn.get_res o Future.join_result)
|
||||
|> sort result_ord
|
||||
|> Par_List.map (fn result => Exn.capture (result_present result) ());
|
||||
|
||||
(* FIXME more precise commit order (!?) *)
|
||||
val results3 = futures
|
||||
|> map (fn future => Exn.capture (fn () => result_commit (Future.join future) ()) ());
|
||||
|
||||
(* FIXME avoid global Execution.reset (!??) *)
|
||||
val results4 = map Exn.Exn (maps Task_Queue.group_status (Execution.reset ()));
|
||||
|
||||
val _ = Par_Exn.release_all (results1 @ results2 @ results3 @ results4);
|
||||
in () end);
|
||||
|
||||
|
||||
(* eval theory *)
|
||||
|
||||
fun excursion keywords master_dir last_timing init elements =
|
||||
let
|
||||
fun prepare_span st span =
|
||||
Command_Span.content span
|
||||
|> Command.read keywords (Command.read_thy st) master_dir init ([], ~1)
|
||||
|> (fn tr => Toplevel.put_timing (last_timing tr) tr);
|
||||
|
||||
fun element_result span_elem (st, _) =
|
||||
let
|
||||
val elem = Thy_Syntax.map_element (prepare_span st) span_elem;
|
||||
val (results, st') = Toplevel.element_result keywords elem st;
|
||||
val pos' = Toplevel.pos_of (Thy_Syntax.last_element elem);
|
||||
in (results, (st', pos')) end;
|
||||
|
||||
val (results, (end_state, end_pos)) =
|
||||
fold_map element_result elements (Toplevel.toplevel, Position.none);
|
||||
|
||||
val thy = Toplevel.end_theory end_pos end_state;
|
||||
in (results, thy) end;
|
||||
|
||||
fun eval_thy (context: context) update_time master_dir header text_pos text parents =
|
||||
let
|
||||
val {options, symbols, bibtex_entries, last_timing} = context;
|
||||
val (name, _) = #name header;
|
||||
val keywords =
|
||||
fold (curry Keyword.merge_keywords o Thy_Header.get_keywords) parents
|
||||
(Keyword.add_keywords (#keywords header) Keyword.empty_keywords);
|
||||
|
||||
val spans = Outer_Syntax.parse_spans (Token.explode keywords text_pos text);
|
||||
val elements = Thy_Syntax.parse_elements keywords spans;
|
||||
|
||||
fun init () =
|
||||
Resources.begin_theory master_dir header parents
|
||||
|> Present.begin_theory bibtex_entries update_time
|
||||
(fn () => implode (map (HTML.present_span symbols keywords) spans));
|
||||
|
||||
val (results, thy) =
|
||||
cond_timeit true ("theory " ^ quote name)
|
||||
(fn () => excursion keywords master_dir last_timing init elements);
|
||||
|
||||
fun present () =
|
||||
let
|
||||
val segments = (spans ~~ maps Toplevel.join_results results)
|
||||
|> map (fn (span, (tr, st')) => {span = span, command = tr, state = st'});
|
||||
val context: presentation_context =
|
||||
{options = options, file_pos = text_pos, adjust_pos = I, segments = segments};
|
||||
in apply_presentation context thy end;
|
||||
in (thy, present, size text) end;
|
||||
|
||||
|
||||
(* require_thy -- checking database entries wrt. the file-system *)
|
||||
|
||||
local
|
||||
|
||||
fun required_by _ [] = ""
|
||||
| required_by s initiators = s ^ "(required by " ^ show_path (rev initiators) ^ ")";
|
||||
|
||||
fun load_thy context initiators update_time deps text (name, pos) keywords parents =
|
||||
let
|
||||
val _ = remove_thy name;
|
||||
val _ = writeln ("Loading theory " ^ quote name ^ required_by " " initiators);
|
||||
val _ = Output.try_protocol_message (Markup.loading_theory name) [];
|
||||
|
||||
val {master = (thy_path, _), imports} = deps;
|
||||
val dir = Path.dir thy_path;
|
||||
val header = Thy_Header.make (name, pos) imports keywords;
|
||||
|
||||
val _ = Position.reports (map #2 imports ~~ map Theory.get_markup parents);
|
||||
|
||||
val exec_id = Document_ID.make ();
|
||||
val _ =
|
||||
Execution.running Document_ID.none exec_id [] orelse
|
||||
raise Fail ("Failed to register execution: " ^ Document_ID.print exec_id);
|
||||
|
||||
val timing_start = Timing.start ();
|
||||
|
||||
val text_pos = Position.put_id (Document_ID.print exec_id) (Path.position thy_path);
|
||||
val (theory, present, weight) =
|
||||
eval_thy context update_time dir header text_pos text
|
||||
(if name = Context.PureN then [Context.the_global_context ()] else parents);
|
||||
|
||||
val timing_result = Timing.result timing_start;
|
||||
val timing_props = [Markup.theory_timing, (Markup.nameN, name)];
|
||||
val _ = Output.try_protocol_message (timing_props @ Markup.timing_properties timing_result) []
|
||||
|
||||
fun commit () = update_thy deps theory;
|
||||
in
|
||||
Result {theory = theory, exec_id = exec_id, present = present, commit = commit, weight = weight}
|
||||
end;
|
||||
|
||||
fun check_deps dir name =
|
||||
(case lookup_deps name of
|
||||
SOME NONE => (true, NONE, Position.none, get_imports name, [])
|
||||
| NONE =>
|
||||
let val {master, text, theory_pos, imports, keywords} = Resources.check_thy dir name
|
||||
in (false, SOME (make_deps master imports, text), theory_pos, imports, keywords) end
|
||||
| SOME (SOME {master, ...}) =>
|
||||
let
|
||||
val {master = master', text = text', theory_pos = theory_pos', imports = imports',
|
||||
keywords = keywords'} = Resources.check_thy dir name;
|
||||
val deps' = SOME (make_deps master' imports', text');
|
||||
val current =
|
||||
#2 master = #2 master' andalso
|
||||
(case lookup_theory name of
|
||||
NONE => false
|
||||
| SOME theory => Resources.loaded_files_current theory);
|
||||
in (current, deps', theory_pos', imports', keywords') end);
|
||||
|
||||
in
|
||||
|
||||
fun require_thys context initiators qualifier dir strs tasks =
|
||||
fold_map (require_thy context initiators qualifier dir) strs tasks |>> forall I
|
||||
and require_thy context initiators qualifier dir (s, require_pos) tasks =
|
||||
let
|
||||
val {master_dir, theory_name, ...} = Resources.import_name qualifier dir s;
|
||||
in
|
||||
(case try (String_Graph.get_node tasks) theory_name of
|
||||
SOME task => (task_finished task, tasks)
|
||||
| NONE =>
|
||||
let
|
||||
val _ = member (op =) initiators theory_name andalso error (cycle_msg initiators);
|
||||
|
||||
val (current, deps, theory_pos, imports, keywords) = check_deps master_dir theory_name
|
||||
handle ERROR msg =>
|
||||
cat_error msg
|
||||
("The error(s) above occurred for theory " ^ quote theory_name ^
|
||||
Position.here require_pos ^ required_by "\n" initiators);
|
||||
|
||||
val qualifier' = Resources.theory_qualifier theory_name;
|
||||
val dir' = Path.append dir (master_dir_deps (Option.map #1 deps));
|
||||
|
||||
val parents = map (#theory_name o Resources.import_name qualifier' dir' o #1) imports;
|
||||
val (parents_current, tasks') =
|
||||
require_thys context (theory_name :: initiators) qualifier' dir' imports tasks;
|
||||
|
||||
val all_current = current andalso parents_current;
|
||||
val task =
|
||||
if all_current then Finished (get_theory theory_name)
|
||||
else
|
||||
(case deps of
|
||||
NONE => raise Fail "Malformed deps"
|
||||
| SOME (dep, text) =>
|
||||
let
|
||||
val update_time = serial ();
|
||||
val load =
|
||||
load_thy context initiators update_time
|
||||
dep text (theory_name, theory_pos) keywords;
|
||||
in Task (parents, load) end);
|
||||
|
||||
val tasks'' = new_entry theory_name parents task tasks';
|
||||
in (all_current, tasks'') end)
|
||||
end;
|
||||
|
||||
end;
|
||||
|
||||
|
||||
(* use theories *)
|
||||
|
||||
fun use_theories context qualifier master_dir imports =
|
||||
let val (_, tasks) = require_thys context [] qualifier master_dir imports String_Graph.empty
|
||||
in if Multithreading.max_threads () > 1 then schedule_futures tasks else schedule_seq tasks end;
|
||||
|
||||
fun use_thy name =
|
||||
use_theories (default_context ()) Resources.default_qualifier
|
||||
Path.current [(name, Position.none)];
|
||||
|
||||
|
||||
(* toplevel scripting -- without maintaining database *)
|
||||
|
||||
fun script_thy pos txt thy =
|
||||
let
|
||||
val trs =
|
||||
Outer_Syntax.parse thy pos txt
|
||||
|> map (Toplevel.modify_init (K thy));
|
||||
val end_pos = if null trs then pos else Toplevel.pos_of (List.last trs);
|
||||
val end_state = fold (Toplevel.command_exception true) trs Toplevel.toplevel;
|
||||
in Toplevel.end_theory end_pos end_state end;
|
||||
|
||||
|
||||
(* register theory *)
|
||||
|
||||
fun register_thy theory =
|
||||
let
|
||||
val name = Context.theory_long_name theory;
|
||||
val {master, ...} = Resources.check_thy (Resources.master_directory theory) name;
|
||||
val imports = Resources.imports_of theory;
|
||||
in
|
||||
change_thys (fn thys =>
|
||||
let
|
||||
val thys' = remove name thys;
|
||||
val _ = writeln ("Registering theory " ^ quote name);
|
||||
in update (make_deps master imports) theory thys' end)
|
||||
end;
|
||||
|
||||
|
||||
(* finish all theories *)
|
||||
|
||||
fun finish () = change_thys (String_Graph.map (fn _ => fn (_, entry) => (NONE, entry)));
|
||||
|
||||
end;
|
||||
|
||||
fun use_thy name = Runtime.toplevel_program (fn () => Thy_Info.use_thy name);
|
|
@ -1,252 +1,181 @@
|
|||
(* Title: Pure/Thy/thy_output.ML
|
||||
Author: Markus Wenzel, TU Muenchen
|
||||
Author: Makarius
|
||||
|
||||
Theory document output with antiquotations.
|
||||
Theory document output.
|
||||
*)
|
||||
|
||||
signature THY_OUTPUT =
|
||||
sig
|
||||
val display: bool Config.T
|
||||
val quotes: bool Config.T
|
||||
val margin: int Config.T
|
||||
val indent: int Config.T
|
||||
val source: bool Config.T
|
||||
val break: bool Config.T
|
||||
val modes: string Config.T
|
||||
val add_wrapper: ((unit -> string) -> unit -> string) -> Proof.context -> Proof.context
|
||||
val add_option: binding -> (string -> Proof.context -> Proof.context) -> theory -> theory
|
||||
val check_command: Proof.context -> xstring * Position.T -> string
|
||||
val check_option: Proof.context -> xstring * Position.T -> string
|
||||
val print_antiquotations: bool -> Proof.context -> unit
|
||||
val antiquotation: binding -> 'a context_parser ->
|
||||
({source: Token.src, state: Toplevel.state, context: Proof.context} -> 'a -> string) ->
|
||||
theory -> theory
|
||||
val boolean: string -> bool
|
||||
val integer: string -> int
|
||||
val eval_antiquote: Toplevel.state -> Antiquote.text_antiquote -> string
|
||||
val output_text: Toplevel.state -> {markdown: bool} -> Input.source -> string
|
||||
val present_thy: theory -> (Toplevel.transition * Toplevel.state) list -> Token.T list -> Buffer.T
|
||||
val output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text list
|
||||
val check_comments: Proof.context -> Symbol_Pos.T list -> unit
|
||||
val output_token: Proof.context -> Token.T -> Latex.text list
|
||||
val output_source: Proof.context -> string -> Latex.text list
|
||||
type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state}
|
||||
val present_thy: Options.T -> theory -> segment list -> Latex.text list
|
||||
val set_meta_args_parser : (theory -> string parser) -> unit
|
||||
val pretty_text: Proof.context -> string -> Pretty.T
|
||||
val pretty_term: Proof.context -> term -> Pretty.T
|
||||
val pretty_thm: Proof.context -> thm -> Pretty.T
|
||||
val str_of_source: Token.src -> string
|
||||
val maybe_pretty_source: (Proof.context -> 'a -> Pretty.T) -> Proof.context ->
|
||||
Token.src -> 'a list -> Pretty.T list
|
||||
val string_of_margin: Proof.context -> Pretty.T -> string
|
||||
val output: Proof.context -> Pretty.T list -> string
|
||||
val verbatim_text: Proof.context -> string -> string
|
||||
val document_command: {markdown: bool} -> (xstring * Position.T) option * Input.source ->
|
||||
Toplevel.transition -> Toplevel.transition
|
||||
val lines: Latex.text list -> Latex.text list
|
||||
val items: Latex.text list -> Latex.text list
|
||||
val isabelle: Proof.context -> Latex.text list -> Latex.text
|
||||
val isabelle_typewriter: Proof.context -> Latex.text list -> Latex.text
|
||||
val typewriter: Proof.context -> string -> Latex.text
|
||||
val verbatim: Proof.context -> string -> Latex.text
|
||||
val source: Proof.context -> Token.src -> Latex.text
|
||||
val pretty: Proof.context -> Pretty.T -> Latex.text
|
||||
val pretty_source: Proof.context -> Token.src -> Pretty.T -> Latex.text
|
||||
val pretty_items: Proof.context -> Pretty.T list -> Latex.text
|
||||
val pretty_items_source: Proof.context -> Token.src -> Pretty.T list -> Latex.text
|
||||
val antiquotation_pretty:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
|
||||
val antiquotation_pretty_source:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
|
||||
val antiquotation_raw:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory
|
||||
val antiquotation_verbatim:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory
|
||||
end;
|
||||
|
||||
structure Thy_Output: THY_OUTPUT =
|
||||
struct
|
||||
|
||||
(** options **)
|
||||
(* output document source *)
|
||||
|
||||
val display = Attrib.setup_option_bool ("thy_output_display", \<^here>);
|
||||
val break = Attrib.setup_option_bool ("thy_output_break", \<^here>);
|
||||
val quotes = Attrib.setup_option_bool ("thy_output_quotes", \<^here>);
|
||||
val margin = Attrib.setup_option_int ("thy_output_margin", \<^here>);
|
||||
val indent = Attrib.setup_option_int ("thy_output_indent", \<^here>);
|
||||
val source = Attrib.setup_option_bool ("thy_output_source", \<^here>);
|
||||
val modes = Attrib.setup_option_string ("thy_output_modes", \<^here>);
|
||||
val output_symbols = single o Latex.symbols_output;
|
||||
|
||||
|
||||
structure Wrappers = Proof_Data
|
||||
(
|
||||
type T = ((unit -> string) -> unit -> string) list;
|
||||
fun init _ = [];
|
||||
);
|
||||
|
||||
fun add_wrapper wrapper = Wrappers.map (cons wrapper);
|
||||
|
||||
val wrap = Wrappers.get #> fold (fn wrapper => fn f => wrapper f);
|
||||
|
||||
|
||||
|
||||
(** maintain global antiquotations **)
|
||||
|
||||
structure Antiquotations = Theory_Data
|
||||
(
|
||||
type T =
|
||||
(Token.src -> Toplevel.state -> Proof.context -> string) Name_Space.table *
|
||||
(string -> Proof.context -> Proof.context) Name_Space.table;
|
||||
val empty : T =
|
||||
(Name_Space.empty_table Markup.document_antiquotationN,
|
||||
Name_Space.empty_table Markup.document_antiquotation_optionN);
|
||||
val extend = I;
|
||||
fun merge ((commands1, options1), (commands2, options2)) : T =
|
||||
(Name_Space.merge_tables (commands1, commands2),
|
||||
Name_Space.merge_tables (options1, options2));
|
||||
);
|
||||
|
||||
val get_antiquotations = Antiquotations.get o Proof_Context.theory_of;
|
||||
|
||||
fun add_command name cmd thy = thy
|
||||
|> Antiquotations.map (apfst (Name_Space.define (Context.Theory thy) true (name, cmd) #> snd));
|
||||
|
||||
fun add_option name opt thy = thy
|
||||
|> Antiquotations.map (apsnd (Name_Space.define (Context.Theory thy) true (name, opt) #> snd));
|
||||
|
||||
fun check_command ctxt = #1 o Name_Space.check (Context.Proof ctxt) (#1 (get_antiquotations ctxt));
|
||||
|
||||
fun check_option ctxt = #1 o Name_Space.check (Context.Proof ctxt) (#2 (get_antiquotations ctxt));
|
||||
|
||||
fun command src state ctxt =
|
||||
let val (src', f) = Token.check_src ctxt (#1 o get_antiquotations) src
|
||||
in f src' state ctxt end;
|
||||
|
||||
fun option ((xname, pos), s) ctxt =
|
||||
fun output_comment ctxt (kind, syms) =
|
||||
(case kind of
|
||||
Comment.Comment =>
|
||||
Input.cartouche_content syms
|
||||
|> output_document (ctxt |> Config.put Document_Antiquotation.thy_output_display false)
|
||||
{markdown = false}
|
||||
|> Latex.enclose_body "%\n\\isamarkupcmt{" "%\n}"
|
||||
| Comment.Cancel =>
|
||||
Symbol_Pos.cartouche_content syms
|
||||
|> output_symbols
|
||||
|> Latex.enclose_body "%\n\\isamarkupcancel{" "}"
|
||||
| Comment.Latex =>
|
||||
[Latex.symbols (Symbol_Pos.cartouche_content syms)])
|
||||
and output_comment_document ctxt (comment, syms) =
|
||||
(case comment of
|
||||
SOME kind => output_comment ctxt (kind, syms)
|
||||
| NONE => [Latex.symbols syms])
|
||||
and output_document_text ctxt syms =
|
||||
Comment.read_body syms |> maps (output_comment_document ctxt)
|
||||
and output_document ctxt {markdown} source =
|
||||
let
|
||||
val (_, opt) =
|
||||
Name_Space.check (Context.Proof ctxt) (#2 (get_antiquotations ctxt)) (xname, pos);
|
||||
in opt s ctxt end;
|
||||
|
||||
fun print_antiquotations verbose ctxt =
|
||||
let
|
||||
val (commands, options) = get_antiquotations ctxt;
|
||||
val command_names = map #1 (Name_Space.markup_table verbose ctxt commands);
|
||||
val option_names = map #1 (Name_Space.markup_table verbose ctxt options);
|
||||
in
|
||||
[Pretty.big_list "document antiquotations:" (map Pretty.mark_str command_names),
|
||||
Pretty.big_list "document antiquotation options:" (map Pretty.mark_str option_names)]
|
||||
end |> Pretty.writeln_chunks;
|
||||
|
||||
fun antiquotation name scan body =
|
||||
add_command name
|
||||
(fn src => fn state => fn ctxt =>
|
||||
let val (x, ctxt') = Token.syntax scan src ctxt
|
||||
in body {source = src, state = state, context = ctxt'} x end);
|
||||
|
||||
|
||||
|
||||
(** syntax of antiquotations **)
|
||||
|
||||
(* option values *)
|
||||
|
||||
fun boolean "" = true
|
||||
| boolean "true" = true
|
||||
| boolean "false" = false
|
||||
| boolean s = error ("Bad boolean value: " ^ quote s);
|
||||
|
||||
fun integer s =
|
||||
let
|
||||
fun int ss =
|
||||
(case Library.read_int ss of (i, []) => i
|
||||
| _ => error ("Bad integer value: " ^ quote s));
|
||||
in (case Symbol.explode s of "-" :: ss => ~ (int ss) | ss => int ss) end;
|
||||
|
||||
|
||||
(* outer syntax *)
|
||||
|
||||
local
|
||||
|
||||
val property =
|
||||
Parse.position Parse.name -- Scan.optional (Parse.$$$ "=" |-- Parse.!!! Parse.name) "";
|
||||
|
||||
val properties =
|
||||
Scan.optional (Parse.$$$ "[" |-- Parse.!!! (Parse.enum "," property --| Parse.$$$ "]")) [];
|
||||
|
||||
in
|
||||
|
||||
val antiq =
|
||||
Parse.!!!
|
||||
(Parse.token Parse.liberal_name -- properties -- Parse.args --| Scan.ahead Parse.eof)
|
||||
>> (fn ((name, props), args) => (props, name :: args));
|
||||
|
||||
end;
|
||||
|
||||
|
||||
(* eval antiquote *)
|
||||
|
||||
local
|
||||
|
||||
fun eval_antiq state (opts, src) =
|
||||
let
|
||||
val preview_ctxt = fold option opts (Toplevel.presentation_context_of state);
|
||||
val print_ctxt = Context_Position.set_visible false preview_ctxt;
|
||||
|
||||
fun cmd ctxt = wrap ctxt (fn () => command src state ctxt) ();
|
||||
val _ = cmd preview_ctxt;
|
||||
val print_modes = space_explode "," (Config.get print_ctxt modes) @ [Latex.latexN];
|
||||
in Print_Mode.with_modes print_modes (fn () => cmd print_ctxt) () end;
|
||||
|
||||
in
|
||||
|
||||
fun eval_antiquote _ (Antiquote.Text ss) = Symbol_Pos.content ss
|
||||
| eval_antiquote state (Antiquote.Control {name, body, ...}) =
|
||||
eval_antiq state
|
||||
([], Token.make_src name (if null body then [] else [Token.read_cartouche body]))
|
||||
| eval_antiquote state (Antiquote.Antiq {range = (pos, _), body, ...}) =
|
||||
let
|
||||
val keywords =
|
||||
(case try Toplevel.presentation_context_of state of
|
||||
SOME ctxt => Thy_Header.get_keywords' ctxt
|
||||
| NONE =>
|
||||
error ("Unknown context -- cannot expand document antiquotations" ^
|
||||
Position.here pos));
|
||||
in eval_antiq state (Token.read_antiq keywords antiq (body, pos)) end;
|
||||
|
||||
end;
|
||||
|
||||
|
||||
(* output text *)
|
||||
|
||||
fun output_text state {markdown} source =
|
||||
let
|
||||
val is_reported =
|
||||
(case try Toplevel.context_of state of
|
||||
SOME ctxt => Context_Position.is_visible ctxt
|
||||
| NONE => true);
|
||||
|
||||
val pos = Input.pos_of source;
|
||||
val syms = Input.source_explode source;
|
||||
|
||||
val _ =
|
||||
if is_reported then
|
||||
Position.report pos (Markup.language_document (Input.is_delimited source))
|
||||
else ();
|
||||
|
||||
val output_antiquotes = map (eval_antiquote state) #> implode;
|
||||
val output_antiquotes =
|
||||
maps (Document_Antiquotation.evaluate (output_document_text ctxt) ctxt);
|
||||
|
||||
fun output_line line =
|
||||
(if Markdown.line_is_item line then "\\item " else "") ^
|
||||
(if Markdown.line_is_item line then [Latex.string "\\item "] else []) @
|
||||
output_antiquotes (Markdown.line_content line);
|
||||
|
||||
fun output_blocks blocks = space_implode "\n\n" (map output_block blocks)
|
||||
and output_block (Markdown.Par lines) = cat_lines (map output_line lines)
|
||||
fun output_block (Markdown.Par lines) =
|
||||
Latex.block (separate (Latex.string "\n") (map (Latex.block o output_line) lines))
|
||||
| output_block (Markdown.List {kind, body, ...}) =
|
||||
Latex.environment (Markdown.print_kind kind) (output_blocks body);
|
||||
Latex.environment_block (Markdown.print_kind kind) (output_blocks body)
|
||||
and output_blocks blocks = separate (Latex.string "\n\n") (map output_block blocks);
|
||||
in
|
||||
if Toplevel.is_skipped_proof state then ""
|
||||
if Toplevel.is_skipped_proof (Toplevel.presentation_state ctxt) then []
|
||||
else if markdown andalso exists (Markdown.is_control o Symbol_Pos.symbol) syms
|
||||
then
|
||||
let
|
||||
val ants = Antiquote.parse pos syms;
|
||||
val ants = Antiquote.parse_comments pos syms;
|
||||
val reports = Antiquote.antiq_reports ants;
|
||||
val blocks = Markdown.read_antiquotes ants;
|
||||
val _ = if is_reported then Position.reports (reports @ Markdown.reports blocks) else ();
|
||||
val _ = Context_Position.reports ctxt (reports @ Markdown.reports blocks);
|
||||
in output_blocks blocks end
|
||||
else
|
||||
let
|
||||
val ants = Antiquote.parse pos (Symbol_Pos.trim_blanks syms);
|
||||
val ants = Antiquote.parse_comments pos (trim (Symbol.is_blank o Symbol_Pos.symbol) syms);
|
||||
val reports = Antiquote.antiq_reports ants;
|
||||
val _ = if is_reported then Position.reports (reports @ Markdown.text_reports ants) else ();
|
||||
val _ = Context_Position.reports ctxt (reports @ Markdown.text_reports ants);
|
||||
in output_antiquotes ants end
|
||||
end;
|
||||
|
||||
|
||||
(* output tokens with formal comments *)
|
||||
|
||||
local
|
||||
|
||||
val output_symbols_antiq =
|
||||
(fn Antiquote.Text syms => output_symbols syms
|
||||
| Antiquote.Control {name = (name, _), body, ...} =>
|
||||
Latex.string (Latex.output_symbols [Symbol.encode (Symbol.Control name)]) ::
|
||||
output_symbols body
|
||||
| Antiquote.Antiq {body, ...} =>
|
||||
Latex.enclose_body "%\n\\isaantiq\n" "{}%\n\\endisaantiq\n" (output_symbols body));
|
||||
|
||||
fun output_comment_symbols ctxt {antiq} (comment, syms) =
|
||||
(case (comment, antiq) of
|
||||
(NONE, false) => output_symbols syms
|
||||
| (NONE, true) =>
|
||||
Antiquote.parse_comments (#1 (Symbol_Pos.range syms)) syms
|
||||
|> maps output_symbols_antiq
|
||||
| (SOME comment, _) => output_comment ctxt (comment, syms));
|
||||
|
||||
fun output_body ctxt antiq bg en syms =
|
||||
Comment.read_body syms
|
||||
|> maps (output_comment_symbols ctxt {antiq = antiq})
|
||||
|> Latex.enclose_body bg en;
|
||||
|
||||
in
|
||||
|
||||
fun output_token ctxt tok =
|
||||
let
|
||||
fun output antiq bg en =
|
||||
output_body ctxt antiq bg en (Input.source_explode (Token.input_of tok));
|
||||
in
|
||||
(case Token.kind_of tok of
|
||||
Token.Comment NONE => []
|
||||
| Token.Command => output false "\\isacommand{" "}"
|
||||
| Token.Keyword =>
|
||||
if Symbol.is_ascii_identifier (Token.content_of tok)
|
||||
then output false "\\isakeyword{" "}"
|
||||
else output false "" ""
|
||||
| Token.String => output false "{\\isachardoublequoteopen}" "{\\isachardoublequoteclose}"
|
||||
| Token.Alt_String => output false "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}"
|
||||
| Token.Verbatim => output true "{\\isacharverbatimopen}" "{\\isacharverbatimclose}"
|
||||
| Token.Cartouche => output false "{\\isacartoucheopen}" "{\\isacartoucheclose}"
|
||||
| _ => output false "" "")
|
||||
end handle ERROR msg => error (msg ^ Position.here (Token.pos_of tok));
|
||||
|
||||
fun output_source ctxt s =
|
||||
output_body ctxt false "" "" (Symbol_Pos.explode (s, Position.none));
|
||||
|
||||
fun check_comments ctxt =
|
||||
Comment.read_body #> List.app (fn (comment, syms) =>
|
||||
let
|
||||
val pos = #1 (Symbol_Pos.range syms);
|
||||
val _ =
|
||||
comment |> Option.app (fn kind =>
|
||||
Context_Position.reports ctxt (map (pair pos) (Markup.cartouche :: Comment.markups kind)));
|
||||
val _ = output_comment_symbols ctxt {antiq = false} (comment, syms);
|
||||
in if comment = SOME Comment.Comment then check_comments ctxt syms else () end);
|
||||
|
||||
end;
|
||||
|
||||
|
||||
|
||||
(** present theory source **)
|
||||
|
||||
(*NB: arranging white space around command spans is a black art*)
|
||||
|
||||
val is_white = Token.is_space orf Token.is_informal_comment;
|
||||
val is_black = not o is_white;
|
||||
|
||||
val is_white_comment = Token.is_informal_comment;
|
||||
val is_black_comment = Token.is_formal_comment;
|
||||
|
||||
|
||||
(* presentation tokens *)
|
||||
|
||||
datatype token =
|
||||
No_Token
|
||||
Ignore_Token
|
||||
| Basic_Token of Token.T
|
||||
| Markup_Token of string * string * Input.source
|
||||
| Markup_Env_Token of string * string * Input.source
|
||||
|
@ -255,25 +184,24 @@ datatype token =
|
|||
fun basic_token pred (Basic_Token tok) = pred tok
|
||||
| basic_token _ _ = false;
|
||||
|
||||
val improper_token = basic_token Token.is_improper;
|
||||
val comment_token = basic_token Token.is_comment;
|
||||
val white_token = basic_token is_white;
|
||||
val white_comment_token = basic_token is_white_comment;
|
||||
val blank_token = basic_token Token.is_blank;
|
||||
val newline_token = basic_token Token.is_newline;
|
||||
|
||||
|
||||
(* output token *)
|
||||
|
||||
fun output_token state tok =
|
||||
fun present_token ctxt tok =
|
||||
(case tok of
|
||||
No_Token => ""
|
||||
| Basic_Token tok => Latex.output_token tok
|
||||
Ignore_Token => []
|
||||
| Basic_Token tok => output_token ctxt tok
|
||||
| Markup_Token (cmd, meta_args, source) =>
|
||||
"%\n\\isamarkup" ^ cmd ^ meta_args ^ "{" ^ output_text state {markdown = false} source ^ "%\n}\n"
|
||||
Latex.enclose_body ("%\n\\isamarkup" ^ cmd (* ^ meta_args *) ^ "{") "%\n}\n"
|
||||
(output_document ctxt {markdown = false} source)
|
||||
| Markup_Env_Token (cmd, meta_args, source) =>
|
||||
Latex.environment ("isamarkup" ^ cmd)
|
||||
(meta_args ^ output_text state {markdown = true} source)
|
||||
[Latex.environment_block
|
||||
("isamarkup" ^ cmd (* ^ meta_args*))
|
||||
(Latex.string meta_args :: output_document ctxt {markdown = true} source)]
|
||||
| Raw_Token source =>
|
||||
"%\n" ^ output_text state {markdown = true} source ^ "\n");
|
||||
Latex.string "%\n" :: output_document ctxt {markdown = true} source @ [Latex.string "\n"]);
|
||||
|
||||
|
||||
(* command spans *)
|
||||
|
@ -285,16 +213,16 @@ datatype span = Span of command * (source * source * source * source) * bool;
|
|||
|
||||
fun make_span cmd src =
|
||||
let
|
||||
fun take_newline (tok :: toks) =
|
||||
fun chop_newline (tok :: toks) =
|
||||
if newline_token (fst tok) then ([tok], toks, true)
|
||||
else ([], tok :: toks, false)
|
||||
| take_newline [] = ([], [], false);
|
||||
| chop_newline [] = ([], [], false);
|
||||
val (((src_prefix, src_main), src_suffix1), (src_suffix2, src_appendix, newline)) =
|
||||
src
|
||||
|> take_prefix (improper_token o fst)
|
||||
||>> take_suffix (improper_token o fst)
|
||||
||>> take_prefix (comment_token o fst)
|
||||
||> take_newline;
|
||||
|> chop_prefix (white_token o fst)
|
||||
||>> chop_suffix (white_token o fst)
|
||||
||>> chop_prefix (white_comment_token o fst)
|
||||
||> chop_newline;
|
||||
in Span (cmd, (src_prefix, src_main, src_suffix1 @ src_suffix2, src_appendix), newline) end;
|
||||
|
||||
|
||||
|
@ -307,42 +235,72 @@ fun err_bad_nesting pos =
|
|||
|
||||
fun edge which f (x: string option, y) =
|
||||
if x = y then I
|
||||
else (case which (x, y) of NONE => I | SOME txt => Buffer.add (f txt));
|
||||
else (case which (x, y) of NONE => I | SOME txt => cons (Latex.string (f txt)));
|
||||
|
||||
val begin_tag = edge #2 Latex.begin_tag;
|
||||
val end_tag = edge #1 Latex.end_tag;
|
||||
fun open_delim delim e = edge #2 Latex.begin_delim e #> delim #> edge #2 Latex.end_delim e;
|
||||
fun close_delim delim e = edge #1 Latex.begin_delim e #> delim #> edge #1 Latex.end_delim e;
|
||||
|
||||
fun read_tag s =
|
||||
(case space_explode "%" s of
|
||||
["", b] => (SOME b, NONE)
|
||||
| [a, b] => (NONE, SOME (a, b))
|
||||
| _ => error ("Bad document_tags specification: " ^ quote s));
|
||||
|
||||
in
|
||||
|
||||
fun present_span keywords span state state' (tag_stack, active_tag, newline, buffer, present_cont) =
|
||||
fun make_command_tag options keywords =
|
||||
let
|
||||
val document_tags =
|
||||
map read_tag (space_explode "," (Options.string options \<^system_option>\<open>document_tags\<close>));
|
||||
val document_tags_default = map_filter #1 document_tags;
|
||||
val document_tags_command = map_filter #2 document_tags;
|
||||
in
|
||||
fn {cmd_name, cmd_tags, tag, active_tag} => fn state => fn state' =>
|
||||
let
|
||||
val tag' = try hd (fold (update (op =)) cmd_tags (the_list tag));
|
||||
|
||||
val keyword_tags =
|
||||
if cmd_name = "end" andalso Toplevel.is_end_theory state' then ["theory"]
|
||||
else Keyword.command_tags keywords cmd_name;
|
||||
val command_tags =
|
||||
the_list (AList.lookup (op =) document_tags_command cmd_name) @
|
||||
keyword_tags @ document_tags_default;
|
||||
|
||||
val active_tag' =
|
||||
if is_some tag' then tag'
|
||||
else
|
||||
(case command_tags of
|
||||
default_tag :: _ => SOME default_tag
|
||||
| [] =>
|
||||
if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state
|
||||
then active_tag
|
||||
else NONE);
|
||||
in {tag' = tag', active_tag' = active_tag'} end
|
||||
end;
|
||||
|
||||
fun present_span thy command_tag span state state'
|
||||
(tag_stack, active_tag, newline, latex, present_cont) =
|
||||
let
|
||||
val ctxt' =
|
||||
Toplevel.presentation_context state'
|
||||
handle Toplevel.UNDEF => Proof_Context.get_global thy Context.PureN;
|
||||
val present = fold (fn (tok, (flag, 0)) =>
|
||||
Buffer.add (output_token state' tok)
|
||||
#> Buffer.add flag
|
||||
fold cons (present_token ctxt' tok)
|
||||
#> cons (Latex.string flag)
|
||||
| _ => I);
|
||||
|
||||
val Span ((cmd_name, cmd_pos, cmd_tags), srcs, span_newline) = span;
|
||||
|
||||
val (tag, tags) = tag_stack;
|
||||
val tag' = try hd (fold (update (op =)) cmd_tags (the_list tag));
|
||||
val {tag', active_tag'} =
|
||||
command_tag {cmd_name = cmd_name, cmd_tags = cmd_tags, tag = tag, active_tag = active_tag}
|
||||
state state';
|
||||
val edge = (active_tag, active_tag');
|
||||
|
||||
val nesting = Toplevel.level state' - Toplevel.level state;
|
||||
|
||||
val active_tag' =
|
||||
if is_some tag' then tag'
|
||||
else if cmd_name = "end" andalso not (Toplevel.is_toplevel state') then NONE
|
||||
else
|
||||
(case Keyword.command_tags keywords cmd_name of
|
||||
default_tag :: _ => SOME default_tag
|
||||
| [] =>
|
||||
if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state
|
||||
then active_tag
|
||||
else NONE);
|
||||
|
||||
val edge = (active_tag, active_tag');
|
||||
|
||||
val newline' =
|
||||
if is_none active_tag' then span_newline else newline;
|
||||
|
||||
|
@ -354,8 +312,8 @@ fun present_span keywords span state state' (tag_stack, active_tag, newline, buf
|
|||
tg :: tgs => (tg, tgs)
|
||||
| [] => err_bad_nesting (Position.here cmd_pos));
|
||||
|
||||
val buffer' =
|
||||
buffer
|
||||
val latex' =
|
||||
latex
|
||||
|> end_tag edge
|
||||
|> close_delim (fst present_cont) edge
|
||||
|> snd present_cont
|
||||
|
@ -365,12 +323,12 @@ fun present_span keywords span state state' (tag_stack, active_tag, newline, buf
|
|||
val present_cont' =
|
||||
if newline then (present (#3 srcs), present (#4 srcs))
|
||||
else (I, present (#3 srcs) #> present (#4 srcs));
|
||||
in (tag_stack', active_tag', newline', buffer', present_cont') end;
|
||||
in (tag_stack', active_tag', newline', latex', present_cont') end;
|
||||
|
||||
fun present_trailer ((_, tags), active_tag, _, buffer, present_cont) =
|
||||
fun present_trailer ((_, tags), active_tag, _, latex, present_cont) =
|
||||
if not (null tags) then err_bad_nesting " at end of theory"
|
||||
else
|
||||
buffer
|
||||
latex
|
||||
|> end_tag (active_tag, NONE)
|
||||
|> close_delim (fst present_cont) (active_tag, NONE)
|
||||
|> snd present_cont;
|
||||
|
@ -386,9 +344,9 @@ val markup_true = "\\isamarkuptrue%\n";
|
|||
val markup_false = "\\isamarkupfalse%\n";
|
||||
|
||||
val space_proper =
|
||||
Scan.one Token.is_blank -- Scan.many Token.is_comment -- Scan.one Token.is_proper;
|
||||
Scan.one Token.is_blank -- Scan.many is_white_comment -- Scan.one is_black;
|
||||
|
||||
val is_improper = not o (Token.is_proper orf Token.is_begin_ignore orf Token.is_end_ignore);
|
||||
val is_improper = not o (is_black orf Token.is_begin_ignore orf Token.is_end_ignore);
|
||||
val improper = Scan.many is_improper;
|
||||
val improper_end = Scan.repeat (Scan.unless space_proper (Scan.one is_improper));
|
||||
val blank_end = Scan.repeat (Scan.unless space_proper (Scan.one Token.is_blank));
|
||||
|
@ -408,24 +366,33 @@ val locale =
|
|||
Scan.option ((Parse.$$$ "(" -- improper -- Parse.$$$ "in") |--
|
||||
Parse.!!! (improper |-- Parse.name --| (improper -- Parse.$$$ ")")));
|
||||
|
||||
val meta_args_parser_hook = Unsynchronized.ref((fn thy => fn s => ("",s)): theory -> string parser)
|
||||
(*
|
||||
val meta_args_parser_hook = Synchronized.var "meta args parser hook"
|
||||
((fn thy => fn s => ("",s)): theory -> string parser);
|
||||
*)
|
||||
val meta_args_parser_hook = Unsynchronized.ref
|
||||
((fn thy => fn s => ("",s)): theory -> string parser);
|
||||
|
||||
|
||||
in
|
||||
|
||||
fun present_thy thy command_results toks =
|
||||
type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state};
|
||||
|
||||
fun present_thy options thy (segments: segment list) =
|
||||
let
|
||||
|
||||
val keywords = Thy_Header.get_keywords thy;
|
||||
|
||||
|
||||
(* tokens *)
|
||||
|
||||
val ignored = Scan.state --| ignore
|
||||
>> (fn d => (NONE, (No_Token, ("", d))));
|
||||
>> (fn d => (NONE, (Ignore_Token, ("", d))));
|
||||
|
||||
fun markup pred mk flag = Scan.peek (fn d =>
|
||||
improper |--
|
||||
Parse.position (Scan.one (fn tok => Token.is_command tok andalso
|
||||
pred keywords (Token.content_of tok))) --
|
||||
Parse.position (Scan.one (fn tok =>
|
||||
Token.is_command tok andalso pred keywords (Token.content_of tok))) --
|
||||
Scan.repeat tag --
|
||||
(improper |--
|
||||
(Parse.!!!!
|
||||
|
@ -433,7 +400,7 @@ fun present_thy thy command_results toks =
|
|||
-- ( (improper -- locale -- improper)
|
||||
|-- (Parse.document_source))
|
||||
--| improper_end)))
|
||||
>> (fn (((tok, pos'), tags), (meta_args,source)) =>
|
||||
>> (fn (((tok, pos'), tags), (meta_args, source)) =>
|
||||
let val name = Token.content_of tok
|
||||
in (SOME (name, pos', tags), (mk (name, meta_args, source), (flag, d))) end));
|
||||
|
||||
|
@ -446,9 +413,7 @@ fun present_thy thy command_results toks =
|
|||
(Basic_Token cmd, (markup_false, d)))]));
|
||||
|
||||
val cmt = Scan.peek (fn d =>
|
||||
(Parse.$$$ "--" || Parse.$$$ Symbol.comment) |--
|
||||
Parse.!!!! (improper |-- Parse.document_source) >>
|
||||
(fn source => (NONE, (Markup_Token ("cmt", "", source), ("", d)))));
|
||||
Scan.one is_black_comment >> (fn tok => (NONE, (Basic_Token tok, ("", d)))));
|
||||
|
||||
val other = Scan.peek (fn d =>
|
||||
Parse.not_eof >> (fn tok => (NONE, (Basic_Token tok, ("", d)))));
|
||||
|
@ -470,13 +435,13 @@ fun present_thy thy command_results toks =
|
|||
val cmd = Scan.one (is_some o fst);
|
||||
val non_cmd = Scan.one (is_none o fst andf not o is_eof) >> #2;
|
||||
|
||||
val comments = Scan.many (comment_token o fst o snd);
|
||||
val white_comments = Scan.many (white_comment_token o fst o snd);
|
||||
val blank = Scan.one (blank_token o fst o snd);
|
||||
val newline = Scan.one (newline_token o fst o snd);
|
||||
val before_cmd =
|
||||
Scan.option (newline -- comments) --
|
||||
Scan.option (newline -- comments) --
|
||||
Scan.option (blank -- comments) -- cmd;
|
||||
Scan.option (newline -- white_comments) --
|
||||
Scan.option (newline -- white_comments) --
|
||||
Scan.option (blank -- white_comments) -- cmd;
|
||||
|
||||
val span =
|
||||
Scan.repeat non_cmd -- cmd --
|
||||
|
@ -485,202 +450,121 @@ fun present_thy thy command_results toks =
|
|||
>> (fn (((toks1, (cmd, tok2)), toks3), tok4) =>
|
||||
make_span (the cmd) (toks1 @ (tok2 :: (toks3 @ the_default [] tok4))));
|
||||
|
||||
val spans = toks
|
||||
|> take_suffix Token.is_space |> #1
|
||||
|> Source.of_list
|
||||
val spans = segments
|
||||
|> maps (Command_Span.content o #span)
|
||||
|> drop_suffix Token.is_space
|
||||
|> Source.of_list
|
||||
|> Source.source' 0 Token.stopper (Scan.error (Scan.bulk tokens >> flat))
|
||||
|> Source.source stopper (Scan.error (Scan.bulk span))
|
||||
|> Source.exhaust;
|
||||
|
||||
val command_results =
|
||||
segments |> map_filter (fn {command, state, ...} =>
|
||||
if Toplevel.is_ignored command then NONE else SOME (command, state));
|
||||
|
||||
|
||||
(* present commands *)
|
||||
|
||||
val command_tag = make_command_tag options keywords;
|
||||
|
||||
fun present_command tr span st st' =
|
||||
Toplevel.setmp_thread_position tr (present_span keywords span st st');
|
||||
Toplevel.setmp_thread_position tr (present_span thy command_tag span st st');
|
||||
|
||||
fun present _ [] = I
|
||||
| present st (((tr, st'), span) :: rest) = present_command tr span st st' #> present st' rest;
|
||||
| present st ((span, (tr, st')) :: rest) = present_command tr span st st'
|
||||
#> present st' rest;
|
||||
|
||||
in
|
||||
if length command_results = length spans then
|
||||
((NONE, []), NONE, true, Buffer.empty, (I, I))
|
||||
|> present Toplevel.toplevel (command_results ~~ spans)
|
||||
((NONE, []), NONE, true, [], (I, I))
|
||||
|> present Toplevel.toplevel (spans ~~ command_results)
|
||||
|> present_trailer
|
||||
|> rev
|
||||
else error "Messed-up outer syntax for presentation"
|
||||
end;
|
||||
|
||||
fun set_meta_args_parser f = (meta_args_parser_hook:= f)
|
||||
(*
|
||||
fun set_meta_args_parser f = let val _ = writeln "Meta-args parser set to new value"
|
||||
in (Unsynchronized.assign meta_args_parser_hook f) end
|
||||
*)
|
||||
fun set_meta_args_parser f = let val _ = writeln "Meta-args parser set to new value"
|
||||
in ( meta_args_parser_hook := f) end
|
||||
|
||||
|
||||
end;
|
||||
|
||||
|
||||
|
||||
(** setup default output **)
|
||||
(** standard output operations **)
|
||||
|
||||
(* options *)
|
||||
(* pretty printing *)
|
||||
|
||||
val _ = Theory.setup
|
||||
(add_option @{binding show_types} (Config.put show_types o boolean) #>
|
||||
add_option @{binding show_sorts} (Config.put show_sorts o boolean) #>
|
||||
add_option @{binding show_structs} (Config.put show_structs o boolean) #>
|
||||
add_option @{binding show_question_marks} (Config.put show_question_marks o boolean) #>
|
||||
add_option @{binding show_abbrevs} (Config.put show_abbrevs o boolean) #>
|
||||
add_option @{binding names_long} (Config.put Name_Space.names_long o boolean) #>
|
||||
add_option @{binding names_short} (Config.put Name_Space.names_short o boolean) #>
|
||||
add_option @{binding names_unique} (Config.put Name_Space.names_unique o boolean) #>
|
||||
add_option @{binding eta_contract} (Config.put Syntax_Trans.eta_contract o boolean) #>
|
||||
add_option @{binding display} (Config.put display o boolean) #>
|
||||
add_option @{binding break} (Config.put break o boolean) #>
|
||||
add_option @{binding quotes} (Config.put quotes o boolean) #>
|
||||
add_option @{binding mode} (add_wrapper o Print_Mode.with_modes o single) #>
|
||||
add_option @{binding margin} (Config.put margin o integer) #>
|
||||
add_option @{binding indent} (Config.put indent o integer) #>
|
||||
add_option @{binding source} (Config.put source o boolean) #>
|
||||
add_option @{binding goals_limit} (Config.put Goal_Display.goals_limit o integer));
|
||||
|
||||
|
||||
(* basic pretty printing *)
|
||||
|
||||
fun perhaps_trim ctxt =
|
||||
not (Config.get ctxt display) ? Symbol.trim_blanks;
|
||||
|
||||
fun pretty_text ctxt =
|
||||
Pretty.chunks o map Pretty.str o map (perhaps_trim ctxt) o split_lines;
|
||||
|
||||
fun pretty_term ctxt t = Syntax.pretty_term (Variable.auto_fixes t ctxt) t;
|
||||
fun pretty_term ctxt t =
|
||||
Syntax.pretty_term (Variable.auto_fixes t ctxt) t;
|
||||
|
||||
fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of;
|
||||
|
||||
fun pretty_term_style ctxt (style, t) =
|
||||
pretty_term ctxt (style t);
|
||||
|
||||
fun pretty_thm_style ctxt (style, th) =
|
||||
pretty_term ctxt (style (Thm.full_prop_of th));
|
||||
|
||||
fun pretty_term_typ ctxt (style, t) =
|
||||
let val t' = style t
|
||||
in pretty_term ctxt (Type.constraint (Term.fastype_of t') t') end;
|
||||
|
||||
fun pretty_term_typeof ctxt (style, t) =
|
||||
Syntax.pretty_typ ctxt (Term.fastype_of (style t));
|
||||
|
||||
fun pretty_const ctxt c =
|
||||
let
|
||||
val t = Const (c, Consts.type_scheme (Proof_Context.consts_of ctxt) c)
|
||||
handle TYPE (msg, _, _) => error msg;
|
||||
val ([t'], _) = Variable.import_terms true [t] ctxt;
|
||||
in pretty_term ctxt t' end;
|
||||
|
||||
fun pretty_abbrev ctxt s =
|
||||
let
|
||||
val t = Syntax.read_term (Proof_Context.set_mode Proof_Context.mode_abbrev ctxt) s;
|
||||
fun err () = error ("Abbreviated constant expected: " ^ Syntax.string_of_term ctxt t);
|
||||
val (head, args) = Term.strip_comb t;
|
||||
val (c, T) = Term.dest_Const head handle TERM _ => err ();
|
||||
val (U, u) = Consts.the_abbreviation (Proof_Context.consts_of ctxt) c
|
||||
handle TYPE _ => err ();
|
||||
val t' = Term.betapplys (Envir.expand_atom T (U, u), args);
|
||||
val eq = Logic.mk_equals (t, t');
|
||||
val ctxt' = Variable.auto_fixes eq ctxt;
|
||||
in Proof_Context.pretty_term_abbrev ctxt' eq end;
|
||||
|
||||
fun pretty_locale ctxt (name, pos) =
|
||||
let
|
||||
val thy = Proof_Context.theory_of ctxt
|
||||
in (Pretty.str o Locale.extern thy o Locale.check thy) (name, pos) end;
|
||||
|
||||
fun pretty_class ctxt =
|
||||
Pretty.str o Proof_Context.extern_class ctxt o Proof_Context.read_class ctxt;
|
||||
|
||||
fun pretty_type ctxt s =
|
||||
let val Type (name, _) = Proof_Context.read_type_name {proper = true, strict = false} ctxt s
|
||||
in Pretty.str (Proof_Context.extern_type ctxt name) end;
|
||||
|
||||
fun pretty_prf full ctxt = Proof_Syntax.pretty_clean_proof_of ctxt full;
|
||||
|
||||
fun pretty_theory ctxt (name, pos) = (Theory.check ctxt (name, pos); Pretty.str name);
|
||||
|
||||
|
||||
(* default output *)
|
||||
|
||||
val str_of_source = space_implode " " o map Token.unparse o Token.args_of_src;
|
||||
val lines = separate (Latex.string "\\isanewline%\n");
|
||||
val items = separate (Latex.string "\\isasep\\isanewline%\n");
|
||||
|
||||
fun maybe_pretty_source pretty ctxt src xs =
|
||||
map (pretty ctxt) xs (*always pretty in order to exhibit errors!*)
|
||||
|> (if Config.get ctxt source then K [pretty_text ctxt (str_of_source src)] else I);
|
||||
fun isabelle ctxt body =
|
||||
if Config.get ctxt Document_Antiquotation.thy_output_display
|
||||
then Latex.environment_block "isabelle" body
|
||||
else Latex.block (Latex.enclose_body "\\isa{" "}" body);
|
||||
|
||||
fun string_of_margin ctxt = Pretty.string_of_margin (Config.get ctxt margin);
|
||||
fun isabelle_typewriter ctxt body =
|
||||
if Config.get ctxt Document_Antiquotation.thy_output_display
|
||||
then Latex.environment_block "isabellett" body
|
||||
else Latex.block (Latex.enclose_body "\\isatt{" "}" body);
|
||||
|
||||
fun output ctxt prts =
|
||||
prts
|
||||
|> Config.get ctxt quotes ? map Pretty.quote
|
||||
|> (if Config.get ctxt display then
|
||||
map (Pretty.indent (Config.get ctxt indent) #> string_of_margin ctxt #> Output.output)
|
||||
#> space_implode "\\isasep\\isanewline%\n"
|
||||
#> Latex.environment "isabelle"
|
||||
else
|
||||
map
|
||||
((if Config.get ctxt break then string_of_margin ctxt else Pretty.unformatted_string_of)
|
||||
#> Output.output)
|
||||
#> space_implode "\\isasep\\isanewline%\n"
|
||||
#> enclose "\\isa{" "}");
|
||||
fun typewriter ctxt s =
|
||||
isabelle_typewriter ctxt [Latex.string (Latex.output_ascii s)];
|
||||
|
||||
fun verbatim ctxt =
|
||||
if Config.get ctxt Document_Antiquotation.thy_output_display
|
||||
then Document_Antiquotation.indent_lines ctxt #> typewriter ctxt
|
||||
else split_lines #> map (typewriter ctxt) #> lines #> Latex.block;
|
||||
|
||||
fun source ctxt =
|
||||
Token.args_of_src
|
||||
#> map (Token.unparse #> Document_Antiquotation.prepare_lines ctxt)
|
||||
#> space_implode " "
|
||||
#> output_source ctxt
|
||||
#> isabelle ctxt;
|
||||
|
||||
fun pretty ctxt =
|
||||
Document_Antiquotation.output ctxt #> Latex.string #> single #> isabelle ctxt;
|
||||
|
||||
fun pretty_source ctxt src prt =
|
||||
if Config.get ctxt Document_Antiquotation.thy_output_source
|
||||
then source ctxt src else pretty ctxt prt;
|
||||
|
||||
fun pretty_items ctxt =
|
||||
map (Document_Antiquotation.output ctxt #> Latex.string) #> items #> isabelle ctxt;
|
||||
|
||||
fun pretty_items_source ctxt src prts =
|
||||
if Config.get ctxt Document_Antiquotation.thy_output_source
|
||||
then source ctxt src else pretty_items ctxt prts;
|
||||
|
||||
|
||||
(* verbatim text *)
|
||||
(* antiquotation variants *)
|
||||
|
||||
fun verbatim_text ctxt =
|
||||
if Config.get ctxt display then
|
||||
split_lines #> map (prefix (Symbol.spaces (Config.get ctxt indent))) #> cat_lines #>
|
||||
Latex.output_ascii #> Latex.environment "isabellett"
|
||||
else
|
||||
split_lines #>
|
||||
map (Latex.output_ascii #> enclose "\\isatt{" "}") #>
|
||||
space_implode "\\isasep\\isanewline%\n";
|
||||
fun antiquotation_pretty name scan f =
|
||||
Document_Antiquotation.setup name scan
|
||||
(fn {context = ctxt, argument = x, ...} => pretty ctxt (f ctxt x));
|
||||
|
||||
fun antiquotation_pretty_source name scan f =
|
||||
Document_Antiquotation.setup name scan
|
||||
(fn {context = ctxt, source = src, argument = x} => pretty_source ctxt src (f ctxt x));
|
||||
|
||||
(* antiquotations for basic entities *)
|
||||
fun antiquotation_raw name scan f =
|
||||
Document_Antiquotation.setup name scan
|
||||
(fn {context = ctxt, argument = x, ...} => f ctxt x);
|
||||
|
||||
local
|
||||
|
||||
fun basic_entities name scan pretty =
|
||||
antiquotation name scan (fn {source, context = ctxt, ...} =>
|
||||
output ctxt o maybe_pretty_source pretty ctxt source);
|
||||
|
||||
fun basic_entities_style name scan pretty =
|
||||
antiquotation name scan (fn {source, context = ctxt, ...} => fn (style, xs) =>
|
||||
output ctxt
|
||||
(maybe_pretty_source (fn ctxt => fn x => pretty ctxt (style, x)) ctxt source xs));
|
||||
|
||||
fun basic_entity name scan = basic_entities name (scan >> single);
|
||||
|
||||
in
|
||||
|
||||
val _ = Theory.setup
|
||||
(basic_entities_style @{binding thm} (Term_Style.parse -- Attrib.thms) pretty_thm_style #>
|
||||
basic_entity @{binding prop} (Term_Style.parse -- Args.prop) pretty_term_style #>
|
||||
basic_entity @{binding term} (Term_Style.parse -- Args.term) pretty_term_style #>
|
||||
basic_entity @{binding term_type} (Term_Style.parse -- Args.term) pretty_term_typ #>
|
||||
basic_entity @{binding typeof} (Term_Style.parse -- Args.term) pretty_term_typeof #>
|
||||
basic_entity @{binding const} (Args.const {proper = true, strict = false}) pretty_const #>
|
||||
basic_entity @{binding abbrev} (Scan.lift Args.embedded_inner_syntax) pretty_abbrev #>
|
||||
basic_entity @{binding typ} Args.typ_abbrev Syntax.pretty_typ #>
|
||||
basic_entity @{binding locale} (Scan.lift (Parse.position Args.name)) pretty_locale #>
|
||||
basic_entity @{binding class} (Scan.lift Args.embedded_inner_syntax) pretty_class #>
|
||||
basic_entity @{binding type} (Scan.lift Args.embedded) pretty_type #>
|
||||
basic_entities @{binding prf} Attrib.thms (pretty_prf false) #>
|
||||
basic_entities @{binding full_prf} Attrib.thms (pretty_prf true) #>
|
||||
basic_entity @{binding theory} (Scan.lift (Parse.position Args.name)) pretty_theory);
|
||||
|
||||
end;
|
||||
|
||||
|
||||
(** document command **)
|
||||
|
||||
|
||||
fun document_command markdown (loc, txt) =
|
||||
Toplevel.keep (fn state =>
|
||||
(case loc of
|
||||
NONE => ignore (output_text state markdown txt)
|
||||
| SOME (_, pos) =>
|
||||
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
|
||||
Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt));
|
||||
fun antiquotation_verbatim name scan f =
|
||||
antiquotation_raw name scan (fn ctxt => verbatim ctxt o f ctxt);
|
||||
|
||||
end;
|
||||
|
|
|
@ -0,0 +1,555 @@
|
|||
(* Title: Pure/Thy/thy_output.ML
|
||||
Author: Makarius
|
||||
|
||||
Theory document output.
|
||||
*)
|
||||
|
||||
signature THY_OUTPUT =
|
||||
sig
|
||||
val output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text list
|
||||
val check_comments: Proof.context -> Symbol_Pos.T list -> unit
|
||||
val output_token: Proof.context -> Token.T -> Latex.text list
|
||||
val output_source: Proof.context -> string -> Latex.text list
|
||||
type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state}
|
||||
val present_thy: Options.T -> theory -> segment list -> Latex.text list
|
||||
val set_meta_args_parser : (theory -> string parser) -> unit
|
||||
val pretty_term: Proof.context -> term -> Pretty.T
|
||||
val pretty_thm: Proof.context -> thm -> Pretty.T
|
||||
val lines: Latex.text list -> Latex.text list
|
||||
val items: Latex.text list -> Latex.text list
|
||||
val isabelle: Proof.context -> Latex.text list -> Latex.text
|
||||
val isabelle_typewriter: Proof.context -> Latex.text list -> Latex.text
|
||||
val typewriter: Proof.context -> string -> Latex.text
|
||||
val verbatim: Proof.context -> string -> Latex.text
|
||||
val source: Proof.context -> Token.src -> Latex.text
|
||||
val pretty: Proof.context -> Pretty.T -> Latex.text
|
||||
val pretty_source: Proof.context -> Token.src -> Pretty.T -> Latex.text
|
||||
val pretty_items: Proof.context -> Pretty.T list -> Latex.text
|
||||
val pretty_items_source: Proof.context -> Token.src -> Pretty.T list -> Latex.text
|
||||
val antiquotation_pretty:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
|
||||
val antiquotation_pretty_source:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
|
||||
val antiquotation_raw:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory
|
||||
val antiquotation_verbatim:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory
|
||||
end;
|
||||
|
||||
structure Thy_Output: THY_OUTPUT =
|
||||
struct
|
||||
|
||||
(* output document source *)
|
||||
|
||||
val output_symbols = single o Latex.symbols_output;
|
||||
|
||||
fun output_comment ctxt (kind, syms) =
|
||||
(case kind of
|
||||
Comment.Comment =>
|
||||
Input.cartouche_content syms
|
||||
|> output_document (ctxt |> Config.put Document_Antiquotation.thy_output_display false)
|
||||
{markdown = false}
|
||||
|> Latex.enclose_body "%\n\\isamarkupcmt{" "%\n}"
|
||||
| Comment.Cancel =>
|
||||
Symbol_Pos.cartouche_content syms
|
||||
|> output_symbols
|
||||
|> Latex.enclose_body "%\n\\isamarkupcancel{" "}"
|
||||
| Comment.Latex =>
|
||||
[Latex.symbols (Symbol_Pos.cartouche_content syms)])
|
||||
and output_comment_document ctxt (comment, syms) =
|
||||
(case comment of
|
||||
SOME kind => output_comment ctxt (kind, syms)
|
||||
| NONE => [Latex.symbols syms])
|
||||
and output_document_text ctxt syms =
|
||||
Comment.read_body syms |> maps (output_comment_document ctxt)
|
||||
and output_document ctxt {markdown} source =
|
||||
let
|
||||
val pos = Input.pos_of source;
|
||||
val syms = Input.source_explode source;
|
||||
|
||||
val output_antiquotes =
|
||||
maps (Document_Antiquotation.evaluate (output_document_text ctxt) ctxt);
|
||||
|
||||
fun output_line line =
|
||||
(if Markdown.line_is_item line then [Latex.string "\\item "] else []) @
|
||||
output_antiquotes (Markdown.line_content line);
|
||||
|
||||
fun output_block (Markdown.Par lines) =
|
||||
Latex.block (separate (Latex.string "\n") (map (Latex.block o output_line) lines))
|
||||
| output_block (Markdown.List {kind, body, ...}) =
|
||||
Latex.environment_block (Markdown.print_kind kind) (output_blocks body)
|
||||
and output_blocks blocks = separate (Latex.string "\n\n") (map output_block blocks);
|
||||
in
|
||||
if Toplevel.is_skipped_proof (Toplevel.presentation_state ctxt) then []
|
||||
else if markdown andalso exists (Markdown.is_control o Symbol_Pos.symbol) syms
|
||||
then
|
||||
let
|
||||
val ants = Antiquote.parse_comments pos syms;
|
||||
val reports = Antiquote.antiq_reports ants;
|
||||
val blocks = Markdown.read_antiquotes ants;
|
||||
val _ = Context_Position.reports ctxt (reports @ Markdown.reports blocks);
|
||||
in output_blocks blocks end
|
||||
else
|
||||
let
|
||||
val ants = Antiquote.parse_comments pos (trim (Symbol.is_blank o Symbol_Pos.symbol) syms);
|
||||
val reports = Antiquote.antiq_reports ants;
|
||||
val _ = Context_Position.reports ctxt (reports @ Markdown.text_reports ants);
|
||||
in output_antiquotes ants end
|
||||
end;
|
||||
|
||||
|
||||
(* output tokens with formal comments *)
|
||||
|
||||
local
|
||||
|
||||
val output_symbols_antiq =
|
||||
(fn Antiquote.Text syms => output_symbols syms
|
||||
| Antiquote.Control {name = (name, _), body, ...} =>
|
||||
Latex.string (Latex.output_symbols [Symbol.encode (Symbol.Control name)]) ::
|
||||
output_symbols body
|
||||
| Antiquote.Antiq {body, ...} =>
|
||||
Latex.enclose_body "%\n\\isaantiq\n" "{}%\n\\endisaantiq\n" (output_symbols body));
|
||||
|
||||
fun output_comment_symbols ctxt {antiq} (comment, syms) =
|
||||
(case (comment, antiq) of
|
||||
(NONE, false) => output_symbols syms
|
||||
| (NONE, true) =>
|
||||
Antiquote.parse_comments (#1 (Symbol_Pos.range syms)) syms
|
||||
|> maps output_symbols_antiq
|
||||
| (SOME comment, _) => output_comment ctxt (comment, syms));
|
||||
|
||||
fun output_body ctxt antiq bg en syms =
|
||||
Comment.read_body syms
|
||||
|> maps (output_comment_symbols ctxt {antiq = antiq})
|
||||
|> Latex.enclose_body bg en;
|
||||
|
||||
in
|
||||
|
||||
fun output_token ctxt tok =
|
||||
let
|
||||
fun output antiq bg en =
|
||||
output_body ctxt antiq bg en (Input.source_explode (Token.input_of tok));
|
||||
in
|
||||
(case Token.kind_of tok of
|
||||
Token.Comment NONE => []
|
||||
| Token.Command => output false "\\isacommand{" "}"
|
||||
| Token.Keyword =>
|
||||
if Symbol.is_ascii_identifier (Token.content_of tok)
|
||||
then output false "\\isakeyword{" "}"
|
||||
else output false "" ""
|
||||
| Token.String => output false "{\\isachardoublequoteopen}" "{\\isachardoublequoteclose}"
|
||||
| Token.Alt_String => output false "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}"
|
||||
| Token.Verbatim => output true "{\\isacharverbatimopen}" "{\\isacharverbatimclose}"
|
||||
| Token.Cartouche => output false "{\\isacartoucheopen}" "{\\isacartoucheclose}"
|
||||
| _ => output false "" "")
|
||||
end handle ERROR msg => error (msg ^ Position.here (Token.pos_of tok));
|
||||
|
||||
fun output_source ctxt s =
|
||||
output_body ctxt false "" "" (Symbol_Pos.explode (s, Position.none));
|
||||
|
||||
fun check_comments ctxt =
|
||||
Comment.read_body #> List.app (fn (comment, syms) =>
|
||||
let
|
||||
val pos = #1 (Symbol_Pos.range syms);
|
||||
val _ =
|
||||
comment |> Option.app (fn kind =>
|
||||
Context_Position.reports ctxt (map (pair pos) (Markup.cartouche :: Comment.markups kind)));
|
||||
val _ = output_comment_symbols ctxt {antiq = false} (comment, syms);
|
||||
in if comment = SOME Comment.Comment then check_comments ctxt syms else () end);
|
||||
|
||||
end;
|
||||
|
||||
|
||||
|
||||
(** present theory source **)
|
||||
|
||||
(*NB: arranging white space around command spans is a black art*)
|
||||
|
||||
val is_white = Token.is_space orf Token.is_informal_comment;
|
||||
val is_black = not o is_white;
|
||||
|
||||
val is_white_comment = Token.is_informal_comment;
|
||||
val is_black_comment = Token.is_formal_comment;
|
||||
|
||||
|
||||
(* presentation tokens *)
|
||||
|
||||
datatype token =
|
||||
Ignore_Token
|
||||
| Basic_Token of Token.T
|
||||
| Markup_Token of string * Input.source
|
||||
| Markup_Env_Token of string * Input.source
|
||||
| Raw_Token of Input.source;
|
||||
|
||||
fun basic_token pred (Basic_Token tok) = pred tok
|
||||
| basic_token _ _ = false;
|
||||
|
||||
val white_token = basic_token is_white;
|
||||
val white_comment_token = basic_token is_white_comment;
|
||||
val blank_token = basic_token Token.is_blank;
|
||||
val newline_token = basic_token Token.is_newline;
|
||||
|
||||
fun present_token ctxt tok =
|
||||
(case tok of
|
||||
Ignore_Token => []
|
||||
| Basic_Token tok => output_token ctxt tok
|
||||
| Markup_Token (cmd, source) =>
|
||||
Latex.enclose_body ("%\n\\isamarkup" ^ cmd ^ "{") "%\n}\n"
|
||||
(output_document ctxt {markdown = false} source)
|
||||
| Markup_Env_Token (cmd, source) =>
|
||||
[Latex.environment_block ("isamarkup" ^ cmd) (output_document ctxt {markdown = true} source)]
|
||||
| Raw_Token source =>
|
||||
Latex.string "%\n" :: output_document ctxt {markdown = true} source @ [Latex.string "\n"]);
|
||||
|
||||
|
||||
(* command spans *)
|
||||
|
||||
type command = string * Position.T * string list; (*name, position, tags*)
|
||||
type source = (token * (string * int)) list; (*token, markup flag, meta-comment depth*)
|
||||
|
||||
datatype span = Span of command * (source * source * source * source) * bool;
|
||||
|
||||
fun make_span cmd src =
|
||||
let
|
||||
fun chop_newline (tok :: toks) =
|
||||
if newline_token (fst tok) then ([tok], toks, true)
|
||||
else ([], tok :: toks, false)
|
||||
| chop_newline [] = ([], [], false);
|
||||
val (((src_prefix, src_main), src_suffix1), (src_suffix2, src_appendix, newline)) =
|
||||
src
|
||||
|> chop_prefix (white_token o fst)
|
||||
||>> chop_suffix (white_token o fst)
|
||||
||>> chop_prefix (white_comment_token o fst)
|
||||
||> chop_newline;
|
||||
in Span (cmd, (src_prefix, src_main, src_suffix1 @ src_suffix2, src_appendix), newline) end;
|
||||
|
||||
|
||||
(* present spans *)
|
||||
|
||||
local
|
||||
|
||||
fun err_bad_nesting pos =
|
||||
error ("Bad nesting of commands in presentation" ^ pos);
|
||||
|
||||
fun edge which f (x: string option, y) =
|
||||
if x = y then I
|
||||
else (case which (x, y) of NONE => I | SOME txt => cons (Latex.string (f txt)));
|
||||
|
||||
val begin_tag = edge #2 Latex.begin_tag;
|
||||
val end_tag = edge #1 Latex.end_tag;
|
||||
fun open_delim delim e = edge #2 Latex.begin_delim e #> delim #> edge #2 Latex.end_delim e;
|
||||
fun close_delim delim e = edge #1 Latex.begin_delim e #> delim #> edge #1 Latex.end_delim e;
|
||||
|
||||
fun read_tag s =
|
||||
(case space_explode "%" s of
|
||||
["", b] => (SOME b, NONE)
|
||||
| [a, b] => (NONE, SOME (a, b))
|
||||
| _ => error ("Bad document_tags specification: " ^ quote s));
|
||||
|
||||
in
|
||||
|
||||
fun make_command_tag options keywords =
|
||||
let
|
||||
val document_tags =
|
||||
map read_tag (space_explode "," (Options.string options \<^system_option>\<open>document_tags\<close>));
|
||||
val document_tags_default = map_filter #1 document_tags;
|
||||
val document_tags_command = map_filter #2 document_tags;
|
||||
in
|
||||
fn {cmd_name, cmd_tags, tag, active_tag} => fn state => fn state' =>
|
||||
let
|
||||
val tag' = try hd (fold (update (op =)) cmd_tags (the_list tag));
|
||||
|
||||
val keyword_tags =
|
||||
if cmd_name = "end" andalso Toplevel.is_end_theory state' then ["theory"]
|
||||
else Keyword.command_tags keywords cmd_name;
|
||||
val command_tags =
|
||||
the_list (AList.lookup (op =) document_tags_command cmd_name) @
|
||||
keyword_tags @ document_tags_default;
|
||||
|
||||
val active_tag' =
|
||||
if is_some tag' then tag'
|
||||
else
|
||||
(case command_tags of
|
||||
default_tag :: _ => SOME default_tag
|
||||
| [] =>
|
||||
if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state
|
||||
then active_tag
|
||||
else NONE);
|
||||
in {tag' = tag', active_tag' = active_tag'} end
|
||||
end;
|
||||
|
||||
fun present_span thy command_tag span state state'
|
||||
(tag_stack, active_tag, newline, latex, present_cont) =
|
||||
let
|
||||
val ctxt' =
|
||||
Toplevel.presentation_context state'
|
||||
handle Toplevel.UNDEF => Proof_Context.get_global thy Context.PureN;
|
||||
val present = fold (fn (tok, (flag, 0)) =>
|
||||
fold cons (present_token ctxt' tok)
|
||||
#> cons (Latex.string flag)
|
||||
| _ => I);
|
||||
|
||||
val Span ((cmd_name, cmd_pos, cmd_tags), srcs, span_newline) = span;
|
||||
|
||||
val (tag, tags) = tag_stack;
|
||||
val {tag', active_tag'} =
|
||||
command_tag {cmd_name = cmd_name, cmd_tags = cmd_tags, tag = tag, active_tag = active_tag}
|
||||
state state';
|
||||
val edge = (active_tag, active_tag');
|
||||
|
||||
val nesting = Toplevel.level state' - Toplevel.level state;
|
||||
|
||||
val newline' =
|
||||
if is_none active_tag' then span_newline else newline;
|
||||
|
||||
val tag_stack' =
|
||||
if nesting = 0 andalso not (Toplevel.is_proof state) then tag_stack
|
||||
else if nesting >= 0 then (tag', replicate nesting tag @ tags)
|
||||
else
|
||||
(case drop (~ nesting - 1) tags of
|
||||
tg :: tgs => (tg, tgs)
|
||||
| [] => err_bad_nesting (Position.here cmd_pos));
|
||||
|
||||
val latex' =
|
||||
latex
|
||||
|> end_tag edge
|
||||
|> close_delim (fst present_cont) edge
|
||||
|> snd present_cont
|
||||
|> open_delim (present (#1 srcs)) edge
|
||||
|> begin_tag edge
|
||||
|> present (#2 srcs);
|
||||
val present_cont' =
|
||||
if newline then (present (#3 srcs), present (#4 srcs))
|
||||
else (I, present (#3 srcs) #> present (#4 srcs));
|
||||
in (tag_stack', active_tag', newline', latex', present_cont') end;
|
||||
|
||||
fun present_trailer ((_, tags), active_tag, _, latex, present_cont) =
|
||||
if not (null tags) then err_bad_nesting " at end of theory"
|
||||
else
|
||||
latex
|
||||
|> end_tag (active_tag, NONE)
|
||||
|> close_delim (fst present_cont) (active_tag, NONE)
|
||||
|> snd present_cont;
|
||||
|
||||
end;
|
||||
|
||||
|
||||
(* present_thy *)
|
||||
|
||||
local
|
||||
|
||||
val markup_true = "\\isamarkuptrue%\n";
|
||||
val markup_false = "\\isamarkupfalse%\n";
|
||||
|
||||
val space_proper =
|
||||
Scan.one Token.is_blank -- Scan.many is_white_comment -- Scan.one is_black;
|
||||
|
||||
val is_improper = not o (is_black orf Token.is_begin_ignore orf Token.is_end_ignore);
|
||||
val improper = Scan.many is_improper;
|
||||
val improper_end = Scan.repeat (Scan.unless space_proper (Scan.one is_improper));
|
||||
val blank_end = Scan.repeat (Scan.unless space_proper (Scan.one Token.is_blank));
|
||||
|
||||
val opt_newline = Scan.option (Scan.one Token.is_newline);
|
||||
|
||||
val ignore =
|
||||
Scan.depend (fn d => opt_newline |-- Scan.one Token.is_begin_ignore
|
||||
>> pair (d + 1)) ||
|
||||
Scan.depend (fn d => Scan.one Token.is_end_ignore --|
|
||||
(if d = 0 then Scan.fail_with (K (fn () => "Bad nesting of meta-comments")) else opt_newline)
|
||||
>> pair (d - 1));
|
||||
|
||||
val tag = (improper -- Parse.$$$ "%" -- improper) |-- Parse.!!! (Parse.tag_name --| blank_end);
|
||||
|
||||
val locale =
|
||||
Scan.option ((Parse.$$$ "(" -- improper -- Parse.$$$ "in") |--
|
||||
Parse.!!! (improper |-- Parse.name --| (improper -- Parse.$$$ ")")));
|
||||
|
||||
val meta_args_parser_hook = Unsynchronized.ref((fn thy => fn s => ("",s)): theory -> string parser)
|
||||
|
||||
|
||||
in
|
||||
|
||||
type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state};
|
||||
|
||||
fun present_thy options thy (segments: segment list) =
|
||||
let
|
||||
val keywords = Thy_Header.get_keywords thy;
|
||||
|
||||
|
||||
(* tokens *)
|
||||
|
||||
val ignored = Scan.state --| ignore
|
||||
>> (fn d => (NONE, (Ignore_Token, ("", d))));
|
||||
|
||||
fun markup pred mk flag = Scan.peek (fn d =>
|
||||
improper |--
|
||||
Parse.position (Scan.one (fn tok =>
|
||||
Token.is_command tok andalso pred keywords (Token.content_of tok))) --
|
||||
Scan.repeat tag --
|
||||
Parse.!!!! ((improper -- locale -- improper) |-- Parse.document_source --| improper_end)
|
||||
>> (fn (((tok, pos'), tags), source) =>
|
||||
let val name = Token.content_of tok
|
||||
in (SOME (name, pos', tags), (mk (name, source), (flag, d))) end));
|
||||
|
||||
val command = Scan.peek (fn d =>
|
||||
Scan.optional (Scan.one Token.is_command_modifier ::: improper) [] --
|
||||
Scan.one Token.is_command -- Scan.repeat tag
|
||||
>> (fn ((cmd_mod, cmd), tags) =>
|
||||
map (fn tok => (NONE, (Basic_Token tok, ("", d)))) cmd_mod @
|
||||
[(SOME (Token.content_of cmd, Token.pos_of cmd, tags),
|
||||
(Basic_Token cmd, (markup_false, d)))]));
|
||||
(*
|
||||
val cmt = Scan.peek (fn d =>
|
||||
Scan.one is_black_comment >> (fn tok => (NONE, (Basic_Token tok, ("", d)))));
|
||||
*)
|
||||
val cmt = Scan.peek (fn d =>
|
||||
(Parse.$$$ "--" || Parse.$$$ Symbol.comment) |--
|
||||
Parse.!!!! (improper |-- Parse.document_source) >>
|
||||
(fn source => (NONE, (Markup_Token ("cmt", "", source), ("", d)))));
|
||||
|
||||
|
||||
val other = Scan.peek (fn d =>
|
||||
Parse.not_eof >> (fn tok => (NONE, (Basic_Token tok, ("", d)))));
|
||||
|
||||
val tokens =
|
||||
(ignored ||
|
||||
markup Keyword.is_document_heading Markup_Token markup_true ||
|
||||
markup Keyword.is_document_body Markup_Env_Token markup_true ||
|
||||
markup Keyword.is_document_raw (Raw_Token o #2) "") >> single ||
|
||||
command ||
|
||||
(cmt || other) >> single;
|
||||
|
||||
|
||||
(* spans *)
|
||||
|
||||
val is_eof = fn (_, (Basic_Token x, _)) => Token.is_eof x | _ => false;
|
||||
val stopper = Scan.stopper (K (NONE, (Basic_Token Token.eof, ("", 0)))) is_eof;
|
||||
|
||||
val cmd = Scan.one (is_some o fst);
|
||||
val non_cmd = Scan.one (is_none o fst andf not o is_eof) >> #2;
|
||||
|
||||
val white_comments = Scan.many (white_comment_token o fst o snd);
|
||||
val blank = Scan.one (blank_token o fst o snd);
|
||||
val newline = Scan.one (newline_token o fst o snd);
|
||||
val before_cmd =
|
||||
Scan.option (newline -- white_comments) --
|
||||
Scan.option (newline -- white_comments) --
|
||||
Scan.option (blank -- white_comments) -- cmd;
|
||||
|
||||
val span =
|
||||
Scan.repeat non_cmd -- cmd --
|
||||
Scan.repeat (Scan.unless before_cmd non_cmd) --
|
||||
Scan.option (newline >> (single o snd))
|
||||
>> (fn (((toks1, (cmd, tok2)), toks3), tok4) =>
|
||||
make_span (the cmd) (toks1 @ (tok2 :: (toks3 @ the_default [] tok4))));
|
||||
|
||||
val spans = segments
|
||||
|> maps (Command_Span.content o #span)
|
||||
|> drop_suffix Token.is_space
|
||||
|> Source.of_list
|
||||
|> Source.source' 0 Token.stopper (Scan.error (Scan.bulk tokens >> flat))
|
||||
|> Source.source stopper (Scan.error (Scan.bulk span))
|
||||
|> Source.exhaust;
|
||||
|
||||
val command_results =
|
||||
segments |> map_filter (fn {command, state, ...} =>
|
||||
if Toplevel.is_ignored command then NONE else SOME (command, state));
|
||||
|
||||
|
||||
(* present commands *)
|
||||
|
||||
val command_tag = make_command_tag options keywords;
|
||||
|
||||
fun present_command tr span st st' =
|
||||
Toplevel.setmp_thread_position tr (present_span thy command_tag span st st');
|
||||
|
||||
fun present _ [] = I
|
||||
| present st ((span, (tr, st')) :: rest) = present_command tr span st st' #> present st' rest;
|
||||
in
|
||||
if length command_results = length spans then
|
||||
((NONE, []), NONE, true, [], (I, I))
|
||||
|> present Toplevel.toplevel (spans ~~ command_results)
|
||||
|> present_trailer
|
||||
|> rev
|
||||
else error "Messed-up outer syntax for presentation"
|
||||
end;
|
||||
|
||||
fun set_meta_args_parser f = (meta_args_parser_hook:= f)
|
||||
|
||||
end;
|
||||
|
||||
|
||||
|
||||
(** standard output operations **)
|
||||
|
||||
(* pretty printing *)
|
||||
|
||||
fun pretty_term ctxt t =
|
||||
Syntax.pretty_term (Variable.auto_fixes t ctxt) t;
|
||||
|
||||
fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of;
|
||||
|
||||
|
||||
(* default output *)
|
||||
|
||||
val lines = separate (Latex.string "\\isanewline%\n");
|
||||
val items = separate (Latex.string "\\isasep\\isanewline%\n");
|
||||
|
||||
fun isabelle ctxt body =
|
||||
if Config.get ctxt Document_Antiquotation.thy_output_display
|
||||
then Latex.environment_block "isabelle" body
|
||||
else Latex.block (Latex.enclose_body "\\isa{" "}" body);
|
||||
|
||||
fun isabelle_typewriter ctxt body =
|
||||
if Config.get ctxt Document_Antiquotation.thy_output_display
|
||||
then Latex.environment_block "isabellett" body
|
||||
else Latex.block (Latex.enclose_body "\\isatt{" "}" body);
|
||||
|
||||
fun typewriter ctxt s =
|
||||
isabelle_typewriter ctxt [Latex.string (Latex.output_ascii s)];
|
||||
|
||||
fun verbatim ctxt =
|
||||
if Config.get ctxt Document_Antiquotation.thy_output_display
|
||||
then Document_Antiquotation.indent_lines ctxt #> typewriter ctxt
|
||||
else split_lines #> map (typewriter ctxt) #> lines #> Latex.block;
|
||||
|
||||
fun source ctxt =
|
||||
Token.args_of_src
|
||||
#> map (Token.unparse #> Document_Antiquotation.prepare_lines ctxt)
|
||||
#> space_implode " "
|
||||
#> output_source ctxt
|
||||
#> isabelle ctxt;
|
||||
|
||||
fun pretty ctxt =
|
||||
Document_Antiquotation.output ctxt #> Latex.string #> single #> isabelle ctxt;
|
||||
|
||||
fun pretty_source ctxt src prt =
|
||||
if Config.get ctxt Document_Antiquotation.thy_output_source
|
||||
then source ctxt src else pretty ctxt prt;
|
||||
|
||||
fun pretty_items ctxt =
|
||||
map (Document_Antiquotation.output ctxt #> Latex.string) #> items #> isabelle ctxt;
|
||||
|
||||
fun pretty_items_source ctxt src prts =
|
||||
if Config.get ctxt Document_Antiquotation.thy_output_source
|
||||
then source ctxt src else pretty_items ctxt prts;
|
||||
|
||||
|
||||
(* antiquotation variants *)
|
||||
|
||||
fun antiquotation_pretty name scan f =
|
||||
Document_Antiquotation.setup name scan
|
||||
(fn {context = ctxt, argument = x, ...} => pretty ctxt (f ctxt x));
|
||||
|
||||
fun antiquotation_pretty_source name scan f =
|
||||
Document_Antiquotation.setup name scan
|
||||
(fn {context = ctxt, source = src, argument = x} => pretty_source ctxt src (f ctxt x));
|
||||
|
||||
fun antiquotation_raw name scan f =
|
||||
Document_Antiquotation.setup name scan
|
||||
(fn {context = ctxt, argument = x, ...} => f ctxt x);
|
||||
|
||||
fun antiquotation_verbatim name scan f =
|
||||
antiquotation_raw name scan (fn ctxt => verbatim ctxt o f ctxt);
|
||||
|
||||
end;
|
|
@ -0,0 +1,543 @@
|
|||
(* Title: Pure/Thy/thy_output.ML
|
||||
Author: Makarius
|
||||
|
||||
Theory document output.
|
||||
*)
|
||||
|
||||
signature THY_OUTPUT =
|
||||
sig
|
||||
val output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text list
|
||||
val check_comments: Proof.context -> Symbol_Pos.T list -> unit
|
||||
val output_token: Proof.context -> Token.T -> Latex.text list
|
||||
val output_source: Proof.context -> string -> Latex.text list
|
||||
type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state}
|
||||
val present_thy: Options.T -> theory -> segment list -> Latex.text list
|
||||
val pretty_term: Proof.context -> term -> Pretty.T
|
||||
val pretty_thm: Proof.context -> thm -> Pretty.T
|
||||
val lines: Latex.text list -> Latex.text list
|
||||
val items: Latex.text list -> Latex.text list
|
||||
val isabelle: Proof.context -> Latex.text list -> Latex.text
|
||||
val isabelle_typewriter: Proof.context -> Latex.text list -> Latex.text
|
||||
val typewriter: Proof.context -> string -> Latex.text
|
||||
val verbatim: Proof.context -> string -> Latex.text
|
||||
val source: Proof.context -> Token.src -> Latex.text
|
||||
val pretty: Proof.context -> Pretty.T -> Latex.text
|
||||
val pretty_source: Proof.context -> Token.src -> Pretty.T -> Latex.text
|
||||
val pretty_items: Proof.context -> Pretty.T list -> Latex.text
|
||||
val pretty_items_source: Proof.context -> Token.src -> Pretty.T list -> Latex.text
|
||||
val antiquotation_pretty:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
|
||||
val antiquotation_pretty_source:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
|
||||
val antiquotation_raw:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory
|
||||
val antiquotation_verbatim:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory
|
||||
end;
|
||||
|
||||
structure Thy_Output: THY_OUTPUT =
|
||||
struct
|
||||
|
||||
(* output document source *)
|
||||
|
||||
val output_symbols = single o Latex.symbols_output;
|
||||
|
||||
fun output_comment ctxt (kind, syms) =
|
||||
(case kind of
|
||||
Comment.Comment =>
|
||||
Input.cartouche_content syms
|
||||
|> output_document (ctxt |> Config.put Document_Antiquotation.thy_output_display false)
|
||||
{markdown = false}
|
||||
|> Latex.enclose_body "%\n\\isamarkupcmt{" "%\n}"
|
||||
| Comment.Cancel =>
|
||||
Symbol_Pos.cartouche_content syms
|
||||
|> output_symbols
|
||||
|> Latex.enclose_body "%\n\\isamarkupcancel{" "}"
|
||||
| Comment.Latex =>
|
||||
[Latex.symbols (Symbol_Pos.cartouche_content syms)])
|
||||
and output_comment_document ctxt (comment, syms) =
|
||||
(case comment of
|
||||
SOME kind => output_comment ctxt (kind, syms)
|
||||
| NONE => [Latex.symbols syms])
|
||||
and output_document_text ctxt syms =
|
||||
Comment.read_body syms |> maps (output_comment_document ctxt)
|
||||
and output_document ctxt {markdown} source =
|
||||
let
|
||||
val pos = Input.pos_of source;
|
||||
val syms = Input.source_explode source;
|
||||
|
||||
val output_antiquotes =
|
||||
maps (Document_Antiquotation.evaluate (output_document_text ctxt) ctxt);
|
||||
|
||||
fun output_line line =
|
||||
(if Markdown.line_is_item line then [Latex.string "\\item "] else []) @
|
||||
output_antiquotes (Markdown.line_content line);
|
||||
|
||||
fun output_block (Markdown.Par lines) =
|
||||
Latex.block (separate (Latex.string "\n") (map (Latex.block o output_line) lines))
|
||||
| output_block (Markdown.List {kind, body, ...}) =
|
||||
Latex.environment_block (Markdown.print_kind kind) (output_blocks body)
|
||||
and output_blocks blocks = separate (Latex.string "\n\n") (map output_block blocks);
|
||||
in
|
||||
if Toplevel.is_skipped_proof (Toplevel.presentation_state ctxt) then []
|
||||
else if markdown andalso exists (Markdown.is_control o Symbol_Pos.symbol) syms
|
||||
then
|
||||
let
|
||||
val ants = Antiquote.parse_comments pos syms;
|
||||
val reports = Antiquote.antiq_reports ants;
|
||||
val blocks = Markdown.read_antiquotes ants;
|
||||
val _ = Context_Position.reports ctxt (reports @ Markdown.reports blocks);
|
||||
in output_blocks blocks end
|
||||
else
|
||||
let
|
||||
val ants = Antiquote.parse_comments pos (trim (Symbol.is_blank o Symbol_Pos.symbol) syms);
|
||||
val reports = Antiquote.antiq_reports ants;
|
||||
val _ = Context_Position.reports ctxt (reports @ Markdown.text_reports ants);
|
||||
in output_antiquotes ants end
|
||||
end;
|
||||
|
||||
|
||||
(* output tokens with formal comments *)
|
||||
|
||||
local
|
||||
|
||||
val output_symbols_antiq =
|
||||
(fn Antiquote.Text syms => output_symbols syms
|
||||
| Antiquote.Control {name = (name, _), body, ...} =>
|
||||
Latex.string (Latex.output_symbols [Symbol.encode (Symbol.Control name)]) ::
|
||||
output_symbols body
|
||||
| Antiquote.Antiq {body, ...} =>
|
||||
Latex.enclose_body "%\n\\isaantiq\n" "{}%\n\\endisaantiq\n" (output_symbols body));
|
||||
|
||||
fun output_comment_symbols ctxt {antiq} (comment, syms) =
|
||||
(case (comment, antiq) of
|
||||
(NONE, false) => output_symbols syms
|
||||
| (NONE, true) =>
|
||||
Antiquote.parse_comments (#1 (Symbol_Pos.range syms)) syms
|
||||
|> maps output_symbols_antiq
|
||||
| (SOME comment, _) => output_comment ctxt (comment, syms));
|
||||
|
||||
fun output_body ctxt antiq bg en syms =
|
||||
Comment.read_body syms
|
||||
|> maps (output_comment_symbols ctxt {antiq = antiq})
|
||||
|> Latex.enclose_body bg en;
|
||||
|
||||
in
|
||||
|
||||
fun output_token ctxt tok =
|
||||
let
|
||||
fun output antiq bg en =
|
||||
output_body ctxt antiq bg en (Input.source_explode (Token.input_of tok));
|
||||
in
|
||||
(case Token.kind_of tok of
|
||||
Token.Comment NONE => []
|
||||
| Token.Command => output false "\\isacommand{" "}"
|
||||
| Token.Keyword =>
|
||||
if Symbol.is_ascii_identifier (Token.content_of tok)
|
||||
then output false "\\isakeyword{" "}"
|
||||
else output false "" ""
|
||||
| Token.String => output false "{\\isachardoublequoteopen}" "{\\isachardoublequoteclose}"
|
||||
| Token.Alt_String => output false "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}"
|
||||
| Token.Verbatim => output true "{\\isacharverbatimopen}" "{\\isacharverbatimclose}"
|
||||
| Token.Cartouche => output false "{\\isacartoucheopen}" "{\\isacartoucheclose}"
|
||||
| _ => output false "" "")
|
||||
end handle ERROR msg => error (msg ^ Position.here (Token.pos_of tok));
|
||||
|
||||
fun output_source ctxt s =
|
||||
output_body ctxt false "" "" (Symbol_Pos.explode (s, Position.none));
|
||||
|
||||
fun check_comments ctxt =
|
||||
Comment.read_body #> List.app (fn (comment, syms) =>
|
||||
let
|
||||
val pos = #1 (Symbol_Pos.range syms);
|
||||
val _ =
|
||||
comment |> Option.app (fn kind =>
|
||||
Context_Position.reports ctxt (map (pair pos) (Markup.cartouche :: Comment.markups kind)));
|
||||
val _ = output_comment_symbols ctxt {antiq = false} (comment, syms);
|
||||
in if comment = SOME Comment.Comment then check_comments ctxt syms else () end);
|
||||
|
||||
end;
|
||||
|
||||
|
||||
|
||||
(** present theory source **)
|
||||
|
||||
(*NB: arranging white space around command spans is a black art*)
|
||||
|
||||
val is_white = Token.is_space orf Token.is_informal_comment;
|
||||
val is_black = not o is_white;
|
||||
|
||||
val is_white_comment = Token.is_informal_comment;
|
||||
val is_black_comment = Token.is_formal_comment;
|
||||
|
||||
|
||||
(* presentation tokens *)
|
||||
|
||||
datatype token =
|
||||
Ignore_Token
|
||||
| Basic_Token of Token.T
|
||||
| Markup_Token of string * Input.source
|
||||
| Markup_Env_Token of string * Input.source
|
||||
| Raw_Token of Input.source;
|
||||
|
||||
fun basic_token pred (Basic_Token tok) = pred tok
|
||||
| basic_token _ _ = false;
|
||||
|
||||
val white_token = basic_token is_white;
|
||||
val white_comment_token = basic_token is_white_comment;
|
||||
val blank_token = basic_token Token.is_blank;
|
||||
val newline_token = basic_token Token.is_newline;
|
||||
|
||||
fun present_token ctxt tok =
|
||||
(case tok of
|
||||
Ignore_Token => []
|
||||
| Basic_Token tok => output_token ctxt tok
|
||||
| Markup_Token (cmd, source) =>
|
||||
Latex.enclose_body ("%\n\\isamarkup" ^ cmd ^ "{") "%\n}\n"
|
||||
(output_document ctxt {markdown = false} source)
|
||||
| Markup_Env_Token (cmd, source) =>
|
||||
[Latex.environment_block ("isamarkup" ^ cmd) (output_document ctxt {markdown = true} source)]
|
||||
| Raw_Token source =>
|
||||
Latex.string "%\n" :: output_document ctxt {markdown = true} source @ [Latex.string "\n"]);
|
||||
|
||||
|
||||
(* command spans *)
|
||||
|
||||
type command = string * Position.T * string list; (*name, position, tags*)
|
||||
type source = (token * (string * int)) list; (*token, markup flag, meta-comment depth*)
|
||||
|
||||
datatype span = Span of command * (source * source * source * source) * bool;
|
||||
|
||||
fun make_span cmd src =
|
||||
let
|
||||
fun chop_newline (tok :: toks) =
|
||||
if newline_token (fst tok) then ([tok], toks, true)
|
||||
else ([], tok :: toks, false)
|
||||
| chop_newline [] = ([], [], false);
|
||||
val (((src_prefix, src_main), src_suffix1), (src_suffix2, src_appendix, newline)) =
|
||||
src
|
||||
|> chop_prefix (white_token o fst)
|
||||
||>> chop_suffix (white_token o fst)
|
||||
||>> chop_prefix (white_comment_token o fst)
|
||||
||> chop_newline;
|
||||
in Span (cmd, (src_prefix, src_main, src_suffix1 @ src_suffix2, src_appendix), newline) end;
|
||||
|
||||
|
||||
(* present spans *)
|
||||
|
||||
local
|
||||
|
||||
fun err_bad_nesting pos =
|
||||
error ("Bad nesting of commands in presentation" ^ pos);
|
||||
|
||||
fun edge which f (x: string option, y) =
|
||||
if x = y then I
|
||||
else (case which (x, y) of NONE => I | SOME txt => cons (Latex.string (f txt)));
|
||||
|
||||
val begin_tag = edge #2 Latex.begin_tag;
|
||||
val end_tag = edge #1 Latex.end_tag;
|
||||
fun open_delim delim e = edge #2 Latex.begin_delim e #> delim #> edge #2 Latex.end_delim e;
|
||||
fun close_delim delim e = edge #1 Latex.begin_delim e #> delim #> edge #1 Latex.end_delim e;
|
||||
|
||||
fun read_tag s =
|
||||
(case space_explode "%" s of
|
||||
["", b] => (SOME b, NONE)
|
||||
| [a, b] => (NONE, SOME (a, b))
|
||||
| _ => error ("Bad document_tags specification: " ^ quote s));
|
||||
|
||||
in
|
||||
|
||||
fun make_command_tag options keywords =
|
||||
let
|
||||
val document_tags =
|
||||
map read_tag (space_explode "," (Options.string options \<^system_option>\<open>document_tags\<close>));
|
||||
val document_tags_default = map_filter #1 document_tags;
|
||||
val document_tags_command = map_filter #2 document_tags;
|
||||
in
|
||||
fn {cmd_name, cmd_tags, tag, active_tag} => fn state => fn state' =>
|
||||
let
|
||||
val tag' = try hd (fold (update (op =)) cmd_tags (the_list tag));
|
||||
|
||||
val keyword_tags =
|
||||
if cmd_name = "end" andalso Toplevel.is_end_theory state' then ["theory"]
|
||||
else Keyword.command_tags keywords cmd_name;
|
||||
val command_tags =
|
||||
the_list (AList.lookup (op =) document_tags_command cmd_name) @
|
||||
keyword_tags @ document_tags_default;
|
||||
|
||||
val active_tag' =
|
||||
if is_some tag' then tag'
|
||||
else
|
||||
(case command_tags of
|
||||
default_tag :: _ => SOME default_tag
|
||||
| [] =>
|
||||
if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state
|
||||
then active_tag
|
||||
else NONE);
|
||||
in {tag' = tag', active_tag' = active_tag'} end
|
||||
end;
|
||||
|
||||
fun present_span thy command_tag span state state'
|
||||
(tag_stack, active_tag, newline, latex, present_cont) =
|
||||
let
|
||||
val ctxt' =
|
||||
Toplevel.presentation_context state'
|
||||
handle Toplevel.UNDEF => Proof_Context.get_global thy Context.PureN;
|
||||
val present = fold (fn (tok, (flag, 0)) =>
|
||||
fold cons (present_token ctxt' tok)
|
||||
#> cons (Latex.string flag)
|
||||
| _ => I);
|
||||
|
||||
val Span ((cmd_name, cmd_pos, cmd_tags), srcs, span_newline) = span;
|
||||
|
||||
val (tag, tags) = tag_stack;
|
||||
val {tag', active_tag'} =
|
||||
command_tag {cmd_name = cmd_name, cmd_tags = cmd_tags, tag = tag, active_tag = active_tag}
|
||||
state state';
|
||||
val edge = (active_tag, active_tag');
|
||||
|
||||
val nesting = Toplevel.level state' - Toplevel.level state;
|
||||
|
||||
val newline' =
|
||||
if is_none active_tag' then span_newline else newline;
|
||||
|
||||
val tag_stack' =
|
||||
if nesting = 0 andalso not (Toplevel.is_proof state) then tag_stack
|
||||
else if nesting >= 0 then (tag', replicate nesting tag @ tags)
|
||||
else
|
||||
(case drop (~ nesting - 1) tags of
|
||||
tg :: tgs => (tg, tgs)
|
||||
| [] => err_bad_nesting (Position.here cmd_pos));
|
||||
|
||||
val latex' =
|
||||
latex
|
||||
|> end_tag edge
|
||||
|> close_delim (fst present_cont) edge
|
||||
|> snd present_cont
|
||||
|> open_delim (present (#1 srcs)) edge
|
||||
|> begin_tag edge
|
||||
|> present (#2 srcs);
|
||||
val present_cont' =
|
||||
if newline then (present (#3 srcs), present (#4 srcs))
|
||||
else (I, present (#3 srcs) #> present (#4 srcs));
|
||||
in (tag_stack', active_tag', newline', latex', present_cont') end;
|
||||
|
||||
fun present_trailer ((_, tags), active_tag, _, latex, present_cont) =
|
||||
if not (null tags) then err_bad_nesting " at end of theory"
|
||||
else
|
||||
latex
|
||||
|> end_tag (active_tag, NONE)
|
||||
|> close_delim (fst present_cont) (active_tag, NONE)
|
||||
|> snd present_cont;
|
||||
|
||||
end;
|
||||
|
||||
|
||||
(* present_thy *)
|
||||
|
||||
local
|
||||
|
||||
val markup_true = "\\isamarkuptrue%\n";
|
||||
val markup_false = "\\isamarkupfalse%\n";
|
||||
|
||||
val space_proper =
|
||||
Scan.one Token.is_blank -- Scan.many is_white_comment -- Scan.one is_black;
|
||||
|
||||
val is_improper = not o (is_black orf Token.is_begin_ignore orf Token.is_end_ignore);
|
||||
val improper = Scan.many is_improper;
|
||||
val improper_end = Scan.repeat (Scan.unless space_proper (Scan.one is_improper));
|
||||
val blank_end = Scan.repeat (Scan.unless space_proper (Scan.one Token.is_blank));
|
||||
|
||||
val opt_newline = Scan.option (Scan.one Token.is_newline);
|
||||
|
||||
val ignore =
|
||||
Scan.depend (fn d => opt_newline |-- Scan.one Token.is_begin_ignore
|
||||
>> pair (d + 1)) ||
|
||||
Scan.depend (fn d => Scan.one Token.is_end_ignore --|
|
||||
(if d = 0 then Scan.fail_with (K (fn () => "Bad nesting of meta-comments")) else opt_newline)
|
||||
>> pair (d - 1));
|
||||
|
||||
val tag = (improper -- Parse.$$$ "%" -- improper) |-- Parse.!!! (Parse.tag_name --| blank_end);
|
||||
|
||||
val locale =
|
||||
Scan.option ((Parse.$$$ "(" -- improper -- Parse.$$$ "in") |--
|
||||
Parse.!!! (improper |-- Parse.name --| (improper -- Parse.$$$ ")")));
|
||||
|
||||
in
|
||||
|
||||
type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state};
|
||||
|
||||
fun present_thy options thy (segments: segment list) =
|
||||
let
|
||||
val keywords = Thy_Header.get_keywords thy;
|
||||
|
||||
|
||||
(* tokens *)
|
||||
|
||||
val ignored = Scan.state --| ignore
|
||||
>> (fn d => (NONE, (Ignore_Token, ("", d))));
|
||||
|
||||
fun markup pred mk flag = Scan.peek (fn d =>
|
||||
improper |--
|
||||
Parse.position (Scan.one (fn tok =>
|
||||
Token.is_command tok andalso pred keywords (Token.content_of tok))) --
|
||||
Scan.repeat tag --
|
||||
Parse.!!!! ((improper -- locale -- improper) |-- Parse.document_source --| improper_end)
|
||||
>> (fn (((tok, pos'), tags), source) =>
|
||||
let val name = Token.content_of tok
|
||||
in (SOME (name, pos', tags), (mk (name, source), (flag, d))) end));
|
||||
|
||||
val command = Scan.peek (fn d =>
|
||||
Scan.optional (Scan.one Token.is_command_modifier ::: improper) [] --
|
||||
Scan.one Token.is_command -- Scan.repeat tag
|
||||
>> (fn ((cmd_mod, cmd), tags) =>
|
||||
map (fn tok => (NONE, (Basic_Token tok, ("", d)))) cmd_mod @
|
||||
[(SOME (Token.content_of cmd, Token.pos_of cmd, tags),
|
||||
(Basic_Token cmd, (markup_false, d)))]));
|
||||
|
||||
val cmt = Scan.peek (fn d =>
|
||||
Scan.one is_black_comment >> (fn tok => (NONE, (Basic_Token tok, ("", d)))));
|
||||
|
||||
val other = Scan.peek (fn d =>
|
||||
Parse.not_eof >> (fn tok => (NONE, (Basic_Token tok, ("", d)))));
|
||||
|
||||
val tokens =
|
||||
(ignored ||
|
||||
markup Keyword.is_document_heading Markup_Token markup_true ||
|
||||
markup Keyword.is_document_body Markup_Env_Token markup_true ||
|
||||
markup Keyword.is_document_raw (Raw_Token o #2) "") >> single ||
|
||||
command ||
|
||||
(cmt || other) >> single;
|
||||
|
||||
|
||||
(* spans *)
|
||||
|
||||
val is_eof = fn (_, (Basic_Token x, _)) => Token.is_eof x | _ => false;
|
||||
val stopper = Scan.stopper (K (NONE, (Basic_Token Token.eof, ("", 0)))) is_eof;
|
||||
|
||||
val cmd = Scan.one (is_some o fst);
|
||||
val non_cmd = Scan.one (is_none o fst andf not o is_eof) >> #2;
|
||||
|
||||
val white_comments = Scan.many (white_comment_token o fst o snd);
|
||||
val blank = Scan.one (blank_token o fst o snd);
|
||||
val newline = Scan.one (newline_token o fst o snd);
|
||||
val before_cmd =
|
||||
Scan.option (newline -- white_comments) --
|
||||
Scan.option (newline -- white_comments) --
|
||||
Scan.option (blank -- white_comments) -- cmd;
|
||||
|
||||
val span =
|
||||
Scan.repeat non_cmd -- cmd --
|
||||
Scan.repeat (Scan.unless before_cmd non_cmd) --
|
||||
Scan.option (newline >> (single o snd))
|
||||
>> (fn (((toks1, (cmd, tok2)), toks3), tok4) =>
|
||||
make_span (the cmd) (toks1 @ (tok2 :: (toks3 @ the_default [] tok4))));
|
||||
|
||||
val spans = segments
|
||||
|> maps (Command_Span.content o #span)
|
||||
|> drop_suffix Token.is_space
|
||||
|> Source.of_list
|
||||
|> Source.source' 0 Token.stopper (Scan.error (Scan.bulk tokens >> flat))
|
||||
|> Source.source stopper (Scan.error (Scan.bulk span))
|
||||
|> Source.exhaust;
|
||||
|
||||
val command_results =
|
||||
segments |> map_filter (fn {command, state, ...} =>
|
||||
if Toplevel.is_ignored command then NONE else SOME (command, state));
|
||||
|
||||
|
||||
(* present commands *)
|
||||
|
||||
val command_tag = make_command_tag options keywords;
|
||||
|
||||
fun present_command tr span st st' =
|
||||
Toplevel.setmp_thread_position tr (present_span thy command_tag span st st');
|
||||
|
||||
fun present _ [] = I
|
||||
| present st ((span, (tr, st')) :: rest) = present_command tr span st st' #> present st' rest;
|
||||
in
|
||||
if length command_results = length spans then
|
||||
((NONE, []), NONE, true, [], (I, I))
|
||||
|> present Toplevel.toplevel (spans ~~ command_results)
|
||||
|> present_trailer
|
||||
|> rev
|
||||
else error "Messed-up outer syntax for presentation"
|
||||
end;
|
||||
|
||||
end;
|
||||
|
||||
|
||||
|
||||
(** standard output operations **)
|
||||
|
||||
(* pretty printing *)
|
||||
|
||||
fun pretty_term ctxt t =
|
||||
Syntax.pretty_term (Variable.auto_fixes t ctxt) t;
|
||||
|
||||
fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of;
|
||||
|
||||
|
||||
(* default output *)
|
||||
|
||||
val lines = separate (Latex.string "\\isanewline%\n");
|
||||
val items = separate (Latex.string "\\isasep\\isanewline%\n");
|
||||
|
||||
fun isabelle ctxt body =
|
||||
if Config.get ctxt Document_Antiquotation.thy_output_display
|
||||
then Latex.environment_block "isabelle" body
|
||||
else Latex.block (Latex.enclose_body "\\isa{" "}" body);
|
||||
|
||||
fun isabelle_typewriter ctxt body =
|
||||
if Config.get ctxt Document_Antiquotation.thy_output_display
|
||||
then Latex.environment_block "isabellett" body
|
||||
else Latex.block (Latex.enclose_body "\\isatt{" "}" body);
|
||||
|
||||
fun typewriter ctxt s =
|
||||
isabelle_typewriter ctxt [Latex.string (Latex.output_ascii s)];
|
||||
|
||||
fun verbatim ctxt =
|
||||
if Config.get ctxt Document_Antiquotation.thy_output_display
|
||||
then Document_Antiquotation.indent_lines ctxt #> typewriter ctxt
|
||||
else split_lines #> map (typewriter ctxt) #> lines #> Latex.block;
|
||||
|
||||
fun source ctxt =
|
||||
Token.args_of_src
|
||||
#> map (Token.unparse #> Document_Antiquotation.prepare_lines ctxt)
|
||||
#> space_implode " "
|
||||
#> output_source ctxt
|
||||
#> isabelle ctxt;
|
||||
|
||||
fun pretty ctxt =
|
||||
Document_Antiquotation.output ctxt #> Latex.string #> single #> isabelle ctxt;
|
||||
|
||||
fun pretty_source ctxt src prt =
|
||||
if Config.get ctxt Document_Antiquotation.thy_output_source
|
||||
then source ctxt src else pretty ctxt prt;
|
||||
|
||||
fun pretty_items ctxt =
|
||||
map (Document_Antiquotation.output ctxt #> Latex.string) #> items #> isabelle ctxt;
|
||||
|
||||
fun pretty_items_source ctxt src prts =
|
||||
if Config.get ctxt Document_Antiquotation.thy_output_source
|
||||
then source ctxt src else pretty_items ctxt prts;
|
||||
|
||||
|
||||
(* antiquotation variants *)
|
||||
|
||||
fun antiquotation_pretty name scan f =
|
||||
Document_Antiquotation.setup name scan
|
||||
(fn {context = ctxt, argument = x, ...} => pretty ctxt (f ctxt x));
|
||||
|
||||
fun antiquotation_pretty_source name scan f =
|
||||
Document_Antiquotation.setup name scan
|
||||
(fn {context = ctxt, source = src, argument = x} => pretty_source ctxt src (f ctxt x));
|
||||
|
||||
fun antiquotation_raw name scan f =
|
||||
Document_Antiquotation.setup name scan
|
||||
(fn {context = ctxt, argument = x, ...} => f ctxt x);
|
||||
|
||||
fun antiquotation_verbatim name scan f =
|
||||
antiquotation_raw name scan (fn ctxt => verbatim ctxt o f ctxt);
|
||||
|
||||
end;
|
Loading…
Reference in New Issue