Typing works (more or less) for the value.

Sometimes schematic variables were left-overs;
was able to suppress this type of fault by additional
annotations.
Sometimes confusion o heavily overloaded field names.
Also workaround by stronger annotations.
This commit is contained in:
Burkhart Wolff 2018-04-27 17:12:42 +02:00
parent fb9da6784e
commit 3e746a4d9d
5 changed files with 71 additions and 86 deletions

View File

@ -372,9 +372,14 @@ val attribute =
Parse.position Parse.const
-- Scan.optional (Parse.$$$ "=" |-- Parse.!!! Parse.term) "";
val attribute_upd =
val attribute_upd : (((string * Position.T) * string) * string) parser =
Parse.position Parse.const
-- ( Parse.$$$ "+" || Parse.$$$ "=")
-- ( Parse.$$$ "+" || Parse.$$$ ":") (* + still does not work ... Yuck ! ! !*)
--| Parse.$$$ "="
(*
-- ( Parse.keyword_improper "+" || Parse.keyword_improper ":") (* + still does not work ... *)
--| Parse.keyword_improper "="
*)
(* -- ( Parse.keyword_with (fn x => "+=" = x) || Parse.keyword_with (fn x=> ":=" = x)) *)
-- Parse.!!! Parse.term;
(*
@ -404,11 +409,11 @@ val attributes_upd =
val SPY = Unsynchronized.ref ([]:((term * Position.T) * term) list)
fun convert((Const(s,ty),_), t) X = Const(s^"_update", range_type ty)
fun convert((Const(s,ty),_), t) X = Const(s^"_update", dummyT)
$ Abs("uuu_", type_of t, t) $ X
|convert _ _ = error("Big drama")
|convert _ _ = error("Left-hand side not a doc_class attribute.")
val base = @{term "undefined"}
val base = Const(@{const_name "undefined"},dummyT)
fun check_classref (SOME(cid,pos')) thy =
let val _ = if not (DOF_core.is_defined_cid_global cid thy)
@ -423,6 +428,8 @@ fun check_classref (SOME(cid,pos')) thy =
| check_classref NONE _ = DOF_core.default_cid
fun generalize_typ n = Term.map_type_tfree (fn (str,sort)=> Term.TVar((str,n),sort));
fun infer_type thy term = hd (Type_Infer_Context.infer_types (Proof_Context.init_global thy) [term])
fun enriched_document_command markdown (((((oid,pos),cid_pos),
doc_attrs: ((string * Position.T) * string) list),
@ -437,12 +444,16 @@ fun enriched_document_command markdown (((((oid,pos),cid_pos),
fun enrich_trans thy =
let val cid_long = check_classref cid_pos thy
fun read_assn ((lhs, pos), rhs) = ((Syntax.read_term_global thy lhs,pos),
Syntax.read_term_global thy rhs)
val count = Unsynchronized.ref (0 - 1);
fun incr () = Unsynchronized.inc count
val generalize_term = let val n = incr () in Term.map_types (generalize_typ n) end
fun read_assn ((lhs, pos), rhs) =
((Syntax.read_term_global thy lhs |> generalize_term,pos),
Syntax.read_term_global thy rhs |> generalize_term)
val assns = map read_assn doc_attrs
val _ = (SPY:=assns)
val defaults = base (* this calculation ignores the defaults *)
val value_term = (fold convert assns defaults) (* |> (Sign.cert_term thy) *)
val value_term = (fold convert assns defaults) |> (infer_type thy)
val name = Context.theory_name thy
in thy |> DOF_core.define_object_global (oid, {pos=pos,
thy_name=name,
@ -467,16 +478,21 @@ fun update_instance_command (((oid:string,pos),cid_pos),
val cid_long = check_classref cid_pos thy
val _ = if cid_long = DOF_core.default_cid orelse cid = cid_long then ()
else error("incompatible classes:"^cid^":"^cid_long)
val count = Unsynchronized.ref (0 - 1);
fun incr () = Unsynchronized.inc count
val generalize_term = let val n = incr () in Term.map_types (generalize_typ n) end
fun read_assn (((lhs, pos), opn), rhs) =
let val _ = writeln opn in
((Syntax.read_term_global thy lhs,pos), (* this is problematic,
lhs need to be qualified *)
Syntax.read_term_global thy rhs)
((Syntax.read_term_global thy lhs |> generalize_term ,pos),
(* this is problematic,
lhs need to be qualified *)
Syntax.read_term_global thy rhs |> generalize_term)
end
(* Missing: Check that attributes are legal here *)
val assns = map read_assn doc_attrs
val _ = (SPY:=assns)
in thy |> DOF_core.update_value_global oid ((fold convert assns) (* #> (Sign.cert_term thy) *))
in thy |> DOF_core.update_value_global oid ((fold convert assns) #> (infer_type thy))
end
in Toplevel.theory(upd)
end
@ -631,6 +647,13 @@ val _ = Theory.setup((docitem_ref_antiquotation @{binding docref} DOF_core.defau
end (* struct *)
*}
ML{*
val count = Unsynchronized.ref (0 - 1);
Unsynchronized.inc count;
Unsynchronized.inc count
*}
section{* Syntax for Ontologies (the '' View'' Part III) *}
ML{*
@ -646,12 +669,16 @@ fun read_parent NONE ctxt = (NONE, ctxt)
fun map_option _ NONE = NONE
|map_option f (SOME x) = SOME (f x);
fun read_fields raw_fields ctxt =
let
val Ts = Syntax.read_typs ctxt (map (fn ((_, raw_T, _),_) => raw_T) raw_fields);
val terms = map ((map_option (Syntax.read_term ctxt)) o snd) raw_fields
val generalize_typ = Term.map_type_tfree (fn (str,sort)=> Term.TVar((str,0),sort));
fun test t1 t2 = Sign.typ_instance (Proof_Context.theory_of ctxt)(t1, generalize_typ t2)
val count = Unsynchronized.ref (0 - 1);
fun incr () = Unsynchronized.inc count
fun test t1 t2 = Sign.typ_instance (Proof_Context.theory_of ctxt)
(t1, AnnoTextelemParser.generalize_typ 0 t2)
fun check_default (ty,SOME trm) =
let val ty' = (type_of trm)
in if test ty ty'
@ -717,37 +744,11 @@ val _ =
end (* struct *)
*}
section{* Testing and Validation *}
ML{*
Binding.print;
Syntax.read_sort;
Syntax.read_typ;
Syntax.read_term;
Syntax.pretty_typ;
Parse.document_source;
*}
ML\<open>
open Markup;
Markup.binding;
open Position;
open Binding;
Position.line;
Context.Theory;
Context_Position.report_generic;
Context_Position.report;
Term_Style.parse;
Sign.certify_term;
\<close>
text {* Lq *}
end

View File

@ -129,7 +129,7 @@ text{* there is a joker type that can be added as place-holder during term const
ML{* Term.dummyT : typ *}
subsection{* Type-Inference *}
subsection{* Type-Certification (=checking that a type annotation is consistent) *}
ML{*
Sign.typ_instance: theory -> typ * typ -> bool;
@ -150,7 +150,15 @@ Sign.typ_instance @{theory}(ty', ty);
val generalize_typ = Term.map_type_tfree (fn (str,sort)=> Term.TVar((str,0),sort));
Sign.typ_instance @{theory} (ty', generalize_typ ty)
*}
subsection{* Type-Inference (= inferring consistent type information if possible) *}
text{* Type inference eliminates also joker-types such as @{ML dummyT} and produces
instances for schematic type variables where necessary. In the case of success,
it produces a certifiable term. *}
ML{*
Type_Infer_Context.infer_types: Proof.context -> term list -> term list
*}
section\<open> Front End: \<close>
@ -307,7 +315,10 @@ Thy_Output.output : Proof.context -> Pretty.T list -> string;
section {* The Nano-Kernel: Contexts, (Theory)-Contexts, (Proof)-Contexts *}
ML{*
open Context
open Context;
Proof_Context.theory_of: Proof.context -> theory;
: theory -> Proof.context;
*}
ML{*
Context.theory_name;
@ -564,7 +575,7 @@ Markup.entity;
ML{*
Toplevel.theory;
Toplevel.presentation_context_of; (* Toplevel is a kind of table with call-bacl functions *)
Proof_Context.consts_of;
Consts.the_const; (* T is a kind of signature ... *)
Variable.import_terms;
Vartab.update;

View File

@ -4,21 +4,24 @@ theory Article
begin
(* >> *)
ML{* val keywords = Thy_Header.get_keywords' @{context};
*}
open_monitor*[onto::article]
text*[tit::title]{* Using The Isabelle Ontology Framework*}
text*[stit::subtitle] \<open>Linking the Formal with the Informal\<close>
text*[auth1::author, affiliation="''University of Sheffield''"]\<open>Achim Brucker\<close>
text*[auth1::author, affiliation = "''University of Sheffield''"]\<open>Achim Brucker\<close>
text*[auth2::author, affiliation="''Centrale-Supelec''"] \<open>Idir Ait-Sadoune\<close>
text*[auth3::author, affiliation="''IRT-SystemX''"] \<open>Paolo Crisafulli\<close>
text*[auth2::author, affiliation = "''Centrale-Supelec''"] \<open>Idir Ait-Sadoune\<close>
text*[auth3::author, affiliation = "''IRT-SystemX''"] \<open>Paolo Crisafulli\<close>
text*[auth4::author, affiliation="''Universit\\'e Paris-Sud''"]\<open>Burkhart Wolff\<close>
term "affiliation_update (\<lambda> _ . '''') S"
text*[abs::abstract, keyword_list="[]"] {* Isabelle/Isar is a system
text*[abs::abstract, keyword_list="[]::string list"] {* Isabelle/Isar is a system
framework with many similarities to Eclipse; it is mostly known as part of
Isabelle/HOL, an interactive theorem proving and code generation environment.
Recently, an Document Ontology Framework has been developed as a plugin in
@ -39,51 +42,22 @@ sed nibh ut lorem integer, maecenas sed mi purus non nunc, morbi pretium tortor.
section* [bgrnd :: introduction] {* Background: Isabelle and Isabelle_DOF *}
text{* As mentioned in @{introduction \<open>intro\<close>} ... *}
(*update_instance*[bgrnd, main_author = "Some(''bu'')", formula="@{term ''a + b = b + a''}"] *)
update_instance*[bgrnd, comment2 = "''bu''"]
update_instance*[bgrnd, comment := "''bu''"]
term scholarly_paper.introduction.comment2_update
ML{*
val thy = @{theory};
val oid = "bgrnd"
val (cid,value) = case DOF_core.get_object_global oid thy of
SOME{cid,value,...} => (cid, value)
| NONE => raise TERM("XX",[]);
val term'' = @{docitem_value \<open>bgrnd\<close>};
(* val term' = Sign.certify_term @{theory} value; *)
simplify;
val a $ b $ c = @{term "X\<lparr>affiliation:='' ''\<rparr>"}
*}
term "scholarly_paper.author.affiliation_update"
term "scholarly_paper.abstract.keyword_list_update"
term "scholarly_paper.introduction.comment2_update"
ML{* val a $ b $ c = @{term "X\<lparr>affiliation:='' ''\<rparr>"}; fold;
*}
ML{* !AnnoTextelemParser.SPY;
fun convert((Const(s,_),_), t) X = Const(s^"_update", dummyT)
$ Abs("uuu_", type_of t, t)
$ X
val base = @{term "undefined"}
val converts = fold convert (!AnnoTextelemParser.SPY) base
*}
term "scholarly_paper.introduction.comment_update"
term "\<lparr>author.tag_attribute=undefined,email=''dfg'',orcid=None,affiliation=undefined\<rparr>"
ML{* !AnnoTextelemParser.SPY *}
definition HURX where "HURX = affiliation((undefined::author)\<lparr>affiliation:='' ''\<rparr>)"
thm HURX_def[simplified]
(*ML{*
val x = @{code "HURX"}
*}
*)
definition HORX
where "HORX = affiliation(\<lparr>author.tag_attribute=undefined,email=''dfg'',orcid=None,affiliation=undefined\<rparr>\<lparr>affiliation:=''e''\<rparr>) "
(* ML{*
@ -92,8 +66,6 @@ val x = @{code "HORX"}
*)
term related_work.main_author_update
section*[ontomod::technical]{* Modeling Ontologies in Isabelle_DOF *}
text{* Lorem ipsum dolor sit amet, suspendisse non arcu malesuada mollis, nibh morbi,*}

View File

@ -90,7 +90,8 @@ paragraph* [sdfk] \<open> just a paragraph - lexical variant \<close>
subsection*[sdf]{* shouldn't work, multiple ref. *}
section*[sedf::requirement, alpja= refl]{* works again. One can hover over the class constraint and
section*[sedf::requirement, long_name = "None::string option"]
{* works again. One can hover over the class constraint and
jump to its definition. *}
text\<open>\label{sedf}\<close> (* Hack to make the LaTeX-ing running. Should disappear. *)
@ -149,6 +150,6 @@ term "a + b = b + a"
section(in order){* sdfsdf*} (* undocumented trouvaille when analysing the code *)
end
end

View File

@ -30,7 +30,7 @@ doc_class text_section =
*)
doc_class introduction = text_section +
comment2 :: string
comment :: string
doc_class technical = text_section +
definition_list :: "string list" <= "[]"