From e29ee3789db8e0def7cb2641785c11f22c6a92c9 Mon Sep 17 00:00:00 2001 From: bu Date: Wed, 7 Feb 2018 19:44:27 +0100 Subject: [PATCH] Kind of current status. Crudely carved out of an other repository - not sure that this works. --- Assert.thy | 68 +++++ AssertLong.thy | 57 +++++ CC_ISO15408.thy | 0 CENELEC_50126.thy | 147 +++++++++++ Isa_DOF.thy | 430 +++++++++++++++++++++++++++++++ Isa_MOF.thy | 236 +++++++++++++++++ MyCommentedIsabelle.thy | 498 ++++++++++++++++++++++++++++++++++++ README.md | 5 + ROOT | 60 +++++ RegExp.thy | 157 ++++++++++++ examples/simple/Example.thy | 60 +++++ 11 files changed, 1718 insertions(+) create mode 100644 Assert.thy create mode 100644 AssertLong.thy create mode 100644 CC_ISO15408.thy create mode 100644 CENELEC_50126.thy create mode 100644 Isa_DOF.thy create mode 100644 Isa_MOF.thy create mode 100644 MyCommentedIsabelle.thy create mode 100644 ROOT create mode 100644 RegExp.thy create mode 100644 examples/simple/Example.thy diff --git a/Assert.thy b/Assert.thy new file mode 100644 index 0000000..b4e9953 --- /dev/null +++ b/Assert.thy @@ -0,0 +1,68 @@ +(* Little theory implementing the an assertion command in Isabelle/HOL. *) + +theory Assert + imports Main + keywords "assert" ::thy_decl + +begin + + +ML{* + +(* Reimplementation needed because not exported from ML structure Value_Command *) +fun value_maybe_select some_name = + case some_name + of NONE => Value_Command.value + | SOME name => Value_Command.value_select name; + +(* Reimplementation structure Value_Command due to tiny modification of value_cmd. *) + +fun assert_cmd some_name modes raw_t state = + let + val ctxt = Toplevel.context_of state; + val t = Syntax.read_term ctxt raw_t; + val t' = value_maybe_select some_name ctxt t; + val ty' = Term.type_of t'; + val ty' = case ty' of @{typ "bool"} => ty' | _ => error "Assertion expressions must be boolean."; + val t' = case t' of @{term "True"} => t' | _ => error "Assertion failed."; + val ctxt' = Variable.auto_fixes t' ctxt; + val p = Print_Mode.with_modes modes (fn () => + Pretty.block [Pretty.quote (Syntax.pretty_term ctxt' t'), Pretty.fbrk, + Pretty.str "::", Pretty.brk 1, Pretty.quote (Syntax.pretty_typ ctxt' ty')]) (); + in Pretty.writeln p end; + +*} + +ML{* +(* Reimplementation needed because not exported from ML structure Value_Command *) +val opt_modes = + Scan.optional (@{keyword "("} |-- Parse.!!! (Scan.repeat1 Parse.name --| @{keyword ")"})) []; + +(* Reimplementation needed because not exported from ML structure Value_Command *) +val opt_evaluator = + Scan.option (@{keyword "["} |-- Parse.name --| @{keyword "]"}) + +(* NEW *) +val _ = + Outer_Syntax.command @{command_keyword assert} "evaluate and print term" + (opt_evaluator -- opt_modes -- Parse.term + >> (fn ((some_name, modes), t) => Toplevel.keep (assert_cmd some_name modes t) )); +*} + + +(* Test: + + assert "" + assert "3 = 4" + assert "False" + assert "5 * 5 = 25 " + +*) + +assert "True \ True " + +assert "(5::int) * 5 = 25 " + + +end + \ No newline at end of file diff --git a/AssertLong.thy b/AssertLong.thy new file mode 100644 index 0000000..a029ec6 --- /dev/null +++ b/AssertLong.thy @@ -0,0 +1,57 @@ +theory AssertLong + imports Main + keywords "assert" ::thy_decl + +begin + + + + +ML{* + +fun value_maybe_select some_name = + case some_name + of NONE => Value_Command.value + | SOME name => Value_Command.value_select name; + +val TT = Unsynchronized.ref (HOLogic.boolT); + +fun value_cmd2 some_name modes raw_t state = + let + val ctxt = Toplevel.context_of state; + val t = Syntax.read_term ctxt raw_t; + val t' = value_maybe_select some_name ctxt t; + val ty' = Term.type_of t'; + val t' = case ty' of @{typ "bool"} => t' | _ => error "Assertion expressions must be boolean."; + val t' = case t' of @{term "True"} => t' | _ => error "Assertion failed."; + val ctxt' = Variable.auto_fixes t' ctxt; + val p = Print_Mode.with_modes modes (fn () => + Pretty.block [Pretty.quote (Syntax.pretty_term ctxt' t'), Pretty.fbrk, + Pretty.str "::", Pretty.brk 1, Pretty.quote (Syntax.pretty_typ ctxt' ty')]) (); + in Pretty.writeln p end; + +*} +ML{* value_cmd2*} +definition ASSERT :: "bool \ bool" where "ASSERT p == (p=True)" +ML{* val x = @{code "ASSERT"} *} +ML{* +val opt_modes = + Scan.optional (@{keyword "("} |-- Parse.!!! (Scan.repeat1 Parse.name --| @{keyword ")"})) []; + +val opt_evaluator = + Scan.option (@{keyword "["} |-- Parse.name --| @{keyword "]"}) + +val _ = + Outer_Syntax.command @{command_keyword assert} "evaluate and print term" + (opt_evaluator -- opt_modes -- Parse.term + >> (fn ((some_name, modes), t) => + let val _ = writeln t in + (* Toplevel.keep (Value_Command.value_cmd some_name modes (enclose "ASSERT(" ")" t)) *) + Toplevel.keep (value_cmd2 some_name modes t) + end)); +*} + +assert "True" +assert "True \ True " +ML{* !TT ; + @{term "True"} *} \ No newline at end of file diff --git a/CC_ISO15408.thy b/CC_ISO15408.thy new file mode 100644 index 0000000..e69de29 diff --git a/CENELEC_50126.thy b/CENELEC_50126.thy new file mode 100644 index 0000000..1c830e3 --- /dev/null +++ b/CENELEC_50126.thy @@ -0,0 +1,147 @@ +chapter \An Outline of a CENELEC Ontology\ + +text{* NOTE: An Ontology-Model of a certification standard such as CENELEC or +Common Criteria identifies: +- notions (conceptual \emph{categories}) having \emph{instances} + (similar to classes and objects), +- their subtype relation (eg., a "SRAC" is an "assumption"), +- their syntactical structure + (for the moment: defined by regular expressions describing the + order of category instances in the overall document) + *} + +theory CENELEC_50126 + imports Isa_DOF +begin + +text{* Excerpt of the BE EN 50128:2011 *} + +section {* Requirements-Analysis related Categories *} + +doc_class requirement_analysis_item = + long_name :: "string option" + + +doc_class requirement_analysis = + no :: "nat" + where "requirement_analysis_item +" + +text{*The category @{emph \hypothesis\} is used for assumptions from the + foundational mathematical or physical domain, so for example: + \<^item> the Mordell-Lang conjecture holds, + \<^item> euklidian geometry is assumed, or + \<^item> Newtonian (non-relativistic) physics is assumed, + \<^item> @{term "P \ NP"}, + \<^item> or the computational hardness of certain functions + relevant for cryptography (like prime-factorization). + Their acceptance is inherently outside the scope of the model + and can only established inside certification process by + authority argument. +*} + +datatype hyp_type = physical | mathematical | computational | other + +doc_class hypothesis = requirement_analysis_item + + hyp_type :: hyp_type -- physical (* default *) + +text{*The category @{emph \assumption\} is used for domain-specific assumptions. + It has formal , semi-formal and informal sub-categories. They have to be + tracked and discharged by appropriate validation procedures within a + certification process, by it by test or proof. *} + +datatype ass_kind = informal | semiformal | formal + +doc_class assumption = requirement_analysis_item + + assumption_kind :: ass_kind + +text{*The category @{emph \exported constraint\} (or @{emph \ec\} for short) + is used for formal assumptions, that arise during the analysis, + design or implementation and have to be tracked till the final + evaluation target,and discharged by appropriate validation procedures + within the certification process, by it by test or proof. *} + +doc_class ec = assumption + + assumption_kind :: ass_kind -- (*default *) formal + +text{*The category @{emph \safety related application condition\} (or @{emph \srac\} + for short) is used for @{typ ec}'s that establish safety properties + of the evaluation target. Their trackability throughout the certification + is therefore particularly critical. *} + +doc_class srac = ec + + assumption_kind :: ass_kind -- (*default *) formal + +section {* Requirements-Analysis related Categories *} + +doc_class test_item = + nn :: "string option" + +text{* subcategories are : *} + +datatype test_kind = blackbox | whitebox | greybox | fuzz | pertubation + +datatype test_coverage_criterion = + allpathk nat nat (* depth, test_coverage_degree *) + | mcdc nat (* depth, test_coverage_degree *) + | exhaustive + | dnf_E_d string nat (* equivalence class testing *) + | other string + +doc_class test_specification = test_item + + short_goal :: string + +doc_class test_case = test_item + + kind :: test_kind + descr :: string + + +doc_class test_result = test_item + + verdict :: bool + remarks :: string + covvrit :: test_coverage_criterion + +datatype test_environment_kind = hardware_in_the_loop ("hil") + | simulated_hardware_in_the_loop ("shil") + +doc_class test_environment = test_item + + descr :: string + kind :: test_environment_kind -- shil + +doc_class test_tool = test_item + + descr :: string + +doc_class test_requirement = test_item + + descr :: string + +doc_class test_adm_role = test_item + + name :: string + +doc_class test_documentation = + no :: "nat" + where "(test_specification.((test_case.test_result)+.(test_environment|test_tool))+. + [test_requirement].test_adm_role" + +section{* Example *} + +text*[ass122::srac] {* The overall sampling frequence of the odometer +subsystem is therefore 14 khz, which includes sampling, computing and +result communication times... *} + +text*[t10::test_result] {* This is a meta-test. This could be an ML-command +that governs the external test-execution via, eg., a makefile or specific calls +to a test-environment or test-engine *} + +text \ As established by @{docref \t10\}, the + assumption @{docref \ass122\} is validated. \ + +(* Hack: This should be generated automatically: *) +ML{* +val _ = Theory.setup + ((DocAttrParser.control_antiquotation @{binding srac} {strict_checking=true} "\\autoref{" "}" ) #> + (DocAttrParser.control_antiquotation @{binding ec}{strict_checking=true} "\\autoref{" "}")#> + (DocAttrParser.control_antiquotation @{binding requirement_analysis_item} {strict_checking=true} "\\label{" "}")) + +*} + +end + \ No newline at end of file diff --git a/Isa_DOF.thy b/Isa_DOF.thy new file mode 100644 index 0000000..bb194b5 --- /dev/null +++ b/Isa_DOF.thy @@ -0,0 +1,430 @@ +chapter \The Document-Type Support Framework for Isabelle \ + +text{* Offering reflection to ML for class hierarchies, objects and object states. + + Isar syntax for these, assuming that classes entities fit to predefined + Isar keywords. + *} + +text{* In this section, we develop on the basis of a management of references Isar-markups +that provide direct support in the PIDE framework. *} + +theory Isa_DOF (* Isabelle Document Ontology Framework *) + imports Main (* Isa_MOF *) + keywords "section*" "subsection*" "subsubsection*" + "paragraph*" "subparagraph*" "text*" "declare_reference"::thy_decl + and + "doc_class" :: thy_decl + +begin + + +section{* Reflection of MOF into ML *} +(* +ML_file "MOF.sml" + + +ML{* val mt = @{code mt} *} + +ML{* val replace_class_hierarchy = @{code replace_class_hierarchy} + val get_class = @{code get_class}; +*} +*) + +section{* A HomeGrown Document Type Management (the ''Model'') *} + +ML{* +curry; +op |>; +op #>; +op |>> : ('a * 'c) * ('a -> 'b) -> 'b * 'c; +op ||> : ('c * 'a) * ('a -> 'b) -> 'c * 'b +*} + +ML{* +fun H1 f (a,b) = (f a, b) +fun H2 f (a,b) = (a, f b) +*} + +ML{* + +structure DocObjTab = +struct + datatype docclass_struct = DocClassStruct of + {inherits_from : string option, + pos : Position.T, thy_name : string, + id : serial, (* for pide *) + attribute_decl : (string * string * string option) list} + + type docclass_tab = docclass_struct Symtab.table + + val initial_docclass_tab = Symtab.empty:docclass_tab + + fun merge_docclass_tab (otab,otab') = Symtab.merge (op =) (otab,otab') + + fun is_subclass (tab:docclass_tab) s t = + let val _ = case Symtab.lookup tab t of + NONE => error "super doc_class reference not defined" + | SOME X => X + fun father_is_sub s = case Symtab.lookup tab s of + NONE => error "inconsistent doc_class hierarchy" + | SOME (DocClassStruct{inherits_from=NONE, ...}) => s = t + | SOME (DocClassStruct{inherits_from=SOME s', ...}) => s' = t + orelse + father_is_sub s' + in s = t orelse father_is_sub s + end + + type docobj = {pos : Position.T, thy_name : string, id : serial, cid : string} + + type docobj_tab ={tab : (docobj option) Symtab.table, + maxano : int + } + + val initial_docobj_tab:docobj_tab = {tab = Symtab.empty, maxano = 0} + + fun merge_docobj_tab ({tab=otab,maxano=m}, {tab=otab',maxano=m'}) = + (let fun X(NONE,NONE) = false + |X(SOME _, NONE) = false + |X(NONE, SOME _) = false + |X(SOME b, SOME b') = true (* b = b' *) + in {tab=Symtab.merge X (otab,otab'),maxano=Int.max(m,m')} + end) + + +structure Data = Generic_Data +( + type T = docobj_tab * docclass_tab + val empty = (initial_docobj_tab, initial_docclass_tab) + val extend = I + fun merge((d1,c1),(d2,c2)) = (merge_docobj_tab (d1, d2), merge_docclass_tab(c1,c2)) +); + +val get_data = Data.get o Context.Proof; +val map_data = Data.map; +val get_data_global = Data.get o Context.Theory; +val map_data_global = Context.theory_map o map_data; + + +fun is_declared_oid_global oid thy = let val {tab=t,maxano=_} = fst(get_data_global thy) + in Symtab.defined t oid end + +fun is_declared_oid_local oid thy = let val {tab=t,maxano=_} = fst(get_data thy) + in Symtab.defined t oid end + +fun is_defined_oid_global oid thy = let val {tab=t,maxano=_} = fst(get_data_global thy) + in case Symtab.lookup t oid of + NONE => false + |SOME(NONE) => false + |SOME _ => true + end + +fun is_defined_oid_local oid thy = let val {tab=t,maxano=_} = fst(get_data thy) + in case Symtab.lookup t oid of + NONE => false + |SOME(NONE) => false + |SOME _ => true + end + + +fun declare_object_global oid thy = (map_data_global + (H1(fn {tab=t,maxano=x} => + {tab=Symtab.update_new(oid,NONE)t, + maxano=x})) + (thy) + handle Symtab.DUP _ => + error("multiple declaration of document reference")) + +fun declare_object_local oid ctxt = (map_data (H1(fn {tab=t,maxano=x} => + {tab=Symtab.update_new(oid,NONE) t, + maxano=x})) + (ctxt) + handle Symtab.DUP _ => + error("multiple declaration of document reference")) + + +fun define_object_global (oid, bbb) thy = + let val nn = Context.theory_name thy (* in case that we need the thy-name to identify + the space where it is ... *) + in if is_defined_oid_global oid thy + then error("multiple definition of document reference") + else map_data_global (H1(fn {tab=t,maxano=x} => + {tab=Symtab.update(oid,SOME bbb) t, + maxano=x})) + (thy) + end + +fun define_object_local (oid, bbb) ctxt = + map_data (H1(fn {tab=t,maxano=x} => + {tab=Symtab.update(oid,SOME bbb) t,maxano=x})) ctxt + + +(* declares an anonyme label of a given type and generates a unique reference ... *) +fun declare_anoobject_global thy cid = map_data_global + (H1(fn {tab=t,maxano=x} => + let val str = cid^":"^Int.toString(x+1) + val _ = writeln("Anonymous doc-ref declared: " + ^str) + in {tab=Symtab.update(str,NONE)t,maxano=x+1} + end)) + (thy) + +fun declare_anoobject_local ctxt cid = map_data + (H1(fn {tab=t,maxano=x} => + let val str = cid^":"^Int.toString(x+1) + val _ = writeln("Anonymous doc-ref declared: " + ^str) + in {tab=Symtab.update(str,NONE)t, maxano=x+1} + end)) + (ctxt) + +fun get_object_global oid thy = let val {tab=t,maxano=_} = fst(get_data_global thy) + in case Symtab.lookup t oid of + NONE => error"undefined reference" + |SOME(bbb) => bbb + end + +fun get_object_local oid ctxt = let val {tab=t,maxano=_} = fst(get_data ctxt) + in case Symtab.lookup t oid of + NONE => error"undefined reference" + |SOME(bbb) => bbb + end + + +fun writeln_keys ctxt = let val {tab=t,maxano=_} = fst(get_data ctxt) + in writeln (String.concatWith "," (Symtab.keys t)) end +end (* struct *) +*} + + +section{* Syntax for Annotated Documentation Commands (the '' View'') *} + + + + +ML{* +structure DocAttrParser = +struct + +val semi = Scan.option (Parse.$$$ ";"); + +val attribute = + Parse.position Parse.name -- Scan.optional (Parse.$$$ "=" |-- Parse.!!! Parse.name) ""; + +val reference = + Parse.position Parse.name -- Scan.optional (Parse.$$$ "::" |-- Parse.!!! Parse.name) ""; + +val attributes = (Parse.$$$ "[" |-- (reference -- + (Scan.optional(Parse.$$$ "," |-- (Parse.enum "," attribute))) [])) + --| Parse.$$$ "]" + +val docrefN = "docref"; + +(* derived from: theory_markup *) +fun docref_markup def name id pos = + if id = 0 then Markup.empty + else + Markup.properties (Position.entity_properties_of def id pos) + (Markup.entity docrefN name); (* or better store the thy-name as property ? ? ? *) + +fun init_docref_markup (name, pos) thy = (* now superfluous ? *) + let + val id = serial (); + val _ = Position.report pos (docref_markup true name id pos); + in (* map_thy (fn (_, _, axioms, defs, wrappers) => (pos, id, axioms, defs, wrappers)) *) + thy + end; + +(* old : +fun enriched_document_command markdown (((((oid,pos),dtyp),doc_attrs),xstring_opt),toks) = + + (Thy_Output.document_command markdown (xstring_opt,toks)) o + (Toplevel.theory(DocObjTab.define_object_global (oid, (Binding.make (oid, pos),dtyp)))) +*) +(* new *) +(* +{markdown: bool} -> + ((((string * Position.T) * string) * 'a) * (xstring * Position.T) option) * Input.source + -> Toplevel.transition -> Toplevel.transition +*) +fun enriched_document_command markdown (((((oid,pos),dtyp:string),doc_attrs), + xstring_opt:(xstring * Position.T) option), + toks:Input.source) = + let + val id = serial (); + val _ = Position.report pos (docref_markup true oid id pos); + (* creates a markup label for this position and reports it to the PIDE framework; + this label is used as jump-target for point-and-click feature. *) + fun enrich_trans thy = let val name = Context.theory_name thy + in DocObjTab.define_object_global (oid, {pos=pos, thy_name=name, + id=id , cid=dtyp}) + (thy) + end + fun MMM(SOME(s,p)) = SOME(s^"XXX",p) + |MMM(NONE) = SOME("XXX",Position.id "") + in + (Toplevel.theory(enrich_trans)) #> + (Thy_Output.document_command markdown (MMM xstring_opt,toks)) #> + (Thy_Output.document_command markdown (SOME("\\label{"^oid^"}", Position.id ""),toks)) #> + (Thy_Output.document_command markdown (SOME("\\label{"^oid^"}", Position.id ""),toks)) + end; + + +val _ = + Outer_Syntax.command ("section*", @{here}) "section heading" + (attributes -- Parse.opt_target -- Parse.document_source --| semi + >> enriched_document_command {markdown = false}); + +val _ = + Outer_Syntax.command ("subsection*", @{here}) "subsection heading" + (attributes -- Parse.opt_target -- Parse.document_source --| semi + >> enriched_document_command {markdown = false}); + +val _ = + Outer_Syntax.command ("subsubsection*", @{here}) "subsubsection heading" + (attributes -- Parse.opt_target -- Parse.document_source --| semi + >> enriched_document_command {markdown = false}); + +val _ = + Outer_Syntax.command ("paragraph*", @{here}) "paragraph heading" + (attributes -- Parse.opt_target -- Parse.document_source --| semi + >> enriched_document_command {markdown = false}); + +val _ = + Outer_Syntax.command ("subparagraph*", @{here}) "subparagraph heading" + (attributes -- Parse.opt_target -- Parse.document_source --| semi + >> enriched_document_command {markdown = false}); + +val _ = + Outer_Syntax.command ("text*", @{here}) "formal comment (primary style)" + (attributes -- Parse.opt_target -- Parse.document_source + >> enriched_document_command {markdown = false}); + +val _ = + Outer_Syntax.command @{command_keyword "declare_reference"} "declare document reference" + (attributes >> (fn (((oid,pos),cid),doc_attrs) => + (Toplevel.theory (DocObjTab.declare_object_global oid)))); + + +(* Proof.context -> Symtab.key * Position.T -> Pretty.T ; dead code: +fun pretty_docref ctxt (name, pos) = + let + (* val _ = DocObjTab.writeln_keys ctxt *) + val thy = Proof_Context.theory_of ctxt; + fun pretty_docref str = let val tok = Pretty.enclose "\\autoref{" "}" (Pretty.text (str)) + (* val _ = writeln (Pretty.string_of tok) *) + in tok end + in + if DocObjTab.is_defined_oid_global name thy + then let val {pos=pos_decl,id,...} = the(DocObjTab.get_object_global name thy) + val markup = docref_markup false name id pos_decl; + val _ = Context_Position.report ctxt pos markup; + (* this sends a report to the PIDE interface ... *) + in pretty_docref name end + else if DocObjTab.is_declared_oid_global name thy + then (warning("declared but undefined document reference:"^name); + pretty_docref name) + else error("undefined document reference:"^name) + end +*) + + +fun check_and_mark ctxt (str:{strict_checking: bool}) pos name = + let + val thy = Proof_Context.theory_of ctxt; + in + if DocObjTab.is_defined_oid_global name thy + then let val {pos=pos_decl,id,...} = the(DocObjTab.get_object_global name thy) + val markup = docref_markup false name id pos_decl; + val _ = Context_Position.report ctxt pos markup; + (* this sends a report to the PIDE interface ... *) + in name end + else if DocObjTab.is_declared_oid_global name thy + then (if #strict_checking str then warning("declared but undefined document reference:"^name) + else (); name) + else error("undefined document reference:"^name) + end + +(* superfluous : +fun basic_entities_style name scan pretty = + Thy_Output.antiquotation name scan (fn {source, context = ctxt, ...} => fn (style, xs) => + Thy_Output.output ctxt + (Thy_Output.maybe_pretty_source (fn ctxt => fn x => pretty ctxt (style, x)) ctxt source xs)); + +fun basic_entities name scan pretty = + Thy_Output.antiquotation name scan (fn {source, context = ctxt, ...} => + Thy_Output.output ctxt o Thy_Output.maybe_pretty_source pretty ctxt source); + +fun basic_entity name scan = basic_entities name (scan >> single); +*) + +fun control_antiquotation name (str:{strict_checking: bool}) s1 s2 = + Thy_Output.antiquotation name (Scan.lift (Args.cartouche_input)) + (fn {context =ctxt, source = src:Token.src, state} => + fn source:Input.source => + (Thy_Output.output_text state {markdown=false} #> + check_and_mark ctxt (str:{strict_checking: bool})(Input.pos_of source) #> + enclose s1 s2) + source); + + +val _ = Theory.setup + ((control_antiquotation @{binding docref} {strict_checking=true} "\\autoref{" "}" ) #> + (control_antiquotation @{binding docref_unchecked}{strict_checking=false} "\\autoref{" "}")#> + (control_antiquotation @{binding docref_define} {strict_checking=false} "\\label{" "}")) + +end (* struct *) +*} + +ML{* + +fun read_parent NONE ctxt = (NONE, ctxt) + | read_parent (SOME raw_T) ctxt = + (case Proof_Context.read_typ_abbrev ctxt raw_T of + Type (name, Ts) => (SOME (Ts, name), fold Variable.declare_typ Ts ctxt) + | T => error ("Bad parent record specification: " ^ Syntax.string_of_typ ctxt T)); + +fun read_fields raw_fields ctxt = + let + val Ts = Syntax.read_typs ctxt (map (fn (_, raw_T, _) => raw_T) raw_fields); + val fields = map2 (fn (x, _, mx) => fn T => (x, T, mx)) raw_fields Ts; + val ctxt' = fold Variable.declare_typ Ts ctxt; + in (fields, ctxt') end; + +fun add_record_cmd overloaded (raw_params, binding) raw_parent raw_fields thy = + let + val ctxt = Proof_Context.init_global thy; + val params = map (apsnd (Typedecl.read_constraint ctxt)) raw_params; + val ctxt1 = fold (Variable.declare_typ o TFree) params ctxt; + val (parent, ctxt2) = read_parent raw_parent ctxt1; + val (fields, ctxt3) = read_fields raw_fields ctxt2; + val params' = map (Proof_Context.check_tfree ctxt3) params; + in thy |> Record.add_record overloaded (params', binding) parent fields end; + + +val _ = + Outer_Syntax.command @{command_keyword doc_class} "define document class" + (Parse_Spec.overloaded + -- (Parse.type_args_constrained -- Parse.binding) + -- (@{keyword "="} |-- Scan.option (Parse.typ --| @{keyword "+"}) + -- Scan.repeat1 Parse.const_binding) + -- Scan.option (@{keyword "where"} |-- Parse.term) + >> (fn (((overloaded, x), (y, z)),_) => + Toplevel.theory (add_record_cmd {overloaded = overloaded} x y z))); + +*} + + +(* Look at this thingi ... *) +ML \ +fun document_command markdown (loc, txt) = + Toplevel.keep + (fn state => (case loc of + NONE => ignore (Thy_Output.output_text state markdown txt) + | SOME (_, pos) => error ("Illegal target specification -- not a theory context" + ^ Position.here pos))) + o Toplevel.present_local_theory loc + (fn state => ignore (Thy_Output.output_text state markdown txt)); + +\ + +end \ No newline at end of file diff --git a/Isa_MOF.thy b/Isa_MOF.thy new file mode 100644 index 0000000..f542720 --- /dev/null +++ b/Isa_MOF.thy @@ -0,0 +1,236 @@ +chapter \The Document Meta-Class Framework for Isabelle \ + +text{* A kind of ".dtd" for Isabelle documents modeled inside Isabelle for Isabelle, including + general reg-exps for specifying the syntactic structure of sub-entities as well as + sub-typing between document classes. *} + +theory Isa_MOF + imports RegExp + "~~/src/HOL/Library/Code_Target_Numeral" + "~~/src/HOL/Library/String" +begin + +type_synonym string = String.literal +type_synonym cid = string (*class identifier *) +type_synonym aid = string (*attribute identifier *) +type_synonym xstring = string +type_synonym oid = string + + + +datatype attribute_types = file\<^sub>T (file_name: string) + | image_file\<^sub>T (image_file_name : string) + | thms\<^sub>T (thm_names : "string list") + | int\<^sub>T (int_of : int) + | bool\<^sub>T (bool_of : bool) + | string\<^sub>T (string_of : string) + | text\<^sub>T (text_of : xstring) + | range\<^sub>T (start : int) (end_opt : "int option") + | enum\<^sub>T (enum_ids : "string list") + +(* +datatype ('\\<^sub>s\<^sub>y\<^sub>s)base_types = int\<^sub>T (default1:"int") + | string\<^sub>T (default2:"string") + | int_list\<^sub>T (default3:"int list") + | string\<^sub>_list\<^sub>T (default4:"string list") + | method\<^sub>T (action:"'\\<^sub>s\<^sub>y\<^sub>s \ '\\<^sub>s\<^sub>y\<^sub>s") +(* Options ?*) +*) + + +type_synonym attribute_env = "(string \ attribute_types)list" + +type_synonym entity_env = "(string \ (oid \ cid)rexp) list" + (* entities have a regular structure, i.e. their syntactic structure is described by + a regular grammar involving in object-conformance checks a grammar check. *) + +datatype class_hierarchy = class\<^sub>T (class_name : cid) + (subclasses : "class_hierarchy list" ) + (attributes : "attribute_env") + (entities : "entity_env") +(* The conceptual distinction between attributes and entities is pragmatic - attributes + are basic values, components sub-OBJECTS having an class-typed syntactical + structure. *) + +(* Note: we may have override in attributes and entities, + and their name-spaces are disjoint. Obviously, the syntactic entity structure may be + inexistent ([]) or having empty entities ("bla", <>). The semantics of the list concatenation + of two entities is textual sequencing. *) + +text{* in the reflection part, @{typ 'oid} will be instantiated with + @{ML_type "bstring * Binding.binding"} pairs allowing native Isabelle/jedit support for + document editing. *} (* quatsch *) + + +definition mt :: "class_hierarchy" + where "mt == class\<^sub>T (String.implode ''/'') [] [] [] " + +fun classes :: "class_hierarchy \ string list" + where "classes (class\<^sub>T name sub_classes attr comps) = name # concat(map classes sub_classes)" + +fun all_attributes :: "class_hierarchy \ attribute_env list" + where "all_attributes (class\<^sub>T name sub_classes attr comps) = attr # (map attributes sub_classes)" + +fun all_entities :: "class_hierarchy \ entity_env list" + where "all_entities (class\<^sub>T name sub_classes attr comps) = comps # (map entities sub_classes)" + +fun atoms_of :: "'a rexp \ 'a list" + where "atoms_of <> = []" + |"atoms_of (\x\) = [x]" + |"atoms_of(a | b) = atoms_of a @ atoms_of b" + |"atoms_of(a : b) = atoms_of a @ atoms_of b" + |"atoms_of(Star a) = atoms_of a" + + +text{* Elementary consistency definitions ... *} +definition wff :: "class_hierarchy \ bool" + where "wff H = (distinct(classes H) \ + (\ (_,cid) \ set(concat(map(atoms_of o snd) + (concat(all_entities H)))). cid \ set(classes H)) \ + (\ attribute_list \ set(all_attributes H). distinct(map fst attribute_list)) \ + (\ entity_list \ set(all_entities H). distinct(map fst entity_list)) + )" + +lemma wff_mt [simp]: "wff mt" + unfolding mt_def wff_def by auto + +lemma class_names_distinct : "wff H \ distinct(classes H)" unfolding wff_def by(auto) + +lemma class_names_declared_in_component_decl : + "wff H \ (\ (_,cid) \ set(concat(map(atoms_of o snd) + (concat(all_entities H)))). cid \ set(classes H))" + unfolding wff_def by(auto) + +lemma attribute_names_unique_in_attr_decl : + "wff H \ (\ attribute_list \ set(all_attributes H). distinct(map fst attribute_list))" + unfolding wff_def by(auto) + +lemma component_names_unique_in_attr_decl : + "wff H \ (\ entity_list \ set(all_attributes H). distinct(map fst entity_list))" + unfolding wff_def by(auto) + + +fun get_subclass :: "class_hierarchy \ nat list \ class_hierarchy option" + where "get_subclass H [] = Some H" + |"get_subclass (class\<^sub>T cid subs attrs ents) (n#S) = + (if n < length subs + then case get_subclass (subs ! n) S of + Some H' \ Some (class\<^sub>T cid (subs[n := H']) attrs ents) + | None \ None + else None)" + +(* I don't know if this is general enough. It does not allow the introduction + of classes which are mutually dependant. + *) +fun extend_class_hierarchy :: "((class_hierarchy \ class_hierarchy) \ class_hierarchy) \ + (cid \ attribute_env \ entity_env) \ + class_hierarchy" (infixl "\" 70) + where "(C,H) \ (cid,attrs,ents) = + (if cid \ set(classes (C H)) (* cid fresh in context and subhierarchy *) + then if distinct(map fst attrs) + then if distinct(map fst ents) + then case H of + class\<^sub>T cid' subs attrs' ents' \ + C(class\<^sub>T cid' (( class\<^sub>T cid [] attrs' ents') # subs) attrs ents) + else mt + else mt + else mt)" + +(* Todo : Lemmas that establish wff of extended trees *) + +lemma wff_preserved : + assumes "wff (C H)" + shows "wff((C,H) \ (cid,attrs,ents))" + apply(insert assms) + sorry + + (* well, this does not hold. this can only be true for standard contexts C, but + how do we model this ? ? ? *) + (* sigh, then try Dewey notation and classical grafting. *) + + +section{* Subclass properties *} + +fun dir_sub_class :: "string \ class_hierarchy \ string \ bool" + ("(_)\\<^bsub>(_)\<^esub> (_)"[60,80,60]80) + where "(s \\<^bsub>(class\<^sub>T n subs _ _)\<^esub> t) = ((s=n \ t \ set (map class_name subs) \ + (\ sub\set subs. s \\<^bsub>sub\<^esub> t)))" + +definition sub_class :: "string \ class_hierarchy \ string \ bool" + ("(_)\\<^bsub>(_)\<^esub> (_)"[60,80,60]80) + where "(s \\<^bsub>(H)\<^esub> t) = ((s,t) \ {(x,y). x \\<^bsub>(H)\<^esub> y}\<^sup>*) " + +lemma sub_class_refl : "(s \\<^bsub>(H)\<^esub> s)" + unfolding sub_class_def + by simp + +text{* A manner to compute the subclass relationship effectively gives the following lemma: *} +lemma sub_class_trans_comp : + "(s \\<^bsub>(H)\<^esub> t) = (if class_name H = s + then \ s'\ set(map class_name (subclasses H)). (s' \\<^bsub>(H)\<^esub> t) + else \ H'\ set(subclasses H). (s \\<^bsub>(H')\<^esub> t) )" +proof(induct H) print_cases + case (class\<^sub>T n subs attrs) + then show ?case (* using class\<^sub>T.hyps *) apply auto + sorry +qed + + +subsection{* Fundamental search and replacement operations in a hierarchy *} + +fun replace_class_hierarchy :: "((class_hierarchy \ class_hierarchy) \ class_hierarchy) \ + cid \ + class_hierarchy \ + class_hierarchy" ("(_)[_\\_]" [80,80,80]80) + where "(C,class\<^sub>T cid' subs attrs ents) [cid \\ H'] = (if cid = cid' + then C H' + else C(class\<^sub>T cid' + (map (\X. (\x. x, X) [cid \\ H']) subs) attrs ents))" + + +fun get_class :: "cid \ class_hierarchy \ (attribute_env \ entity_env \ class_hierarchy)option" + where "get_class cid (class\<^sub>T c_name subc attrs comps) = + (if cid = c_name then Some([],[],(class\<^sub>T c_name subc attrs comps)) + else (case filter (\X. X \ None) (map (get_class cid) subc) of + [] \ None + | (Some(x,y,H) # _) \ Some(attrs@x,comps@y,H)))" + + +subsection{* Objects and States *} +type_synonym object = "cid \ attribute_env \ (oid \ cid) list" +type_synonym state = "oid \ object" + +fun remove_overrides :: "('\ \ '\) \ '\ list \ '\ list" + where "remove_overrides f [] = []" + |"remove_overrides f (a#R) = (if f a \ set(map f R) then remove_overrides f R + else a # (remove_overrides f R))" + + +subsection{* Code-Generation *} + +code_printing + type_constructor prod \ (SML) infix 2 "*" + | constant Pair \ (SML) "!((_),/ (_))" + + +code_printing + type_constructor bool \ (SML) "bool" + | constant True \ (SML) "true" + | constant False \ (SML) "false" + | constant "HOL.conj" \ (SML) infix 1 "_ andalso _" + + +(* todo: checker for "validity" of an object wrt to its class description *) + + +export_code mt classes all_attributes all_entities remove_overrides + (* replace_class_hierarchy get_class extend_class_hierarchy *) in SML +module_name MOF file "MOF.sml" + + +(* Hhm, a slightly more pragmatic approach to document types ... *) + + + +end + \ No newline at end of file diff --git a/MyCommentedIsabelle.thy b/MyCommentedIsabelle.thy new file mode 100644 index 0000000..b066c4f --- /dev/null +++ b/MyCommentedIsabelle.thy @@ -0,0 +1,498 @@ +chapter \A More Or Less Structured File with my Personal, Ecclectic Comments + on Internal Isabelle/Infrastructure \ + +text" Covering Parsers, Pretty-Printers and Presentation, and Kernel. " + +theory MyCommentedIsabelle + imports Main + +begin + +section "Isabelle/Pure bootstrap"; + text "The Fundamental Boot Order gives an Idea on Module Dependencies: " + text \ @{file "$ISABELLE_HOME/src/Pure/ROOT.ML"}\ + + text "It's even roughly commented ... " + +section{* Stuff - Interesting operators (just sample code) *} + +(* Vorbild *) +text \ @{footnote \sdf\ }\ + +subsection\ Global State Management\ +subsubsection\ Mechanism 1 : configuration flags of fixed type. \ + +ML{* +Config.get @{context} Thy_Output.quotes; +Config.get @{context} Thy_Output.display; + +val C = Synchronized.var "Pretty.modes" "latEEex"; +(* Synchronized: a mechanism to bookkeep global + variables with synchronization mechanism included *) +Synchronized.value C; +(* +fun output ctxt prts = + 603 prts + 604 |> Config.get ctxt quotes ? map Pretty.quote + 605 |> (if Config.get ctxt display then + 606 map (Pretty.indent (Config.get ctxt indent) #> string_of_margin ctxt #> Output.output) + 607 #> space_implode "\\isasep\\isanewline%\n" + 608 #> Latex.environment "isabelle" + 609 else + 610 map + 611 ((if Config.get ctxt break then string_of_margin ctxt else Pretty.unformatted_string_of) + 612 #> Output.output) + 613 #> space_implode "\\isasep\\isanewline%\n" + 614 #> enclose "\\isa{" "}"); +*) +*} + +subsubsection\ Mechanism 2 : global arbitrary data structure that is attached to the global and + local Isabelle context $\theta$ \ +ML {* + +datatype X = mt +val init = mt; +val ext = I +fun merge (X,Y) = mt + +structure Data = Generic_Data +( + type T = X + val empty = init + val extend = ext + val merge = merge +); + +*} + + +(* General combinators (in Pure/General/basics.ML)*) +ML{* +op #>; (* reversed function composition *) +op |--; +op --|; +op --; +op ?; +*} + + +(* Tokens and Bindings *) +ML{* + + +(* Core: Token.T *) + +(* Derived type : *) +(* + type 'a parser = T list -> 'a * T list + type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list) +*) + +Token.is_command; +Token.content_of; (* textueller kern eines Tokens. *) + +val H = @{binding here}; (* There are "bindings" consisting of a text-span and a position, + where \positions\ are absolute references to a file *) + +Binding.make; +Binding.pos_of @{binding erzerzer}; +Position.here; + + +*} + +(* Scanning and combinator parsing. *) +ML\ +Scan.peek; +Scan.option: ('a -> 'b * 'a) -> 'a -> 'b option * 'a; +Scan.repeat: ('a -> 'b * 'a) -> 'a -> 'b list * 'a; +Scan.lift; +Scan.optional; +Scan.lift (Parse.position Args.cartouche_input); + +(* "parsers" are actually interpreters; an 'a parser is a function that parses + an input stream and computes(=evaluates, computes) it into 'a. + Since the semantics of an Isabelle command is a transition => transition + or theory \ theory function, i.e. a global system transition. + parsers of that type can be constructed and be bound as call-back functions + to a table in the Toplevel-structure of Isar. + + The type 'a parser is already defined in the structure Toekn. +*) + +Parse.nat: int parser; +Parse.int: int parser; +Parse.enum_positions: string -> 'a parser -> ('a list * Position.T list) parser; +Parse.enum: string -> 'a parser -> 'a list parser; + +Parse.enum; +Parse.!!!; +Parse.position: 'a parser -> ('a * Position.T) parser; + +(* Examples *) +Parse.position Args.cartouche_input; + + +(* More High-level, more Isar-specific Parsers *) +Args.name; +Args.const; +Args.cartouche_input: Input.source parser; +Args.text_token: Token.T parser; + +val Z = let val attribute = Parse.position Parse.name -- + Scan.optional (Parse.$$$ "=" |-- Parse.!!! Parse.name) ""; + in (Scan.optional(Parse.$$$ "," |-- (Parse.enum "," attribute))) end ; +(* this leads to constructions like the following, where a parser for a *) +fn name => (Thy_Output.antiquotation name (Scan.lift (Parse.position Args.cartouche_input))); +\ + +(* Bindings *) +ML\val X = @{here};\ + + + + +(* Output: Very Low Level *) +ML\ +Output.output; (* output is the structure for the "hooks" with the target devices. *) +Output.output "bla_1:"; +\ + +ML\ +Thy_Output.verbatim_text; +Thy_Output.output_text: Toplevel.state -> {markdown: bool} -> Input.source -> string; +Thy_Output.antiquotation: + binding -> + 'a context_parser -> + ({context: Proof.context, source: Token.src, state: Toplevel.state} -> 'a -> string) -> + theory -> theory; +Thy_Output.output: Proof.context -> Pretty.T list -> string; +Thy_Output.output_text: Toplevel.state -> {markdown: bool} -> Input.source -> string; + + + +Thy_Output.output : Proof.context -> Pretty.T list -> string; +\ + + +(* Context and Theory: *) +ML{* +Context.theory_name; + +Theory.check; + +Context.map_theory; +(* Theory.map_thy; *) + +Theory.begin_theory; +Theory.check; +(* Outer_Syntax.pretty_command; not exported*) +Theory.setup; (* The thing to extend the table of "command"s with parser - callbacks. *) + +*} + + +(* Pretty.T, pretty-operations. *) +ML{* + +(* interesting piece for LaTeX Generation: +fun verbatim_text ctxt = + if Config.get ctxt display then + split_lines #> map (prefix (Symbol.spaces (Config.get ctxt indent))) #> cat_lines #> + Latex.output_ascii #> Latex.environment "isabellett" + else + split_lines #> + map (Latex.output_ascii #> enclose "\\isatt{" "}") #> + space_implode "\\isasep\\isanewline%\n"; + +(* From structure Thy_Output *) +fun pretty_const ctxt c = + let + val t = Const (c, Consts.type_scheme (Proof_Context.consts_of ctxt) c) + handle TYPE (msg, _, _) => error msg; + val ([t'], _) = Variable.import_terms true [t] ctxt; + in pretty_term ctxt t' end; + + basic_entity @{binding const} (Args.const {proper = true, strict = false}) pretty_const #> + +*) + +Pretty.enclose : string -> string -> Pretty.T list -> Pretty.T; (* not to confuse with: String.enclose *) + +(* At times, checks where attached to Pretty - functions and exceptions used to + stop the execution/validation of a command *) +fun pretty_theory ctxt (name, pos) = (Theory.check ctxt (name, pos); Pretty.str name); +Pretty.enclose; +Pretty.str; +Pretty.mark_str; +Pretty.text "bla_d"; + +Pretty.quote; (* Pretty.T transformation adding " " *) +Pretty.unformatted_string_of : Pretty.T -> string ; + +Latex.output_ascii; +Latex.environment "isa" "bg"; +Latex.output_ascii "a_b:c'é"; +(* Note: *) +space_implode "sd &e sf dfg" ["qs","er","alpa"]; + + +(* +fun pretty_command (cmd as (name, Command {comment, ...})) = + Pretty.block + (Pretty.marks_str + ([Active.make_markup Markup.sendbackN {implicit = true, properties = [Markup.padding_line]}, + command_markup false cmd], name) :: Pretty.str ":" :: Pretty.brk 2 :: Pretty.text comment); +*) + + +*} + + +(* Markup Operations, and reporting. *) +ML{* +(* Markup.enclose; *) +Position.reports; (* ? ? ? I think this is the magic thing that sends reports to the GUI. *) + +Markup.properties; +Properties.get; + + +fun theory_markup def name id pos = + if id = 0 then Markup.empty + else + Markup.properties (Position.entity_properties_of def id pos) + (Markup.entity Markup.theoryN name); +Markup.theoryN; + +(* From Theory: +fun check ctxt (name, pos) = + let + val thy = Proof_Context.theory_of ctxt; + val thy' = + Context.get_theory thy name + handle ERROR msg => + let + val completion = + Completion.make (name, pos) + (fn completed => + map Context.theory_name (ancestors_of thy) + |> filter completed + |> sort_strings + |> map (fn a => (a, (Markup.theoryN, a)))); + val report = Markup.markup_report (Completion.reported_text completion); + in error (msg ^ Position.here pos ^ report) end; + val _ = Context_Position.report ctxt pos (get_markup thy'); + in thy' end; + +fun init_markup (name, pos) thy = + let + val id = serial (); + val _ = Position.report pos (theory_markup true name id pos); + in map_thy (fn (_, _, axioms, defs, wrappers) => (pos, id, axioms, defs, wrappers)) thy end; + +fun get_markup thy = + let val {pos, id, ...} = rep_theory thy + in theory_markup false (Context.theory_name thy) id pos end; + + +*) +serial(); (* A global, lock-guarded seriel counter used to produce unique identifiers, + be it on the level of thy-internal states or as reference in markup in + PIDE *) +(* +fun theory_markup def thy_name id pos = + if id = 0 then Markup.empty + else + Markup.properties (Position.entity_properties_of def id pos) + (Markup.entity Markup.theoryN thy_name); + +fun get_markup thy = + let val {pos, id, ...} = rep_theory thy + in theory_markup false (Context.theory_name thy) id pos end; + + +fun init_markup (name, pos) thy = + let + val id = serial (); + val _ = Position.report pos (theory_markup true name id pos); + in map_thy (fn (_, _, axioms, defs, wrappers) => (pos, id, axioms, defs, wrappers)) thy end; + + +fun check ctxt (name, pos) = + let + val thy = Proof_Context.theory_of ctxt; + val thy' = + Context.get_theory thy name + handle ERROR msg => + let + val completion = + Completion.make (name, pos) + (fn completed => + map Context.theory_name (ancestors_of thy) + |> filter completed + |> sort_strings + |> map (fn a => (a, (Markup.theoryN, a)))); + val report = Markup.markup_report (Completion.reported_text completion); + in error (msg ^ Position.here pos ^ report) end; + val _ = Context_Position.report ctxt pos (get_markup thy'); + in thy' end; + + +*) +Markup.properties; (* read : add Properties.T items into Markup.T *) +Markup.entity; + +*} + + +ML{* +Toplevel.theory; +Toplevel.presentation_context_of; (* Toplevel is a kind of table with call-bacl functions *) +Proof_Context.consts_of; +Consts.the_const; (* T is a kind of signature ... *) +Variable.import_terms; +Vartab.update; + +fun control_antiquotation name s1 s2 = + Thy_Output.antiquotation name (Scan.lift Args.cartouche_input) + (fn {state, ...} => enclose s1 s2 o Thy_Output.output_text state {markdown = false}); + +Output.output; + +Proof_Context.read_const; +Syntax.read_input ; +Input.source_content; + +(* + basic_entity @{binding const} (Args.const {proper = true, strict = false}) pretty_const #> +*) +*} + +ML{* +Config.get @{context} Thy_Output.display; +Config.get @{context} Thy_Output.source; +Config.get @{context} Thy_Output.modes; +Thy_Output.document_command; +(* is: +fun document_command markdown (loc, txt) = + Toplevel.keep (fn state => + (case loc of + NONE => ignore (output_text state markdown txt) + | SOME (_, pos) => + error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o + Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt)); + +end; + +*) +Thy_Output.output_text; +(* is: +fun output_text state {markdown} source = + let + val is_reported = + (case try Toplevel.context_of state of + SOME ctxt => Context_Position.is_visible ctxt + | NONE => true); + + val pos = Input.pos_of source; + val syms = Input.source_explode source; + + val _ = + if is_reported then + Position.report pos (Markup.language_document (Input.is_delimited source)) + else (); + + val output_antiquotes = map (eval_antiquote state) #> implode; + + fun output_line line = + (if Markdown.line_is_item line then "\\item " else "") ^ + output_antiquotes (Markdown.line_content line); + + fun output_blocks blocks = space_implode "\n\n" (map output_block blocks) + and output_block (Markdown.Par lines) = cat_lines (map output_line lines) + | output_block (Markdown.List {kind, body, ...}) = + Latex.environment (Markdown.print_kind kind) (output_blocks body); + in + if Toplevel.is_skipped_proof state then "" + else if markdown andalso exists (Markdown.is_control o Symbol_Pos.symbol) syms + then + let + val ants = Antiquote.parse pos syms; + val reports = Antiquote.antiq_reports ants; + val blocks = Markdown.read_antiquotes ants; + val _ = if is_reported then Position.reports (reports @ Markdown.reports blocks) else (); + in output_blocks blocks end + else + let + val ants = Antiquote.parse pos (Symbol_Pos.trim_blanks syms); + val reports = Antiquote.antiq_reports ants; + val _ = if is_reported then Position.reports (reports @ Markdown.text_reports ants) else (); + in output_antiquotes ants end + end; +*) + +*} + + +ML{* Outer_Syntax.print_commands @{theory}; +Outer_Syntax.command : Outer_Syntax.command_keyword -> string -> + (Toplevel.transition -> Toplevel.transition) parser -> unit; +(* creates an implicit thy_setup with an entry for a call-back function, which happens + to be a parser that must have as side-effect a Toplevel-transition-transition. *) + + +(* not exported: Thy_Output.output_token; Ich glaub, da passierts ... *) +Thy_Output.present_thy; +*} + +ML{* Thy_Output.document_command {markdown = true} *} +(* Structures related to LaTeX Generation *) +ML{* Latex.output_ascii; + Latex.modes; + Latex.output_token +(* Hm, generierter output for +subsection*[Shaft_Encoder_characteristics]{ * Shaft Encoder Characteristics * } : + +\begin{isamarkuptext}% +\isa{{\isacharbackslash}label{\isacharbraceleft}general{\isacharunderscore}hyps{\isacharbraceright}}% +\end{isamarkuptext}\isamarkuptrue% +\isacommand{subsection{\isacharasterisk}}\isamarkupfalse% +{\isacharbrackleft}Shaft{\isacharunderscore}Encoder{\isacharunderscore}characteristics{\isacharbrackright}{\isacharverbatimopen}\ Shaft\ Encoder\ Characteristics\ {\isacharverbatimclose}% + +Generierter output for: text\\label{sec:Shaft-Encoder-characteristics}\ + +\begin{isamarkuptext}% +\label{sec:Shaft-Encoder-characteristics}% +\end{isamarkuptext}\isamarkuptrue% + + +*) + + +*} + +ML{* +Thy_Output.maybe_pretty_source : + (Proof.context -> 'a -> Pretty.T) -> Proof.context -> Token.src -> 'a list -> Pretty.T list; + +Thy_Output.output: Proof.context -> Pretty.T list -> string; + + (* nuescht besonderes *) + +fun document_antiq check_file ctxt (name, pos) = + let + (* val dir = master_directory (Proof_Context.theory_of ctxt); *) + (* val _ = check_path check_file ctxt dir (name, pos); *) + in + space_explode "/" name + |> map Latex.output_ascii + |> space_implode (Latex.output_ascii "/" ^ "\\discretionary{}{}{}") + |> enclose "\\isatt{" "}" + end; + +*} + + +end \ No newline at end of file diff --git a/README.md b/README.md index 2e33bcc..df7af02 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,7 @@ # Isabelle_DOF + +--- Isa_DOF is currently not used. + it is based on an reflection approach (a la Frederic) + but was abandoned as too heavy for this little nut to crack + diff --git a/ROOT b/ROOT new file mode 100644 index 0000000..d912124 --- /dev/null +++ b/ROOT @@ -0,0 +1,60 @@ +(* + Bit + Bits + Boolean_Algebra + Code_Abstract_Nat + Code_Target_Nat + Code_Target_Int + Misc_Numeric + Misc_Typedef + Code_Target_Numeral + Bit_Representation + Bits_Bit + Phantom_Type + RegExp + Word_Miscellaneous + Bits_Int + Cardinality + Isa_MOF + Numeral_Type + Bool_List_Representation + Type_Length + Word + Isa_DOF + CENELEC_50126 +*) + +session "HOL-Analysis-AutoCorres" = "HOL-Analysis" + + theories [document=false] + "autocorres-1.3/autocorres/AutoCorres" + + +session "Odo" = "HOL-Analysis-AutoCorres" + + description {* The Toplevel Requirement Documents of the Odometrie Service *} + options [document = pdf,document_variants="document:outline=/proof,/ML",document_output=output,quick_and_dirty] + theories [document=false] + "ontology_support/CENELEC_50126" + "Real" + "~~/src/HOL/Word/Word" + "Monads" + "Assert" + theories + "Odo_ReqAna" + "Odo_Design" + document_files + "root.tex" + "root.bib" + "main.tex" + "titlepage.tex" + +(* examples of s.th. more complex: +session "HOL-TestGen" (main) = "HOL-TestGenLib" + + description {* HOL-TestGen *} + theories + "codegen_fsharp/Code_String_Fsharp" + "codegen_fsharp/Code_Char_chr_Fsharp" + "codegen_fsharp/Code_Integer_Fsharp" + "codegen_fsharp/Code_Char_Fsharp" + "Testing" + "IOCO" +*) diff --git a/RegExp.thy b/RegExp.thy new file mode 100644 index 0000000..7d6952c --- /dev/null +++ b/RegExp.thy @@ -0,0 +1,157 @@ +theory RegExp +imports Main +begin + +datatype 'a rexp = Empty ("<>") + | Atom 'a ("\_\" 65) + | Alt "('a rexp)" "('a rexp)" (infix "|" 55) + | Conc "('a rexp)" "('a rexp)" (infix ":" 60) + | Star "('a rexp)" + + +value "Star (Conc(Alt (Atom(CHR ''a'')) (Atom(CHR ''b''))) (Atom(CHR ''c'')))" +text{* or better equivalently: *} +value "Star ((\CHR ''a''\ | \CHR ''b''\) : \CHR ''c''\)" + +text{* Let's try to prove something obvious : *} +lemma alt_commute : " ((A::'a rexp) | B) = (B | A)" (* use the type annotation to disambiguate *) +apply(case_tac A, simp_all) +apply(case_tac B, simp_all) +oops + +text{* This is simply FALSE. Why ??? *} + + +section{* Definition of a semantic function: the ``language'' of the regular expression *} + +text{* In the following, we give a semantics for our regular expressions, which so far have +just been a term language (i.e. abstract syntax). The semantics is a ``denotational semantics'', +i.e. we give a direct meaning for regular expressions in some universe of ``denotations''. + +This universe of denotations is in our concrete case: *} + +type_synonym 'a lang = "'a list set" + +inductive_set star :: "'a lang \ 'a lang" + for A:: "'a lang" +where NilI : "[] : star A" + | ConsI : "[| a:A; as : star A |] ==> a@as : star A" + + +lemma NilI : "[] : star A" +by(rule NilI) + + +lemma ConsI : "a\A \ as \ star A \ a@as \ star A" +apply(rule ConsI) +apply(assumption) +by(assumption) + + +find_theorems (80) name:"Set." name:"eq" + +lemma epsilonExists: "star {[]} = {[]}" +apply(subst set_eq_iff) +apply(rule allI) +apply(rule iffI) +apply(rule_tac A="{[]}" in star.induct)back +apply(assumption) +apply simp +apply simp +by (simp add: star.NilI) + + +lemma epsilonExists': "star {[]} = {[]}" +apply(rule Set.set_eqI) +apply(rule iffI) +apply(erule star.induct) +apply(auto intro: NilI ) +done + + +lemma epsilonAlt: "star {} = {[]}" +apply(rule Set.set_eqI) +apply(rule iffI) +apply(erule star.induct, simp,simp) +apply(auto intro: NilI ) +done + + + + +text{* ... and of course, we have the consequence : *} + +lemma epsilon': "star (star {[]}) = {[]}" +by(simp add: epsilonExists) + +lemma epsilon'': "star (star ((star {[]}))) = {[]}" +by(simp add: epsilonExists) + + + +text{* Now the denotational semantics for regular expression can be defined on a post-card: *} + +fun L :: "'a rexp => 'a lang" +where L_Emp : "L Empty = {}" + |L_Atom: "L (\a\) = {[a]}" + |L_Un: "L (el | er) = (L el) \ (L er)" + |L_Conc: "L (el : er) = {xs@ys | xs ys. xs:L el \ ys:L er}" + |L_Star: "L (Star e) = star(L e)" + + +schematic_goal WeCompute: "L(Star ((\CHR ''a''\ | \CHR ''b''\) : \CHR ''c''\)) = ?X" +by simp + +thm WeCompute + + +text{* Well, an attempt to evaluate this turns out not to work too well ... *} + + +text{* Now let's reconsider our `obvious lemma' from above, this time by stating that + the alternative-operator produces \emph{semantically} equivalent ewpressions. *} + +lemma alt_commute : "L((A::'a rexp) | B) = L(B | A)" +apply(subst L_Un) +apply(subst L_Un) +apply(rule inf_sup_aci) +done + + +lemma alt_commute' : "L((A::'a rexp) | B) = L(B | A)" +by auto + + +lemma alt_commute'' : "L((A::'a rexp) | B) = L(B | A)" +using alt_commute' by blast + +lemma mt_seq : "L(Empty : Empty) = {}" +apply(subst L_Conc) +apply(subst L_Emp) +apply(subst L_Emp) +apply(subst Set.empty_iff) +apply(subst HOL.simp_thms(24)) +apply(subst Set.Collect_empty_eq) +apply(subst HOL.simp_thms(23)) +apply(rule allI) +apply(subst HOL.not_ex) +apply(subst HOL.not_ex) +apply(rule allI) +apply(rule allI) +apply(subst HOL.not_False_eq_True) +apply(rule TrueI) +done + +lemma mt_seq' : "L(Empty : Empty) = {}" +by simp + + +lemma eps : "L (Star Empty) = {[]}" +by (simp add: epsilonAlt) + + term "\X\" + +no_notation Atom ("\_\") + + +end \ No newline at end of file diff --git a/examples/simple/Example.thy b/examples/simple/Example.thy new file mode 100644 index 0000000..499426e --- /dev/null +++ b/examples/simple/Example.thy @@ -0,0 +1,60 @@ +theory Example + imports Isa_DOF + keywords "Term" :: diag +begin + +section{* Some tests ... *} + +declare_reference [lalala::requirement, alpha="main", beta=42] + +declare_reference [lalala::quod] (* shouldn't work *) + +declare_reference [blablabla::cid, alpha=beta, beta=gamma] + +paragraph*[sdf]{* just a paragraph *} + +subsection*[sdf]{* shouldn't work *} + +section*[sedf::requirement]{* works again *} + +section*[seedf::integration_test, dfg=34,fgdfg=zf]{* and another example with attribute setting *} + +section{* Text Antiquotation Infrastructure ... *} + +text{* @{docref lalala} *} + + + +text{* @{thm refl} @{file "MOF.sml"} @{value 3} @{const hd} @{theory List}} + @{term "3"} @{type bool} @{term [show_types] "f x = a + x"} *} + +text{* Here is a reference to @{docref sedf} *} + + + +section{* A Small Example for a Command Definition *} + +ML{* +val opt_modes = + Scan.optional (@{keyword "("} |-- Parse.!!! (Scan.repeat1 Parse.name --| @{keyword ")"})) []; + +val _ = + Outer_Syntax.command @{command_keyword Term} "read and print term" + (opt_modes -- Parse.term >> Isar_Cmd.print_term); + +*} + +lemma "True" by simp + +Term "a + b = b + a" + +term "a + b = b + a" + +section(in order){* sdfsdf*} (* undocumented trouvaille when analysing the code *) + + + + +end + + \ No newline at end of file