From de1870fbee8ca6c11f7a3b56f02722206a97af65 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sun, 19 Feb 2023 20:01:01 +0000 Subject: [PATCH 01/27] Update to Isabelle devleopment version. --- .../TR_MyCommentedIsabelle.thy | 9 +++------ Isabelle_DOF/scala/dof_document_build.scala | 7 ++++--- Isabelle_DOF/thys/Isa_DOF.thy | 10 +++------- 3 files changed, 10 insertions(+), 16 deletions(-) diff --git a/Isabelle_DOF-Examples-Extra/technical_report/TR_my_commented_isabelle/TR_MyCommentedIsabelle.thy b/Isabelle_DOF-Examples-Extra/technical_report/TR_my_commented_isabelle/TR_MyCommentedIsabelle.thy index 1f90b61..4d5ce9a 100644 --- a/Isabelle_DOF-Examples-Extra/technical_report/TR_my_commented_isabelle/TR_MyCommentedIsabelle.thy +++ b/Isabelle_DOF-Examples-Extra/technical_report/TR_my_commented_isabelle/TR_MyCommentedIsabelle.thy @@ -1144,8 +1144,7 @@ text\ necessary infrastructure --- i.e. the operations to pack and unpack theories and queries on it: -\<^item> \<^ML>\ Toplevel.theory_toplevel: theory -> Toplevel.state\ -\<^item> \<^ML>\ Toplevel.init_toplevel: unit -> Toplevel.state\ +\<^item> \<^ML>\ Toplevel.make_state: theory option -> Toplevel.state\ \<^item> \<^ML>\ Toplevel.is_toplevel: Toplevel.state -> bool\ \<^item> \<^ML>\ Toplevel.is_theory: Toplevel.state -> bool\ \<^item> \<^ML>\ Toplevel.is_proof: Toplevel.state -> bool\ @@ -1183,7 +1182,7 @@ text\ The extensibility of Isabelle as a system framework depends on a num \<^item> \<^ML>\Toplevel.theory: (theory -> theory) -> Toplevel.transition -> Toplevel.transition\ adjoins a theory transformer. \<^item> \<^ML>\Toplevel.generic_theory: (generic_theory -> generic_theory) -> Toplevel.transition -> Toplevel.transition\ - \<^item> \<^ML>\Toplevel.theory': (bool -> theory -> theory) -> Toplevel.presentation -> Toplevel.transition -> Toplevel.transition\ + \<^item> \<^ML>\Toplevel.theory': (bool -> theory -> theory) -> Toplevel.presentation option -> Toplevel.transition -> Toplevel.transition\ \<^item> \<^ML>\Toplevel.exit: Toplevel.transition -> Toplevel.transition\ \<^item> \<^ML>\Toplevel.ignored: Position.T -> Toplevel.transition\ \<^item> \<^ML>\Toplevel.present_local_theory: (xstring * Position.T) option -> @@ -1201,7 +1200,6 @@ text\ \<^item> \<^ML>\Document.state : unit -> Document.state\, giving the state as a "collection" of named nodes, each consisting of an editable list of commands, associated with asynchronous execution process, - \<^item> \<^ML>\Session.get_keywords : unit -> Keyword.keywords\, this looks to be session global, \<^item> \<^ML>\Thy_Header.get_keywords : theory -> Keyword.keywords\ this looks to be just theory global. @@ -1275,7 +1273,6 @@ subsection\Miscellaneous\ text\Here are a few queries relevant for the global config of the isar engine:\ ML\ Document.state();\ -ML\ Session.get_keywords(); (* this looks to be session global. *) \ ML\ Thy_Header.get_keywords @{theory};(* this looks to be really theory global. *) \ @@ -2153,7 +2150,7 @@ text\ \<^item>\<^ML>\Document_Output.output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text \ \<^item>\<^ML>\Document_Output.output_token: Proof.context -> Token.T -> Latex.text \ \<^item>\<^ML>\Document_Output.output_source: Proof.context -> string -> Latex.text \ -\<^item>\<^ML>\Document_Output.present_thy: Options.T -> theory -> Document_Output.segment list -> Latex.text \ +\<^item>\<^ML>\Document_Output.present_thy: Options.T -> Keyword.keywords -> string -> Document_Output.segment list -> Latex.text \ \<^item>\<^ML>\Document_Output.isabelle: Proof.context -> Latex.text -> Latex.text\ \<^item>\<^ML>\Document_Output.isabelle_typewriter: Proof.context -> Latex.text -> Latex.text\ diff --git a/Isabelle_DOF/scala/dof_document_build.scala b/Isabelle_DOF/scala/dof_document_build.scala index cd2063f..bbb22fa 100644 --- a/Isabelle_DOF/scala/dof_document_build.scala +++ b/Isabelle_DOF/scala/dof_document_build.scala @@ -42,7 +42,7 @@ object DOF_Document_Build def the_document_entry(context: Document_Build.Context, name: String): Export.Entry = { val entries = for { - node_name <- context.document_theories + node_name <- context.all_document_theories entry <- context.session_context.get(node_name.theory, name) } yield entry @@ -60,11 +60,12 @@ object DOF_Document_Build override def prepare_directory( context: Document_Build.Context, dir: Path, - doc: Document_Build.Document_Variant): Document_Build.Directory = + doc: Document_Build.Document_Variant, + verbose: Boolean): Document_Build.Directory = { val options = DOF.options(context.options) val latex_output = new Latex_Output(options) - val directory = context.prepare_directory(dir, doc, latex_output) + val directory = context.prepare_directory(dir, doc, latex_output, verbose) val isabelle_dof_dir = context.session_context.sessions_structure(DOF.session).dir diff --git a/Isabelle_DOF/thys/Isa_DOF.thy b/Isabelle_DOF/thys/Isa_DOF.thy index 143061e..b57da72 100644 --- a/Isabelle_DOF/thys/Isa_DOF.thy +++ b/Isabelle_DOF/thys/Isa_DOF.thy @@ -953,7 +953,7 @@ fun check_path check_file ctxt dir (name, pos) = val path = Path.append dir (Path.explode name) handle ERROR msg => err msg pos; val _ = Path.expand path handle ERROR msg => err msg pos; - val _ = Context_Position.report ctxt pos (Markup.path (Path.implode_symbolic path)); + val _ = Context_Position.report ctxt pos (Markup.path (File.symbolic_path path)); val _ = (case check_file of NONE => path @@ -1857,14 +1857,11 @@ let fun prin state _ = string_of_term string pos (Toplevel.context_of state) in Toplevel.theory(fn thy => - (print_item prin (string_list, string) (Toplevel.theory_toplevel thy); + (print_item prin (string_list, string) (Toplevel.make_state (SOME thy)); thy |> meta_args_exec meta_args_opt ) ) trans end -val _ = Toplevel.theory -val _ = Toplevel.theory_toplevel - (* setup ontology aware commands *) @@ -2146,7 +2143,7 @@ fun document_command (name, pos) descr mark cmd = Outer_Syntax.command (name, pos) descr (ODL_Meta_Args_Parser.attributes -- Parse.document_source >> (fn (meta_args, text) => Toplevel.theory' (fn _ => cmd meta_args) - (Toplevel.presentation_context #> document_output_reports name mark meta_args text #> SOME))); + (SOME (Toplevel.presentation_context #> document_output_reports name mark meta_args text)))); (* Core Command Definitions *) @@ -3198,7 +3195,6 @@ ML\ (String.concatWith ","); Token.content_of\ ML\ Document.state; -Session.get_keywords(); Parse.command; Parse.tags; \ From 60b1c4f4d442527505a6c75f149a14f379a224db Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sun, 19 Feb 2023 20:08:18 +0000 Subject: [PATCH 02/27] Update to Isabelle devleopment version. --- .woodpecker/build.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.woodpecker/build.yml b/.woodpecker/build.yml index 78e8aab..60dc3a1 100644 --- a/.woodpecker/build.yml +++ b/.woodpecker/build.yml @@ -1,6 +1,6 @@ pipeline: build: - image: docker.io/logicalhacking/isabelle2022 + image: docker.io/logicalhacking/isabelle-dev commands: - ./.woodpecker/check_external_file_refs - export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/$LATEX @@ -18,7 +18,7 @@ pipeline: - cd ../.. - ln -s * latest archive: - image: docker.io/logicalhacking/isabelle2022 + image: docker.io/logicalhacking/isabelle-dev commands: - export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/$LATEX - mkdir -p $ARTIFACT_DIR From 873f5c79ab0278684a67363d5f085848a035e3ee Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Wed, 22 Feb 2023 11:05:05 +0000 Subject: [PATCH 03/27] API update to match development version of Isabelle. --- Isabelle_DOF-Unit-Tests/OutOfOrderPresntn.thy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Isabelle_DOF-Unit-Tests/OutOfOrderPresntn.thy b/Isabelle_DOF-Unit-Tests/OutOfOrderPresntn.thy index 5fa3d6c..79a24bf 100644 --- a/Isabelle_DOF-Unit-Tests/OutOfOrderPresntn.thy +++ b/Isabelle_DOF-Unit-Tests/OutOfOrderPresntn.thy @@ -50,7 +50,7 @@ fun gen_enriched_document_command2 name {body} cid_transform attr_transform mark xstring_opt:(xstring * Position.T) option), toks_list:Input.source list) : theory -> theory = - let val toplvl = Toplevel.theory_toplevel + let fun toplvl thy = Toplevel.make_state (SOME thy) (* as side-effect, generates markup *) fun check_n_tex_text thy toks = let val ctxt = Toplevel.presentation_context (toplvl thy); From f27150eb88365c8f53fc40516b58df8b5e875824 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Tue, 28 Feb 2023 08:34:29 +0000 Subject: [PATCH 04/27] Updated options to mark the use of the development version of Isabelle. --- Isabelle_DOF/scala/dof.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Isabelle_DOF/scala/dof.scala b/Isabelle_DOF/scala/dof.scala index 1ace3a8..c89ff0d 100644 --- a/Isabelle_DOF/scala/dof.scala +++ b/Isabelle_DOF/scala/dof.scala @@ -39,10 +39,10 @@ import isabelle._ object DOF { /** parameters **/ - val isabelle_version = "2022" - val isabelle_url = "https://isabelle.in.tum.de/website-Isabelle2022" + val isabelle_version = "" + val isabelle_url = "https://isabelle.sketis.net/devel/release_snapshot/" - val afp_version = "afp-2022-10-27" + val afp_version = "afp-devel" // Isabelle/DOF version: "Unreleased" for development, semantic version for releases val version = "Unreleased" @@ -55,7 +55,7 @@ object DOF { val generic_doi = "10.5281/zenodo.3370482" // Isabelle/DOF source repository - val url = "https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF" + val url = "https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/src/branch/Isabelle_dev" // Isabelle/DOF release artifacts val artifact_dir = "releases/Isabelle_DOF/Isabelle_DOF" From 4df233e9f430177274e0e119040d2084ec9a7f6c Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Thu, 27 Apr 2023 13:35:11 +0100 Subject: [PATCH 05/27] Updated image name. --- .woodpecker/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.woodpecker/build.yml b/.woodpecker/build.yml index 10c6121..01041e5 100644 --- a/.woodpecker/build.yml +++ b/.woodpecker/build.yml @@ -1,6 +1,6 @@ pipeline: build: - image: docker.io/logicalhacking/isabelle-dev + image: docker.io/logicalhacking/isabelle_nightly:latest commands: - ./.woodpecker/check_dangling_theories - ./.woodpecker/check_external_file_refs From 02bf9620f6f3dc4161d3390c50ff5b8ba510f27d Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Thu, 27 Apr 2023 14:54:22 +0100 Subject: [PATCH 06/27] Changed registry. --- .woodpecker/build.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.woodpecker/build.yml b/.woodpecker/build.yml index 01041e5..b96506f 100644 --- a/.woodpecker/build.yml +++ b/.woodpecker/build.yml @@ -1,6 +1,6 @@ pipeline: build: - image: docker.io/logicalhacking/isabelle_nightly:latest + image: git.logicalhacking.com/lh-docker/lh-docker-isabelle/isabelle_nightly:latest commands: - ./.woodpecker/check_dangling_theories - ./.woodpecker/check_external_file_refs @@ -20,7 +20,7 @@ pipeline: - cd ../.. - ln -s * latest archive: - image: docker.io/logicalhacking/isabelle-dev + image: git.logicalhacking.com/lh-docker/lh-docker-isabelle/isabelle_nightly:latest commands: - export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/$LATEX - mkdir -p $ARTIFACT_DIR From c3aaaf9ebbdb57ee8ca6d3f836d63143d0c956ad Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Fri, 28 Apr 2023 07:33:24 +0100 Subject: [PATCH 07/27] Force pull of container and print latest log from Isabelle repo. --- .woodpecker/build.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.woodpecker/build.yml b/.woodpecker/build.yml index b96506f..c2b57dc 100644 --- a/.woodpecker/build.yml +++ b/.woodpecker/build.yml @@ -2,6 +2,7 @@ pipeline: build: image: git.logicalhacking.com/lh-docker/lh-docker-isabelle/isabelle_nightly:latest commands: + - hg log --limit 2 /home/root/isabelle - ./.woodpecker/check_dangling_theories - ./.woodpecker/check_external_file_refs - ./.woodpecker/check_quick_and_dirty @@ -21,6 +22,7 @@ pipeline: - ln -s * latest archive: image: git.logicalhacking.com/lh-docker/lh-docker-isabelle/isabelle_nightly:latest + pull: true commands: - export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/$LATEX - mkdir -p $ARTIFACT_DIR From 01bcc48c7907e7be853e829f12ff0747878975f5 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Fri, 28 Apr 2023 11:20:23 +0100 Subject: [PATCH 08/27] Fixing repo location in container (Fixes #26). --- .woodpecker/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.woodpecker/build.yml b/.woodpecker/build.yml index c2b57dc..2014aa8 100644 --- a/.woodpecker/build.yml +++ b/.woodpecker/build.yml @@ -2,7 +2,7 @@ pipeline: build: image: git.logicalhacking.com/lh-docker/lh-docker-isabelle/isabelle_nightly:latest commands: - - hg log --limit 2 /home/root/isabelle + - hg log --limit 2 /root/isabelle - ./.woodpecker/check_dangling_theories - ./.woodpecker/check_external_file_refs - ./.woodpecker/check_quick_and_dirty From 0c654e2634f1ef0a05f548a00273c66070a3efe3 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Fri, 28 Apr 2023 11:21:13 +0100 Subject: [PATCH 09/27] Pull image for build ... --- .woodpecker/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.woodpecker/build.yml b/.woodpecker/build.yml index 2014aa8..50935c4 100644 --- a/.woodpecker/build.yml +++ b/.woodpecker/build.yml @@ -1,6 +1,7 @@ pipeline: build: image: git.logicalhacking.com/lh-docker/lh-docker-isabelle/isabelle_nightly:latest + pull: true commands: - hg log --limit 2 /root/isabelle - ./.woodpecker/check_dangling_theories @@ -22,7 +23,6 @@ pipeline: - ln -s * latest archive: image: git.logicalhacking.com/lh-docker/lh-docker-isabelle/isabelle_nightly:latest - pull: true commands: - export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/$LATEX - mkdir -p $ARTIFACT_DIR From 640a867f28b348ea40a9ed7c4427b0f11bdefbb5 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Fri, 28 Apr 2023 15:00:10 +0100 Subject: [PATCH 10/27] Port to Isabelle Nightly. --- Isabelle_DOF-Proofs/Very_Deep_Interpretation.thy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Isabelle_DOF-Proofs/Very_Deep_Interpretation.thy b/Isabelle_DOF-Proofs/Very_Deep_Interpretation.thy index 2d1e9ea..a86b020 100644 --- a/Isabelle_DOF-Proofs/Very_Deep_Interpretation.thy +++ b/Isabelle_DOF-Proofs/Very_Deep_Interpretation.thy @@ -43,7 +43,7 @@ fun check_path check_file ctxt dir (name, pos) = val path = Path.append dir (Path.explode name) handle ERROR msg => ISA_core.err msg pos; val _ = Path.expand path handle ERROR msg => ISA_core.err msg pos; - val _ = Context_Position.report ctxt pos (Markup.path (Path.implode_symbolic path)); + val _ = Context_Position.report ctxt pos (Markup.path (File.symbolic_path path)); val _ = (case check_file of NONE => path From 1869a96b2d426a47ad6d42f9fd061bcc53c756de Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sun, 4 Jun 2023 12:01:38 +0200 Subject: [PATCH 11/27] API update. --- .../TR_my_commented_isabelle/TR_MyCommentedIsabelle.thy | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/Isabelle_DOF-Examples-Extra/technical_report/TR_my_commented_isabelle/TR_MyCommentedIsabelle.thy b/Isabelle_DOF-Examples-Extra/technical_report/TR_my_commented_isabelle/TR_MyCommentedIsabelle.thy index 92c1151..e1c9749 100644 --- a/Isabelle_DOF-Examples-Extra/technical_report/TR_my_commented_isabelle/TR_MyCommentedIsabelle.thy +++ b/Isabelle_DOF-Examples-Extra/technical_report/TR_my_commented_isabelle/TR_MyCommentedIsabelle.thy @@ -1873,18 +1873,17 @@ Common Stuff related to Inner Syntax Parsing \<^item>\<^ML>\Args.internal_typ : typ parser\ \<^item>\<^ML>\Args.internal_term: term parser\ \<^item>\<^ML>\Args.internal_fact: thm list parser\ -\<^item>\<^ML>\Args.internal_attribute: (morphism -> attribute) parser\ -\<^item>\<^ML>\Args.internal_declaration: declaration parser\ +\<^item>\<^ML>\Args.internal_attribute: attribute Morphism.entity parser\ \<^item>\<^ML>\Args.alt_name : string parser\ \<^item>\<^ML>\Args.liberal_name: string parser\ - Common Isar Syntax \<^item>\<^ML>\Args.named_source: (Token.T -> Token.src) -> Token.src parser\ \<^item>\<^ML>\Args.named_typ : (string -> typ) -> typ parser\ \<^item>\<^ML>\Args.named_term : (string -> term) -> term parser\ -\<^item>\<^ML>\Args.embedded_declaration: (Input.source -> declaration) -> declaration parser\ +\<^item>\<^ML>\Args.embedded_declaration: (Input.source -> Morphism.declaration_entity) -> + Morphism.declaration_entity parser\ \<^item>\<^ML>\Args.typ_abbrev : typ context_parser\ \<^item>\<^ML>\Args.typ: typ context_parser\ \<^item>\<^ML>\Args.term: term context_parser\ From b364880bfc86b356aa805eb688f6f93cfa8f09c0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20M=C3=A9ric?= Date: Tue, 21 Feb 2023 17:38:45 +0100 Subject: [PATCH 12/27] Polymorphic classes first draft --- Isabelle_DOF-Example-I/IsaDofApplications.thy | 4 +- Isabelle_DOF-Example-II/paper.thy | 38 +- .../Conceptual/Conceptual.thy | 2 +- Isabelle_DOF-Unit-Tests/Attributes.thy | 2 +- .../Concept_OntoReferencing.thy | 4 +- .../Concept_TermAntiquotations.thy | 12 +- .../Concept_TermEvaluation.thy | 10 +- Isabelle_DOF-Unit-Tests/ROOT | 1 + Isabelle_DOF-Unit-Tests/TestKit.thy | 49 +- .../Test_Polymorphic_Classes.thy | 682 ++++++++++++++++++ .../scholarly_paper/scholarly_paper.thy | 22 +- Isabelle_DOF/thys/Isa_DOF.thy | 678 +++++++++-------- Isabelle_DOF/thys/manual/M_04_RefMan.thy | 2 +- 13 files changed, 1168 insertions(+), 338 deletions(-) create mode 100644 Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy diff --git a/Isabelle_DOF-Example-I/IsaDofApplications.thy b/Isabelle_DOF-Example-I/IsaDofApplications.thy index cde82e9..098ebc7 100644 --- a/Isabelle_DOF-Example-I/IsaDofApplications.thy +++ b/Isabelle_DOF-Example-I/IsaDofApplications.thy @@ -184,7 +184,7 @@ scenarios from the point of view of the ontology modeling. In @{text_section (un we discuss the user-interaction generated from the ontological definitions. Finally, we draw conclusions and discuss related work in @{text_section (unchecked) \conclusion\}. \ -section*[bgrnd::text_section,main_author="Some(@{docitem ''bu''}::author)"] +section*[bgrnd::text_section,main_author="Some(@{author ''bu''}::author)"] \ Background: The Isabelle System \ text*[background::introduction, level="Some 1"]\ While Isabelle is widely perceived as an interactive theorem prover for HOL @@ -246,7 +246,7 @@ can be type-checked before being displayed and can be used for calculations befo typeset. When editing, Isabelle's PIDE offers auto-completion and error-messages while typing the above \<^emph>\semi-formal\ content.\ -section*[isadof::technical,main_author="Some(@{docitem ''adb''}::author)"]\ \<^isadof> \ +section*[isadof::technical,main_author="Some(@{author ''adb''}::author)"]\ \<^isadof> \ text\ An \<^isadof> document consists of three components: \<^item> the \<^emph>\ontology definition\ which is an Isabelle theory file with definitions diff --git a/Isabelle_DOF-Example-II/paper.thy b/Isabelle_DOF-Example-II/paper.thy index e54b5f2..9d25e96 100644 --- a/Isabelle_DOF-Example-II/paper.thy +++ b/Isabelle_DOF-Example-II/paper.thy @@ -54,7 +54,7 @@ abstract*[abs, keywordlist="[\Shallow Embedding\,\Process-Alg If you consider citing this paper, please refer to @{cite "HOL-CSP-iFM2020"}. \ text\\ -section*[introheader::introduction,main_author="Some(@{docitem ''bu''}::author)"]\ Introduction \ +section*[introheader::introduction,main_author="Some(@{author ''bu''}::author)"]\ Introduction \ text*[introtext::introduction, level="Some 1"]\ Communicating Sequential Processes (\<^csp>) is a language to specify and verify patterns of interaction of concurrent systems. Together with CCS and LOTOS, it belongs to the family of @@ -126,10 +126,10 @@ attempt to formalize denotational \<^csp> semantics covering a part of Bill Rosc omitted.\}. \ -section*["pre"::tc,main_author="Some(@{author \bu\}::author)"] +section*["pre"::technical,main_author="Some(@{author \bu\}::author)"] \Preliminaries\ -subsection*[cspsemantics::tc, main_author="Some(@{author ''bu''})"]\Denotational \<^csp> Semantics\ +subsection*[cspsemantics::technical, main_author="Some(@{author ''bu''})"]\Denotational \<^csp> Semantics\ text\ The denotational semantics (following @{cite "roscoe:csp:1998"}) comes in three layers: the \<^emph>\trace model\, the \<^emph>\(stable) failures model\ and the \<^emph>\failure/divergence model\. @@ -189,7 +189,7 @@ of @{cite "IsobeRoggenbach2010"} is restricted to a variant of the failures mode \ -subsection*["isabelleHol"::tc, main_author="Some(@{author ''bu''})"]\Isabelle/HOL\ +subsection*["isabelleHol"::technical, main_author="Some(@{author ''bu''})"]\Isabelle/HOL\ text\ Nowadays, Isabelle/HOL is one of the major interactive theory development environments @{cite "nipkow.ea:isabelle:2002"}. HOL stands for Higher-Order Logic, a logic based on simply-typed \\\-calculus extended by parametric polymorphism and Haskell-like type-classes. @@ -218,10 +218,10 @@ domain theory for a particular type-class \\::pcpo\, \<^ie> fixed-point induction and other (automated) proof infrastructure. Isabelle's type-inference can automatically infer, for example, that if \\::pcpo\, then \(\ \ \)::pcpo\. \ -section*["csphol"::tc,main_author="Some(@{author ''bu''}::author)", level="Some 2"] +section*["csphol"::technical,main_author="Some(@{author ''bu''}::author)", level="Some 2"] \Formalising Denotational \<^csp> Semantics in HOL \ -subsection*["processinv"::tc, main_author="Some(@{author ''bu''})"] +subsection*["processinv"::technical, main_author="Some(@{author ''bu''})"] \Process Invariant and Process Type\ text\ First, we need a slight revision of the concept of \<^emph>\trace\: if \\\ is the type of the atomic events (represented by a type variable), then @@ -272,7 +272,7 @@ but this can be constructed in a straight-forward manner. Suitable definitions f \\\, \\\ and \\\ lifting \fst\ and \snd\ on the new \'\ process\-type allows to derive the above properties for any \P::'\ process\. \ -subsection*["operator"::tc, main_author="Some(@{author ''lina''})"] +subsection*["operator"::technical, main_author="Some(@{author ''lina''})"] \\<^csp> Operators over the Process Type\ text\ Now, the operators of \<^csp> \Skip\, \Stop\, \_\_\, \_\_\, \_\_\,\_\_\_\ etc. for internal choice, external choice, prefix and parallel composition, can @@ -297,7 +297,7 @@ The definitional presentation of the \<^csp> process operators according to @{ci follows always this scheme. This part of the theory comprises around 2000 loc. \ -subsection*["orderings"::tc, main_author="Some(@{author ''bu''})"] +subsection*["orderings"::technical, main_author="Some(@{author ''bu''})"] \Refinement Orderings\ text\ \<^csp> is centered around the idea of process refinement; many critical properties, @@ -327,7 +327,7 @@ states, from which no internal progress is possible. \ -subsection*["fixpoint"::tc, main_author="Some(@{author ''lina''})"] +subsection*["fixpoint"::technical, main_author="Some(@{author ''lina''})"] \Process Ordering and HOLCF\ text\ For any denotational semantics, the fixed point theory giving semantics to systems of recursive equations is considered as keystone. Its prerequisite is a complete partial ordering @@ -394,7 +394,7 @@ Fixed-point inductions are the main proof weapon in verifications, together with and the \<^csp> laws. Denotational arguments can be hidden as they are not needed in practical verifications. \ -subsection*["law"::tc, main_author="Some(@{author ''lina''})"] +subsection*["law"::technical, main_author="Some(@{author ''lina''})"] \\<^csp> Rules: Improved Proofs and New Results\ @@ -436,11 +436,11 @@ cases to be considered as well as their complexity makes pen and paper proofs practically infeasible. \ -section*["newResults"::tc,main_author="Some(@{author ''safouan''}::author)", +section*["newResults"::technical,main_author="Some(@{author ''safouan''}::author)", main_author="Some(@{author ''lina''}::author)", level= "Some 3"] \Theoretical Results on Refinement\ text\\ -subsection*["adm"::tc,main_author="Some(@{author ''safouan''}::author)", +subsection*["adm"::technical,main_author="Some(@{author ''safouan''}::author)", main_author="Some(@{author ''lina''}::author)"] \Decomposition Rules\ text\ @@ -476,7 +476,7 @@ The failure and divergence projections of this operator are also interdependent, sequence operator. Hence, this operator is not monotonic with \\\<^sub>\\, \\\<^sub>\\ and \\\<^sub>\\, but monotonic when their combinations are considered. \ -subsection*["processes"::tc,main_author="Some(@{author ''safouan''}::author)", +subsection*["processes"::technical,main_author="Some(@{author ''safouan''}::author)", main_author="Some(@{author ''lina''}::author)"] \Reference Processes and their Properties\ text\ @@ -597,7 +597,7 @@ then it may still be livelock-free. % This makes sense since livelocks are worse \ -section*["advanced"::tc,main_author="Some(@{author ''safouan''}::author)",level="Some 3"] +section*["advanced"::technical,main_author="Some(@{author ''safouan''}::author)",level="Some 3"] \Advanced Verification Techniques\ text\ @@ -612,7 +612,7 @@ verification. In the latter case, we present an approach to a verification of a architecture, in this case a ring-structure of arbitrary size. \ -subsection*["illustration"::tc,main_author="Some(@{author ''safouan''}::author)", level="Some 3"] +subsection*["illustration"::technical,main_author="Some(@{author ''safouan''}::author)", level="Some 3"] \The General CopyBuffer Example\ text\ We consider the paradigmatic copy buffer example @{cite "Hoare:1985:CSP:3921" and "Roscoe:UCS:2010"} @@ -677,7 +677,7 @@ corollary deadlock_free COPY \ -subsection*["inductions"::tc,main_author="Some(@{author ''safouan''}::author)"] +subsection*["inductions"::technical,main_author="Some(@{author ''safouan''}::author)"] \New Fixed-Point Inductions\ text\ @@ -727,7 +727,7 @@ The astute reader may notice here that if the induction step is weakened (having the base steps require enforcement. \ -subsection*["norm"::tc,main_author="Some(@{author ''safouan''}::author)"] +subsection*["norm"::technical,main_author="Some(@{author ''safouan''}::author)"] \Normalization\ text\ Our framework can reason not only over infinite alphabets, but also over processes parameterized @@ -787,7 +787,7 @@ Summing up, our method consists of four stages: \ -subsection*["dining_philosophers"::tc,main_author="Some(@{author ''safouan''}::author)",level="Some 3"] +subsection*["dining_philosophers"::technical,main_author="Some(@{author ''safouan''}::author)",level="Some 3"] \Generalized Dining Philosophers\ text\ The dining philosophers problem is another paradigmatic example in the \<^csp> literature @@ -879,7 +879,7 @@ for a dozen of philosophers (on a usual machine) due to the exponential combinat Furthermore, our proof is fairly stable against modifications like adding non synchronized events like thinking or sitting down in contrast to model-checking techniques. \ -section*["relatedwork"::tc,main_author="Some(@{author ''lina''}::author)",level="Some 3"] +section*["relatedwork"::technical,main_author="Some(@{author ''lina''}::author)",level="Some 3"] \Related work\ text\ diff --git a/Isabelle_DOF-Ontologies/Conceptual/Conceptual.thy b/Isabelle_DOF-Ontologies/Conceptual/Conceptual.thy index 75bb658..c3ed081 100644 --- a/Isabelle_DOF-Ontologies/Conceptual/Conceptual.thy +++ b/Isabelle_DOF-Ontologies/Conceptual/Conceptual.thy @@ -131,7 +131,7 @@ type_synonym XX = B section\Examples of inheritance \ -doc_class C = XX + +doc_class C = B + z :: "A option" <= None (* A LINK, i.e. an attribute that has a type referring to a document class. Mathematical relations over document items can be modeled. *) diff --git a/Isabelle_DOF-Unit-Tests/Attributes.thy b/Isabelle_DOF-Unit-Tests/Attributes.thy index 2dab8f1..14ba9f3 100644 --- a/Isabelle_DOF-Unit-Tests/Attributes.thy +++ b/Isabelle_DOF-Unit-Tests/Attributes.thy @@ -160,7 +160,7 @@ ML\ @{docitem_attribute a2::omega}; type_synonym ALFACENTAURI = E -update_instance*[omega::ALFACENTAURI, x+="''inition''"] +update_instance*[omega::E, x+="''inition''"] ML\ val s = HOLogic.dest_string ( @{docitem_attribute x::omega}) \ diff --git a/Isabelle_DOF-Unit-Tests/Concept_OntoReferencing.thy b/Isabelle_DOF-Unit-Tests/Concept_OntoReferencing.thy index 16ab498..6d3ad8c 100644 --- a/Isabelle_DOF-Unit-Tests/Concept_OntoReferencing.thy +++ b/Isabelle_DOF-Unit-Tests/Concept_OntoReferencing.thy @@ -144,8 +144,8 @@ update_instance*[f::F,r:="[@{thm ''Concept_OntoReferencing.some_proof''}]"] text\ ..., mauris amet, id elit aliquam aptent id, ... @{docitem \a\} \ (*>*) text\Here we add and maintain a link that is actually modeled as m-to-n relation ...\ -update_instance*[f::F,b:="{(@{docitem \a\}::A,@{docitem \c1\}::C), - (@{docitem \a\}, @{docitem \c2\})}"] +update_instance*[f::F,b:="{(@{A \a\}::A,@{C \c1\}::C), + (@{A \a\}, @{C \c2\})}"] section\Closing the Monitor and testing the Results.\ diff --git a/Isabelle_DOF-Unit-Tests/Concept_TermAntiquotations.thy b/Isabelle_DOF-Unit-Tests/Concept_TermAntiquotations.thy index 2f1933f..ebd64c3 100644 --- a/Isabelle_DOF-Unit-Tests/Concept_TermAntiquotations.thy +++ b/Isabelle_DOF-Unit-Tests/Concept_TermAntiquotations.thy @@ -116,20 +116,22 @@ section\Putting everything together\ text\Major sample: test-item of doc-class \F\ with a relational link between class instances, and links to formal Isabelle items like \typ\, \term\ and \thm\. \ +declare[[ML_print_depth = 10000]] text*[xcv4::F, r="[@{thm ''HOL.refl''}, @{thm \Concept_TermAntiquotations.local_sample_lemma\}]", (* long names required *) - b="{(@{docitem ''xcv1''},@{docitem \xcv2\})}", (* notations \...\ vs. ''...'' *) + b="{(@{A ''xcv1''},@{C \xcv2\})}", (* notations \...\ vs. ''...'' *) s="[@{typ \int list\}]", properties = "[@{term \H \ H\}]" (* notation \...\ required for UTF8*) ]\Lorem ipsum ...\ - +declare[[ML_print_depth = 20]] text*[xcv5::G, g="@{thm \HOL.sym\}"]\Lorem ipsum ...\ text\... and here we add a relation between @{docitem \xcv3\} and @{docitem \xcv2\} into the relation \verb+b+ of @{docitem \xcv5\}. Note that in the link-relation, -a @{typ "C"}-type is required, but a @{typ "G"}-type is offered which is legal in -\verb+Isa_DOF+ because of the sub-class relation between those classes: \ -update_instance*[xcv4::F, b+="{(@{docitem ''xcv3''},@{docitem ''xcv5''})}"] +a @{typ "C"}-type is required, so if a @{typ "G"}-type is offered, it is considered illegal +in \verb+Isa_DOF+ despite the sub-class relation between those classes: \ +update_instance-assert-error[xcv4::F, b+="{(@{docitem ''xcv3''},@{docitem ''xcv5''})}"] +\Type unification failed\ text\And here is the results of some ML-term antiquotations:\ ML\ @{docitem_attribute b::xcv4} \ diff --git a/Isabelle_DOF-Unit-Tests/Concept_TermEvaluation.thy b/Isabelle_DOF-Unit-Tests/Concept_TermEvaluation.thy index c23203b..17e8b9d 100644 --- a/Isabelle_DOF-Unit-Tests/Concept_TermEvaluation.thy +++ b/Isabelle_DOF-Unit-Tests/Concept_TermEvaluation.thy @@ -187,7 +187,7 @@ to update the instance @{docitem \xcv4\}: \ update_instance-assert-error[xcv4::F, b+="{(@{A ''xcv3''},@{G ''xcv5''})}"] - \type of attribute: Conceptual.F.b does not fit to term\ + \Type unification failed: Clash of types\ section\\<^theory_text>\assert*\-Annotated assertion-commands\ @@ -225,11 +225,11 @@ text\... and here we reference @{A \assertionA\}.\ (*>*) assert*\evidence @{result \resultProof\} = evidence @{result \resultProof2\}\ -text\The optional evaluator of \value*\ and \assert*\ must be specified after the meta arguments:\ -value* [optional_test_A::A, x=6] [nbe] \filter (\\. A.x \ > 5) @{A_instances}\ +text\The optional evaluator of \value*\ and \assert*\ must be specified before the meta arguments:\ +value* [nbe] [optional_test_A::A, x=6] \filter (\\. A.x \ > 5) @{A_instances}\ -assert* [resultProof3::result, evidence = "proof", property="[@{thm \HOL.sym\}]"] [nbe] - \evidence @{result \resultProof3\} = evidence @{result \resultProof2\}\ +assert* [nbe] [resultProof3::result, evidence = "proof", property="[@{thm \HOL.sym\}]"] + \evidence @{result \resultProof3\} = evidence @{result \resultProof2\}\ text\ The evaluation of @{command "assert*"} can be disabled diff --git a/Isabelle_DOF-Unit-Tests/ROOT b/Isabelle_DOF-Unit-Tests/ROOT index 9fb9052..78d731e 100644 --- a/Isabelle_DOF-Unit-Tests/ROOT +++ b/Isabelle_DOF-Unit-Tests/ROOT @@ -16,6 +16,7 @@ session "Isabelle_DOF-Unit-Tests" = "Isabelle_DOF-Ontologies" + "Cenelec_Test" "OutOfOrderPresntn" "COL_Test" + "Test_Polymorphic_Classes" document_files "root.bib" "figures/A.png" diff --git a/Isabelle_DOF-Unit-Tests/TestKit.thy b/Isabelle_DOF-Unit-Tests/TestKit.thy index fa8d468..16c1c90 100644 --- a/Isabelle_DOF-Unit-Tests/TestKit.thy +++ b/Isabelle_DOF-Unit-Tests/TestKit.thy @@ -23,6 +23,8 @@ keywords "text-" "text-latex" :: document_body and "update_instance-assert-error" :: document_body and "declare_reference-assert-error" :: document_body and "value-assert-error" :: document_body + and "definition-assert-error" :: document_body + and "doc_class-assert-error" :: document_body begin @@ -152,20 +154,55 @@ val _ = val _ = - let fun pass_trans_to_value_cmd (args, (((name, modes), t),src)) trans = - (Value_Command.value_cmd {assert=false} args name modes t @{here} trans - handle ERROR msg => (if error_match src msg - then (writeln ("Correct error: "^msg^": reported.");trans) - else error"Wrong error reported")) + let fun pass_trans_to_value_cmd (args, (((name, modes), t),src)) trans = + let val pos = Toplevel.pos_of trans + in trans |> Toplevel.theory + (fn thy => Value_Command.value_cmd {assert=false} args name modes t pos thy + handle ERROR msg => (if error_match src msg + then (writeln ("Correct error: "^msg^": reported."); thy) + else error"Wrong error reported")) + end in Outer_Syntax.command \<^command_keyword>\value-assert-error\ "evaluate and print term" (ODL_Meta_Args_Parser.opt_attributes -- (Value_Command.opt_evaluator -- Value_Command.opt_modes -- Parse.term -- Parse.document_source) - >> (Toplevel.theory o pass_trans_to_value_cmd)) + >> (pass_trans_to_value_cmd)) end; +val _ = + let fun definition_cmd' meta_args_opt decl params prems spec src bool ctxt = + Local_Theory.background_theory (Value_Command.meta_args_exec meta_args_opt) ctxt + |> (fn ctxt => Definition_Star_Command.definition_cmd decl params prems spec bool ctxt + handle ERROR msg => if error_match src msg + then (writeln ("Correct error: "^msg^": reported.") + ; pair "Bound 0" @{thm refl} + |> pair (Bound 0) + |> rpair ctxt) + else error"Wrong error reported") + in + Outer_Syntax.local_theory' \<^command_keyword>\definition-assert-error\ "constant definition" + (ODL_Meta_Args_Parser.opt_attributes -- + (Scan.option Parse_Spec.constdecl -- (Parse_Spec.opt_thm_name ":" -- Parse.prop) -- + Parse_Spec.if_assumes -- Parse.for_fixes -- Parse.document_source) + >> (fn (meta_args_opt, ((((decl, spec), prems), params), src)) => + #2 oo definition_cmd' meta_args_opt decl params prems spec src)) + end; + + +val _ = + let fun add_doc_class_cmd' ((((overloaded, hdr), (parent, attrs)),((rejects,accept_rex),invars)), src) = + (fn thy => OntoParser.add_doc_class_cmd {overloaded = overloaded} hdr parent attrs rejects accept_rex invars thy + handle ERROR msg => (if error_match src msg + then (writeln ("Correct error: "^msg^": reported."); thy) + else error"Wrong error reported")) + in + Outer_Syntax.command \<^command_keyword>\doc_class-assert-error\ + "define document class" + ((OntoParser.parse_doc_class -- Parse.document_source) + >> (Toplevel.theory o add_doc_class_cmd')) + end val _ = Outer_Syntax.command ("text-latex", \<^here>) "formal comment (primary style)" diff --git a/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy b/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy new file mode 100644 index 0000000..6791f38 --- /dev/null +++ b/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy @@ -0,0 +1,682 @@ +theory Test_Polymorphic_Classes + imports Isabelle_DOF.Isa_DOF + TestKit +begin + +doc_class title = + short_title :: "string option" <= "None" + +doc_class Author = + email :: "string" <= "''''" + +datatype classification = SIL0 | SIL1 | SIL2 | SIL3 | SIL4 + +doc_class abstract = + keywordlist :: "string list" <= "[]" + safety_level :: "classification" <= "SIL3" + +doc_class text_section = + authored_by :: "Author set" <= "{}" + level :: "int option" <= "None" + +doc_class ('a::one, 'b, 'c) test0 = text_section + + testa :: "'a list" + testb :: "'b list" + testc :: "'c list" + +typ\('a, 'b, 'c) test0\ +typ\('a, 'b, 'c, 'd) test0_scheme\ + +find_consts name:"test0" +find_theorems name:"test0" + + +doc_class 'a test1 = text_section + + test1 :: "'a list" + invariant author_finite_test :: "finite (authored_by \)" + invariant force_level_test :: "(level \) \ None \ the (level \) > 1" + +find_consts name:"test1*inv" +find_theorems name:"test1*inv" + +text*[church::Author, email="\b\"]\\ +text\@{Author "church"}\ +value*\@{Author \church\}\ + +text\\<^value_>\@{Author \church\}\\ + +doc_class ('a, 'b) test2 = "'a test1" + + test2 :: "'b list" +type_synonym ('a, 'b) test2_syn = "('a, 'b) test2" + +find_theorems name:"test2" + +declare [[invariants_checking_with_tactics]] +text*[testtest::"('a, int) test2", level = "Some 2", authored_by = "{@{Author \church\}}", test2 = "[1]"]\\ +value*\test2 @{test2 \testtest\}\ + +text*[testtest2''::"(nat, int) test2", test1 = "[2::nat, 3]", test2 = "[4::int, 5]", level = "Some (2::int)"]\\ +value*\test1 @{test2 \testtest2''\}\ +declare [[invariants_checking_with_tactics = false]] + +ML\ +val t = Syntax.parse_term \<^context> "@{test2 \testtest\}" + +\ +ML\ +val t = \<^term>\test2.make 8142730 Test_Parametric_Classes_2_test2_authored_by_Attribute_Not_Initialized Test_Parametric_Classes_2_test2_level_Attribute_Not_Initialized Test_Parametric_Classes_2_test2_test1_Attribute_Not_Initialized + Test_Parametric_Classes_2_test2_test2_Attribute_Not_Initialized + \authored_by := bot, level := None\ \ +\ + +text\test2 = "[1::'a::one]" should be test2 = "[1::int]" because the type of testtest4 is ('a::one, int) test2:\ +text-assert-error[testtest4::"('a::one, int) test2", level = "Some 2", authored_by = "{@{Author \church\}}", test2 = "[1::'a::one]"]\\ + \Type unification failed\ +text\Indeed this definition fails:\ +definition-assert-error testtest2::"('a::one, int) test2" where "testtest2 \ + test2.make 11953346 + {@{Author \church\}} + (Some 2) + [] + [] + \authored_by := bot + , level := None, level := Some 2 + , authored_by := insert \Author.tag_attribute = 11953164, email = []\ bot + , test2.test2 := [1::('a::one)]\ " +\Type unification failed\ + +text\For now, no more support of type synonyms as parent:\ +doc_class ('a, 'b, 'c) A = +a :: "'a list" +b :: "'b list" +c :: "'c list" +type_synonym ('a, 'b, 'c) A_syn = "('a, 'b, 'c) A" + +doc_class-assert-error ('a, 'b, 'c, 'd) B = "('b, 'c, 'd) A_syn" + +d ::"'a::one list" <= "[1]" +\Undefined onto class: "A_syn"\ + + +declare[[invariants_checking_with_tactics]] + +definition* testauthor0 where "testauthor0 \ \Author.tag_attribute = 5, email = \test_author_email\\" +definition* testauthor :: "Author" where "testauthor \ \Author.tag_attribute = 5, email = \test_author_email\\" +definition* testauthor2 :: "Author" where "testauthor2 \ \Author.tag_attribute = 5, email = \test_author_email\\ \email := \test_author_email_2\ \" +definition* testauthor3 :: "Author" where "testauthor3 \ testauthor \email := \test_author_email_2\ \" + +ML\ +val ctxt = \<^context> +val input0 = Syntax.read_input "@{Author \church\}" +val source = Syntax.read_input "\<^term_>\@{Author \church\}\" +val input = source +val tt = Document_Output.output_document ctxt {markdown = false} input +\ + +doc_class ('a, 'b) elaborate1 = +a :: "'a list" +b :: "'b list" + +doc_class ('a, 'b) elaborate2 = +c :: "('a, 'b) elaborate1 list" + +doc_class ('a, 'b) elaborate3 = +d :: "('a, 'b) elaborate2 list" + +text*[test_elaborate1::"('a::one, 'b) elaborate1", a = "[1]"]\\ + +term*\@{elaborate1 \test_elaborate1\}\ +value* [nbe]\@{elaborate1 \test_elaborate1\}\ + + +text*[test_elaborate2::"('a::one, 'b) elaborate2", c = "[@{elaborate1 \test_elaborate1\}]"]\\ + +text*[test_elaborate3::"('a::one, 'b) elaborate3", d = "[@{elaborate2 \test_elaborate2\}]"]\\ + +term*\(concat o concat) ((map o map) a (map c (elaborate3.d @{elaborate3 \test_elaborate3\})))\ +value*\(concat o concat) ((map o map) a (map c (elaborate3.d @{elaborate3 \test_elaborate3\})))\ + + +text\ +The term antiquotation is considered a ground term. +Then its type here is \<^typ>\'a::one list\ whith \'a\ a fixed-type variable. +So the following definition only works because the parameter of the class is also \'a\.\ +declare[[ML_print_depth = 10000]] +doc_class 'a elaborate4 = +d :: "'a::one list" <= "(concat o concat) ((map o map) a (map c (elaborate3.d @{elaborate3 \test_elaborate3\})))" +declare[[ML_print_depth = 20]] + +declare[[ML_print_depth = 10000]] +text*[test_elaborate4::"'a::one elaborate4"]\\ +declare[[ML_print_depth = 20]] + + +text\Bug: +As the term antiquotation is considered as a ground term, +its type \<^typ>\'a::one list\ conflicts with the type of the attribute \<^typ>\int list\. +To support the instantiataion of the term antiquatation as an \<^typ>\int list\, +the term antiquatation should have the same behavior as a constant definition, +which is not the case for now.\ +declare[[ML_print_depth = 10000]] +doc_class-assert-error elaborate4' = +d :: "int list" <= "(concat o concat) ((map o map) a (map c (elaborate3.d @{elaborate3 \test_elaborate3\})))" +\Type unification failed\ +declare[[ML_print_depth = 20]] + +text\The behavior we want to support: \ + +definition one_list :: "'a::one list" where "one_list \ [1]" + +text\the constant \<^const>\one_list\ can be instantiate as an \<^typ>\int list\:\ +doc_class elaborate4'' = +d :: "int list" <= "one_list" + +declare[[ML_print_depth = 10000]] +text*[test_elaborate4''::"elaborate4''"]\\ +declare[[ML_print_depth = 20]] + + +term*\concat (map a (elaborate2.c @{elaborate2 \test_elaborate2\}))\ +value*\concat (map a (elaborate2.c @{elaborate2 \test_elaborate2\}))\ + +text\ +The term antiquotation is considered a ground term. +Then its type here is \<^typ>\'a::one list\ whith \'a\ a fixed-type variable. +So the following definition only works because the parameter of the class is also \'a\.\ +declare[[ML_print_depth = 10000]] +doc_class 'a elaborate5 = +d :: "'a::one list" <= "concat (map a (elaborate2.c @{elaborate2 \test_elaborate2\}))" +declare[[ML_print_depth = 20]] + +text\Bug: But when defining an instance, as we use a \'b\ variable to specify the type +of the instance (\<^typ>\'b::one elaborate5\, then the unification fails\ +declare[[ML_print_depth = 10000]] +text-assert-error[test_elaborate5::"'b::one elaborate5"]\\ +\Inconsistent sort constraints for type variable "'b"\ +declare[[ML_print_depth = 20]] + +text\Bug: +The term antiquotation is considered a ground term. +Then its type here is \<^typ>\'a::one list\ whith \'a\ a fixed-type variable. +So it is not compatible with the type of the attribute \<^typ>\'a::numeral list\\ +doc_class-assert-error 'a elaborate5' = +d :: "'a::numeral list" <= "concat (map a (elaborate2.c @{elaborate2 \test_elaborate2\}))" +\Sort constraint\ + +text\The behavior we want to support: \ + +text\the constant \<^const>\one_list\ can be instantiate as an \<^typ>\'a::numeral list\:\ +doc_class 'a elaborate5'' = +d :: "'a::numeral list" <= "one_list" + + +text*[test_elaborate1a::"('a::one, int) elaborate1", a = "[1]", b = "[2]"]\\ + +term*\@{elaborate1 \test_elaborate1a\}\ +value* [nbe]\@{elaborate1 \test_elaborate1a\}\ + +text*[test_elaborate2a::"('a::one, int) elaborate2", c = "[@{elaborate1 \test_elaborate1a\}]"]\\ + +text*[test_elaborate3a::"('a::one, int) elaborate3", d = "[@{elaborate2 \test_elaborate2a\}]"]\\ + +text\ +The term antiquotation is considered a ground term. +Then its type here is \<^typ>\'a::one list\ whith \'a\ a fixed-type variable. +So the following definition only works because the parameter of the class is also \'a\.\ +definition* test_elaborate3_embedding ::"'a::one list" + where "test_elaborate3_embedding \ (concat o concat) ((map o map) elaborate1.a (map elaborate2.c (elaborate3.d @{elaborate3 \test_elaborate3a\})))" + +text\Bug: +The term antiquotation is considered a ground term. +Then its type here is \<^typ>\'a::one list\ whith \'a\ a fixed-type variable. +So it is not compatible with the specified type of the definition \<^typ>\int list\:\ +definition-assert-error test_elaborate3_embedding'::"int list" + where "test_elaborate3_embedding' \ (concat o concat) ((map o map) elaborate1.a (map elaborate2.c (elaborate3.d @{elaborate3 \test_elaborate3a\})))" +\Type unification failed\ + +term*\@{elaborate1 \test_elaborate1a\}\ +value* [nbe]\@{elaborate1 \test_elaborate1a\}\ + + +record ('a, 'b) elaborate1' = +a :: "'a list" +b :: "'b list" + +record ('a, 'b) elaborate2' = +c :: "('a, 'b) elaborate1' list" + +record ('a, 'b) elaborate3' = +d :: "('a, 'b) elaborate2' list" + +doc_class 'a one = +a::"'a list" + +text*[test_one::"'a::one one", a = "[1]"]\\ + +value* [nbe] \@{one \test_one\}\ + +term*\a @{one \test_one\}\ + +text\Bug: +The term antiquotation is considered a ground term. +Then its type here is \<^typ>\'a::one list\ whith \'a\ a fixed-type variable. +So it is not compatible with the specified type of the definition \<^typ>\('b::one, 'a::numeral) elaborate1'\ +because the term antiquotation can not be instantiate as a \<^typ>\'b::one list\ +and the \'a\ is checked against the \'a::numeral\ instance type parameter:\ +definition-assert-error test_elaborate1'::"('b::one, 'a::numeral) elaborate1'" + where "test_elaborate1' \ \ elaborate1'.a = a @{one \test_one\}, b = [2] \" +\Sort constraint\ + +text\This is the same behavior as the following:\ +definition-assert-error test_elaborate10::"('b::one, 'a::numeral) elaborate1'" + where "test_elaborate10 \ \ elaborate1'.a = [1::'a::one], b = [2] \" +\Sort constraint\ + +definition-assert-error test_elaborate11::"('b::one, 'c::numeral) elaborate1'" + where "test_elaborate11 \ \ elaborate1'.a = [1::'a::one], b = [2] \" +\Type unification failed\ + +text\So this works:\ +definition* test_elaborate1''::"('a::one, 'b::numeral) elaborate1'" + where "test_elaborate1'' \ \ elaborate1'.a = a @{one \test_one\}, b = [2] \" + +term \elaborate1'.a test_elaborate1''\ +value [nbe] \elaborate1'.a test_elaborate1''\ + +text\But if we embed the term antiquotation in a definition, +the type unification works:\ +definition* onedef where "onedef \ @{one \test_one\}" + +definition test_elaborate1'''::"('b::one, 'a::numeral) elaborate1'" + where "test_elaborate1''' \ \ elaborate1'.a = a onedef, b = [2] \" + +value [nbe] \elaborate1'.a test_elaborate1'''\ + + +definition test_elaborate2'::"(int, 'b::numeral) elaborate2'" + where "test_elaborate2' \ \ c = [test_elaborate1''] \" + +definition test_elaborate3'::"(int, 'b::numeral) elaborate3'" + where "test_elaborate3' \ \ d = [test_elaborate2'] \" + + +doc_class 'a test3' = +test3 :: "int" +test3' :: "'a list" + +text*[testtest30::"'a::one test3'", test3'="[1]"]\\ +text-assert-error[testtest30::"'a test3'", test3'="[1]"]\\ +\Type unification failed: Variable\ + +find_consts name:"test3'.test3" +definition testone :: "'a::one test3'" where "testone \ \tag_attribute = 5, test3 = 3, test3' = [1] \" +definition* testtwo :: "'a::one test3'" where "testtwo \ \tag_attribute = 5, test3 = 1, test3' = [1] \\ test3 := 1\" + +text*[testtest3'::"'a test3'", test3 = "1"]\\ + +declare [[show_sorts = false]] +definition* testtest30 :: "'a test3'" where "testtest30 \ \tag_attribute = 12, test3 = 2, test3' = [] \" +update_instance*[testtest3'::"'a test3'", test3 := "2"] + +ML\ +val t = @{value_ [nbe] \test3 @{test3' \testtest3'\}\} +val tt = HOLogic.dest_number t +\ + +text\@{value_ [] [nbe] \test3 @{test3' \testtest3'\}\}\ + +update_instance*[testtest3'::"'a test3'", test3 += "2"] + +ML\ +val t = @{value_ [nbe] \test3 @{test3' \testtest3'\}\} +val tt = HOLogic.dest_number t +\ + +value\test3 \ tag_attribute = 1, test3 = 2, test3' = [2::int, 3] \\ +value\test3 \ tag_attribute = 1, test3 = 2, test3' = [2::int, 3] \\ +find_consts name:"test3'.test3" + +ML\ +val test_value = @{value_ \@{test3' \testtest3'\}\} + +\ +declare [[show_sorts = false]] +update_instance*[testtest3'::"'a test3'", test3 += "3"] +declare [[show_sorts = false]] +value*\test3 @{test3' \testtest3'\}\ +value\test3 \ tag_attribute = 12, test3 = 5, test3' = AAAAAA\\ + +find_consts name:"test3'.test3" + +text*[testtest3''::"int test3'", test3 = "1"]\\ + +update_instance*[testtest3''::"int test3'", test3' += "[3]"] + +value*\test3' @{test3' \testtest3''\}\ + +update_instance*[testtest3''::"int test3'", test3' := "[3]"] + +value*\test3' @{test3' \testtest3''\}\ + +update_instance*[testtest3''::"int test3'", test3' += "[2,5]"] + +value*\test3' @{test3' \testtest3''\}\ + +definition testeq where "testeq \ \x. x" +find_consts name:"test3'.ma" + +text-assert-error[testtest3''::"int test3'", test3 = "1", test3' = "[3::'a::numeral]"]\\ + \Type unification failed\ + +text-assert-error[testtest3''::"int test3'", test3 = "1", test3' = "[3]"]\\ + \Duplicate instance declaration\ + + +declare[[ML_print_depth = 10000]] +definition-assert-error testest3''' :: "int test3'" + where "testest3''' \ \ tag_attribute = 12, test3 = 1, test3' = [2]\\ test3' := [3::'a::numeral]\" +\Type unification failed\ +declare[[ML_print_depth = 20]] + +value* \test3 @{test3' \testtest3''\}\ +value* \\ tag_attribute = 12, test3 = 1, test3' = [2]\\ test3' := [3::int]\\ +value* \test3 (\ tag_attribute = 12, test3 = 1, test3' = [2]\\ test3' := [3::int]\)\ +term*\@{test3' \testtest3''\}\ + +ML\val t = \<^term_>\test3 @{test3' \testtest3''\}\\ + +value\test3 \ tag_attribute = 12, test3 = 2, test3' = [2::int ,3]\\ + +find_consts name:"test3'.test3" +find_consts name:"Isabelle_DOF_doc_class_test3'" +update_instance*[testtest3''::"int test3'", test3 := "2"] +ML\ +val t = @{value_ [nbe] \test3 @{test3' \testtest3''\}\} +val tt = HOLogic.dest_number t |> snd +\ + +doc_class 'a testa = +a:: "'a set" +b:: "int set" + +text*[testtesta::"'a testa", b = "{2}"]\\ +update_instance*[testtesta::"'a testa", b += "{3}"] + +ML\ +val t = @{value_ [nbe] \b @{testa \testtesta\}\} +val tt = HOLogic.dest_set t |> map (HOLogic.dest_number #> snd) +\ + +update_instance-assert-error[testtesta::"'a::numeral testa", a := "{2::'a::numeral}"] +\incompatible classes:'a Test_Polymorphic_Classes.testa:'a::numeral Test_Polymorphic_Classes.testa\ + +text*[testtesta'::"'a::numeral testa", a = "{2}"]\\ + +update_instance*[testtesta'::"'a::numeral testa", a += "{3}"] + +ML\ +val t = @{value_ [nbe] \a @{testa \testtesta'\}\} +\ + +update_instance-assert-error[testtesta'::"'a::numeral testa", a += "{3::int}"] + \Type unification failed\ + +definition-assert-error testtesta'' :: "'a::numeral testa" + where "testtesta'' \ \tag_attribute = 5, a = {1}, b = {1} \\ a := {1::int}\" +\Type unification failed\ + +update_instance*[testtesta'::"'a::numeral testa", b := "{3::int}"] +ML\ +val t = @{value_ [nbe] \b @{testa \testtesta'\}\} +\ + +value* [nbe] \b @{testa \testtesta'\}\ + +definition testtesta'' :: "'a::numeral testa" + where "testtesta'' \ \tag_attribute = 5, a = {1}, b = {1} \\ b := {2::int}\" + +value [nbe]\b testtesta''\ + +doc_class 'a test3 = +test3 :: "'a list" +type_synonym 'a test3_syn = "'a test3" + +text*[testtest3::"int test3", test3 = "[1]"]\\ +update_instance*[testtest3::"int test3", test3 := "[2]"] +ML\ +val t = \<^term_>\test3 @{test3 \testtest3\}\ +val tt = \<^value_>\test3 @{test3 \testtest3\}\ |> HOLogic.dest_list |> map HOLogic.dest_number +\ + +update_instance*[testtest3::"int test3", test3 += "[3]"] +value*\test3 @{test3 \testtest3\}\ + + +doc_class ('a, 'b) test4 = "'a test3" + + test4 :: "'b list" + +definition-assert-error testtest0'::"('a::one, int) test4" where "testtest0' \ + test4.make 11953346 + [] [1::('a::one)]" +\Type unification failed\ + +definition-assert-error testtest0''::"('a, int) test4" where "testtest0'' \ + test4.make 11953346 + [1] Test_Parametric_Classes_2_test4_test4_Attribute_Not_Initialized" +\Type unification failed\ + +text\Must fail because the equivalent definition +\testtest0'\ below fails +due to the constraint in the where [1::('a::one)] is not an \<^typ>\int list\ +but an \<^typ>\'a::one list\ list \ +text-assert-error[testtest0::"('a::one, int) test4", test4 = "[1::'a::one]"]\\ +\Type unification failed\ +update_instance-assert-error[testtest0::"('a::one, int) test4"] + \Undefined instance: "testtest0"\ + +value-assert-error\@{test4 \testtest0\}\\Undefined instance: "testtest0"\ + +definition testtest0''::"('a, int) test4" where "testtest0'' \ + \ tag_attribute = 11953346, test3 = [], test4 = [1]\\test4 := [2]\" + +definition testtest0'''::"('a, int) test4" where "testtest0''' \ + \ tag_attribute = 11953346, test3 = [], test4 = [1]\\test4 := [2]\" + + +value [nbe] \test3 testtest0''\ + +type_synonym notion = string + +doc_class Introduction = text_section + + authored_by :: "Author set" <= "UNIV" + uses :: "notion set" + invariant author_finite :: "finite (authored_by \)" + and force_level :: "(level \) \ None \ the (level \) > 1" + +doc_class claim = Introduction + + based_on :: "notion list" + +doc_class technical = text_section + + formal_results :: "thm list" + +doc_class "definition" = technical + + is_formal :: "bool" + property :: "term list" <= "[]" + +datatype kind = expert_opinion | argument | "proof" + +doc_class result = technical + + evidence :: kind + property :: "thm list" <= "[]" + invariant has_property :: "evidence \ = proof \ property \ \ []" + +doc_class example = technical + + referring_to :: "(notion + definition) set" <= "{}" + +doc_class conclusion = text_section + + establish :: "(claim \ result) set" + invariant establish_defined :: "\ x. x \ Domain (establish \) + \ (\ y \ Range (establish \). (x, y) \ establish \)" + +text\Next we define some instances (docitems): \ + +declare[[invariants_checking_with_tactics = true]] + +text*[church1::Author, email="\church@lambda.org\"]\\ + +text*[resultProof::result, evidence = "proof", property="[@{thm \HOL.refl\}]"]\\ +text*[resultArgument::result, evidence = "argument"]\\ + +text\The invariants \<^theory_text>\author_finite\ and \<^theory_text>\establish_defined\ can not be checked directly + and need a little help. + We can set the \invariants_checking_with_tactics\ theory attribute to help the checking. + It will enable a basic tactic, using unfold and auto:\ + +declare[[invariants_checking_with_tactics = true]] + +text*[curry::Author, email="\curry@lambda.org\"]\\ +text*[introduction2::Introduction, authored_by = "{@{Author \church\}}", level = "Some 2"]\\ +(* When not commented, should violated the invariant: +update_instance*[introduction2::Introduction + , authored_by := "{@{Author \church\}}" + , level := "Some 1"] +*) + +text*[introduction_test_parsed_elaborate::Introduction, authored_by = "authored_by @{Introduction \introduction2\}", level = "Some 2"]\\ +term*\authored_by @{Introduction \introduction_test_parsed_elaborate\}\ +value*\authored_by @{Introduction \introduction_test_parsed_elaborate\}\ +text*[introduction3::Introduction, authored_by = "{@{Author \church\}}", level = "Some 2"]\\ +text*[introduction4::Introduction, authored_by = "{@{Author \curry\}}", level = "Some 4"]\\ + +text*[resultProof2::result, evidence = "proof", property="[@{thm \HOL.sym\}]"]\\ + +text\Then we can evaluate expressions with instances:\ + +term*\authored_by @{Introduction \introduction2\} = authored_by @{Introduction \introduction3\}\ +value*\authored_by @{Introduction \introduction2\} = authored_by @{Introduction \introduction3\}\ +value*\authored_by @{Introduction \introduction2\} = authored_by @{Introduction \introduction4\}\ + +value*\@{Introduction \introduction2\}\ + +value*\{@{Author \curry\}} = {@{Author \church\}}\ + +term*\property @{result \resultProof\} = property @{result \resultProof2\}\ +value*\property @{result \resultProof\} = property @{result \resultProof2\}\ + +value*\evidence @{result \resultProof\} = evidence @{result \resultProof2\}\ + +declare[[invariants_checking_with_tactics = false]] + +declare[[invariants_strict_checking = false]] + +doc_class test_A = + level :: "int option" + x :: int + +doc_class test_B = + level :: "int option" + x :: "string" (* attributes live in their own name-space *) + y :: "string list" <= "[]" (* and can have arbitrary type constructors *) + (* LaTeX may have problems with this, though *) + +text\We may even use type-synonyms for class synonyms ...\ +type_synonym test_XX = test_B + +doc_class test_C0 = test_B + + z :: "test_A option" <= None (* A LINK, i.e. an attribute that has a type + referring to a document class. Mathematical + relations over document items can be modeled. *) + g :: "thm" (* a reference to the proxy-type 'thm' allowing + + to denote references to theorems inside attributes *) + + +doc_class test_C = test_B + + z :: "test_A option" <= None (* A LINK, i.e. an attribute that has a type + referring to a document class. Mathematical + relations over document items can be modeled. *) + g :: "thm" (* a reference to the proxy-type 'thm' allowing + + to denote references to theorems inside attributes *) + +datatype enum = X1 | X2 | X3 (* we add an enumeration type ... *) + + +doc_class test_D = test_B + + x :: "string" <= "\def \\" (* overriding default *) + a1 :: enum <= "X2" (* class - definitions may be mixed + with arbitrary HOL-commands, thus + also local definitions of enumerations *) + a2 :: int <= 0 + +doc_class test_E = test_D + + x :: "string" <= "''qed''" (* overriding default *) + +doc_class test_G = test_C + + g :: "thm" <= "@{thm \HOL.refl\}" (* warning overriding attribute expected*) + +doc_class 'a test_F = + properties :: "term list" + r :: "thm list" + u :: "file" + s :: "typ list" + b :: "(test_A \ 'a test_C_scheme) set" <= "{}" (* This is a relation link, roughly corresponding + to an association class. It can be used to track + claims to result - relations, for example.*) + b' :: "(test_A \ 'a test_C_scheme) list" <= "[]" + invariant br :: "r \ \ [] \ card(b \) \ 3" + and br':: "r \ \ [] \ length(b' \) \ 3" + and cr :: "properties \ \ []" + +lemma*[l::test_E] local_sample_lemma : + "@{thm \refl\} = @{thm ''refl''}" by simp + \ \un-evaluated references are similar to + uninterpreted constants. Not much is known + about them, but that doesn't mean that we + can't prove some basics over them...\ + +text*[xcv1::test_A, x=5]\Lorem ipsum ...\ +text*[xcv2::test_C, g="@{thm ''HOL.refl''}"]\Lorem ipsum ...\ +text*[xcv3::test_A, x=7]\Lorem ipsum ...\ + +text\Bug: For now, the implementation is no more compatible with the docitem term-antiquotation:\ +text-assert-error[xcv10::"unit test_F", r="[@{thm ''HOL.refl''}, + @{thm \local_sample_lemma\}]", (* long names required *) + b="{(@{docitem ''xcv1''},@{docitem \xcv2\})}", (* notations \...\ vs. ''...'' *) + s="[@{typ \int list\}]", + properties = "[@{term \H \ H\}]" (* notation \...\ required for UTF8*) +]\Lorem ipsum ...\\Type unification failed\ + +text*[xcv11::"unit test_F", r="[@{thm ''HOL.refl''}, + @{thm \local_sample_lemma\}]", (* long names required *) + b="{(@{test_A ''xcv1''},@{test_C \xcv2\})}", (* notations \...\ vs. ''...'' *) + s="[@{typ \int list\}]", + properties = "[@{term \H \ H\}]" (* notation \...\ required for UTF8*) +]\Lorem ipsum ...\ + +value*\b @{test_F \xcv11\}\ + +typ\unit test_F\ + +text*[xcv4::"unit test_F", r="[@{thm ''HOL.refl''}, + @{thm \local_sample_lemma\}]", (* long names required *) + b="{(@{test_A ''xcv1''},@{test_C \xcv2\})}", (* notations \...\ vs. ''...'' *) + s="[@{typ \int list\}]", + properties = "[@{term \H \ H\}]" (* notation \...\ required for UTF8*) +]\Lorem ipsum ...\ + +value*\b @{test_F \xcv4\}\ + +text*[xcv5::test_G, g="@{thm \HOL.sym\}"]\Lorem ipsum ...\ + +update_instance*[xcv4::"unit test_F", b+="{(@{test_A ''xcv3''},@{test_C ''xcv2''})}"] + +update_instance-assert-error[xcv4::"unit test_F", b+="{(@{test_A ''xcv3''},@{test_G ''xcv5''})}"] + \Type unification failed: Clash of types\ + + + +typ\unit test_G_ext\ +typ\\test_G.tag_attribute :: int\\ +text*[xcv6::"\test_G.tag_attribute :: int\ test_F", b="{(@{test_A ''xcv3''},@{test_G ''xcv5''})}"]\\ + +end \ No newline at end of file diff --git a/Isabelle_DOF/ontologies/scholarly_paper/scholarly_paper.thy b/Isabelle_DOF/ontologies/scholarly_paper/scholarly_paper.thy index 732200f..c62d28b 100644 --- a/Isabelle_DOF/ontologies/scholarly_paper/scholarly_paper.thy +++ b/Isabelle_DOF/ontologies/scholarly_paper/scholarly_paper.thy @@ -165,11 +165,11 @@ text\The intended use for the \doc_class\es \<^verbatim>\\math_example\ (or \<^verbatim>\math_ex\ for short) are \<^emph>\informal\ descriptions of semi-formal definitions (by inheritance). Math-Examples can be made referentiable triggering explicit, numbered presentations.\ -doc_class math_motivation = tc + +doc_class math_motivation = technical + referentiable :: bool <= False type_synonym math_mtv = math_motivation -doc_class math_explanation = tc + +doc_class math_explanation = technical + referentiable :: bool <= False type_synonym math_exp = math_explanation @@ -207,7 +207,7 @@ datatype math_content_class = text\Instances of the \doc_class\ \<^verbatim>\math_content\ are by definition @{term "semiformal"}; they may be non-referential, but in this case they will not have a @{term "short_name"}.\ -doc_class math_content = tc + +doc_class math_content = technical + referentiable :: bool <= False short_name :: string <= "''''" status :: status <= "semiformal" @@ -516,34 +516,34 @@ subsection\Content in Engineering/Tech Papers \ text\This section is currently experimental and not supported by the documentation generation backend.\ -doc_class engineering_content = tc + +doc_class engineering_content = technical + short_name :: string <= "''''" status :: status type_synonym eng_content = engineering_content -doc_class "experiment" = eng_content + +doc_class "experiment" = engineering_content + tag :: "string" <= "''''" -doc_class "evaluation" = eng_content + +doc_class "evaluation" = engineering_content + tag :: "string" <= "''''" -doc_class "data" = eng_content + +doc_class "data" = engineering_content + tag :: "string" <= "''''" -doc_class tech_definition = eng_content + +doc_class tech_definition = engineering_content + referentiable :: bool <= True tag :: "string" <= "''''" -doc_class tech_code = eng_content + +doc_class tech_code = engineering_content + referentiable :: bool <= True tag :: "string" <= "''''" -doc_class tech_example = eng_content + +doc_class tech_example = engineering_content + referentiable :: bool <= True tag :: "string" <= "''''" -doc_class eng_example = eng_content + +doc_class eng_example = engineering_content + referentiable :: bool <= True tag :: "string" <= "''''" diff --git a/Isabelle_DOF/thys/Isa_DOF.thy b/Isabelle_DOF/thys/Isa_DOF.thy index 7c4f238..efa7d84 100644 --- a/Isabelle_DOF/thys/Isa_DOF.thy +++ b/Isabelle_DOF/thys/Isa_DOF.thy @@ -93,7 +93,7 @@ structure DOF_core = struct datatype onto_class = Onto_Class of - {params : (string * sort) list, (*currently not used *) + {params : (string * sort) list, name : binding, virtual : {virtual : bool}, inherits_from : (typ list * string) option, (* imports *) @@ -115,33 +115,69 @@ struct val get_onto_classes = Onto_Classes.get o Proof_Context.theory_of + (* Get the Onto_Class object from its short name or long name *) fun get_onto_class_global name thy = Name_Space.check (Context.Theory thy) (get_onto_classes (Proof_Context.init_global thy)) (name, Position.none) |> #2 - fun resolve_syn thy name = - name |> Syntax.read_typ_global thy - |> Syntax.string_of_typ_global thy - |> YXML.parse_body |> XML.content_of + fun markup2string s = YXML.content_of s + |> Symbol.explode + |> List.filter (fn c => c <> Symbol.DEL) + |> String.concat - (* takes class synonyms into account *) + fun get_name name = + name |> markup2string + |> Symbol.explode_words |> split_last |> #2 + + (* Get the Onto_Class object. + The function does not use the abstract syntax of an onto_class. + and relies entirely on the associated record created with the onto_class and its type. + The function takes, for the name argument, a string that has the same form as + the input of an ML \<^typ>\\ anti-quotation to get a record type. + To get the type of the record ('a, 'b) A, we'll use \<^typ>\('a, 'b) A\. + So the name argument will be "('a, 'b) A" *) + (* does not take class synonyms into account *) fun get_onto_class_global' name thy = - let val name' = name |> resolve_syn thy + let val name' = name |> get_name in Name_Space.check (Context.Theory thy) (get_onto_classes (Proof_Context.init_global thy)) (name', Position.none) |> #2 end + (* Get the full-qualified name of an onto_class from its short name or long name *) fun get_onto_class_name_global name thy = Name_Space.check (Context.Theory thy) (get_onto_classes (Proof_Context.init_global thy)) (name, Position.none) |> #1 - (* takes class synonyms into account *) + (* Get the full-qualified name of an onto-class. + The function does not use the abstract syntax of an onto_class. + and relies entirely on the associated record created with the onto_class and its type. + The function takes, for the name argument, a string that has the same form as + the input of an ML \<^typ>\\ anti-quotation to get a record type. + To get the type of the record ('a, 'b) A, we'll use \<^typ>\('a, 'b) A\. + So the name argument will be "('a, 'b) A". *) + (* does not take class synonyms into account *) fun get_onto_class_name_global' name thy = - let val name' = name |> resolve_syn thy + let val name' = name |> get_name in Name_Space.check (Context.Theory thy) (get_onto_classes (Proof_Context.init_global thy)) (name', Position.none) |> #1 end + (* Get the full-qualified name of an onto-class, + its string representation with arguments and sorts, and its type + from its short or long name. + To get the type of the record ('a::one, 'b) A, we'll use \<^typ>\('a::one, 'b) A\. + So the name argument will be "('a::one, 'b) A" *) + (* does not takes class synonyms into account *) + fun get_onto_class_cid thy name = + let val cid_typ = name |> Syntax.read_typ_global thy + val (args, name') = name |> Symbol.explode_words |> split_last + val name'' = (name', Position.none) |> Name_Space.check (Context.Theory thy) + (get_onto_classes (Proof_Context.init_global thy)) + |> fst + val args_cid = append args [name''] |> space_implode Symbol.space + in ((name'', args_cid), cid_typ) + end + fun check_onto_class ctxt = #1 o Name_Space.check (Context.Proof ctxt) (get_onto_classes ctxt); @@ -198,7 +234,7 @@ struct rejectS , rex, invs) => (params, name, virtual, inherits_from, attribute_decl, rejectS , rex, f invs)) - fun rep_onto_class cid thy = get_onto_class_global' cid thy |> (fn Onto_Class rep => rep) + fun rep_onto_class cid thy = get_onto_class_global cid thy |> (fn Onto_Class rep => rep) fun params_of cid = #params o rep_onto_class cid fun name_of cid = #name o rep_onto_class cid @@ -228,16 +264,18 @@ struct val default_cid = "text" (* the top (default) document class: everything is a text.*) +(* Due to the implementation, is_subclass0 is _NOT_ compatible with DOF classes type synonyms, + so they must be resolved to full class names before use. *) fun is_subclass0 s t ctxt = let fun get id = if id = default_cid then default_cid - else check_onto_class ctxt (id, Position.none) + else get_onto_class_name_global id (Proof_Context.theory_of ctxt) val s' = get s val t' = get t fun father_is_sub s = case get_onto_class_global s (Proof_Context.theory_of ctxt) of - (Onto_Class {inherits_from=NONE, ...}) => s' = t' - | (Onto_Class {inherits_from=SOME (_,s''), ...}) => - s'' = t' orelse father_is_sub s'' + (Onto_Class {inherits_from=NONE, ...}) => s = t' + | (Onto_Class {inherits_from=SOME (_, s''), ...}) => + s'' = t' orelse father_is_sub s'' val s'_defined = s' |> Name_Space.declared (get_onto_classes ctxt |> Name_Space.space_of_table) in s' = t' orelse @@ -694,9 +732,7 @@ fun check_reject_atom term = fun define_doc_class_global (params', binding) parent fields rexp reject_Atoms invs virtual thy = (* This operation is executed in a context where the record has already been defined, but its conversion into a class is not yet done. *) - let (* takes class synonyms into account *) - val parent' = Option.map (apsnd (fn x => get_onto_class_name_global' x thy)) parent - val rejectS = map (Syntax.read_term_global thy) reject_Atoms; + let val rejectS = map (Syntax.read_term_global thy) reject_Atoms; val _ = map (check_reject_atom) rejectS; val reg_exps = map (Syntax.read_term_global thy) rexp; val _ = map check_regexps reg_exps @@ -707,7 +743,7 @@ fun define_doc_class_global (params', binding) parent fields rexp reject_Atoms i |> (fn pos => error("invariant labels must be unique"^ Position.here pos)) else () val invs' = map (apsnd(Syntax.read_term_global thy)) invs - in thy |> add_onto_class binding (make_onto_class (params', binding, virtual, parent', fields + in thy |> add_onto_class binding (make_onto_class (params', binding, virtual, parent, fields , rejectS, reg_exps, invs')) end @@ -739,17 +775,15 @@ fun define_object_global {define = define} ((oid, pos), instance) thy = add_instance binding instance thy end - fun get_attributes_local cid ctxt = if cid = default_cid then [] else let val cid_long = get_onto_class_name_global cid (Proof_Context.theory_of ctxt) in - case get_onto_class_global cid (Proof_Context.theory_of ctxt) of + case get_onto_class_global cid_long (Proof_Context.theory_of ctxt) of Onto_Class {inherits_from=NONE, attribute_decl = X, ...} => [(cid_long,X)] - | Onto_Class {inherits_from=SOME(_,father), - attribute_decl = X, ...} => - get_attributes_local father ctxt @ [(cid_long,X)] + | Onto_Class {inherits_from=SOME(_, father), attribute_decl = X, ...} => + get_attributes_local father ctxt @ [(cid_long,X)] end fun get_attributes cid thy = get_attributes_local cid (Proof_Context.init_global thy) @@ -780,8 +814,8 @@ fun get_attribute_info_local (*long*)cid attr ctxt : attributes_info option= typ = ty}) end -fun get_attribute_info (*long*)cid attr thy = - get_attribute_info_local cid attr (Proof_Context.init_global thy) +fun get_attribute_info (*long*)cid attr thy = + get_attribute_info_local cid attr (Proof_Context.init_global thy) fun get_attribute_defaults (* long*)cid thy = let val attrS = flat(map snd (get_attributes cid thy)) @@ -870,6 +904,25 @@ fun transduce_term_global {mk_elaboration=mk_elaboration} (term,pos) thy = | T t = t in T term end +(* Elaborate an Isabelle_DOF term-antiquotation from an only parsed-term (not checked) *) +fun parsed_elaborate ctxt pos (Const(s,ty) $ t) = + if is_ISA s + then Syntax.check_term ctxt (Const(s, ty) $ t) + |> (fn t => transduce_term_global {mk_elaboration=true} (t , pos) + (Proof_Context.theory_of ctxt)) + else (Const(s,ty) $ (parsed_elaborate ctxt pos t)) + | parsed_elaborate ctxt pos (t1 $ t2) = parsed_elaborate ctxt pos (t1) $ parsed_elaborate ctxt pos (t2) + | parsed_elaborate ctxt pos (Const(s,ty)) = + if is_ISA s + then Syntax.check_term ctxt (Const(s, ty)) + |> (fn t => transduce_term_global {mk_elaboration=true} (t , pos) + (Proof_Context.theory_of ctxt)) + else Const(s,ty) + | parsed_elaborate ctxt pos (Abs(s,ty,t)) = Abs (s,ty,parsed_elaborate ctxt pos t) + | parsed_elaborate _ _ t = t + +fun elaborate_term' ctxt parsed_term = parsed_elaborate ctxt Position.none parsed_term + fun elaborate_term ctxt term = transduce_term_global {mk_elaboration=true} (term , Position.none) (Proof_Context.theory_of ctxt) @@ -878,6 +931,39 @@ fun check_term ctxt term = transduce_term_global {mk_elaboration=false} (term , Position.none) (Proof_Context.theory_of ctxt) +(* Check an Isabelle_DOF term-antiquotation from an only parsed-term (not checked) *) +fun parsed_check ctxt pos (Const(s,ty) $ t) = + if is_ISA s + then let val _ = Syntax.check_term ctxt (Const(s, ty) $ t) + |> (fn t => transduce_term_global {mk_elaboration=false} (t , pos) + (Proof_Context.theory_of ctxt)) + in (Const(s,ty) $ (parsed_check ctxt pos t)) end + else (Const(s,ty) $ (parsed_check ctxt pos t)) + | parsed_check ctxt pos (t1 $ t2) = parsed_check ctxt pos (t1) $ parsed_check ctxt pos (t2) + | parsed_check ctxt pos (Const(s,ty)) = + if is_ISA s + then let val _ = Syntax.check_term ctxt (Const(s, ty)) + |> (fn t => transduce_term_global {mk_elaboration=false} (t , pos) + (Proof_Context.theory_of ctxt)) + in Const(s,ty) end + else Const(s,ty) + | parsed_check ctxt pos (Abs(s,ty,t)) = Abs (s,ty,parsed_check ctxt pos t) + | parsed_check _ _ t = t + +fun check_term' ctxt parsed_term = parsed_check ctxt Position.none parsed_term + +fun prep_decls prep_var raw_vars ctxt = + let + val (vars, ctxt') = fold_map prep_var raw_vars ctxt; + val (xs, ctxt'') = ctxt' + |> Context_Position.set_visible false + |> Proof_Context.add_fixes vars + ||> Context_Position.restore_visible ctxt'; + val _ = + Context_Position.reports ctxt'' + (map (Binding.pos_of o #1) vars ~~ + map (Variable.markup_entity_def ctxt'' ##> Properties.remove Markup.kindN) xs); + in ((vars, xs), ctxt'') end; fun print_doc_class_tree ctxt P T = let val classes = Name_Space.dest_table (get_onto_classes ctxt) @@ -1104,12 +1190,13 @@ fun check_instance thy (term, _, pos) s = |> Long_Name.qualify qual fun check thy (name, _) = let - val object_cid = DOF_core.cid_of name thy + val name' = DOF_core.cid_of name thy + |> DOF_core.get_onto_class_cid thy |> (fst o fst) fun check' (class_name, object_cid) = if class_name = object_cid then DOF_core.value_of name thy else err (name ^ " is not an instance of " ^ class_name) pos - in check' (class_name, object_cid) end; + in check' (class_name, name') end; in ML_isa_check_generic check thy (term, pos) end fun ML_isa_id _ (term,_) = SOME term @@ -1152,12 +1239,11 @@ fun rm_mixfix name mixfix thy = |> Named_Target.theory_map) end -fun elaborate_instance thy _ _ term_option pos = +fun elaborate_instance thy _ _ term_option _ = case term_option of NONE => error ("Malformed term annotation") | SOME term => let val instance_name = HOLogic.dest_string term - val value = DOF_core.value_of instance_name thy - in DOF_core.transduce_term_global {mk_elaboration=true} (value, pos) thy + in DOF_core.value_of instance_name thy end (* @@ -1165,14 +1251,15 @@ fun elaborate_instance thy _ _ term_option pos = because the class name is already bound to "doc_class Regular_Exp.rexp" constant by add_doc_class_cmd function *) -fun declare_ISA_class_accessor_and_check_instance (doc_class_name, bind_pos) thy = +fun declare_ISA_class_accessor_and_check_instance (params, doc_class_name, bind_pos) thy = let val bname = Long_Name.base_name doc_class_name val bname' = prefix DOF_core.doc_class_prefix bname val bind = bname' |> pair bind_pos |> swap |> Binding.make val bind' = prefix DOF_core.long_doc_class_prefix bname |> pair bind_pos |> swap |> Binding.make - val const_typ = \<^typ>\string\ --> Syntax.read_typ (Proof_Context.init_global thy) doc_class_name + val typ = Type (doc_class_name, map TFree params) + val const_typ = \<^typ>\string\ --> typ fun mixfix_enclose name = name |> enclose "@{" " _}" val mixfix = clean_mixfix bname |> mixfix_enclose val mixfix' = clean_mixfix doc_class_name |> mixfix_enclose @@ -1184,6 +1271,7 @@ fun declare_ISA_class_accessor_and_check_instance (doc_class_name, bind_pos) thy |> Sign.add_consts [(bind', const_typ, Mixfix.mixfix mixfix')] |> DOF_core.add_isa_transformer bind' ((check_instance, elaborate_instance) |> DOF_core.make_isa_transformer) + end fun elaborate_instances_list thy isa_name _ _ _ = @@ -1204,23 +1292,23 @@ fun elaborate_instances_list thy isa_name _ _ _ = in HOLogic.mk_list class_typ values end -fun declare_class_instances_annotation (doc_class_name, bind_pos) thy = +fun declare_class_instances_annotation (params, doc_class_name, bind_pos) thy = let val bname = Long_Name.base_name doc_class_name val bname' = prefix DOF_core.doc_class_prefix bname |> suffix instances_of_suffixN val bind = bname' |> pair bind_pos |> swap |> Binding.make val bind' = prefix DOF_core.long_doc_class_prefix bname |> suffix instances_of_suffixN |> pair bind_pos |> swap |> Binding.make - val class_typ = doc_class_name |> Proof_Context.read_typ (Proof_Context.init_global thy) + val typ = Type (doc_class_name, map TFree params) fun mixfix_enclose name = name |> enclose "@{" "}" val mixfix = clean_mixfix (bname ^ instances_of_suffixN) |> mixfix_enclose val mixfix' = clean_mixfix (doc_class_name ^ instances_of_suffixN) |> mixfix_enclose in thy |> rm_mixfix bname' mixfix - |> Sign.add_consts [(bind, \<^Type>\list class_typ\, Mixfix.mixfix mixfix)] + |> Sign.add_consts [(bind, \<^Type>\list typ\, Mixfix.mixfix mixfix)] |> DOF_core.add_isa_transformer bind ((check_identity, elaborate_instances_list) |> DOF_core.make_isa_transformer) - |> Sign.add_consts [(bind', \<^Type>\list class_typ\, Mixfix.mixfix mixfix')] + |> Sign.add_consts [(bind', \<^Type>\list typ\, Mixfix.mixfix mixfix')] |> DOF_core.add_isa_transformer bind' ((check_identity, elaborate_instances_list) |> DOF_core.make_isa_transformer) end @@ -1323,10 +1411,9 @@ ML\ structure ODL_Meta_Args_Parser = struct - -type meta_args_t = (((string * Position.T) * - (string * Position.T) option) - * ((string * Position.T) * string) list) +type meta_args_t = ((string * Position.T) * + (string * Position.T) option) + * ((string * Position.T) * string) list val empty_meta_args = ((("", Position.none), NONE), []) @@ -1373,6 +1460,7 @@ val attributes_upd = (Scan.optional(Parse.$$$ "," -- improper |-- (Parse.enum "," (improper |-- attribute_upd)))) [])) --| Parse.$$$ "]") --| improper + end (* structure ODL_Meta_Args_Parser *) \ @@ -1435,14 +1523,20 @@ fun value_select name ctxt = then default_value ctxt else Name_Space.get (Evaluators.get (Proof_Context.theory_of ctxt)) name ctxt; -fun value_select' raw_name ctxt = - if raw_name = "" - then (DOF_core.elaborate_term ctxt) #> default_value ctxt - else (DOF_core.elaborate_term ctxt) - #> (let val name = intern_evaluator (Proof_Context.theory_of ctxt) raw_name in - Name_Space.get (Evaluators.get (Proof_Context.theory_of ctxt)) name ctxt end); +fun value_select' raw_name ctxt raw_t = + let val thy = Proof_Context.theory_of ctxt + val t = Syntax.parse_term ctxt raw_t; + val t' = DOF_core.elaborate_term' ctxt t + val t'' = Syntax.check_term ctxt t' + in + if raw_name = "" + then t'' |> default_value ctxt + else let val name = intern_evaluator thy raw_name + in t'' |> Name_Space.get (Evaluators.get thy) name ctxt + end + end -val value = value_select' "" +val value = value_select "" val value_without_elaboration = value_select "" @@ -1459,13 +1553,12 @@ fun cid_2_cidType cid_long thy = in fold (fn x => fn y => Type(ty_name x,[y])) (fathers cid_long) \<^Type>\unit\ end -fun create_default_object thy class_name = +fun create_default_object thy oid class_name typ = let val purified_class_name = String.translate (fn #"." => "_" | x => String.implode [x]) class_name - val make_const = Syntax.read_term_global thy (Long_Name.qualify class_name makeN); - fun attr_to_free (binding, typ, _) = Free (purified_class_name ^ "_" - ^ (Binding.name_of binding) - ^ "_Attribute_Not_Initialized", typ) + fun attr_to_s (binding, _, _) = purified_class_name ^ "_" + ^ (Binding.name_of binding) + ^ "_Attribute_Not_Initialized" val class_list = DOF_core.get_attributes class_name thy fun attrs_filter [] = [] | attrs_filter (x::xs) = @@ -1479,44 +1572,51 @@ fun create_default_object thy class_name = else is_duplicated y xs end in (cid, filter_out (fn y => is_duplicated y xs) ys)::attrs_filter xs end val class_list' = rev (attrs_filter (rev class_list)) - val tag_attr = HOLogic.mk_number \<^Type>\int\ - fun add_tag_to_attrs_free' tag_attr thy (cid, filtered_attr_list) = + val tag_attr_s = serial () |> string_of_int + fun trans_attr thy trans tag_attr (cid, filtered_attr_list) = if DOF_core.virtual_of cid thy |> #virtual - then (tag_attr (serial ()))::(map (attr_to_free) filtered_attr_list) - else (map (attr_to_free) filtered_attr_list) - val class_list'' = flat (map (add_tag_to_attrs_free' tag_attr thy) class_list') - in list_comb (make_const, (tag_attr (serial()))::class_list'') end + then (tag_attr)::(map (trans) filtered_attr_list) + else (map (trans) filtered_attr_list) + val test_class = class_list' |> map (trans_attr thy (attr_to_s) tag_attr_s) + |> flat + |> cons tag_attr_s + val term = test_class |> cons (Long_Name.qualify class_name makeN) |> space_implode Symbol.space + val ctxt = Proof_Context.init_global thy + val term' = term |> Syntax.parse_term ctxt |> DOF_core.elaborate_term' ctxt + (* rename oid to be compatible with binding naming policy *) + val clean_oid = translate_string (fn ":" => "_" | "-" => "_" | c => c); + val oid' = clean_oid oid + val parsed_prop = Const (\<^const_name>\Pure.eq\, dummyT) $ Free (oid', dummyT) $ term' + val raw_vars = [(Binding.name oid', SOME typ, NoSyn)] + val (_, vars_ctxt) = DOF_core.prep_decls Proof_Context.cert_var raw_vars ctxt + val concl = Syntax.check_prop vars_ctxt parsed_prop + in Logic.dest_equals concl |> snd end -fun check_classref {is_monitor=is_monitor} (SOME(cid,pos)) thy = - let - val cid_long = DOF_core.get_onto_class_name_global' cid thy - val DOF_core.Onto_Class {rex, ...} = DOF_core.get_onto_class_global cid_long thy - val _ = if is_monitor andalso (null rex orelse cid_long= DOF_core.default_cid ) - then error("should be monitor class!") - else () - val ctxt = Proof_Context.init_global thy - val onto_classes = DOF_core.get_onto_classes ctxt - val markup = DOF_core.get_onto_class_name_global cid_long thy - |> Name_Space.markup (Name_Space.space_of_table onto_classes) - val _ = Context_Position.report ctxt pos markup; - in (cid_long, pos) - end - | check_classref _ NONE _ = (DOF_core.default_cid, Position.none) +fun check_classref {is_monitor=is_monitor} (SOME (cid, pos)) thy = + let + val ctxt = Proof_Context.init_global thy + val name_cid_typ = DOF_core.get_onto_class_cid thy cid + val cid_long = name_cid_typ |> (fst o fst) + val rex = DOF_core.rex_of cid_long thy + val _ = if is_monitor andalso (null rex orelse cid_long= DOF_core.default_cid ) + then error("should be monitor class!") + else () + val onto_classes = DOF_core.get_onto_classes ctxt + val markup = DOF_core.get_onto_class_name_global cid_long thy + |> Name_Space.markup (Name_Space.space_of_table onto_classes) + val _ = Context_Position.report ctxt pos markup; + in (name_cid_typ, pos) + end + | check_classref _ NONE _ = pair DOF_core.default_cid DOF_core.default_cid + |> rpair \<^typ>\unit\ + |> rpair Position.none - -fun generalize_typ n = Term.map_type_tfree (fn (str,sort)=> Term.TVar((str,n),sort)); -fun infer_type thy term = hd (Type_Infer_Context.infer_types (Proof_Context.init_global thy) [term]) - - -fun calc_update_term {mk_elaboration=mk_elaboration} thy cid_long +fun calc_update_term {mk_elaboration=mk_elaboration} thy (name, typ) (S:(string * Position.T * string * term)list) term = - let val cid_ty = cid_2_cidType cid_long thy + let val cid_long = name + val cid_ty = typ val ctxt = Proof_Context.init_global thy - val generalize_term = Term.map_types (generalize_typ 0) - fun toString t = Syntax.string_of_term ctxt t - fun instantiate_term S t = - Term_Subst.map_types_same (Term_Subst.instantiateT (TVars.make S)) (t) fun read_assn (lhs, pos:Position.T, opr, rhs) term = let fun get_class_name parent_cid attribute_name pos = @@ -1532,8 +1632,7 @@ fun calc_update_term {mk_elaboration=mk_elaboration} thy cid_long NONE => ISA_core.err ("Attribute " ^ attribute_name ^ " not defined for class: " ^ cid_long) pos - | SOME (_, parent_name) => - get_class_name parent_name attribute_name pos + | SOME (_, parent_name) => get_class_name parent_name attribute_name pos end val _ = if mk_elaboration then @@ -1544,7 +1643,7 @@ fun calc_update_term {mk_elaboration=mk_elaboration} thy cid_long in Context_Position.report ctxt pos markup end else () val info_opt = DOF_core.get_attribute_info cid_long (Long_Name.base_name lhs) thy - val (ln,lnt,lnu,lnut) = case info_opt of + val (ln,lnt,lnu,_) = case info_opt of NONE => error ("unknown attribute >" ^((Long_Name.base_name lhs)) ^"< in class: "^cid_long) @@ -1552,28 +1651,30 @@ fun calc_update_term {mk_elaboration=mk_elaboration} thy cid_long long_name ^ Record.updateN, (typ --> typ) --> cid_ty --> cid_ty) - val tyenv = Sign.typ_match thy ((generalize_typ 0)(type_of rhs), lnt) (Vartab.empty) - handle Type.TYPE_MATCH => (error ("type of attribute: " ^ ln - ^ " does not fit to term: " - ^ toString rhs)); - val tyenv' = (map (fn (s,(t,u)) => ((s,t),u)) (Vartab.dest tyenv)) val _ = if Long_Name.base_name lhs = lhs orelse ln = lhs then () else error("illegal notation for attribute of "^cid_long) fun join (ttt as \<^Type>\int\) = \<^Const>\Groups.plus ttt\ - |join (ttt as \<^Type>\set _\) = \<^Const>\Lattices.sup ttt\ - |join \<^Type>\list A\ = \<^Const>\List.append A\ + |join (ttt as \<^Type>\set _\) = \<^Const>\Lattices.sup dummyT\ + |join \<^Type>\list A\ = \<^Const>\List.append dummyT\ |join _ = error("implicit fusion operation not defined for attribute: "^ lhs) (* could be extended to bool, map, multisets, ... *) - val rhs' = instantiate_term tyenv' (generalize_term rhs) - val rhs'' = DOF_core.transduce_term_global {mk_elaboration=mk_elaboration} - (rhs',pos) thy in case opr of - "=" => Const(lnu,lnut) $ Abs ("uu_", lnt, rhs'') $ term - | ":=" => Const(lnu,lnut) $ Abs ("uu_", lnt, rhs'') $ term - | "+=" => Const(lnu,lnut) $ Abs ("uu_", lnt, join lnt $ (Bound 0) $ rhs'') $ term + "=" => Const(lnu, dummyT) $ Abs ("uu_", dummyT, rhs) $ term + | ":=" => Const(lnu, dummyT) $ Abs ("uu_", dummyT, rhs) $ term + | "+=" => Const(lnu, dummyT) $ Abs ("uu_u", dummyT, join lnt $ (Bound 0) $ rhs) $ term | _ => error "corrupted syntax - oops - this should not occur" - end - in Sign.certify_term thy (fold read_assn S term) end + end + val t = fold read_assn S term + val t' = if mk_elaboration + then DOF_core.elaborate_term' ctxt t + else DOF_core.check_term' ctxt t + in if t = term + then Sign.certify_term thy t' + else + let + val concl = Syntax.check_term ctxt t'; + in Sign.certify_term thy concl end + end fun msg thy txt pos = if Config.get_global thy DOF_core.strict_monitor_checking then ISA_core.err txt pos @@ -1584,9 +1685,9 @@ fun msg_intro get n oid cid = ("accepts clause " ^ Int.toString n ^ get (" not enabled for", " rejected") ^ " doc_class: " ^ cid) -fun register_oid_cid_in_open_monitors oid _ cid_pos thy = - let val cid_long= fst cid_pos - val pos' = snd cid_pos +fun register_oid_cid_in_open_monitors oid _ (name, pos') thy = + let + val cid_long= name fun is_enabled (n, monitor_info) = if exists (DOF_core.is_subclass_global thy cid_long) (DOF_core.alphabet_of n thy) @@ -1642,11 +1743,11 @@ fun register_oid_cid_in_open_monitors oid _ cid_pos thy = val trace_attr = if defined then trace_attr_t cid_long oid else [] - fun cid_of oid = let val DOF_core.Instance params = DOF_core.get_instance_global oid thy - in #cid params end + fun mon_cid oid = DOF_core.cid_of oid thy |> DOF_core.get_onto_class_cid thy + |> (fn ((name, _), typ) => (name, typ)) val ctxt = Proof_Context.init_global thy fun def_trans_value oid = - (#1 o (calc_update_term {mk_elaboration=true} thy (cid_of oid) trace_attr)) + (#1 o (calc_update_term {mk_elaboration=true} thy (mon_cid oid) trace_attr)) #> value ctxt val _ = if null enabled_monitors then () @@ -1671,7 +1772,7 @@ fun register_oid_cid_in_open_monitors oid _ cid_pos thy = fun update_trace mon_oid = if Config.get_global thy DOF_core.object_value_debug then let fun def_trans_input_term oid = - #1 o (calc_update_term {mk_elaboration=false} thy (cid_of oid) trace_attr) + #1 o (calc_update_term {mk_elaboration=false} thy (mon_cid oid) trace_attr) in DOF_core.map_input_term_value mon_oid (def_trans_input_term mon_oid) (def_trans_value mon_oid) end else DOF_core.map_value mon_oid (def_trans_value mon_oid) @@ -1686,13 +1787,14 @@ fun register_oid_cid_in_open_monitors oid _ cid_pos thy = fun check_invariants thy (oid, _) = let val docitem_value = DOF_core.value_of oid thy - val cid = DOF_core.cid_of oid thy + val name = DOF_core.cid_of oid thy + |> DOF_core.get_onto_class_cid thy |> (fst o fst) fun get_all_invariants cid thy = - case DOF_core.get_onto_class_global cid thy of - DOF_core.Onto_Class {inherits_from=NONE, invs, ...} => single (cid, invs) - | DOF_core.Onto_Class {inherits_from=SOME(_,father), invs, ...} => - (cid, invs) :: (get_all_invariants father thy) - val cids_invariants = get_all_invariants cid thy + case DOF_core.get_onto_class_global cid thy of + DOF_core.Onto_Class {inherits_from=NONE, invs, ...} => single (cid, invs) + | DOF_core.Onto_Class {inherits_from=SOME(_, father), invs, ...} => + (cid, invs) :: get_all_invariants father thy + val cids_invariants = get_all_invariants name thy val inv_and_apply_list = let fun mk_inv_and_apply cid_invs value thy = let val (cid_long, invs) = cid_invs @@ -1725,7 +1827,7 @@ fun check_invariants thy (oid, _) = val thms = Proof_Context.get_thms ctxt (inv_name ^ def_suffixN) (* Get the make definition (def(1) of the record) *) val thms' = - (Proof_Context.get_thms ctxt (Long_Name.append cid defsN)) @ thms + (Proof_Context.get_thms ctxt (Long_Name.append name defsN)) @ thms val _ = Goal.prove ctxt [] [] prop_term (K ((unfold_tac ctxt thms') THEN (auto_tac ctxt))) |> Thm.close_derivation \<^here> @@ -1766,13 +1868,15 @@ fun check_invariants thy (oid, _) = fun create_and_check_docitem is_monitor {is_inline=is_inline} {define=define} oid pos cid_pos doc_attrs thy = let - val cid_pos' = check_classref is_monitor cid_pos thy + val (((name, args_cid), typ), pos') = check_classref is_monitor cid_pos thy + val cid_pos' = (name, pos') val cid_long = fst cid_pos' - val default_cid = cid_long = DOF_core.default_cid - val vcid = case cid_pos of NONE => NONE - | SOME (cid,_) => if DOF_core.virtual_of cid thy |> #virtual - then SOME (DOF_core.get_onto_class_name_global' cid thy) - else NONE + val default_cid = args_cid = DOF_core.default_cid + val vcid = if default_cid + then NONE + else if DOF_core.virtual_of cid_long thy |> #virtual + then SOME args_cid + else NONE val value_terms = if default_cid then let val undefined_value = Free ("Undefined_Value", \<^Type>\unit\) @@ -1782,19 +1886,19 @@ fun create_and_check_docitem is_monitor {is_inline=is_inline} {define=define} oi without using the burden of ontology classes. ex: text*[sdf]\ Lorem ipsum @{thm refl}\ *) else let - val defaults_init = create_default_object thy cid_long - fun conv (na, _(*ty*), term) =(Binding.name_of na, Binding.pos_of na, "=", term); + fun conv_attrs ((lhs, pos), rhs) = (YXML.content_of lhs,pos,"=", Syntax.parse_term (Proof_Context.init_global thy) rhs) + val assns' = map conv_attrs doc_attrs + val defaults_init = create_default_object thy oid cid_long typ + fun conv (na, _(*ty*), parsed_term) =(Binding.name_of na, Binding.pos_of na, "=", parsed_term); val S = map conv (DOF_core.get_attribute_defaults cid_long thy); val (defaults, _(*ty*), _) = calc_update_term {mk_elaboration=false} - thy cid_long S defaults_init; - fun conv_attrs ((lhs, pos), rhs) = (YXML.content_of lhs,pos,"=", Syntax.read_term_global thy rhs) - val assns' = map conv_attrs doc_attrs + thy (name, typ) S defaults_init; val (value_term', _(*ty*), _) = calc_update_term {mk_elaboration=true} - thy cid_long assns' defaults + thy (name, typ) assns' defaults in if Config.get_global thy DOF_core.object_value_debug then let val (input_term, _(*ty*), _) = calc_update_term {mk_elaboration=false} - thy cid_long assns' defaults + thy (name, typ) assns' defaults in (input_term, value_term') end else (\<^term>\()\, value_term') end in thy |> DOF_core.define_object_global @@ -1802,8 +1906,8 @@ fun create_and_check_docitem is_monitor {is_inline=is_inline} {define=define} oi (false, fst value_terms, (snd value_terms) |> value (Proof_Context.init_global thy), - is_inline, cid_long, vcid)) - |> register_oid_cid_in_open_monitors oid pos cid_pos' + is_inline, args_cid, vcid)) + |> register_oid_cid_in_open_monitors oid pos (name, pos') |> (fn thy => if (* declare_reference* without arguments is not checked against invariants *) thy |> DOF_core.defined_of oid |> not @@ -1827,7 +1931,7 @@ fun create_and_check_docitem is_monitor {is_inline=is_inline} {define=define} oi end (* structure Docitem_Parser *) -fun meta_args_exec (meta_args as (((oid, pos), cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) thy = +fun meta_args_exec (meta_args as (((oid, pos), cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) thy = thy |> (if meta_args = ODL_Meta_Args_Parser.empty_meta_args then (K thy) else Docitem_Parser.create_and_check_docitem @@ -1838,10 +1942,10 @@ fun value_cmd {assert=assert} meta_args_opt raw_name modes raw_t pos thy = let val thy' = meta_args_exec meta_args_opt thy val name = intern_evaluator thy' raw_name; - val t = Syntax.read_term_global thy' raw_t; - val term' = DOF_core.transduce_term_global {mk_elaboration=true} (t , pos) - (thy'); - val t' = value_select name (Proof_Context.init_global thy') term'; + val t = Syntax.parse_term (Proof_Context.init_global thy') raw_t; + val term' = DOF_core.parsed_elaborate (Proof_Context.init_global thy') pos t + val term'' = Syntax.check_term (Proof_Context.init_global thy') term' + val t' = value_select name (Proof_Context.init_global thy') term''; val ty' = Term.type_of t'; val ty' = if assert then case ty' of @@ -1866,7 +1970,7 @@ val opt_modes = val opt_evaluator = Scan.optional (\<^keyword>\[\ |-- Parse.name --| \<^keyword>\]\) ""; -fun pass_trans_to_value_cmd meta_args_opt ((name, modes), t) trans = +fun pass_trans_to_value_cmd (((name, meta_args_opt), modes), t) trans = let val pos = Toplevel.pos_of trans in trans |> Toplevel.theory (value_cmd {assert=false} meta_args_opt name modes t pos) @@ -1910,7 +2014,7 @@ val (disable_assert_evaluation, disable_assert_evaluation_setup) val _ = Theory.setup disable_assert_evaluation_setup -fun pass_trans_to_assert_value_cmd meta_args_opt ((name, modes), t) trans = +fun pass_trans_to_assert_value_cmd (((name, meta_args_opt), modes), t) trans = let val pos = Toplevel.pos_of trans in trans |> Toplevel.theory @@ -1923,6 +2027,7 @@ in trans end (* setup ontology aware commands *) + val _ = Outer_Syntax.command \<^command_keyword>\term*\ "read and print term" (ODL_Meta_Args_Parser.opt_attributes -- (opt_modes -- Parse.term) @@ -1930,31 +2035,31 @@ val _ = val _ = Outer_Syntax.command \<^command_keyword>\value*\ "evaluate and print term" - (ODL_Meta_Args_Parser.opt_attributes -- (opt_evaluator -- opt_modes -- Parse.term) - >> (uncurry pass_trans_to_value_cmd)); - + ((opt_evaluator -- ODL_Meta_Args_Parser.opt_attributes -- opt_modes -- Parse.term) + >> (pass_trans_to_value_cmd)); + val _ = Outer_Syntax.command \<^command_keyword>\assert*\ "evaluate and assert term" - (ODL_Meta_Args_Parser.opt_attributes -- (opt_evaluator -- opt_modes -- Parse.term) - >> uncurry pass_trans_to_assert_value_cmd); + ((opt_evaluator -- ODL_Meta_Args_Parser.opt_attributes -- opt_modes -- Parse.term) + >> pass_trans_to_assert_value_cmd); (* setup ontology - aware text and ML antiquotations. Due to lexical restrictions, we can not declare them as value* or term*, although we will refer to them this way in papers. *) -local +local fun pretty_term_style ctxt (style: term -> term, t) = - Document_Output.pretty_term ctxt (style (DOF_core.check_term ctxt t)); + Document_Output.pretty_term ctxt (style (DOF_core.check_term ctxt t)) fun print_term ctxt t = ML_Syntax.print_term (DOF_core.check_term (Context.proof_of ctxt) t) in val _ = Theory.setup (Document_Output.antiquotation_pretty_source_embedded \<^binding>\value_\ - (Scan.lift opt_evaluator -- Term_Style.parse -- Args.term) + (Scan.lift opt_evaluator -- Term_Style.parse -- Scan.lift Parse.term) (fn ctxt => fn ((name, style), t) => Document_Output.pretty_term ctxt (style (value_select' name ctxt t))) #> ML_Antiquotation.inline_embedded \<^binding>\value_\ - ((Scan.lift opt_evaluator -- Args.term) + ((Scan.lift (opt_evaluator -- Parse.term)) #> (fn ((name, t),(ctxt, ts)) => (((value_select' name (Context.proof_of ctxt) t) |> (ML_Syntax.atomic o (print_term ctxt))), (ctxt, ts)))) @@ -1980,53 +2085,64 @@ struct fun update_instance_command (((oid, pos), cid_pos), doc_attrs: (((string*Position.T)*string)*string)list) thy : theory = - let val cid = let val DOF_core.Instance {cid,...} = - DOF_core.get_instance_global oid thy - val ctxt = Proof_Context.init_global thy - val instances = DOF_core.get_instances ctxt - val markup = DOF_core.get_instance_name_global oid thy - |> Name_Space.markup (Name_Space.space_of_table instances) - val _ = Context_Position.report ctxt pos markup; - in cid end - val cid_pos' = Value_Command.Docitem_Parser.check_classref {is_monitor = false} - cid_pos thy - val cid_long = fst cid_pos' - val _ = if cid_long = DOF_core.default_cid orelse cid = cid_long - then () - else error("incompatible classes:"^cid^":"^cid_long) - - fun conv_attrs (((lhs, pos), opn), rhs) = ((YXML.content_of lhs),pos,opn, - Syntax.read_term_global thy rhs) - val assns' = map conv_attrs doc_attrs - val def_trans_value = - #1 o (Value_Command.Docitem_Parser.calc_update_term {mk_elaboration=true} - thy cid_long assns') - #> Value_Command.value (Proof_Context.init_global thy) - in - thy |> (if Config.get_global thy DOF_core.object_value_debug - then let val def_trans_input_term = - #1 o (Value_Command.Docitem_Parser.calc_update_term - {mk_elaboration=false} thy cid_long assns') - in DOF_core.map_input_term_value oid - def_trans_input_term def_trans_value end - else DOF_core.map_value oid def_trans_value) - |> tap (DOF_core.check_ml_invs cid_long oid {is_monitor=false}) - |> (fn thy => if Config.get_global thy DOF_core.invariants_checking - then Value_Command.Docitem_Parser.check_invariants thy (oid, pos) - else thy) - end + let val cid = let val DOF_core.Instance {cid,...} = + DOF_core.get_instance_global oid thy + val ctxt = Proof_Context.init_global thy + val instances = DOF_core.get_instances ctxt + val markup = DOF_core.get_instance_name_global oid thy + |> Name_Space.markup (Name_Space.space_of_table instances) + val _ = Context_Position.report ctxt pos markup; + in cid end + val default_cid = cid = DOF_core.default_cid + val (((name, cid'), typ), pos') = Value_Command.Docitem_Parser.check_classref {is_monitor = false} + cid_pos thy + val cid_pos' = (name, pos') + val cid_long = fst cid_pos' + val _ = if cid' = DOF_core.default_cid orelse cid = cid' + then () + else error("incompatible classes:"^cid^":"^cid') + fun conv_attrs (((lhs, pos), opn), rhs) = ((YXML.content_of lhs),pos,opn, + Syntax.parse_term (Proof_Context.init_global thy) rhs) + val assns' = map conv_attrs doc_attrs + val def_trans_value = + #1 o (Value_Command.Docitem_Parser.calc_update_term {mk_elaboration=true} + thy (name, typ) assns') + #> Value_Command.value (Proof_Context.init_global thy) + in + thy |> (if Config.get_global thy DOF_core.object_value_debug + then let val def_trans_input_term = + #1 o (Value_Command.Docitem_Parser.calc_update_term + {mk_elaboration=false} thy (name, typ) assns') + in DOF_core.map_input_term_value oid + def_trans_input_term def_trans_value end + else DOF_core.map_value oid def_trans_value) + |> tap (DOF_core.check_ml_invs cid_long oid {is_monitor=false}) + (* Bypass checking of high-level invariants when the class default_cid = "text", + the top (default) document class. + We want the class default_cid to stay abstract + and not have the capability to be defined with attribute, invariants, etc. + Hence this bypass handles docitem without a class associated, + for example when you just want a document element to be referentiable + without using the burden of ontology classes. + ex: text*[sdf]\ Lorem ipsum @{thm refl}\ *) + |> (fn thy => if default_cid then thy + else if Config.get_global thy DOF_core.invariants_checking + then Value_Command.Docitem_Parser.check_invariants thy (oid, pos) + else thy) + end (* General criticism : attributes like "level" were treated here in the kernel instead of dragging them out into the COL -- bu *) -fun open_monitor_command ((((oid, pos),cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) thy = - let fun o_m_c oid pos cid_pos doc_attrs thy = + +fun open_monitor_command ((((oid, pos), raw_parent_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) thy = + let fun o_m_c oid pos params_cid_pos doc_attrs thy = Value_Command.Docitem_Parser.create_and_check_docitem {is_monitor=true} (* this is a monitor *) {is_inline=false} (* monitors are always inline *) {define=true} - oid pos cid_pos doc_attrs thy + oid pos params_cid_pos doc_attrs thy fun compute_enabled_set cid thy = let val DOF_core.Onto_Class X = DOF_core.get_onto_class_global' cid thy @@ -2034,19 +2150,23 @@ fun open_monitor_command ((((oid, pos),cid_pos), doc_attrs) : ODL_Meta_Args_Par val aalph = RegExpInterface.alphabet (#rex X) in (aalph, ralph, map (RegExpInterface.rexp_term2da thy aalph)(#rex X)) end fun create_monitor_entry oid thy = - let val cid = case cid_pos of - NONE => ISA_core.err ("You must specified a monitor class.") pos - | SOME (cid, _) => cid - val (accS, rejectS, aS) = compute_enabled_set cid thy - in DOF_core.add_monitor_info (Binding.make (oid, pos)) - (DOF_core.make_monitor_info (accS, rejectS, aS)) thy - end + let val cid = case raw_parent_pos of + NONE => ISA_core.err ("You must specified a monitor class.") pos + | SOME (raw_parent, _) => + DOF_core.markup2string raw_parent + |> DOF_core.get_onto_class_cid thy |> (fst o fst) + val (accS, rejectS, aS) = compute_enabled_set cid thy + in DOF_core.add_monitor_info (Binding.make (oid, pos)) + (DOF_core.make_monitor_info (accS, rejectS, aS)) thy + end in - thy |> o_m_c oid pos cid_pos doc_attrs |> create_monitor_entry oid + thy + |> o_m_c oid pos raw_parent_pos doc_attrs + |> create_monitor_entry oid end; -fun close_monitor_command (args as (((oid, pos), _), +fun close_monitor_command (args as (((oid, pos), cid_pos), _: (((string*Position.T)*string)*string)list)) thy = let fun check_if_final aS = let val i = (find_index (not o RegExpInterface.final) aS) + 1 in if i >= 1 @@ -2085,9 +2205,14 @@ fun meta_args_2_latex thy sem_attrs transform_attr let val l = DOF_core.get_instance_name_global lab thy |> DOF_core.output_name |> enclose "{" "}" |> prefix "label = " - val cid_long = case cid_opt of - NONE => DOF_core.cid_of lab thy - | SOME(cid,_) => DOF_core.get_onto_class_name_global' cid thy + val ((cid_long, _), _) = case cid_opt of + NONE => let val DOF_core.Instance cid = + DOF_core.get_instance_global lab thy + in pair (cid |> #cid) (cid |> #cid) + |> rpair \<^typ>\unit\ end + + | SOME(cid,_) => DOF_core.get_onto_class_cid thy cid + val cid_txt = cid_long |> DOF_core.output_name |> enclose "{" "}" |> prefix "type = " @@ -2123,15 +2248,16 @@ fun meta_args_2_latex thy sem_attrs transform_attr in str_of_term end - fun toLong n = #long_name(the(DOF_core.get_attribute_info cid_long (markup2string n) thy)) - + fun toLong n = #long_name(the(DOF_core.get_attribute_info cid_long (markup2string n) thy)) val ctxt = Proof_Context.init_global thy val actual_args = map (fn ((lhs,_),rhs) => (toLong lhs, ltx_of_markup ctxt rhs)) - attr_list - val default_args = map (fn (b,_,t) => (toLong (Long_Name.base_name ( Sign.full_name thy b)), - ltx_of_term ctxt true t)) - (DOF_core.get_attribute_defaults cid_long thy) - + attr_list + val default_args = + (DOF_core.get_attribute_defaults cid_long thy) + |> map (fn (b,_, parsed_term) => + (toLong (Long_Name.base_name ( Sign.full_name thy b)) + , ltx_of_term ctxt true (Syntax.check_term ctxt parsed_term))) + val default_args_filtered = filter (fn (a,_) => not (exists (fn b => b = a) (map (fn (c,_) => c) actual_args))) default_args val transformed_args = (actual_args@default_args_filtered) @@ -2160,6 +2286,7 @@ fun meta_args_2_latex thy sem_attrs transform_attr Latex.string (enclose "[" "]" (String.concat [ label_and_type, ", args={", (commas str_args), "}"])) end + (* level-attribute information management *) fun gen_enriched_document_cmd {inline} cid_transform attr_transform ((((oid, pos),cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) : theory -> theory = @@ -2203,7 +2330,7 @@ fun document_command (name, pos) descr mark cmd sem_attrs transform_attr = (fn (meta_args, text) => Toplevel.theory' (fn _ => cmd meta_args) ((Toplevel.presentation_context - #> document_output_reports name mark sem_attrs transform_attr meta_args text + #> document_output_reports name mark sem_attrs transform_attr meta_args text #> SOME): Toplevel.state -> Latex.text option)) ); fun onto_macro_cmd_output_reports output_cmd (meta_args, text) ctxt = @@ -2235,17 +2362,16 @@ val _ = (ODL_Meta_Args_Parser.attributes_upd >> (Toplevel.theory o close_monitor_command)); - val _ = Outer_Syntax.command \<^command_keyword>\update_instance*\ "update meta-attributes of an instance of a document class" (ODL_Meta_Args_Parser.attributes_upd - >> (Toplevel.theory o update_instance_command)); + >> (Toplevel.theory o update_instance_command)); val _ = document_command \<^command_keyword>\text*\ "formal comment (primary style)" {markdown = true, body = true} (gen_enriched_document_cmd {inline=true} I I) [] I; - + (* This is just a stub at present *) val _ = @@ -2391,8 +2517,7 @@ fun meta_args_exec (meta_args as (((oid, pos),cid_pos), doc_attrs) : ODL_Meta_Ar then (K ctxt) else Context.map_theory (Value_Command.Docitem_Parser.create_and_check_docitem {is_monitor = false} {is_inline = true} - {define = true} oid pos (I cid_pos) (I doc_attrs)) -) + {define = true} oid pos (I cid_pos) (I doc_attrs))) val attributes_opt = Scan.option ODL_Meta_Args_Parser.attributes @@ -2427,19 +2552,6 @@ fun get_positions ctxt x = | get _ _ = []; in get [] end; -fun prep_decls prep_var raw_vars ctxt = - let - val (vars, ctxt') = fold_map prep_var raw_vars ctxt; - val (xs, ctxt'') = ctxt' - |> Context_Position.set_visible false - |> Proof_Context.add_fixes vars - ||> Context_Position.restore_visible ctxt'; - val _ = - Context_Position.reports ctxt'' - (map (Binding.pos_of o #1) vars ~~ - map (Variable.markup_entity_def ctxt'' ##> Properties.remove Markup.kindN) xs); - in ((vars, xs), ctxt'') end; - fun dummy_frees ctxt xs tss = let val names = @@ -2450,19 +2562,18 @@ fun dummy_frees ctxt xs tss = fun prep_spec_open prep_var parse_prop raw_vars raw_params raw_prems raw_concl ctxt = let - val ((vars, xs), vars_ctxt) = prep_decls prep_var raw_vars ctxt; + val ((vars, xs), vars_ctxt) = DOF_core.prep_decls prep_var raw_vars ctxt; val (ys, params_ctxt) = vars_ctxt |> fold_map prep_var raw_params |-> Proof_Context.add_fixes; - val props = map (parse_prop params_ctxt) (raw_concl :: raw_prems) |> singleton (dummy_frees params_ctxt (xs @ ys)); - val concl :: prems = Syntax.check_props params_ctxt props; + val props' = props |> map (DOF_core.elaborate_term' ctxt) + val concl :: prems = Syntax.check_props params_ctxt props'; val spec = Logic.list_implies (prems, concl); - val spec' = DOF_core.elaborate_term ctxt spec - val spec_ctxt = Variable.declare_term spec' params_ctxt; + val spec_ctxt = Variable.declare_term spec params_ctxt; fun get_pos x = maps (get_positions spec_ctxt x) props; - in ((vars, xs, get_pos, spec'), spec_ctxt) end; + in ((vars, xs, get_pos, spec), spec_ctxt) end; val read_spec_open = prep_spec_open Proof_Context.read_var Syntax.parse_prop; @@ -2650,7 +2761,8 @@ val _ = theorem \<^command_keyword>\proposition*\ false "propositio val _ = theorem \<^command_keyword>\schematic_goal*\ true "schematic goal"; in end \ - + + section\ Syntax for Ontological Antiquotations (the '' View'' Part II) \ ML\ @@ -2679,7 +2791,10 @@ fun check_and_mark ctxt cid_decl ({strict_checking = strict}) {inline=inline_req val markup = name' |> Name_Space.markup (Name_Space.space_of_table instances) (* this sends a report for a ref application to the PIDE interface ... *) val _ = Context_Position.report ctxt pos markup; - val _ = if not(DOF_core.is_subclass ctxt cid cid_decl) + val cid' = if cid = DOF_core.default_cid + then cid + else DOF_core.get_onto_class_cid thy cid |> (fst o fst) + val _ = if not(DOF_core.is_subclass ctxt cid' cid_decl) then error("reference ontologically inconsistent: " ^ name ^ " is an instance of " ^ cid ^ " and " ^ cid @@ -2729,7 +2844,6 @@ fun docitem_antiquotation bind cid = Document_Output.antiquotation_raw bind docitem_antiquotation_parser (pretty_docitem_antiquotation_generic cid) - fun check_and_mark_term ctxt oid = let val ctxt' = Context.proof_of ctxt @@ -2905,28 +3019,22 @@ fun read_parent NONE ctxt = (NONE, ctxt) | T => error ("Bad parent record specification: " ^ Syntax.string_of_typ ctxt T)); - - fun read_fields raw_fields ctxt = let - val Ts = Syntax.read_typs ctxt (map (fn ((_, raw_T, _),_) => raw_T) raw_fields); - val terms = map ((Option.map (Syntax.read_term ctxt)) o snd) raw_fields - fun test t1 t2 = Sign.typ_instance (Proof_Context.theory_of ctxt) - (t1, Value_Command.Docitem_Parser.generalize_typ 0 t2) - fun check_default (ty,SOME trm) = - let val ty' = (type_of trm) - in if test ty ty' - then () - else error("type mismatch:"^ - (Syntax.string_of_typ ctxt ty')^":"^ - (Syntax.string_of_typ ctxt ty)) - end - (* BAD STYLE : better would be catching exn. *) - |check_default (_,_) = () - val fields = map2 (fn ((x, _, mx),_) => fn T => (x, T, mx)) raw_fields Ts; - val _ = map check_default (Ts ~~ terms) (* checking types conform to defaults *) - val ctxt' = fold Variable.declare_typ Ts ctxt; - in (fields, terms, ctxt') end; + val fields' = raw_fields |> map (apfst (fn (b, ty, mx) => (b, Syntax.read_typ ctxt ty, mx))) + |> map (apsnd (Option.map (Syntax.parse_term ctxt #> DOF_core.elaborate_term' ctxt))) + fun check_default ctxt ((b, ty, _), SOME trm) = + let val parsed_prop = Const (\<^const_name>\Pure.eq\, dummyT) $ Free (Binding.name_of b, dummyT) $ trm + val raw_vars = [(b, SOME ty, NoSyn)] + val (_, vars_ctxt) = DOF_core.prep_decls Proof_Context.cert_var raw_vars ctxt + (* check typ unification *) + val _ = Syntax.check_prop vars_ctxt parsed_prop + in () end + (* BAD STYLE : better would be catching exn. *) + | check_default _ (_, NONE)= () + val _ = map (check_default ctxt) fields' + val ctxt' = fields' |> map (#2 o fst) |> (fn Ts => fold Variable.declare_typ Ts ctxt); + in (map fst fields', map snd fields', ctxt') end; fun def_cmd (decl, spec, prems, params) lthy = let @@ -2944,10 +3052,8 @@ fun define_cond bind f_sty read_cond (ctxt:local_theory) = val args = (SOME(bind,NONE,NoSyn), (Binding.empty_atts,eq),[],[]) in def_cmd args ctxt end -fun define_inv cid_long (bind, inv) thy = +fun define_inv (params, cid_long) (bind, inv) thy = let val inv_term = Syntax.read_term (Proof_Context.init_global thy) inv - (* Rewrite selectors types to allow invariants on attributes of the superclasses - using the polymorphic type of the class *) fun update_attribute_type thy class_scheme_ty cid_long (Const (s, Type (st,[ty, ty'])) $ t) = let @@ -2957,7 +3063,7 @@ fun define_inv cid_long (bind, inv) thy = NONE => Const (s, Type(st,[ty, ty'])) $ (update_attribute_type thy class_scheme_ty cid_long t) | SOME _ => if DOF_core.is_subclass_global thy cid_long cid - then let val Type(_, tys') = ty + then let val Type(st', tys') = ty in if tys' = [\<^typ>\unit\] then Const (s, Type(st,[ty, ty'])) $ (update_attribute_type thy class_scheme_ty cid_long t) @@ -2976,31 +3082,34 @@ fun define_inv cid_long (bind, inv) thy = then Free (s, class_scheme_ty) else Free (s, ty) | update_attribute_type _ _ _ t = t - val inv_ty = Syntax.read_typ (Proof_Context.init_global thy) - (Name.aT ^ " " ^ cid_long ^ schemeN) + val zeta = (singleton (Name.variant_list (map #1 params)) "'z", \<^sort>\type\) + val typ = Type (cid_long ^ schemeN, map TFree (params @ [zeta])) (* Update the type of each attribute update function to match the type of the current class. *) - val inv_term' = update_attribute_type thy inv_ty cid_long inv_term - val eq_inv_ty = inv_ty --> HOLogic.boolT - val abs_term = Term.lambda (Free (instance_placeholderN, inv_ty)) inv_term' + val inv_term' = update_attribute_type thy typ cid_long inv_term + val eq_inv_ty = typ --> HOLogic.boolT + val abs_term = Term.lambda (Free (instance_placeholderN, typ)) inv_term' in thy |> Named_Target.theory_map (define_cond bind eq_inv_ty abs_term) end fun add_doc_class_cmd overloaded (raw_params, binding) raw_parent raw_fieldsNdefaults reject_Atoms regexps invariants thy = - let + let val bind_pos = Binding.pos_of binding val name = Binding.name_of binding val ctxt = Proof_Context.init_global thy; val params = map (apsnd (Typedecl.read_constraint ctxt)) raw_params; val ctxt1 = fold (Variable.declare_typ o TFree) params ctxt; - fun cid thy = (* takes class synonyms into account *) - DOF_core.get_onto_class_name_global' name thy + fun markup2string s = String.concat (List.filter (fn c => c <> Symbol.DEL) + (Symbol.explode (YXML.content_of s))) + val name' = + case raw_parent of + NONE => DOF_core.default_cid + | SOME s => markup2string s |> (fn s => DOF_core.get_onto_class_name_global' s thy) + fun cid thy = DOF_core.get_onto_class_name_global name thy val (parent, ctxt2) = read_parent raw_parent ctxt1; - (* takes class synonyms into account *) - val parent' = parent |> Option.map (fn (x, y) => (x, DOF_core.get_onto_class_name_global' y thy)) - val parent_cid_long = if is_some parent' - then parent' |> the |> snd - else DOF_core.default_cid + val parent' = case parent of + NONE => NONE + | SOME (Ts, _) => SOME (Ts, name') val raw_fieldsNdefaults' = filter (fn((bi,_,_),_) => Binding.name_of bi <> traceN) raw_fieldsNdefaults val _ = if length raw_fieldsNdefaults' <> length raw_fieldsNdefaults @@ -3008,22 +3117,21 @@ fun add_doc_class_cmd overloaded (raw_params, binding) else () val raw_fieldsNdefaults'' = if null regexps then raw_fieldsNdefaults' - else trace_attr_ts::raw_fieldsNdefaults' - val (fields, terms, ctxt3) = read_fields raw_fieldsNdefaults'' ctxt2; - val fieldsNterms = (map (fn (a,b,_) => (a,b)) fields) ~~ terms + else trace_attr_ts::raw_fieldsNdefaults' + val (fields, parsed_terms, ctxt3) = read_fields raw_fieldsNdefaults'' ctxt2; + val fieldsNterms = (map (fn (a,b,_) => (a,b)) fields) ~~ parsed_terms val fieldsNterms' = map (fn ((x,y),z) => (x,y,z)) fieldsNterms val params' = map (Proof_Context.check_tfree ctxt3) params; fun check_n_filter thy (bind,ty,mf) = - case DOF_core.get_attribute_info parent_cid_long (Binding.name_of bind) thy of - NONE => SOME(bind,ty,mf) - | SOME{def_occurrence,long_name,typ,...} - => if ty = typ - then (warning("overriding attribute:" - ^ long_name - ^ " in doc class:" - ^ def_occurrence); - NONE) - else error("no overloading allowed.") + case DOF_core.get_attribute_info name' (Binding.name_of bind) thy of + NONE => SOME(bind,ty,mf) + | SOME{def_occurrence,long_name,typ,...} => + if ty = typ + then (warning("overriding attribute:" + ^ long_name + ^ " in doc class:" + ^ def_occurrence); NONE) + else error("no overloading allowed.") val record_fields = map_filter (check_n_filter thy) fields fun mk_eq_pair name = (Free (name, doc_class_rexp_T), doc_class_rexp_t name) |> mk_meta_eq @@ -3048,12 +3156,12 @@ fun add_doc_class_cmd overloaded (raw_params, binding) else add [DOF_core.tag_attr] invariants' {virtual=true}) |> (fn thy => OntoLinkParser.docitem_antiquotation binding (cid thy) thy) (* defines the ontology-checked text antiquotation to this document class *) - |> (fn thy => fold(define_inv (cid thy)) (invariants') thy) + |> (fn thy => fold(define_inv (params', (cid thy))) (invariants') thy) (* The function declare_ISA_class_accessor_and_check_instance uses a prefix because the class name is already bound to "doc_class Regular_Exp.rexp" constant by add_doc_class_cmd function *) - |> (fn thy => ISA_core.declare_ISA_class_accessor_and_check_instance (cid thy, bind_pos) thy) - |> (fn thy => ISA_core.declare_class_instances_annotation (cid thy, bind_pos) thy) + |> (fn thy => ISA_core.declare_ISA_class_accessor_and_check_instance (params', cid thy, bind_pos) thy) + |> (fn thy => ISA_core.declare_class_instances_annotation (params', cid thy, bind_pos) thy) end; diff --git a/Isabelle_DOF/thys/manual/M_04_RefMan.thy b/Isabelle_DOF/thys/manual/M_04_RefMan.thy index 1f78055..292b7fe 100644 --- a/Isabelle_DOF/thys/manual/M_04_RefMan.thy +++ b/Isabelle_DOF/thys/manual/M_04_RefMan.thy @@ -474,7 +474,7 @@ text\ \<^rail>\ (@@{command "term*"} ('[' meta_args ']')? '\' HOL_term '\' | (@@{command "value*"} - | @@{command "assert*"}) \ ('[' meta_args ']')? ('[' evaluator ']')? '\' HOL_term '\' + | @@{command "assert*"}) \ ('[' evaluator ']')? ('[' meta_args ']')? '\' HOL_term '\' | (@@{command "definition*"}) ('[' meta_args ']')? ('... see ref manual') | (@@{command "lemma*"} | @@{command "theorem*"} | @@{command "corollary*"} From 9812bc05171782a796d1213f5c6cb46303914ea3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20M=C3=A9ric?= Date: Thu, 20 Jul 2023 10:11:48 +0200 Subject: [PATCH 13/27] Use binding for instances name --- Isabelle_DOF-Example-I/IsaDofApplications.thy | 8 +- .../CENELEC_50128/mini_odo/mini_odo.thy | 20 ++-- .../TR_MyCommentedIsabelle.thy | 2 +- Isabelle_DOF-Unit-Tests/Latex_Tests.thy | 4 +- Isabelle_DOF-Unit-Tests/OutOfOrderPresntn.thy | 4 +- Isabelle_DOF-Unit-Tests/TestKit.thy | 9 +- Isabelle_DOF/thys/Isa_COL.thy | 34 ++++--- Isabelle_DOF/thys/Isa_DOF.thy | 98 ++++++++++--------- Isabelle_DOF/thys/manual/M_02_Background.thy | 4 +- Isabelle_DOF/thys/manual/M_03_GuidedTour.thy | 6 +- Isabelle_DOF/thys/manual/M_04_RefMan.thy | 56 +++++------ .../thys/manual/M_05_Implementation.thy | 6 +- 12 files changed, 130 insertions(+), 121 deletions(-) diff --git a/Isabelle_DOF-Example-I/IsaDofApplications.thy b/Isabelle_DOF-Example-I/IsaDofApplications.thy index 098ebc7..914243b 100644 --- a/Isabelle_DOF-Example-I/IsaDofApplications.thy +++ b/Isabelle_DOF-Example-I/IsaDofApplications.thy @@ -635,20 +635,20 @@ text\ We present a selection of interaction scenarios @{example \sc and @{example \cenelec_onto\} with Isabelle/PIDE instrumented by \<^isadof>. \ (*<*) -declare_reference*["text-elements"::float] +declare_reference*["text_elements"::float] declare_reference*["hyperlinks"::float] (*>*) subsection*[scholar_pide::example]\ A Scholarly Paper \ -text\ In @{float (unchecked) "text-elements"}~(a) -and @{float (unchecked) "text-elements"}~(b)we show how +text\ In @{float (unchecked) "text_elements"}~(a) +and @{float (unchecked) "text_elements"}~(b)we show how hovering over links permits to explore its meta-information. Clicking on a document class identifier permits to hyperlink into the corresponding class definition (@{float (unchecked) "hyperlinks"}~(a)); hovering over an attribute-definition (which is qualified in order to disambiguate; @{float (unchecked) "hyperlinks"}~(b)). \ -text*["text-elements"::float, +text*["text_elements"::float, main_caption="\Exploring text elements.\"] \ @{fig_content (width=53, height=5, caption="Exploring a reference of a text element.") "figures/Dogfood-II-bgnd1.png" diff --git a/Isabelle_DOF-Examples-Extra/CENELEC_50128/mini_odo/mini_odo.thy b/Isabelle_DOF-Examples-Extra/CENELEC_50128/mini_odo/mini_odo.thy index 3c2cb4d..e36acc4 100644 --- a/Isabelle_DOF-Examples-Extra/CENELEC_50128/mini_odo/mini_odo.thy +++ b/Isabelle_DOF-Examples-Extra/CENELEC_50128/mini_odo/mini_odo.thy @@ -102,13 +102,13 @@ text\ functioning of the system and for its integration into the system as a whole. In particular, we need to make the following assumptions explicit: \<^vs>\-0.3cm\\ -text*["perfect-wheel"::assumption] +text*["perfect_wheel"::assumption] \\<^item> the wheel is perfectly circular with a given, constant radius. \<^vs>\-0.3cm\\ -text*["no-slip"::assumption] +text*["no_slip"::assumption] \\<^item> the slip between the trains wheel and the track negligible. \<^vs>\-0.3cm\\ -text*["constant-teeth-dist"::assumption] +text*["constant_teeth_dist"::assumption] \\<^item> the distance between all teeth of a wheel is the same and constant, and \<^vs>\-0.3cm\\ -text*["constant-sampling-rate"::assumption] +text*["constant_sampling_rate"::assumption] \\<^item> the sampling rate of positions is a given constant.\ text\ @@ -126,13 +126,13 @@ text\ subsection\Capturing ``System Architecture.''\ -figure*["three-phase"::figure,relative_width="70",file_src="''figures/three-phase-odo.pdf''"] +figure*["three_phase"::figure,relative_width="70",file_src="''figures/three-phase-odo.pdf''"] \An odometer with three sensors \C1\, \C2\, and \C3\.\ text\ The requirements analysis also contains a document \<^doc_class>\SYSAD\ (\<^typ>\system_architecture_description\) that contains technical drawing of the odometer, - a timing diagram (see \<^figure>\three-phase\), and tables describing the encoding of the position + a timing diagram (see \<^figure>\three_phase\), and tables describing the encoding of the position for the possible signal transitions of the sensors \C1\, \C2\, and \C3\. \ @@ -146,7 +146,7 @@ text\ sub-system configuration. \ (*<*) -declare_reference*["df-numerics-encshaft"::figure] +declare_reference*["df_numerics_encshaft"::figure] (*>*) subsection\Capturing ``Required Performances.''\ text\ @@ -160,9 +160,9 @@ text\ The requirement analysis document describes the physical environment, the architecture of the measuring device, and the required format and precision of the measurements of the odometry - function as represented (see @{figure (unchecked) "df-numerics-encshaft"}).\ + function as represented (see @{figure (unchecked) "df_numerics_encshaft"}).\ -figure*["df-numerics-encshaft"::figure,relative_width="76",file_src="''figures/df-numerics-encshaft.png''"] +figure*["df_numerics_encshaft"::figure,relative_width="76",file_src="''figures/df-numerics-encshaft.png''"] \Real distance vs. discrete distance vs. shaft-encoder sequence\ @@ -215,7 +215,7 @@ text\ concepts such as Cauchy Sequences, limits, differentiability, and a very substantial part of classical Calculus. \SOME\ is the Hilbert choice operator from HOL; the definitions of the model parameters admit all possible positive values as uninterpreted constants. Our - \<^assumption>\perfect-wheel\ is translated into a calculation of the circumference of the + \<^assumption>\perfect_wheel\ is translated into a calculation of the circumference of the wheel, while \\s\<^sub>r\<^sub>e\<^sub>s\, the resolution of the odometer, can be calculated from the these parameters. HOL-Analysis permits to formalize the fundamental physical observables: \ diff --git a/Isabelle_DOF-Examples-Extra/technical_report/TR_my_commented_isabelle/TR_MyCommentedIsabelle.thy b/Isabelle_DOF-Examples-Extra/technical_report/TR_my_commented_isabelle/TR_MyCommentedIsabelle.thy index a80b722..e9dd5fb 100644 --- a/Isabelle_DOF-Examples-Extra/technical_report/TR_my_commented_isabelle/TR_MyCommentedIsabelle.thy +++ b/Isabelle_DOF-Examples-Extra/technical_report/TR_my_commented_isabelle/TR_MyCommentedIsabelle.thy @@ -807,7 +807,7 @@ text\ They reflect the Pure logic depicted in a number of presentations s Notated as logical inference rules, these operations were presented as follows: \ -text*["text-elements"::float, +text*["text_elements"::float, main_caption="\Kernel Inference Rules.\"] \ @{fig_content (width=48, caption="Pure Kernel Inference Rules I.") "figures/pure-inferences-I.pdf" diff --git a/Isabelle_DOF-Unit-Tests/Latex_Tests.thy b/Isabelle_DOF-Unit-Tests/Latex_Tests.thy index 4a31928..7210ca5 100644 --- a/Isabelle_DOF-Unit-Tests/Latex_Tests.thy +++ b/Isabelle_DOF-Unit-Tests/Latex_Tests.thy @@ -390,11 +390,11 @@ text-latex\ ML\ fun gen_enriched_document_command3 name {body} cid_transform attr_transform markdown - (((((oid,pos),cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t, + ((((binding,cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t, xstring_opt:(xstring * Position.T) option), toks:Input.source list) = gen_enriched_document_command2 name {body=body} cid_transform attr_transform markdown - (((((oid,pos),cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t, + ((((binding,cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t, xstring_opt:(xstring * Position.T) option), toks) \ \Hack : drop second and thrd args.\ diff --git a/Isabelle_DOF-Unit-Tests/OutOfOrderPresntn.thy b/Isabelle_DOF-Unit-Tests/OutOfOrderPresntn.thy index 511466a..c4dc9a5 100644 --- a/Isabelle_DOF-Unit-Tests/OutOfOrderPresntn.thy +++ b/Isabelle_DOF-Unit-Tests/OutOfOrderPresntn.thy @@ -382,11 +382,11 @@ text-latex\ ML\ fun gen_enriched_document_command3 name {body} cid_transform attr_transform markdown - (((((oid,pos),cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t, + ((((binding,cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t, xstring_opt:(xstring * Position.T) option), toks:Input.source list) = gen_enriched_document_command2 name {body=body} cid_transform attr_transform markdown - (((((oid,pos),cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t, + ((((binding,cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t, xstring_opt:(xstring * Position.T) option), toks) \ \Hack : drop second and thrd args.\ diff --git a/Isabelle_DOF-Unit-Tests/TestKit.thy b/Isabelle_DOF-Unit-Tests/TestKit.thy index 16c1c90..276c08e 100644 --- a/Isabelle_DOF-Unit-Tests/TestKit.thy +++ b/Isabelle_DOF-Unit-Tests/TestKit.thy @@ -38,7 +38,8 @@ fun gen_enriched_document_command2 name {body} cid_transform attr_transform mark toks_list:Input.source list) : theory -> theory = let val toplvl = Toplevel.theory_toplevel - val (((oid,pos),cid_pos), doc_attrs) = meta_args + val ((binding,cid_pos), doc_attrs) = meta_args + val oid = Binding.name_of binding val oid' = if meta_args = ODL_Meta_Args_Parser.empty_meta_args then "output" else oid @@ -74,7 +75,7 @@ fun gen_enriched_document_command2 name {body} cid_transform attr_transform mark else Value_Command.Docitem_Parser.create_and_check_docitem {is_monitor = false} {is_inline = false} {define = true} - oid pos (cid_transform cid_pos) (attr_transform doc_attrs)) + binding (cid_transform cid_pos) (attr_transform doc_attrs)) (* ... generating the level-attribute syntax *) in handle_margs_opt #> (fn thy => (app (check_n_tex_text thy) toks_list; thy)) end; @@ -139,10 +140,10 @@ val _ = >> (Toplevel.theory o update_instance_command)); val _ = - let fun create_and_check_docitem ((((oid, pos),cid_pos),doc_attrs),src) thy = + let fun create_and_check_docitem (((binding,cid_pos),doc_attrs),src) thy = (Value_Command.Docitem_Parser.create_and_check_docitem {is_monitor = false} {is_inline=true} - {define = false} oid pos (cid_pos) (doc_attrs) thy) + {define = false} binding (cid_pos) (doc_attrs) thy) handle ERROR msg => (if error_match src msg then (writeln ("Correct error: "^msg^": reported.");thy) else error"Wrong error reported") diff --git a/Isabelle_DOF/thys/Isa_COL.thy b/Isabelle_DOF/thys/Isa_COL.thy index 3e224b5..a5e007f 100644 --- a/Isabelle_DOF/thys/Isa_COL.thy +++ b/Isabelle_DOF/thys/Isa_COL.thy @@ -429,11 +429,11 @@ fun convert_src_from_margs ctxt (X, (((str,_),value)::R)) = fun float_command (name, pos) descr cid = let fun set_default_class NONE = SOME(cid,pos) |set_default_class (SOME X) = SOME X - fun create_instance ((((oid, pos),cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) = + fun create_instance (((binding,cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) = Value_Command.Docitem_Parser.create_and_check_docitem {is_monitor = false} {is_inline = true} - {define = true} oid pos (set_default_class cid_pos) doc_attrs + {define = true} binding (set_default_class cid_pos) doc_attrs fun generate_fig_ltx_ctxt ctxt cap_src oid body = Latex.macro0 "centering" @ body @@ -441,25 +441,31 @@ fun float_command (name, pos) descr cid = @ Latex.macro "label" (DOF_core.get_instance_name_global oid (Proof_Context.theory_of ctxt) |> DOF_core.output_name |> Latex.string) - fun parse_and_tex (margs as (((oid, _),_), _), cap_src) ctxt = - (convert_src_from_margs ctxt margs) - |> pair (upd_caption (K Input.empty) #> convert_meta_args ctxt margs) - |> fig_content ctxt - |> generate_fig_ltx_ctxt ctxt cap_src oid - |> (Latex.environment ("figure") ) + fun parse_and_tex (margs as ((binding,_), _), cap_src) ctxt = + let val oid = Binding.name_of binding + in + (convert_src_from_margs ctxt margs) + |> pair (upd_caption (K Input.empty) #> convert_meta_args ctxt margs) + |> fig_content ctxt + |> generate_fig_ltx_ctxt ctxt cap_src oid + |> (Latex.environment ("figure") ) + end in Monitor_Command_Parser.onto_macro_cmd_command (name, pos) descr create_instance parse_and_tex end fun listing_command (name, pos) descr cid = let fun set_default_class NONE = SOME(cid,pos) |set_default_class (SOME X) = SOME X - fun create_instance ((((oid, pos),cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) = + fun create_instance (((binding,cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) = Value_Command.Docitem_Parser.create_and_check_docitem {is_monitor = false} {is_inline = true} - {define = true} oid pos (set_default_class cid_pos) doc_attrs - fun parse_and_tex (margs as (((_, pos),_), _), _) _ = - ISA_core.err ("Not yet implemented.\n Please use text*[oid::listing]\\ instead.") pos + {define = true} binding (set_default_class cid_pos) doc_attrs + fun parse_and_tex (margs as ((binding,_), _), _) _ = + let val pos = Binding.pos_of binding + in + ISA_core.err ("Not yet implemented.\n Please use text*[oid::listing]\\ instead.") pos + end in Monitor_Command_Parser.onto_macro_cmd_command (name, pos) descr create_instance parse_and_tex end @@ -858,11 +864,11 @@ fun convert_meta_args ctxt (X, (((str,_),value) :: R)) = fun frame_command (name, pos) descr cid = let fun set_default_class NONE = SOME(cid,pos) |set_default_class (SOME X) = SOME X - fun create_instance ((((oid, pos),cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) = + fun create_instance (((binding,cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) = Value_Command.Docitem_Parser.create_and_check_docitem {is_monitor = false} {is_inline = true} - {define = true} oid pos (set_default_class cid_pos) doc_attrs + {define = true} binding (set_default_class cid_pos) doc_attrs fun titles_src ctxt frametitle framesubtitle src = Latex.string "{" @ Document_Output.output_document ctxt {markdown = false} frametitle diff --git a/Isabelle_DOF/thys/Isa_DOF.thy b/Isabelle_DOF/thys/Isa_DOF.thy index efa7d84..ba42a64 100644 --- a/Isabelle_DOF/thys/Isa_DOF.thy +++ b/Isabelle_DOF/thys/Isa_DOF.thy @@ -747,11 +747,10 @@ fun define_doc_class_global (params', binding) parent fields rexp reject_Atoms i , rejectS, reg_exps, invs')) end -fun define_object_global {define = define} ((oid, pos), instance) thy = +fun define_object_global {define = define} (binding, instance) thy = let - val binding = if Long_Name.is_qualified oid - then Binding.make (Long_Name.base_name oid, pos) - else Binding.make (oid, pos) + val oid = Binding.name_of binding + val pos = Binding.pos_of binding val _ = if oid = undefined_instance then error (oid ^ ": This name is reserved by the implementation" ^ Position.here pos) else () @@ -1411,11 +1410,10 @@ ML\ structure ODL_Meta_Args_Parser = struct -type meta_args_t = ((string * Position.T) * - (string * Position.T) option) - * ((string * Position.T) * string) list +type meta_args_t = (binding * (string * Position.T) option) + * ((string * Position.T) * string) list -val empty_meta_args = ((("", Position.none), NONE), []) +val empty_meta_args = ((Binding.empty, NONE), []) val is_improper = not o (Token.is_proper orf Token.is_begin_ignore orf Token.is_end_ignore); val improper = Scan.many is_improper; (* parses white-space and comments *) @@ -1435,7 +1433,7 @@ val attribute_upd : (((string * Position.T) * string) * string) parser = : (((string * Position.T) * string) * string) parser; val reference = - Parse.position Parse.name + Parse.binding --| improper -- Scan.option (Parse.$$$ "::" -- improper @@ -1553,7 +1551,7 @@ fun cid_2_cidType cid_long thy = in fold (fn x => fn y => Type(ty_name x,[y])) (fathers cid_long) \<^Type>\unit\ end -fun create_default_object thy oid class_name typ = +fun create_default_object thy binding class_name typ = let val purified_class_name = String.translate (fn #"." => "_" | x => String.implode [x]) class_name fun attr_to_s (binding, _, _) = purified_class_name ^ "_" @@ -1583,11 +1581,8 @@ fun create_default_object thy oid class_name typ = val term = test_class |> cons (Long_Name.qualify class_name makeN) |> space_implode Symbol.space val ctxt = Proof_Context.init_global thy val term' = term |> Syntax.parse_term ctxt |> DOF_core.elaborate_term' ctxt - (* rename oid to be compatible with binding naming policy *) - val clean_oid = translate_string (fn ":" => "_" | "-" => "_" | c => c); - val oid' = clean_oid oid - val parsed_prop = Const (\<^const_name>\Pure.eq\, dummyT) $ Free (oid', dummyT) $ term' - val raw_vars = [(Binding.name oid', SOME typ, NoSyn)] + val parsed_prop = Const (\<^const_name>\Pure.eq\, dummyT) $ Free (Binding.name_of binding, dummyT) $ term' + val raw_vars = [(binding, SOME typ, NoSyn)] val (_, vars_ctxt) = DOF_core.prep_decls Proof_Context.cert_var raw_vars ctxt val concl = Syntax.check_prop vars_ctxt parsed_prop in Logic.dest_equals concl |> snd end @@ -1685,8 +1680,9 @@ fun msg_intro get n oid cid = ("accepts clause " ^ Int.toString n ^ get (" not enabled for", " rejected") ^ " doc_class: " ^ cid) -fun register_oid_cid_in_open_monitors oid _ (name, pos') thy = +fun register_oid_cid_in_open_monitors binding (name, pos') thy = let + val oid = Binding.name_of binding val cid_long= name fun is_enabled (n, monitor_info) = if exists (DOF_core.is_subclass_global thy cid_long) @@ -1784,8 +1780,9 @@ fun register_oid_cid_in_open_monitors oid _ (name, pos') thy = DOF_core.Monitor_Info.map (fold update_info delta_autoS) end -fun check_invariants thy (oid, _) = +fun check_invariants thy binding = let + val oid = Binding.name_of binding val docitem_value = DOF_core.value_of oid thy val name = DOF_core.cid_of oid thy |> DOF_core.get_onto_class_cid thy |> (fst o fst) @@ -1866,8 +1863,9 @@ fun check_invariants thy (oid, _) = in thy end -fun create_and_check_docitem is_monitor {is_inline=is_inline} {define=define} oid pos cid_pos doc_attrs thy = +fun create_and_check_docitem is_monitor {is_inline=is_inline} {define=define} binding cid_pos doc_attrs thy = let + val oid = Binding.name_of binding val (((name, args_cid), typ), pos') = check_classref is_monitor cid_pos thy val cid_pos' = (name, pos') val cid_long = fst cid_pos' @@ -1888,7 +1886,7 @@ fun create_and_check_docitem is_monitor {is_inline=is_inline} {define=define} oi else let fun conv_attrs ((lhs, pos), rhs) = (YXML.content_of lhs,pos,"=", Syntax.parse_term (Proof_Context.init_global thy) rhs) val assns' = map conv_attrs doc_attrs - val defaults_init = create_default_object thy oid cid_long typ + val defaults_init = create_default_object thy binding cid_long typ fun conv (na, _(*ty*), parsed_term) =(Binding.name_of na, Binding.pos_of na, "=", parsed_term); val S = map conv (DOF_core.get_attribute_defaults cid_long thy); val (defaults, _(*ty*), _) = calc_update_term {mk_elaboration=false} @@ -1902,12 +1900,12 @@ fun create_and_check_docitem is_monitor {is_inline=is_inline} {define=define} oi in (input_term, value_term') end else (\<^term>\()\, value_term') end in thy |> DOF_core.define_object_global - {define = define} ((oid, pos), DOF_core.make_instance + {define = define} (binding, DOF_core.make_instance (false, fst value_terms, (snd value_terms) |> value (Proof_Context.init_global thy), is_inline, args_cid, vcid)) - |> register_oid_cid_in_open_monitors oid pos (name, pos') + |> register_oid_cid_in_open_monitors binding (name, pos') |> (fn thy => if (* declare_reference* without arguments is not checked against invariants *) thy |> DOF_core.defined_of oid |> not @@ -1925,18 +1923,18 @@ fun create_and_check_docitem is_monitor {is_inline=is_inline} {define=define} oi ex: text*[sdf]\ Lorem ipsum @{thm refl}\ *) |> (fn thy => if default_cid then thy else if Config.get_global thy DOF_core.invariants_checking - then check_invariants thy (oid, pos) else thy)) + then check_invariants thy binding else thy)) end end (* structure Docitem_Parser *) -fun meta_args_exec (meta_args as (((oid, pos), cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) thy = +fun meta_args_exec (meta_args as ((binding, cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) thy = thy |> (if meta_args = ODL_Meta_Args_Parser.empty_meta_args then (K thy) else Docitem_Parser.create_and_check_docitem {is_monitor = false} {is_inline = true} {define = true} - oid pos (I cid_pos) (I doc_attrs)) + binding (I cid_pos) (I doc_attrs)) fun value_cmd {assert=assert} meta_args_opt raw_name modes raw_t pos thy = let @@ -2082,16 +2080,17 @@ end; (* structure Value_Command *) structure Monitor_Command_Parser = struct -fun update_instance_command (((oid, pos), cid_pos), +fun update_instance_command ((binding, cid_pos), doc_attrs: (((string*Position.T)*string)*string)list) thy : theory = - let val cid = let val DOF_core.Instance {cid,...} = + let val oid = Binding.name_of binding + val cid = let val DOF_core.Instance {cid,...} = DOF_core.get_instance_global oid thy val ctxt = Proof_Context.init_global thy val instances = DOF_core.get_instances ctxt val markup = DOF_core.get_instance_name_global oid thy |> Name_Space.markup (Name_Space.space_of_table instances) - val _ = Context_Position.report ctxt pos markup; + val _ = Context_Position.report ctxt (Binding.pos_of binding) markup; in cid end val default_cid = cid = DOF_core.default_cid val (((name, cid'), typ), pos') = Value_Command.Docitem_Parser.check_classref {is_monitor = false} @@ -2127,7 +2126,7 @@ fun update_instance_command (((oid, pos), cid_pos), ex: text*[sdf]\ Lorem ipsum @{thm refl}\ *) |> (fn thy => if default_cid then thy else if Config.get_global thy DOF_core.invariants_checking - then Value_Command.Docitem_Parser.check_invariants thy (oid, pos) + then Value_Command.Docitem_Parser.check_invariants thy binding else thy) end @@ -2136,39 +2135,41 @@ fun update_instance_command (((oid, pos), cid_pos), them out into the COL -- bu *) -fun open_monitor_command ((((oid, pos), raw_parent_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) thy = - let fun o_m_c oid pos params_cid_pos doc_attrs thy = +fun open_monitor_command (((binding, raw_parent_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) thy = + let fun o_m_c binding params_cid_pos doc_attrs thy = Value_Command.Docitem_Parser.create_and_check_docitem {is_monitor=true} (* this is a monitor *) {is_inline=false} (* monitors are always inline *) {define=true} - oid pos params_cid_pos doc_attrs thy + binding params_cid_pos doc_attrs thy fun compute_enabled_set cid thy = let val DOF_core.Onto_Class X = DOF_core.get_onto_class_global' cid thy val ralph = RegExpInterface.alphabet (#rejectS X) val aalph = RegExpInterface.alphabet (#rex X) in (aalph, ralph, map (RegExpInterface.rexp_term2da thy aalph)(#rex X)) end - fun create_monitor_entry oid thy = + fun create_monitor_entry thy = let val cid = case raw_parent_pos of - NONE => ISA_core.err ("You must specified a monitor class.") pos + NONE => ISA_core.err ("You must specified a monitor class.") (Binding.pos_of binding) | SOME (raw_parent, _) => DOF_core.markup2string raw_parent |> DOF_core.get_onto_class_cid thy |> (fst o fst) val (accS, rejectS, aS) = compute_enabled_set cid thy - in DOF_core.add_monitor_info (Binding.make (oid, pos)) + in DOF_core.add_monitor_info binding (DOF_core.make_monitor_info (accS, rejectS, aS)) thy end in thy - |> o_m_c oid pos raw_parent_pos doc_attrs - |> create_monitor_entry oid + |> o_m_c binding raw_parent_pos doc_attrs + |> create_monitor_entry end; -fun close_monitor_command (args as (((oid, pos), cid_pos), +fun close_monitor_command (args as ((binding, cid_pos), _: (((string*Position.T)*string)*string)list)) thy = - let fun check_if_final aS = let val i = (find_index (not o RegExpInterface.final) aS) + 1 + let val oid = Binding.name_of binding + val pos = Binding.pos_of binding + fun check_if_final aS = let val i = (find_index (not o RegExpInterface.final) aS) + 1 in if i >= 1 then Value_Command.Docitem_Parser.msg thy @@ -2192,17 +2193,18 @@ fun close_monitor_command (args as (((oid, pos), cid_pos), |> update_instance_command args |> tap (DOF_core.check_ml_invs cid_long oid {is_monitor=true}) |> (fn thy => if Config.get_global thy DOF_core.invariants_checking - then Value_Command.Docitem_Parser.check_invariants thy (oid, pos) + then Value_Command.Docitem_Parser.check_invariants thy binding else thy) |> delete_monitor_entry end fun meta_args_2_latex thy sem_attrs transform_attr - ((((lab, pos), cid_opt), attr_list) : ODL_Meta_Args_Parser.meta_args_t) = + (((binding, cid_opt), attr_list) : ODL_Meta_Args_Parser.meta_args_t) = (* for the moment naive, i.e. without textual normalization of attribute names and adapted term printing *) - let val l = DOF_core.get_instance_name_global lab thy |> DOF_core.output_name + let val lab = Binding.name_of binding + val l = DOF_core.get_instance_name_global lab thy |> DOF_core.output_name |> enclose "{" "}" |> prefix "label = " val ((cid_long, _), _) = case cid_opt of @@ -2272,7 +2274,7 @@ fun meta_args_2_latex thy sem_attrs transform_attr then (name, DOF_core.output_name value') else ISA_core.err ("Bad name of semantic attribute" ^ name ^ ": " ^ value - ^ ". Name must be ASCII") pos + ^ ". Name must be ASCII") (Binding.pos_of binding) else (name, value') end) in update_sem_std_attrs attrs attrs'' end val updated_sem_std_attrs = update_sem_std_attrs sem_attrs transformed_args @@ -2289,14 +2291,14 @@ fun meta_args_2_latex thy sem_attrs transform_attr (* level-attribute information management *) fun gen_enriched_document_cmd {inline} cid_transform attr_transform - ((((oid, pos),cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) : theory -> theory = + (((binding,cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) : theory -> theory = Value_Command.Docitem_Parser.create_and_check_docitem {is_monitor = false} {is_inline = inline} - {define = true} oid pos (cid_transform cid_pos) (attr_transform doc_attrs); + {define = true} binding (cid_transform cid_pos) (attr_transform doc_attrs); fun gen_enriched_document_cmd' {inline} cid_transform attr_transform - ((((oid, pos),cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) : theory -> theory = + (((binding,cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) : theory -> theory = Value_Command.Docitem_Parser.create_and_check_docitem {is_monitor = false} {is_inline = inline} - {define = false} oid pos (cid_transform cid_pos) (attr_transform doc_attrs); + {define = false} binding (cid_transform cid_pos) (attr_transform doc_attrs); (* markup reports and document output *) @@ -2512,12 +2514,12 @@ ML\ structure ML_star_Command = struct -fun meta_args_exec (meta_args as (((oid, pos),cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) ctxt = +fun meta_args_exec (meta_args as ((binding,cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) ctxt = ctxt |> (if meta_args = ODL_Meta_Args_Parser.empty_meta_args then (K ctxt) else Context.map_theory (Value_Command.Docitem_Parser.create_and_check_docitem {is_monitor = false} {is_inline = true} - {define = true} oid pos (I cid_pos) (I doc_attrs))) + {define = true} binding (I cid_pos) (I doc_attrs))) val attributes_opt = Scan.option ODL_Meta_Args_Parser.attributes diff --git a/Isabelle_DOF/thys/manual/M_02_Background.thy b/Isabelle_DOF/thys/manual/M_02_Background.thy index 69cd237..68cdbf5 100644 --- a/Isabelle_DOF/thys/manual/M_02_Background.thy +++ b/Isabelle_DOF/thys/manual/M_02_Background.thy @@ -208,13 +208,13 @@ text\ in many features over-accomplishes the required features of \<^dof>. \ -figure*["fig:dof-ide",relative_width="95",file_src="''figures/cicm2018-combined.png''"]\ +figure*["fig_dof_ide",relative_width="95",file_src="''figures/cicm2018-combined.png''"]\ The \<^isadof> IDE (left) and the corresponding PDF (right), showing the first page of~@{cite "brucker.ea:isabelle-ontologies:2018"}.\ text\ We call the present implementation of \<^dof> on the Isabelle platform \<^isadof> . - @{figure "fig:dof-ide"} shows a screen-shot of an introductory paper on + @{figure "fig_dof_ide"} shows a screen-shot of an introductory paper on \<^isadof>~@{cite "brucker.ea:isabelle-ontologies:2018"}: the \<^isadof> PIDE can be seen on the left, while the generated presentation in PDF is shown on the right. diff --git a/Isabelle_DOF/thys/manual/M_03_GuidedTour.thy b/Isabelle_DOF/thys/manual/M_03_GuidedTour.thy index 1b0e167..859bd38 100644 --- a/Isabelle_DOF/thys/manual/M_03_GuidedTour.thy +++ b/Isabelle_DOF/thys/manual/M_03_GuidedTour.thy @@ -477,7 +477,7 @@ on the level of generated \<^verbatim>\.aux\-files, which are not n error-message and compiling it with a consistent bibtex usually makes disappear this behavior. \ -subsection*["using-term-aq"::technical, main_author = "Some @{author ''bu''}"] +subsection*["using_term_aq"::technical, main_author = "Some @{author ''bu''}"] \Using Term-Antiquotations\ text\The present version of \<^isadof> is the first version that supports the novel feature of @@ -577,11 +577,11 @@ term antiquotations: \ (*<*) -declare_reference*["subsec:onto-term-ctxt"::technical] +declare_reference*["subsec_onto_term_ctxt"::technical] (*>*) text\They are text-contexts equivalents to the \<^theory_text>\term*\ and \<^theory_text>\value*\ commands - for term-contexts introduced in @{technical (unchecked) \subsec:onto-term-ctxt\}\ + for term-contexts introduced in @{technical (unchecked) \subsec_onto_term_ctxt\}\ subsection\A Technical Report with Tight Checking\ text\An example of tight checking is a small programming manual to document programming trick diff --git a/Isabelle_DOF/thys/manual/M_04_RefMan.thy b/Isabelle_DOF/thys/manual/M_04_RefMan.thy index 292b7fe..7c5d09c 100644 --- a/Isabelle_DOF/thys/manual/M_04_RefMan.thy +++ b/Isabelle_DOF/thys/manual/M_04_RefMan.thy @@ -164,7 +164,7 @@ text\ to and between ontological concepts. \ -subsection*["odl-manual0"::technical]\Some Isabelle/HOL Specification Constructs Revisited\ +subsection*["odl_manual0"::technical]\Some Isabelle/HOL Specification Constructs Revisited\ text\ As ODL is an extension of Isabelle/HOL, document class definitions can therefore be arbitrarily mixed with standard HOL specification constructs. To make this manual self-contained, we present @@ -231,7 +231,7 @@ corresponding type-name \<^boxed_theory_text>\0.foo\ is not. For th definition of a \<^boxed_theory_text>\doc_class\ reject problematic lexical overlaps.\ -subsection*["odl-manual1"::technical]\Defining Document Classes\ +subsection*["odl_manual1"::technical]\Defining Document Classes\ text\ A document class\<^bindex>\document class\ can be defined using the @{command "doc_class"} keyword: \<^item> \class_id\:\<^bindex>\class\_id@\class_id\\ a type-\name\ that has been introduced @@ -350,7 +350,7 @@ layout; these commands have to be wrapped into text\ \<^item> \obj_id\:\<^index>\obj\_id@\obj_id\\ (or \<^emph>\oid\\<^index>\oid!oid@\see obj_id\\ for short) a \<^emph>\name\ - as specified in @{technical \odl-manual0\}. + as specified in @{technical \odl_manual0\}. \<^item> \meta_args\ : \<^rail>\obj_id ('::' class_id) ((',' attribute '=' HOL_term) *) \ \<^item> \<^emph>\evaluator\: from @{cite "wenzel:isabelle-isar:2020"}, evaluation is tried first using ML, @@ -465,10 +465,10 @@ text*[b::B'_test']\\ term*\@{B'_test' \b\}\ -declare_reference*["text-elements-expls"::technical] +declare_reference*["text_elements_expls"::technical] (*>*) -subsection*["subsec:onto-term-ctxt"::technical]\Ontological Term-Contexts and their Management\ +subsection*["subsec_onto_term_ctxt"::technical]\Ontological Term-Contexts and their Management\ text\ \<^item> \annotated_term_element\ \<^rail>\ @@ -503,9 +503,9 @@ for example). With the exception of the @{command "term*"}-command, the term-ant This expansion happens \<^emph>\before\ evaluation of the term, thus permitting executable HOL-functions to interact with meta-objects. The @{command "assert*"}-command allows for logical statements to be checked in the global context -(see @{technical (unchecked) \text-elements-expls\}). +(see @{technical (unchecked) \text_elements_expls\}). % TODO: -% Section reference @{docitem (unchecked) \text-elements-expls\} has not the right number +% Section reference @{docitem (unchecked) \text_elements_expls\} has not the right number This is particularly useful to explore formal definitions wrt. their border cases. For @{command "assert*"}, the evaluation of the term can be disabled with the \<^boxed_theory_text>\disable_assert_evaluation\ theory attribute: @@ -558,7 +558,7 @@ of this meta-object. The latter leads to a failure of the entire command. \ (*<*) -declare_reference*["sec:advanced"::technical] +declare_reference*["sec_advanced"::technical] (*>*) subsection\Status and Query Commands\ @@ -586,7 +586,7 @@ text\ The raw term will be available in the \input_term\ field of \<^theory_text>\print_doc_items\ output and, \<^item> \<^theory_text>\check_doc_global\ checks if all declared object references have been defined, all monitors are in a final state, and checks the final invariant - on all objects (cf. @{technical (unchecked) \sec:advanced\}) + on all objects (cf. @{technical (unchecked) \sec_advanced\}) \ subsection\Macros\ @@ -738,7 +738,7 @@ text\The command syntax follows the implicit convention to add a ``*'' to distinguish them from the (similar) standard Isabelle text-commands which are not ontology-aware.\ -subsection*["text-elements"::technical]\The Ontology \<^verbatim>\scholarly_paper\\ +subsection*["text_elements"::technical]\The Ontology \<^verbatim>\scholarly_paper\\ (*<*) ML\val toLaTeX = String.translate (fn c => if c = #"_" then "\\_" else String.implode[c])\ ML\writeln (DOF_core.print_doc_class_tree @@ -821,9 +821,9 @@ or \text*[\::example, main_author = "Some(@{author \bu\})"] \ \ \\} where \<^boxed_theory_text>\"''bu''"\ is a string presentation of the reference to the author -text element (see below in @{docitem (unchecked) \text-elements-expls\}). +text element (see below in @{docitem (unchecked) \text_elements_expls\}). % TODO: -% Section reference @{docitem (unchecked) \text-elements-expls\} has not the right number +% Section reference @{docitem (unchecked) \text_elements_expls\} has not the right number \ text\Some of these concepts were supported as command-abbreviations leading to the extension @@ -866,7 +866,7 @@ of Isabelle is its ability to handle both, and to establish links between both w Therefore, the formal assertion command has been integrated to capture some form of formal content.\ -subsubsection*["text-elements-expls"::example]\Examples\ +subsubsection*["text_elements_expls"::example]\Examples\ text\ While the default user interface for class definitions via the @@ -1018,9 +1018,9 @@ schemata: -section*["sec:advanced"::technical]\Advanced ODL Concepts\ +section*["sec_advanced"::technical]\Advanced ODL Concepts\ -subsection*["sec:example"::technical]\Example\ +subsection*["sec_example"::technical]\Example\ text\We assume in this section the following local ontology: @{boxed_theory_text [display]\ @@ -1089,11 +1089,11 @@ text\ \ (*<*) -declare_reference*["sec:monitors"::technical] -declare_reference*["sec:low_level_inv"::technical] +declare_reference*["sec_monitors"::technical] +declare_reference*["sec_low_level_inv"::technical] (*>*) -subsection*["sec:class_inv"::technical]\ODL Class Invariants\ +subsection*["sec_class_inv"::technical]\ODL Class Invariants\ text\ Ontological classes as described so far are too liberal in many situations. @@ -1144,7 +1144,7 @@ text\ Hence, the \<^boxed_theory_text>\inv1\ invariant is checked when the instance \<^boxed_theory_text>\testinv2\ is defined. - Now let's add some invariants to our example in \<^technical>\sec:example\. + Now let's add some invariants to our example in \<^technical>\sec_example\. For example, one would like to express that any instance of a \<^boxed_theory_text>\result\ class finally has a non-empty property list, if its \<^boxed_theory_text>\kind\ is \<^boxed_theory_text>\proof\, or that @@ -1178,22 +1178,22 @@ text\ declare[[invariants_checking_with_tactics = true]]\} There are still some limitations with this high-level syntax. For now, the high-level syntax does not support the checking of - specific monitor behaviors (see @{technical (unchecked) "sec:monitors"}). + specific monitor behaviors (see @{technical (unchecked) "sec_monitors"}). For example, one would like to delay a final error message till the closing of a monitor. For this use-case you can use low-level class invariants - (see @{technical (unchecked) "sec:low_level_inv"}). + (see @{technical (unchecked) "sec_low_level_inv"}). Also, for now, term-antiquotations can not be used in an invariant formula. \ -subsection*["sec:low_level_inv"::technical]\ODL Low-level Class Invariants\ +subsection*["sec_low_level_inv"::technical]\ODL Low-level Class Invariants\ text\ If one want to go over the limitations of the actual high-level syntax of the invariant, one can define a function using SML. A formulation, in SML, of the class-invariant \<^boxed_theory_text>\has_property\ - in \<^technical>\sec:class_inv\, defined in the supposedly \Low_Level_Syntax_Invariants\ theory + in \<^technical>\sec_class_inv\, defined in the supposedly \Low_Level_Syntax_Invariants\ theory (note the long name of the class), is straight-forward: @@ -1222,7 +1222,7 @@ val _ = Theory.setup (DOF_core.make_ml_invariant (check_result_inv, cid_long) \<^boxed_theory_text>\oid\ is bound to a variable here and can therefore not be statically expanded. \ -subsection*["sec:monitors"::technical]\ODL Monitors\ +subsection*["sec_monitors"::technical]\ODL Monitors\ text\ We call a document class with an \accepts_clause\ a \<^emph>\monitor\.\<^bindex>\monitor\ Syntactically, an \accepts_clause\\<^index>\accepts\_clause@\accepts_clause\\ contains a regular expression over class identifiers. @@ -1291,18 +1291,18 @@ text\ sections. For now, the high-level syntax of invariants does not support the checking of specific monitor behaviors like the one just described and you must use - the low-level class invariants (see @{technical "sec:low_level_inv"}). + the low-level class invariants (see @{technical "sec_low_level_inv"}). Low-level invariants checking can be set up to be triggered when opening a monitor, when closing a monitor, or both by using the \<^ML>\DOF_core.add_opening_ml_invariant\, \<^ML>\DOF_core.add_closing_ml_invariant\, or \<^ML>\DOF_core.add_ml_invariant\ commands respectively, to add the invariants to the theory context - (See @{technical "sec:low_level_inv"} for an example). + (See @{technical "sec_low_level_inv"} for an example). \ -subsection*["sec:queries_on_instances"::technical]\Queries On Instances\ +subsection*["sec_queries_on_instances"::technical]\Queries On Instances\ text\ Any class definition generates term antiquotations checking a class instance or @@ -1414,7 +1414,7 @@ text\ \ -section*["document-templates"::technical]\Defining Document Templates\ +section*["document_templates"::technical]\Defining Document Templates\ subsection\The Core Template\ text\ diff --git a/Isabelle_DOF/thys/manual/M_05_Implementation.thy b/Isabelle_DOF/thys/manual/M_05_Implementation.thy index fc1207a..b5dadd0 100644 --- a/Isabelle_DOF/thys/manual/M_05_Implementation.thy +++ b/Isabelle_DOF/thys/manual/M_05_Implementation.thy @@ -188,7 +188,7 @@ text\ section\Programming Class Invariants\ text\ - See \<^technical>\sec:low_level_inv\. + See \<^technical>\sec_low_level_inv\. \ section\Implementing Monitors\ @@ -203,7 +203,7 @@ text\ val next : automaton -> env -> cid -> automaton\} where \<^boxed_sml>\env\ is basically a map between internal automaton states and class-id's (\<^boxed_sml>\cid\'s). An automaton is said to be \<^emph>\enabled\ for a class-id, - iff it either occurs in its accept-set or its reject-set (see @{docitem "sec:monitors"}). During + iff it either occurs in its accept-set or its reject-set (see @{docitem "sec_monitors"}). During top-down document validation, whenever a text-element is encountered, it is checked if a monitor is \emph{enabled} for this class; in this case, the \<^boxed_sml>\next\-operation is executed. The transformed automaton recognizing the suffix is stored in \<^boxed_sml>\docobj_tab\ if @@ -228,7 +228,7 @@ text\ \expandafter\providekeycommand\csname isaDof.#1\endcsname}%\} The \<^LaTeX>-generator of \<^isadof> maps each \<^boxed_theory_text>\doc_item\ to an \<^LaTeX>-environment (recall - @{docitem "text-elements"}). As generic \<^boxed_theory_text>\doc_item\s are derived from the text element, + @{docitem "text_elements"}). As generic \<^boxed_theory_text>\doc_item\s are derived from the text element, the environment \inlineltx|isamarkuptext*| builds the core of \<^isadof>'s \<^LaTeX> implementation. \ From a78397693e6eab5da7352a54909d7adc962388f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20M=C3=A9ric?= Date: Thu, 20 Jul 2023 14:32:03 +0200 Subject: [PATCH 14/27] Update instances term antiquotation in manual --- Isabelle_DOF/thys/manual/M_04_RefMan.thy | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/Isabelle_DOF/thys/manual/M_04_RefMan.thy b/Isabelle_DOF/thys/manual/M_04_RefMan.thy index 7c5d09c..d820b39 100644 --- a/Isabelle_DOF/thys/manual/M_04_RefMan.thy +++ b/Isabelle_DOF/thys/manual/M_04_RefMan.thy @@ -98,7 +98,7 @@ text\ section\The Ontology Definition Language (ODL)\ text\ ODL shares some similarities with meta-modeling languages such as UML class - models: It builds upon concepts like class, inheritance, class-instances, attributes, references + models: It builds upon concepts like class, inheritance, class_instances, attributes, references to instances, and class-invariants. Some concepts like advanced type-checking, referencing to formal entities of Isabelle, and monitors are due to its specific application in the Isabelle context. Conceptually, ontologies specified in ODL consist of: @@ -1315,19 +1315,19 @@ text\ or to get the list of the authors of the instances of \introduction\, it suffices to treat this meta-data as usual: @{theory_text [display,indent=5, margin=70] \ -value*\map (result.property) @{result-instances}\ -value*\map (text_section.authored_by) @{introduction-instances}\ +value*\map (result.property) @{result_instances}\ +value*\map (text_section.authored_by) @{introduction_instances}\ \} In order to get the list of the instances of the class \myresult\ whose \evidence\ is a \proof\, one can use the command: @{theory_text [display,indent=5, margin=70] \ -value*\filter (\\. result.evidence \ = proof) @{result-instances}\ +value*\filter (\\. result.evidence \ = proof) @{result_instances}\ \} The list of the instances of the class \introduction\ whose \level\ > 1, can be filtered by: @{theory_text [display,indent=5, margin=70] \ value*\filter (\\. the (text_section.level \) > 1) - @{introduction-instances}\ + @{introduction_instances}\ \} \ From b447a480fbd2f8290941722b1456fb8b3eb8ebc7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20M=C3=A9ric?= Date: Thu, 20 Jul 2023 15:04:39 +0200 Subject: [PATCH 15/27] Fix manual latex compilation --- Isabelle_DOF/thys/manual/M_04_RefMan.thy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Isabelle_DOF/thys/manual/M_04_RefMan.thy b/Isabelle_DOF/thys/manual/M_04_RefMan.thy index d820b39..27d3313 100644 --- a/Isabelle_DOF/thys/manual/M_04_RefMan.thy +++ b/Isabelle_DOF/thys/manual/M_04_RefMan.thy @@ -98,7 +98,7 @@ text\ section\The Ontology Definition Language (ODL)\ text\ ODL shares some similarities with meta-modeling languages such as UML class - models: It builds upon concepts like class, inheritance, class_instances, attributes, references + models: It builds upon concepts like class, inheritance, class-instances, attributes, references to instances, and class-invariants. Some concepts like advanced type-checking, referencing to formal entities of Isabelle, and monitors are due to its specific application in the Isabelle context. Conceptually, ontologies specified in ODL consist of: From 8d6c8929e278d3d3c0acff65a09513cd5e4a345c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20M=C3=A9ric?= Date: Thu, 20 Jul 2023 16:14:25 +0200 Subject: [PATCH 16/27] Fix typos --- .../Test_Polymorphic_Classes.thy | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy b/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy index 6791f38..8f46155 100644 --- a/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy +++ b/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy @@ -138,7 +138,7 @@ value*\(concat o concat) ((map o map) a (map c (elaborate3.d @{elaborate3 text\ The term antiquotation is considered a ground term. -Then its type here is \<^typ>\'a::one list\ whith \'a\ a fixed-type variable. +Then its type here is \<^typ>\'a::one list\ with \'a\ a fixed-type variable. So the following definition only works because the parameter of the class is also \'a\.\ declare[[ML_print_depth = 10000]] doc_class 'a elaborate4 = @@ -153,8 +153,8 @@ declare[[ML_print_depth = 20]] text\Bug: As the term antiquotation is considered as a ground term, its type \<^typ>\'a::one list\ conflicts with the type of the attribute \<^typ>\int list\. -To support the instantiataion of the term antiquatation as an \<^typ>\int list\, -the term antiquatation should have the same behavior as a constant definition, +To support the instantiation of the term antiquotation as an \<^typ>\int list\, +the term antiquotation should have the same behavior as a constant definition, which is not the case for now.\ declare[[ML_print_depth = 10000]] doc_class-assert-error elaborate4' = @@ -180,7 +180,7 @@ value*\concat (map a (elaborate2.c @{elaborate2 \test_elaborate2\ The term antiquotation is considered a ground term. -Then its type here is \<^typ>\'a::one list\ whith \'a\ a fixed-type variable. +Then its type here is \<^typ>\'a::one list\ with \'a\ a fixed-type variable. So the following definition only works because the parameter of the class is also \'a\.\ declare[[ML_print_depth = 10000]] doc_class 'a elaborate5 = @@ -196,7 +196,7 @@ declare[[ML_print_depth = 20]] text\Bug: The term antiquotation is considered a ground term. -Then its type here is \<^typ>\'a::one list\ whith \'a\ a fixed-type variable. +Then its type here is \<^typ>\'a::one list\ with \'a\ a fixed-type variable. So it is not compatible with the type of the attribute \<^typ>\'a::numeral list\\ doc_class-assert-error 'a elaborate5' = d :: "'a::numeral list" <= "concat (map a (elaborate2.c @{elaborate2 \test_elaborate2\}))" @@ -220,14 +220,14 @@ text*[test_elaborate3a::"('a::one, int) elaborate3", d = "[@{elaborate2 \t text\ The term antiquotation is considered a ground term. -Then its type here is \<^typ>\'a::one list\ whith \'a\ a fixed-type variable. +Then its type here is \<^typ>\'a::one list\ with \'a\ a fixed-type variable. So the following definition only works because the parameter of the class is also \'a\.\ definition* test_elaborate3_embedding ::"'a::one list" where "test_elaborate3_embedding \ (concat o concat) ((map o map) elaborate1.a (map elaborate2.c (elaborate3.d @{elaborate3 \test_elaborate3a\})))" text\Bug: The term antiquotation is considered a ground term. -Then its type here is \<^typ>\'a::one list\ whith \'a\ a fixed-type variable. +Then its type here is \<^typ>\'a::one list\ with \'a\ a fixed-type variable. So it is not compatible with the specified type of the definition \<^typ>\int list\:\ definition-assert-error test_elaborate3_embedding'::"int list" where "test_elaborate3_embedding' \ (concat o concat) ((map o map) elaborate1.a (map elaborate2.c (elaborate3.d @{elaborate3 \test_elaborate3a\})))" @@ -258,7 +258,7 @@ term*\a @{one \test_one\}\ text\Bug: The term antiquotation is considered a ground term. -Then its type here is \<^typ>\'a::one list\ whith \'a\ a fixed-type variable. +Then its type here is \<^typ>\'a::one list\ with \'a\ a fixed-type variable. So it is not compatible with the specified type of the definition \<^typ>\('b::one, 'a::numeral) elaborate1'\ because the term antiquotation can not be instantiate as a \<^typ>\'b::one list\ and the \'a\ is checked against the \'a::numeral\ instance type parameter:\ From 43aad517b9427239fa6774e9e99a1cc2a851e492 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20M=C3=A9ric?= Date: Thu, 20 Jul 2023 16:25:25 +0200 Subject: [PATCH 17/27] Add basic explanation for lemma*, etc. Add basic explanation how to use lemma*, etc. with term antiquotations of polymorphic class instances --- Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy b/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy index 8f46155..69d6590 100644 --- a/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy +++ b/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy @@ -679,4 +679,13 @@ typ\unit test_G_ext\ typ\\test_G.tag_attribute :: int\\ text*[xcv6::"\test_G.tag_attribute :: int\ test_F", b="{(@{test_A ''xcv3''},@{test_G ''xcv5''})}"]\\ + +text\\lemma*\, etc. do not support well polymorphic classes. +For now only embedded term-antiquotation in a definition could work:\ +definition* testtest_level where "testtest_level \ the (text_section.level @{test2 \testtest2''\})" +lemma*[e5::E] testtest : "xx + testtest_level = yy + testtest_level \ xx = yy" by simp + +text\Indeed this fails:\ +(*lemma*[e6::E] testtest : "xx + the (level @{test2 \testtest2''\}) = yy + the (level @{test2 \testtest2''\}) \ xx = yy" by simp*) + end \ No newline at end of file From ca7cdec9b49722d882cf146f24b19fb2058df099 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20M=C3=A9ric?= Date: Thu, 20 Jul 2023 16:31:08 +0200 Subject: [PATCH 18/27] Fix typos --- Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy b/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy index 69d6590..f6f255a 100644 --- a/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy +++ b/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy @@ -680,7 +680,7 @@ typ\\test_G.tag_attribute :: int\\ text*[xcv6::"\test_G.tag_attribute :: int\ test_F", b="{(@{test_A ''xcv3''},@{test_G ''xcv5''})}"]\\ -text\\lemma*\, etc. do not support well polymorphic classes. +text\\lemma*\, etc. do not support well polymorphic classes term antiquotations. For now only embedded term-antiquotation in a definition could work:\ definition* testtest_level where "testtest_level \ the (text_section.level @{test2 \testtest2''\})" lemma*[e5::E] testtest : "xx + testtest_level = yy + testtest_level \ xx = yy" by simp From 5a8e79fb7e2953bb8f9e046f467b2fc9725af247 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Fri, 4 Aug 2023 04:37:14 +0100 Subject: [PATCH 19/27] Moved default value for title into template, as some LaTeX classes do not allow for a pre-set title. --- .../document-templates/root-beamer-UNSUPPORTED.tex | 1 + .../document-templates/root-beamerposter-UNSUPPORTED.tex | 1 + .../document-templates/root-eptcs-UNSUPPORTED.tex | 1 + .../document-templates/root-lipics-v2021-UNSUPPORTED.tex | 1 + .../document-templates/root-sn-article-UNSUPPORTED.tex | 1 + .../document-templates/root-svjour3-UNSUPPORTED.tex | 1 + Isabelle_DOF/latex/document-templates/root-lncs.tex | 2 +- Isabelle_DOF/latex/document-templates/root-scrartcl.tex | 1 + Isabelle_DOF/latex/document-templates/root-scrreprt-modern.tex | 1 + Isabelle_DOF/latex/document-templates/root-scrreprt.tex | 1 + Isabelle_DOF/latex/styles/DOF-core.sty | 2 +- 11 files changed, 11 insertions(+), 2 deletions(-) diff --git a/Isabelle_DOF-Ontologies/document-templates/root-beamer-UNSUPPORTED.tex b/Isabelle_DOF-Ontologies/document-templates/root-beamer-UNSUPPORTED.tex index 6306e5a..c90ab0b 100644 --- a/Isabelle_DOF-Ontologies/document-templates/root-beamer-UNSUPPORTED.tex +++ b/Isabelle_DOF-Ontologies/document-templates/root-beamer-UNSUPPORTED.tex @@ -21,6 +21,7 @@ \RequirePackage{ifvtex} \documentclass[16x9,9pt]{beamer} \PassOptionsToPackage{force}{DOF-scholarly_paper} +\title{No Title Given} \usepackage{DOF-core} \usepackage{textcomp} diff --git a/Isabelle_DOF-Ontologies/document-templates/root-beamerposter-UNSUPPORTED.tex b/Isabelle_DOF-Ontologies/document-templates/root-beamerposter-UNSUPPORTED.tex index 83457c1..783d279 100644 --- a/Isabelle_DOF-Ontologies/document-templates/root-beamerposter-UNSUPPORTED.tex +++ b/Isabelle_DOF-Ontologies/document-templates/root-beamerposter-UNSUPPORTED.tex @@ -21,6 +21,7 @@ \RequirePackage{ifvtex} \documentclass[]{beamer} \PassOptionsToPackage{force}{DOF-scholarly_paper} +\title{No Title Given} \usepackage{beamerposter} \usepackage{DOF-core} diff --git a/Isabelle_DOF-Ontologies/document-templates/root-eptcs-UNSUPPORTED.tex b/Isabelle_DOF-Ontologies/document-templates/root-eptcs-UNSUPPORTED.tex index 8c0906b..77c4dc3 100644 --- a/Isabelle_DOF-Ontologies/document-templates/root-eptcs-UNSUPPORTED.tex +++ b/Isabelle_DOF-Ontologies/document-templates/root-eptcs-UNSUPPORTED.tex @@ -23,6 +23,7 @@ %% preamble.tex. \documentclass[submission,copyright,creativecommons]{eptcs} +\title{No Title Given} \usepackage{DOF-core} \bibliographystyle{eptcs}% the mandatory bibstyle diff --git a/Isabelle_DOF-Ontologies/document-templates/root-lipics-v2021-UNSUPPORTED.tex b/Isabelle_DOF-Ontologies/document-templates/root-lipics-v2021-UNSUPPORTED.tex index b59198e..9c43b1c 100644 --- a/Isabelle_DOF-Ontologies/document-templates/root-lipics-v2021-UNSUPPORTED.tex +++ b/Isabelle_DOF-Ontologies/document-templates/root-lipics-v2021-UNSUPPORTED.tex @@ -25,6 +25,7 @@ \documentclass[a4paper,UKenglish,cleveref, autoref,thm-restate]{lipics-v2021} \bibliographystyle{plainurl}% the mandatory bibstyle +\title{No Title Given} \usepackage[numbers, sort&compress, sectionbib]{natbib} \usepackage{DOF-core} diff --git a/Isabelle_DOF-Ontologies/document-templates/root-sn-article-UNSUPPORTED.tex b/Isabelle_DOF-Ontologies/document-templates/root-sn-article-UNSUPPORTED.tex index f1d34eb..1f64772 100644 --- a/Isabelle_DOF-Ontologies/document-templates/root-sn-article-UNSUPPORTED.tex +++ b/Isabelle_DOF-Ontologies/document-templates/root-sn-article-UNSUPPORTED.tex @@ -21,6 +21,7 @@ \documentclass[iicol]{sn-jnl} \PassOptionsToPackage{force}{DOF-scholarly_paper} +\title{No Title Given} \usepackage{DOF-core} \bibliographystyle{sn-basic} \let\proof\relax diff --git a/Isabelle_DOF-Ontologies/document-templates/root-svjour3-UNSUPPORTED.tex b/Isabelle_DOF-Ontologies/document-templates/root-svjour3-UNSUPPORTED.tex index 8ee88a3..03f09c3 100644 --- a/Isabelle_DOF-Ontologies/document-templates/root-svjour3-UNSUPPORTED.tex +++ b/Isabelle_DOF-Ontologies/document-templates/root-svjour3-UNSUPPORTED.tex @@ -23,6 +23,7 @@ \RequirePackage{fix-cm} \documentclass[]{svjour3} +\title{No Title Given} \usepackage{DOF-core} \usepackage{mathptmx} \bibliographystyle{abbrvnat} diff --git a/Isabelle_DOF/latex/document-templates/root-lncs.tex b/Isabelle_DOF/latex/document-templates/root-lncs.tex index c2827a0..115ed04 100644 --- a/Isabelle_DOF/latex/document-templates/root-lncs.tex +++ b/Isabelle_DOF/latex/document-templates/root-lncs.tex @@ -23,7 +23,7 @@ \documentclass{llncs} \usepackage{DOF-core} \bibliographystyle{splncs04} - +\title{No Title Given} \usepackage{hyperref} \setcounter{tocdepth}{3} \hypersetup{% diff --git a/Isabelle_DOF/latex/document-templates/root-scrartcl.tex b/Isabelle_DOF/latex/document-templates/root-scrartcl.tex index a6a737f..a006b9c 100644 --- a/Isabelle_DOF/latex/document-templates/root-scrartcl.tex +++ b/Isabelle_DOF/latex/document-templates/root-scrartcl.tex @@ -22,6 +22,7 @@ \RequirePackage{ifvtex} \documentclass[abstract=true,fontsize=11pt,DIV=12,paper=a4]{scrartcl} +\title{No Title Given} \usepackage{DOF-core} \usepackage{textcomp} diff --git a/Isabelle_DOF/latex/document-templates/root-scrreprt-modern.tex b/Isabelle_DOF/latex/document-templates/root-scrreprt-modern.tex index 75ac822..81b7186 100644 --- a/Isabelle_DOF/latex/document-templates/root-scrreprt-modern.tex +++ b/Isabelle_DOF/latex/document-templates/root-scrreprt-modern.tex @@ -24,6 +24,7 @@ \RequirePackage{ifvtex} \documentclass[fontsize=11pt,paper=a4,open=right,twoside,abstract=true]{scrreprt} +\title{No Title Given} \usepackage{textcomp} \bibliographystyle{abbrvnat} diff --git a/Isabelle_DOF/latex/document-templates/root-scrreprt.tex b/Isabelle_DOF/latex/document-templates/root-scrreprt.tex index e8c0141..bb5d07b 100644 --- a/Isabelle_DOF/latex/document-templates/root-scrreprt.tex +++ b/Isabelle_DOF/latex/document-templates/root-scrreprt.tex @@ -21,6 +21,7 @@ \RequirePackage{ifvtex} \documentclass[fontsize=11pt,paper=a4,open=right,twoside,abstract=true]{scrreprt} +\title{No Title Given} \usepackage{DOF-core} diff --git a/Isabelle_DOF/latex/styles/DOF-core.sty b/Isabelle_DOF/latex/styles/DOF-core.sty index 0bfa98c..fc467de 100644 --- a/Isabelle_DOF/latex/styles/DOF-core.sty +++ b/Isabelle_DOF/latex/styles/DOF-core.sty @@ -166,7 +166,7 @@ % end: label and ref %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -\title{No Title Given} +%\title{No Title Given} \input{ontologies} \IfFileExists{preamble.tex}{\input{preamble.tex}}{}% From 342984df3b658e1962ac996feec4b2475a029645 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Fri, 4 Aug 2023 07:01:42 +0100 Subject: [PATCH 20/27] Converted def into newcommand. --- Isabelle_DOF/latex/styles/DOF-core.sty | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Isabelle_DOF/latex/styles/DOF-core.sty b/Isabelle_DOF/latex/styles/DOF-core.sty index fc467de..bd5a2be 100644 --- a/Isabelle_DOF/latex/styles/DOF-core.sty +++ b/Isabelle_DOF/latex/styles/DOF-core.sty @@ -156,13 +156,13 @@ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % begin: label and ref \newkeycommand\isaDof@label[label=,type=][1]{\label{#1}} -\def\isaDofDOTlabel{\isaDof@label} +\newcommand{\isaDofDOTlabel}{\isaDof@label} \newkeycommand\isaDof@ref[label=,type=][1]{\autoref{#1}} -\def\isaDofDOTref{\isaDof@ref} +\newcommand{\isaDofDOTref}{\isaDof@ref} \newkeycommand\isaDof@macro[label=,type=][1]{MMM \label{#1}} %% place_holder -\def\isaDofDOTmacroDef{\iisaDof@macro} +\newcommand{\isaDofDOTmacroDef}{\iisaDof@macro} \newkeycommand\isaDof@macroExp[label=,type=][1]{MMM \autoref{#1}} %% place_holder -\def\isaDofDOTmacroExp{\isaDof@macroExp} +\newcommand{\isaDofDOTmacroExp}{\isaDof@macroExp} % end: label and ref %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% From c06328794736ee370bb8183bcffb4d88a01ea71a Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Tue, 29 Aug 2023 06:11:32 +0100 Subject: [PATCH 21/27] Isabelle API update. --- Isabelle_DOF/thys/Isa_DOF.thy | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/Isabelle_DOF/thys/Isa_DOF.thy b/Isabelle_DOF/thys/Isa_DOF.thy index fde5005..f7d615b 100644 --- a/Isabelle_DOF/thys/Isa_DOF.thy +++ b/Isabelle_DOF/thys/Isa_DOF.thy @@ -2562,7 +2562,8 @@ fun gen_theorem schematic bundle_includes prep_att prep_stmt |> prep_statement (prep_att lthy) prep_stmt elems raw_concl; val atts = more_atts @ map (prep_att lthy) raw_atts; - val pos = Position.thread_data (); + val print_cfg = {interactive = int, pos = Position.thread_data (), proof_state= false} + fun after_qed' results goal_ctxt' = let val results' = @@ -2574,12 +2575,12 @@ fun gen_theorem schematic bundle_includes prep_att prep_stmt (map2 (fn (b, _) => fn ths => (b, [(ths, [])])) stmt results') lthy; val lthy'' = if Binding.is_empty_atts (name, atts) then - (Proof_Display.print_results int pos lthy' ((kind, ""), res); lthy') + (Proof_Display.print_results print_cfg lthy' ((kind, ""), res); lthy') else let val ([(res_name, _)], lthy'') = Local_Theory.notes_kind kind [((name, atts), [(maps #2 res, [])])] lthy'; - val _ = Proof_Display.print_results int pos lthy' ((kind, res_name), res); + val _ = Proof_Display.print_results print_cfg lthy' ((kind, res_name), res); in lthy'' end; in after_qed results' lthy'' end; From ec7297f1d3b0f05ab1fdb478ad52fcf1597ed824 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20M=C3=A9ric?= Date: Mon, 11 Sep 2023 09:07:10 +0200 Subject: [PATCH 22/27] Update instances list term antiquotation Make instances list term antiquotation compatible with polymorphic classes --- .../Very_Deep_Interpretation.thy | 4 +- Isabelle_DOF-Unit-Tests/Attributes.thy | 4 +- .../Concept_High_Level_Invariants.thy | 6 +- .../Concept_MonitorTest1.thy | 4 +- .../Concept_TermEvaluation.thy | 32 ++--- .../Test_Polymorphic_Classes.thy | 5 + Isabelle_DOF/thys/Isa_DOF.thy | 111 +++++++++++------- Isabelle_DOF/thys/manual/M_04_RefMan.thy | 47 +++++++- 8 files changed, 140 insertions(+), 73 deletions(-) diff --git a/Isabelle_DOF-Proofs/Very_Deep_Interpretation.thy b/Isabelle_DOF-Proofs/Very_Deep_Interpretation.thy index 51ff20a..0361e9b 100644 --- a/Isabelle_DOF-Proofs/Very_Deep_Interpretation.thy +++ b/Isabelle_DOF-Proofs/Very_Deep_Interpretation.thy @@ -17,7 +17,7 @@ no_notation "Isabelle_DOF_file" ("@{file _}") no_notation "Isabelle_DOF_thy" ("@{thy _}") no_notation "Isabelle_DOF_docitem" ("@{docitem _}") no_notation "Isabelle_DOF_docitem_attr" ("@{docitemattr (_) :: (_)}") -no_notation "Isabelle_DOF_trace_attribute" ("@{trace-attribute _}") +no_notation "Isabelle_DOF_trace_attribute" ("@{trace'_-attribute _}") consts Isabelle_DOF_typ :: "string \ typ" ("@{typ _}") consts Isabelle_DOF_term :: "string \ term" ("@{term _}") @@ -27,7 +27,7 @@ datatype "file" = Isabelle_DOF_file string ("@{file _}") datatype "thy" = Isabelle_DOF_thy string ("@{thy _}") consts Isabelle_DOF_docitem :: "string \ 'a" ("@{docitem _}") datatype "docitem_attr" = Isabelle_DOF_docitem_attr string string ("@{docitemattr (_) :: (_)}") -consts Isabelle_DOF_trace_attribute :: "string \ (string * string) list" ("@{trace-attribute _}") +consts Isabelle_DOF_trace_attribute :: "string \ (string * string) list" ("@{trace'_-attribute _}") subsection\ Semantics \ diff --git a/Isabelle_DOF-Unit-Tests/Attributes.thy b/Isabelle_DOF-Unit-Tests/Attributes.thy index 14ba9f3..092ec22 100644 --- a/Isabelle_DOF-Unit-Tests/Attributes.thy +++ b/Isabelle_DOF-Unit-Tests/Attributes.thy @@ -214,12 +214,12 @@ no_notation Plus (infixr "||" 55) no_notation Times (infixr "~~" 60) no_notation Atom ("\_\" 65) -value* \ DA.accepts (na2da (rexp2na example_expression)) (map fst @{trace-attribute \aaa\}) \ +value* \ DA.accepts (na2da (rexp2na example_expression)) (map fst @{trace_attribute \aaa\}) \ definition word_test :: "'a list \ 'a rexp \ bool" (infix "is-in" 60) where " w is-in rexp \ DA.accepts (na2da (rexp2na rexp)) (w)" -value* \ (map fst @{trace-attribute \aaa\}) is-in example_expression \ +value* \ (map fst @{trace_attribute \aaa\}) is-in example_expression \ (*<*) diff --git a/Isabelle_DOF-Unit-Tests/Concept_High_Level_Invariants.thy b/Isabelle_DOF-Unit-Tests/Concept_High_Level_Invariants.thy index 878c465..7393db5 100644 --- a/Isabelle_DOF-Unit-Tests/Concept_High_Level_Invariants.thy +++ b/Isabelle_DOF-Unit-Tests/Concept_High_Level_Invariants.thy @@ -213,9 +213,9 @@ value*\@{scholarly_paper.author \church'\}\ value*\@{author \church\}\ value*\@{Concept_High_Level_Invariants.author \church\}\ -value*\@{scholarly_paper.author_instances}\ -value*\@{author_instances}\ -value*\@{Concept_High_Level_Invariants.author_instances}\ +value*\@{instances_of \scholarly_paper.author\}\ +value*\@{instances_of \author\}\ +value*\@{instances_of \Concept_High_Level_Invariants.author\}\ text*[introduction3::introduction, authored_by = "{@{author \church\}}", level = "Some 2"]\\ text*[introduction4::introduction, authored_by = "{@{author \curry\}}", level = "Some 4"]\\ diff --git a/Isabelle_DOF-Unit-Tests/Concept_MonitorTest1.thy b/Isabelle_DOF-Unit-Tests/Concept_MonitorTest1.thy index ceb9fb6..fcea1ea 100644 --- a/Isabelle_DOF-Unit-Tests/Concept_MonitorTest1.thy +++ b/Isabelle_DOF-Unit-Tests/Concept_MonitorTest1.thy @@ -104,8 +104,8 @@ ML\ val (oid, DOF_core.Instance {value, ...}) = Name_Space.check (Context.Proof \<^context>) (DOF_core.get_instances \<^context>) ("aaa", Position.none) \ -term*\map fst @{trace-attribute \test_monitor_M\}\ -value*\map fst @{trace-attribute \test_monitor_M\}\ +term*\map fst @{trace_attribute \test_monitor_M\}\ +value*\map fst @{trace_attribute \test_monitor_M\}\ ML\@{assert} ([("Conceptual.A", "test"), ("Conceptual.F", "sdfg")] = @{trace_attribute aaa}) \ diff --git a/Isabelle_DOF-Unit-Tests/Concept_TermEvaluation.thy b/Isabelle_DOF-Unit-Tests/Concept_TermEvaluation.thy index 17e8b9d..d821672 100644 --- a/Isabelle_DOF-Unit-Tests/Concept_TermEvaluation.thy +++ b/Isabelle_DOF-Unit-Tests/Concept_TermEvaluation.thy @@ -136,6 +136,9 @@ ML\@{thm "refl"}\ section\Request on instances\ +text\The instances directly attached to the default super class \text\: \ +value*\@{instances_of \text\}\ + text\We define a new class Z:\ doc_class Z = z::"int" @@ -146,28 +149,27 @@ text*[test2Z::Z, z=4]\lorem ipsum...\ text*[test3Z::Z, z=3]\lorem ipsum...\ text\We want to get all the instances of the @{doc_class Z}:\ -value*\@{Z_instances}\ +value*\@{instances_of \Z\}\ text\Now we want to get the instances of the @{doc_class Z} whose attribute z > 2:\ -value*\filter (\\. Z.z \ > 2) @{Z_instances}\ +value*\filter (\\. Z.z \ > 2) @{instances_of \Z\}\ text\We can check that we have the list of instances we wanted:\ -value*\filter (\\. Z.z \ > 2) @{Z_instances} = [@{Z \test3Z\}, @{Z \test2Z\}] - \ filter (\\. Z.z \ > 2) @{Z_instances} = [@{Z \test2Z\}, @{Z \test3Z\}]\ +assert*\filter (\\. Z.z \ > 2) @{instances_of \Z\} = [@{Z \test3Z\}, @{Z \test2Z\}] + \ filter (\\. Z.z \ > 2) @{instances_of \Z\} = [@{Z \test2Z\}, @{Z \test3Z\}]\ text\Now, we want to get all the instances of the @{doc_class A}\ -value*\@{A_instances}\ +value*\@{instances_of \A\}\ -(*<*) -text\Warning: If you make a request on attributes that are undefined in some instances, -you will get a result which includes these unresolved cases. + +text\Warning: Requests on attributes that are undefined in some instances +include all the instances. In the following example, we request the instances of the @{doc_class A}. -But we have defined an instance @{docitem \sdf\} in theory @{theory "Isabelle_DOF-Ontologies.Conceptual"} -whose our theory inherits from, and this docitem instance does not initialize its attribute \<^emph>\x\. -So in the request result we get an unresolved case because the evaluator can not get -the value of the \<^emph>\x\ attribute of the instance @{docitem \sdf\}:\ -value*\filter (\\. A.x \ > 5) @{A_instances}\ -(*>*) +But we have defined instances @{A \axx\} and @{A \axx\} previously +and these docitem instances do not initialize their \<^const>\A.x\ attribute. +So the request can not be evaluated:\ +value*\filter (\\. A.x \ > 5) @{instances_of \A\}\ + section\Limitations\ text\There are still some limitations. @@ -226,7 +228,7 @@ text\... and here we reference @{A \assertionA\}.\ assert*\evidence @{result \resultProof\} = evidence @{result \resultProof2\}\ text\The optional evaluator of \value*\ and \assert*\ must be specified before the meta arguments:\ -value* [nbe] [optional_test_A::A, x=6] \filter (\\. A.x \ > 5) @{A_instances}\ +value* [nbe] [optional_test_A::A, x=6] \filter (\\. A.x \ > 5) @{instances_of \A\}\ assert* [nbe] [resultProof3::result, evidence = "proof", property="[@{thm \HOL.sym\}]"] \evidence @{result \resultProof3\} = evidence @{result \resultProof2\}\ diff --git a/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy b/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy index f6f255a..0d5aa30 100644 --- a/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy +++ b/Isabelle_DOF-Unit-Tests/Test_Polymorphic_Classes.thy @@ -3,6 +3,11 @@ theory Test_Polymorphic_Classes TestKit begin +text\The name \text\ is reserved by the implementation and refers to the default super class:\ +doc_class-assert-error "text" = +a::int +\text: This name is reserved by the implementation\ + doc_class title = short_title :: "string option" <= "None" diff --git a/Isabelle_DOF/thys/Isa_DOF.thy b/Isabelle_DOF/thys/Isa_DOF.thy index ba42a64..6f02e90 100644 --- a/Isabelle_DOF/thys/Isa_DOF.thy +++ b/Isabelle_DOF/thys/Isa_DOF.thy @@ -1050,7 +1050,8 @@ datatype "file" = Isabelle_DOF_file string ("@{file _}") datatype "thy" = Isabelle_DOF_thy string ("@{thy _}") consts Isabelle_DOF_docitem :: "string \ 'a" ("@{docitem _}") datatype "docitem_attr" = Isabelle_DOF_docitem_attr string string ("@{docitemattr (_) :: (_)}") -consts Isabelle_DOF_trace_attribute :: "string \ (string * string) list" ("@{trace-attribute _}") +consts Isabelle_DOF_trace_attribute :: "string \ (string * string) list" ("@{trace'_attribute _}") +consts Isabelle_DOF_instances_of :: "string \ 'a list" ("@{instances'_of _}") \ \Dynamic setup of inner syntax cartouche\ @@ -1192,12 +1193,27 @@ fun check_instance thy (term, _, pos) s = val name' = DOF_core.cid_of name thy |> DOF_core.get_onto_class_cid thy |> (fst o fst) fun check' (class_name, object_cid) = - if class_name = object_cid then - DOF_core.value_of name thy + if class_name = object_cid + then () else err (name ^ " is not an instance of " ^ class_name) pos in check' (class_name, name') end; in ML_isa_check_generic check thy (term, pos) end +fun check_instance_of thy (term, _, pos) _ = + let + fun check thy (name, _) = + if equal name DOF_core.default_cid + then () + else + let + val class_typ = name |> DOF_core.get_onto_class_cid thy |> snd + fun check' (class_name, typ) = + if equal (class_name |> Syntax.read_typ_global thy) typ + then () + else err (name ^ " is not a class name") pos + in check' (name, class_typ) end; + in ML_isa_check_generic check thy (term, pos) end + fun ML_isa_id _ (term,_) = SOME term @@ -1273,44 +1289,44 @@ fun declare_ISA_class_accessor_and_check_instance (params, doc_class_name, bind_ end -fun elaborate_instances_list thy isa_name _ _ _ = +fun elaborate_instances_of thy _ _ term_option _ = let - val base_name = Long_Name.base_name isa_name - val qualifier = Long_Name.qualifier isa_name - val isa_name' = (case try (unprefix DOF_core.doc_class_prefix) base_name of - NONE => unprefix DOF_core.long_doc_class_prefix base_name - | SOME name => name) - |> unsuffix instances_of_suffixN - |> Long_Name.qualify qualifier - val class_typ = isa_name' |> Proof_Context.read_typ (Proof_Context.init_global thy) - val long_class_name = DOF_core.get_onto_class_name_global isa_name' thy - val values = thy |> Proof_Context.init_global |> DOF_core.get_instances - |> Name_Space.dest_table - |> filter (fn (name, _) => equal (DOF_core.cid_of name thy) long_class_name) - |> map (fn (oid, _) => DOF_core.value_of oid thy) - in HOLogic.mk_list class_typ values end - - -fun declare_class_instances_annotation (params, doc_class_name, bind_pos) thy = - let - val bname = Long_Name.base_name doc_class_name - val bname' = prefix DOF_core.doc_class_prefix bname |> suffix instances_of_suffixN - val bind = bname' |> pair bind_pos |> swap |> Binding.make - val bind' = prefix DOF_core.long_doc_class_prefix bname - |> suffix instances_of_suffixN |> pair bind_pos |> swap |> Binding.make - val typ = Type (doc_class_name, map TFree params) - fun mixfix_enclose name = name |> enclose "@{" "}" - val mixfix = clean_mixfix (bname ^ instances_of_suffixN) |> mixfix_enclose - val mixfix' = clean_mixfix (doc_class_name ^ instances_of_suffixN) |> mixfix_enclose - in - thy |> rm_mixfix bname' mixfix - |> Sign.add_consts [(bind, \<^Type>\list typ\, Mixfix.mixfix mixfix)] - |> DOF_core.add_isa_transformer bind ((check_identity, elaborate_instances_list) - |> DOF_core.make_isa_transformer) - |> Sign.add_consts [(bind', \<^Type>\list typ\, Mixfix.mixfix mixfix')] - |> DOF_core.add_isa_transformer bind' ((check_identity, elaborate_instances_list) - |> DOF_core.make_isa_transformer) - end + val class_name = case term_option of + NONE => error ("Malformed term annotation") + | SOME term => HOLogic.dest_string term + fun mk_list class_typ f = + let + val values = thy |> Proof_Context.init_global |> DOF_core.get_instances + |> Name_Space.dest_table + |> map fst + |> tap (fn is => writeln ("In elaborate_instances_list instances: " ^ \<^make_string> is)) + |> f + |> tap (fn is => writeln ("In elaborate_instances_list instances after filter: " ^ \<^make_string> is)) + |> map (fn oid => DOF_core.value_of oid thy) + in HOLogic.mk_list class_typ values end + in if equal class_name DOF_core.default_cid + then + (* When the class name is default_cid = "text", + return the instances attached to this default class. + We want the class default_cid to stay abstract + and not have the capability to be defined with attribute, invariants, etc. + Hence this can handle docitem without a class associated, + for example when you just want a document element to be referentiable + without using the burden of ontology classes. + ex: text*[sdf]\ Lorem ipsum @{thm refl}\ *) + (filter (fn name => DOF_core.cid_of name thy |> equal DOF_core.default_cid)) + |> mk_list \<^typ>\unit\ + else + let + val class_typ = class_name |> Syntax.read_typ_global thy + in + (filter_out (fn name => DOF_core.cid_of name thy |> equal DOF_core.default_cid) + #> filter (fn name => DOF_core.cid_of name thy + |> Syntax.read_typ_global thy + |> equal class_typ)) + |> mk_list class_typ + end + end fun symbex_attr_access0 ctxt proj_term term = let @@ -1393,7 +1409,9 @@ end) ([(\<^const_name>\Isabelle_DOF_docitem\, ISA_core.ML_isa_check_docitem, ISA_core.ML_isa_elaborate_generic) , (\<^const_name>\Isabelle_DOF_trace_attribute\, - ISA_core.ML_isa_check_trace_attribute, ISA_core.ML_isa_elaborate_trace_attribute)] + ISA_core.ML_isa_check_trace_attribute, ISA_core.ML_isa_elaborate_trace_attribute) + , (\<^const_name>\Isabelle_DOF_instances_of\, + ISA_core.check_instance_of, ISA_core.elaborate_instances_of)] |> fold (fn (n, check, elaborate) => fn thy => let val ns = Sign.consts_of thy |> Consts.space_of val name = n @@ -1877,7 +1895,7 @@ fun create_and_check_docitem is_monitor {is_inline=is_inline} {define=define} bi else NONE val value_terms = if default_cid then let - val undefined_value = Free ("Undefined_Value", \<^Type>\unit\) + val undefined_value = Free (oid ^ "_Undefined_Value", \<^Type>\unit\) in (undefined_value, undefined_value) end (* Handle initialization of docitem without a class associated, for example when you just want a document element to be referentiable @@ -3097,7 +3115,13 @@ fun add_doc_class_cmd overloaded (raw_params, binding) raw_parent raw_fieldsNdefaults reject_Atoms regexps invariants thy = let val bind_pos = Binding.pos_of binding - val name = Binding.name_of binding + val name = + let val name = Binding.name_of binding + in case name |> Option.filter (not o equal DOF_core.default_cid) of + NONE => bind_pos |> ISA_core.err (name + ^ ": This name is reserved by the implementation") + | SOME name => name + end val ctxt = Proof_Context.init_global thy; val params = map (apsnd (Typedecl.read_constraint ctxt)) raw_params; val ctxt1 = fold (Variable.declare_typ o TFree) params ctxt; @@ -3163,7 +3187,6 @@ fun add_doc_class_cmd overloaded (raw_params, binding) because the class name is already bound to "doc_class Regular_Exp.rexp" constant by add_doc_class_cmd function *) |> (fn thy => ISA_core.declare_ISA_class_accessor_and_check_instance (params', cid thy, bind_pos) thy) - |> (fn thy => ISA_core.declare_class_instances_annotation (params', cid thy, bind_pos) thy) end; diff --git a/Isabelle_DOF/thys/manual/M_04_RefMan.thy b/Isabelle_DOF/thys/manual/M_04_RefMan.thy index 27d3313..583601c 100644 --- a/Isabelle_DOF/thys/manual/M_04_RefMan.thy +++ b/Isabelle_DOF/thys/manual/M_04_RefMan.thy @@ -1019,6 +1019,38 @@ schemata: section*["sec_advanced"::technical]\Advanced ODL Concepts\ +(*<*) +doc_class title = + short_title :: "string option" <= "None" +doc_class author = + email :: "string" <= "''''" +datatype classification = SIL0 | SIL1 | SIL2 | SIL3 | SIL4 +doc_class abstract = + keywordlist :: "string list" <= "[]" + safety_level :: "classification" <= "SIL3" +doc_class text_section = + authored_by :: "author set" <= "{}" + level :: "int option" <= "None" +type_synonym notion = string +doc_class introduction = text_section + + authored_by :: "author set" <= "UNIV" + uses :: "notion set" +doc_class claim = introduction + + based_on :: "notion list" +doc_class technical = text_section + + formal_results :: "thm list" +doc_class "definition" = technical + + is_formal :: "bool" + property :: "term list" <= "[]" +datatype kind = expert_opinion | argument | "proof" +doc_class result = technical + + evidence :: kind + property :: "thm list" <= "[]" +doc_class example = technical + + referring_to :: "(notion + definition) set" <= "{}" +doc_class "conclusion" = text_section + + establish :: "(claim \ result) set" +(*>*) subsection*["sec_example"::technical]\Example\ text\We assume in this section the following local ontology: @@ -1301,6 +1333,12 @@ text\ (See @{technical "sec_low_level_inv"} for an example). \ +(*<*) +value*\map (result.property) @{instances_of \result\}\ +value*\map (text_section.authored_by) @{instances_of \introduction\}\ +value*\filter (\\. result.evidence \ = proof) @{instances_of \result\}\ +value*\filter (\\. the (text_section.level \) > 1) @{instances_of \introduction\}\ +(*>*) subsection*["sec_queries_on_instances"::technical]\Queries On Instances\ @@ -1315,19 +1353,18 @@ text\ or to get the list of the authors of the instances of \introduction\, it suffices to treat this meta-data as usual: @{theory_text [display,indent=5, margin=70] \ -value*\map (result.property) @{result_instances}\ -value*\map (text_section.authored_by) @{introduction_instances}\ +value*\map (result.property) @{instances_of \result\}\ +value*\map (text_section.authored_by) @{instances_of \introduction\}\ \} In order to get the list of the instances of the class \myresult\ whose \evidence\ is a \proof\, one can use the command: @{theory_text [display,indent=5, margin=70] \ -value*\filter (\\. result.evidence \ = proof) @{result_instances}\ +value*\filter (\\. result.evidence \ = proof) @{instances_of \result\}\ \} The list of the instances of the class \introduction\ whose \level\ > 1, can be filtered by: @{theory_text [display,indent=5, margin=70] \ -value*\filter (\\. the (text_section.level \) > 1) - @{introduction_instances}\ +value*\filter (\\. the (text_section.level \) > 1) @{instances_of \introduction\}\ \} \ From e12abadc948732f40e90be956d02a9db21dc8412 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Thu, 14 Sep 2023 06:29:01 +0100 Subject: [PATCH 23/27] Test with Isabelle 2023. --- .woodpecker/build.yml | 4 ++-- Isabelle_DOF/scala/dof.scala | 6 +++--- README_DEVELOPMENT.md | 6 +++--- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.woodpecker/build.yml b/.woodpecker/build.yml index 0b26553..a9ce5b4 100644 --- a/.woodpecker/build.yml +++ b/.woodpecker/build.yml @@ -1,6 +1,6 @@ pipeline: build: - image: git.logicalhacking.com/lh-docker/lh-docker-isabelle/isabelle_nightly:latest + image: git.logicalhacking.com/lh-docker/lh-docker-isabelle/isabelle2023:latest pull: true commands: - hg log --limit 2 /root/isabelle @@ -23,7 +23,7 @@ pipeline: - cd ../.. - ln -s * latest archive: - image: git.logicalhacking.com/lh-docker/lh-docker-isabelle/isabelle_nightly:latest + image: git.logicalhacking.com/lh-docker/lh-docker-isabelle/isabelle2023:latest commands: - export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/$LATEX - mkdir -p $ARTIFACT_DIR diff --git a/Isabelle_DOF/scala/dof.scala b/Isabelle_DOF/scala/dof.scala index c89ff0d..ffc1581 100644 --- a/Isabelle_DOF/scala/dof.scala +++ b/Isabelle_DOF/scala/dof.scala @@ -39,10 +39,10 @@ import isabelle._ object DOF { /** parameters **/ - val isabelle_version = "" - val isabelle_url = "https://isabelle.sketis.net/devel/release_snapshot/" + val isabelle_version = "2023" + val isabelle_url = "https://isabelle.in.tum.de/website-Isabelle2023" - val afp_version = "afp-devel" + val afp_version = "afp-2023-09-13" // Isabelle/DOF version: "Unreleased" for development, semantic version for releases val version = "Unreleased" diff --git a/README_DEVELOPMENT.md b/README_DEVELOPMENT.md index 224824d..539407c 100644 --- a/README_DEVELOPMENT.md +++ b/README_DEVELOPMENT.md @@ -5,9 +5,9 @@ Isabelle/DOF has three major prerequisites: * **Isabelle:** Isabelle/DOF requires [Isabelle - 2022](https://isabelle.in.tum.de/website-Isabelle2022/). Please download the - Isabelle 2022 distribution for your operating system from the [Isabelle - website](https://isabelle.in.tum.de/website-Isabelle2022/). + 2023](https://isabelle.in.tum.de/website-Isabelle2023/). Please download the + Isabelle 2023 distribution for your operating system from the [Isabelle + website](https://isabelle.in.tum.de/website-Isabelle2023/). * **AFP:** Isabelle/DOF requires several entries from the [Archive of Formal Proofs (AFP)](https://www.isa-afp.org/). * **LaTeX:** Isabelle/DOF requires a modern LaTeX installation, i.e., at least From b69857214606b5cd9e336669150c4c9fb9d64433 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Thu, 14 Sep 2023 06:33:28 +0100 Subject: [PATCH 24/27] Documented Isabelle version (2023). --- CHANGELOG.md | 2 +- README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1eb853c..09cbc85 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,7 +11,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Changed -- Updated Isabelle version to Isabelle 2022 +- Updated Isabelle version to Isabelle 2023 ## [1.3.0] - 2022-07-08 diff --git a/README.md b/README.md index 23b429e..22640a6 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ Isabelle/DOF allows for both conventional typesetting and formal development. Isabelle/DOF has two major prerequisites: -* **Isabelle:** Isabelle/DOF requires [Isabelle](https://isabelle.in.tum.de/) +* **Isabelle 2023:** Isabelle/DOF requires [Isabelle](https://isabelle.in.tum.de/) and several entries from the [Archive of Formal Proofs (AFP)](https://www.isa-afp.org/). * **LaTeX:** Isabelle/DOF requires a modern LaTeX installation, i.e., at least From c57ce6292bfaf7d687dc1409eed7ae8f3ddedda2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20M=C3=A9ric?= Date: Tue, 19 Sep 2023 17:03:00 +0200 Subject: [PATCH 25/27] Update output type name for latex refs --- Isabelle_DOF/thys/Isa_COL.thy | 8 ++++---- Isabelle_DOF/thys/Isa_DOF.thy | 9 +++++---- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/Isabelle_DOF/thys/Isa_COL.thy b/Isabelle_DOF/thys/Isa_COL.thy index a5e007f..522f991 100644 --- a/Isabelle_DOF/thys/Isa_COL.thy +++ b/Isabelle_DOF/thys/Isa_COL.thy @@ -804,14 +804,14 @@ fun upd_block f = fun upd_block_title f = upd_block (fn title => f title) -val unenclose_end = unenclose -val unenclose_string = unenclose o unenclose o unenclose_end +val unenclose_string = unenclose o unenclose fun read_string s = - let val symbols = unenclose_end s |> Symbol_Pos.explode0 + let val s' = DOF_core.markup2string s + val symbols = s' |> Symbol_Pos.explode0 in if hd symbols |> fst |> equal Symbol.open_ then Token.read_cartouche symbols |> Token.input_of - else unenclose_string s |> Syntax.read_input + else unenclose_string s' |> Syntax.read_input end val block_titleN = "title" diff --git a/Isabelle_DOF/thys/Isa_DOF.thy b/Isabelle_DOF/thys/Isa_DOF.thy index 409a67a..70a350c 100644 --- a/Isabelle_DOF/thys/Isa_DOF.thy +++ b/Isabelle_DOF/thys/Isa_DOF.thy @@ -2847,12 +2847,13 @@ fun pretty_docitem_antiquotation_generic cid_decl ctxt ({unchecked, define}, src val inline = Config.get ctxt Document_Antiquotation.thy_output_display val _ = check_and_mark ctxt cid_decl {strict_checking = not unchecked} {inline = inline} pos str + val cid_decl' = DOF_core.output_name cid_decl in (case (define,inline) of - (true,false) => XML.enclose("{\\isaDofDOTlabel[type={"^cid_decl^"}] {")"}}" - |(false,false)=> XML.enclose("{\\isaDofDOTref[type={"^cid_decl^"}] {")"}}" - |(true,true) => XML.enclose("{\\isaDofDOTmacroDef[type={"^cid_decl^"}]{")"}}" - |(false,true) => XML.enclose("{\\isaDofDOTmacroExp[type={"^cid_decl^"}]{")"}}" + (true,false) => XML.enclose("{\\isaDofDOTlabel[type={"^cid_decl'^"}] {")"}}" + |(false,false)=> XML.enclose("{\\isaDofDOTref[type={"^cid_decl'^"}] {")"}}" + |(true,true) => XML.enclose("{\\isaDofDOTmacroDef[type={"^cid_decl'^"}]{")"}}" + |(false,true) => XML.enclose("{\\isaDofDOTmacroExp[type={"^cid_decl'^"}]{")"}}" ) (Latex.text (DOF_core.output_name str, pos)) end From baa36b10c13041a0911f5ec350103e78b4cdf6a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20M=C3=A9ric?= Date: Wed, 20 Sep 2023 15:00:43 +0200 Subject: [PATCH 26/27] Cleanup --- Isabelle_DOF/thys/Isa_DOF.thy | 2 -- 1 file changed, 2 deletions(-) diff --git a/Isabelle_DOF/thys/Isa_DOF.thy b/Isabelle_DOF/thys/Isa_DOF.thy index 70a350c..fcbba0c 100644 --- a/Isabelle_DOF/thys/Isa_DOF.thy +++ b/Isabelle_DOF/thys/Isa_DOF.thy @@ -1299,9 +1299,7 @@ fun elaborate_instances_of thy _ _ term_option _ = val values = thy |> Proof_Context.init_global |> DOF_core.get_instances |> Name_Space.dest_table |> map fst - |> tap (fn is => writeln ("In elaborate_instances_list instances: " ^ \<^make_string> is)) |> f - |> tap (fn is => writeln ("In elaborate_instances_list instances after filter: " ^ \<^make_string> is)) |> map (fn oid => DOF_core.value_of oid thy) in HOLogic.mk_list class_typ values end in if equal class_name DOF_core.default_cid From 7b54bf5ca5cbb06d0b870cd86d56d8718d14acfb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicolas=20M=C3=A9ric?= Date: Wed, 20 Sep 2023 15:56:50 +0200 Subject: [PATCH 27/27] Cleanup --- Isabelle_DOF-Unit-Tests/Attributes.thy | 6 +++-- Isabelle_DOF/thys/Isa_DOF.thy | 32 +++++++++----------------- 2 files changed, 15 insertions(+), 23 deletions(-) diff --git a/Isabelle_DOF-Unit-Tests/Attributes.thy b/Isabelle_DOF-Unit-Tests/Attributes.thy index 092ec22..99667e6 100644 --- a/Isabelle_DOF-Unit-Tests/Attributes.thy +++ b/Isabelle_DOF-Unit-Tests/Attributes.thy @@ -96,8 +96,10 @@ term "C" text\Voila what happens on the ML level:\ ML\val Type("Conceptual.B.B_ext",[Type("Conceptual.C.C_ext",t)]) = @{typ "C"}; - val @{typ "D"} = Value_Command.Docitem_Parser.cid_2_cidType "Conceptual.D" @{theory}; - val @{typ "E"} = Value_Command.Docitem_Parser.cid_2_cidType "Conceptual.E" @{theory}; + val \<^typ>\D\ = DOF_core.get_onto_class_cid \<^theory> "Conceptual.D" + |> snd ; + val \<^typ>\E\ = DOF_core.get_onto_class_cid \<^theory> "Conceptual.E" + |> snd; \ text*[dfgdfg2::C, z = "None"]\ Lorem ipsum ... @{thm refl} \ diff --git a/Isabelle_DOF/thys/Isa_DOF.thy b/Isabelle_DOF/thys/Isa_DOF.thy index fcbba0c..cbbe561 100644 --- a/Isabelle_DOF/thys/Isa_DOF.thy +++ b/Isabelle_DOF/thys/Isa_DOF.thy @@ -262,7 +262,9 @@ struct to be able to reference the class internally. *) - val default_cid = "text" (* the top (default) document class: everything is a text.*) + val default_cid = "text" (* the top (default) document class: everything is a text.*) + + val default_cid_typ = \<^typ>\unit\ (* top document class type *) (* Due to the implementation, is_subclass0 is _NOT_ compatible with DOF classes type synonyms, so they must be resolved to full class names before use. *) @@ -294,6 +296,8 @@ struct val undefined_instance = "undefined_instance" + val undefined_value = Free ("Undefined_Value", default_cid_typ) + val empty_instance = Instance {defined = false, input_term = \<^term>\()\, value = \<^term>\()\, @@ -702,12 +706,6 @@ fun is_subclass (ctxt) s t = is_subclass0 s t ctxt fun is_subclass_global thy s t = let val ctxt = Proof_Context.init_global thy in is_subclass0 s t ctxt end - -fun typ_to_cid (Type(s,[\<^Type>\unit\])) = Long_Name.qualifier s - |typ_to_cid (Type(_,[T])) = typ_to_cid T - |typ_to_cid _ = error("type is not an ontological type.") - - fun check_regexps term = let val _ = case fold_aterms Term.add_free_names term [] of n::_ => error("No free variables allowed in monitor regexp:" ^ n) @@ -1313,7 +1311,7 @@ fun elaborate_instances_of thy _ _ term_option _ = without using the burden of ontology classes. ex: text*[sdf]\ Lorem ipsum @{thm refl}\ *) (filter (fn name => DOF_core.cid_of name thy |> equal DOF_core.default_cid)) - |> mk_list \<^typ>\unit\ + |> mk_list DOF_core.default_cid_typ else let val class_typ = class_name |> Syntax.read_typ_global thy @@ -1557,16 +1555,6 @@ val value_without_elaboration = value_select "" structure Docitem_Parser = struct -fun cid_2_cidType cid_long thy = - if cid_long = DOF_core.default_cid then \<^Type>\unit\ - else let fun ty_name cid = cid^"."^ Long_Name.base_name cid ^ Record.extN - fun fathers cid_long = case DOF_core.get_onto_class_global cid_long thy of - DOF_core.Onto_Class {inherits_from=NONE, ...} => [cid_long] - | DOF_core.Onto_Class {inherits_from=SOME(_,father), ...} => - cid_long :: (fathers father) - in fold (fn x => fn y => Type(ty_name x,[y])) (fathers cid_long) \<^Type>\unit\ - end - fun create_default_object thy binding class_name typ = let val purified_class_name = String.translate (fn #"." => "_" | x => String.implode [x]) class_name @@ -1620,7 +1608,7 @@ fun check_classref {is_monitor=is_monitor} (SOME (cid, pos)) thy = in (name_cid_typ, pos) end | check_classref _ NONE _ = pair DOF_core.default_cid DOF_core.default_cid - |> rpair \<^typ>\unit\ + |> rpair DOF_core.default_cid_typ |> rpair Position.none fun calc_update_term {mk_elaboration=mk_elaboration} thy (name, typ) @@ -1893,7 +1881,9 @@ fun create_and_check_docitem is_monitor {is_inline=is_inline} {define=define} bi else NONE val value_terms = if default_cid then let - val undefined_value = Free (oid ^ "_Undefined_Value", \<^Type>\unit\) + val undefined_value = dest_Free DOF_core.undefined_value + |> apfst (fn x => oid ^ "_" ^ x) + |> Free in (undefined_value, undefined_value) end (* Handle initialization of docitem without a class associated, for example when you just want a document element to be referentiable @@ -2227,7 +2217,7 @@ fun meta_args_2_latex thy sem_attrs transform_attr NONE => let val DOF_core.Instance cid = DOF_core.get_instance_global lab thy in pair (cid |> #cid) (cid |> #cid) - |> rpair \<^typ>\unit\ end + |> rpair DOF_core.default_cid_typ end | SOME(cid,_) => DOF_core.get_onto_class_cid thy cid