Add very deep interpretation
ci/woodpecker/push/build Pipeline was successful Details

Use metalogic to generate meta term anti-quotations

The idea is for the Very_Deep_Interpretation
to source the shallow material,
and then update the checking and elaboration functions
of the term anti-quotations.
To achieve this, the mechanism of removing and reading the notations
(mixfixes) of the term-antiquotations, after the metalogic
is sourced, is used.

Example:

With shallow:

datatype "typ" = Isabelle_DOF_typ string  ("@{typ _}")

Generate a datatype whose Constructor Isabelle_DOF_typ has
the notation @{typ ...}.

You get:

find_consts name:"Isabelle_DOF_typ"

find_consts
  name: "Isabelle_DOF_typ"

found 1 constant(s):
  Shallow_Interpretation.typ.Isabelle_DOF_typ :: "char list ⇒ typ"

With Deep:

no_notation "Isabelle_DOF_typ" ("@{typ _}")

consts Isabelle_DOF_typ :: "string ⇒ typ" ("@{typ _}")

The notation is removed and then added to the new Isabelle_DOF_typ constant.

You get:

find_consts name:"Isabelle_DOF_typ"

find_consts
  name: "Isabelle_DOF_typ"

found 2 constant(s):
  Deep_Interpretation.Isabelle_DOF_typ :: "char list ⇒ Core.typ"
  Shallow_Interpretation.typ.Isabelle_DOF_typ :: "char list ⇒ Shallow_Interpretation.typ"

But only the Deep_Interpretation constant has the notation (mixfix).

Then new interpretation of term anti-quotations is available
for the user.
This commit is contained in:
Nicolas Méric 2023-02-24 09:03:43 +01:00
parent fb049946c5
commit 6a6259bf29
4 changed files with 1093 additions and 0 deletions

View File

@ -2,6 +2,9 @@ session "Isabelle_DOF-Proofs" (proofs) = "HOL-Proofs" +
options [document = false, record_proofs = 2, parallel_limit = 500, document_build = dof]
sessions
"Isabelle_DOF"
Metalogic_ProofChecker
theories
Isabelle_DOF.ontologies
Isabelle_DOF.Isa_DOF
Very_Deep_DOF
Reification_Test

View File

@ -0,0 +1,739 @@
theory Reification_Test
imports "Isabelle_DOF-Proofs.Very_Deep_DOF"
begin
ML\<open>
val ty1 = ISA_core.reify_typ @{typ "int"}
val ty2 = ISA_core.reify_typ @{typ "int \<Rightarrow> bool"}
val ty3 = ISA_core.reify_typ @{typ "prop"}
val ty4 = ISA_core.reify_typ @{typ "'a list"}
\<close>
term*\<open>@{typ \<open>int\<close>}\<close>
value*\<open>@{typ \<open>int\<close>}\<close>
value*\<open>@{typ \<open>int \<Rightarrow> bool\<close>}\<close>
term*\<open>@{typ \<open>prop\<close>}\<close>
value*\<open>@{typ \<open>prop\<close>}\<close>
term*\<open>@{typ \<open>'a list\<close>}\<close>
value*\<open>@{typ \<open>'a list\<close>}\<close>
ML\<open>
val t1 = ISA_core.reify_term @{term "1::int"}
val t2 = ISA_core.reify_term @{term "\<lambda>x. x = 1"}
val t3 = ISA_core.reify_term @{term "[2, 3::int]"}
\<close>
term*\<open>@{term \<open>1::int\<close>}\<close>
value*\<open>@{term \<open>1::int\<close>}\<close>
term*\<open>@{term \<open>\<lambda>x. x = 1\<close>}\<close>
value*\<open>@{term \<open>\<lambda>x. x = 1\<close>}\<close>
term*\<open>@{term \<open>[2, 3::int]\<close>}\<close>
value*\<open>@{term \<open>[2, 3::int]\<close>}\<close>
prf refl
full_prf refl
term*\<open>@{thm \<open>HOL.refl\<close>}\<close>
value*\<open>proof @{thm \<open>HOL.refl\<close>}\<close>
value*\<open>proof @{thm \<open>HOL.refl\<close>}\<close>
value*\<open>depth (proof @{thm \<open>HOL.refl\<close>})\<close>
value*\<open>size (proof @{thm \<open>HOL.refl\<close>})\<close>
value*\<open>fv_Proof (proof @{thm \<open>HOL.refl\<close>})\<close>
term*\<open>@{thms-of \<open>HOL.refl\<close>}\<close>
value*\<open>@{thms-of \<open>HOL.refl\<close>}\<close>
ML\<open>
val t_schematic = TVar(("'a",0), [])
val t = @{term "Tv (Var (STR '''a'', 0)) {}"}
val rt_schematic = ISA_core.reify_typ t_schematic
val true = rt_schematic = t
\<close>
lemma test : "A \<and> B \<longrightarrow> B \<and> A"
by auto
lemma test2 : "A \<and> B \<Longrightarrow> B \<and> A"
by auto
lemma test3: "A \<and> B \<longrightarrow> B \<and> A"
proof
assume "A \<and> B"
then obtain B and A ..
then show "B \<and> A" ..
qed
lemma test4:
assumes "(A \<and> B)"
shows "B \<and> A"
apply (insert assms)
by auto
lemma test_subst : "\<lbrakk>x = f x; odd(f x)\<rbrakk> \<Longrightarrow> odd x"
by (erule ssubst)
inductive_set even' :: "nat set" where
"0 \<in> even'"
| "n \<in> even' \<Longrightarrow> (Suc (Suc n)) \<in> even'"
find_theorems name:"even'.induct"
(*lemma even_dvd : "n \<in> even' \<Longrightarrow> 2 dvd n"
proof(induct n)
case 0 then show ?case by simp
next
case (Suc n) then show ?case
apply (simp add: dvd_def)
apply (rule_tac x ="Suc k" in exI)
apply clarify*)
theorem "((A \<longrightarrow> B) \<longrightarrow> A) \<longrightarrow> A"
proof
assume "(A \<longrightarrow> B) \<longrightarrow> A"
show A
proof (rule classical)
assume "\<not> A"
have "A \<longrightarrow> B"
proof
assume A
with \<open>\<not> A\<close> show B by contradiction
qed
with \<open>(A \<longrightarrow> B) \<longrightarrow> A\<close> show A ..
qed
qed
(*lemma even_dvd : "n \<in> even' \<Longrightarrow> 2 dvd n"
using [[simp_trace]]
apply (induct n)
apply (subst even_zero)
apply(rule TrueI)
apply(simp)*)
lemma even_dvd : "n \<in> even' \<Longrightarrow> 2 dvd n"
apply (erule even'.induct)
apply (simp_all add: dvd_def)
using [[simp_trace]]
apply clarify
find_theorems name:"_ = 2 * _"
apply (rule_tac x ="Suc k" in exI)
using [[simp_trace]]
apply simp
done
(*
lemma even_dvd : "n \<in> even' \<Longrightarrow> 2 dvd n"
apply (induct_tac rule:even'.induct)*)
inductive ev :: " nat \<Rightarrow> bool " where
ev0: " ev 0 " |
evSS: " ev n \<Longrightarrow> ev (n + 2) "
fun evn :: " nat \<Rightarrow> bool " where
" evn 0 = True " |
" evn (Suc 0) = False " |
" evn (Suc (Suc n)) = evn n "
(*lemma assumes a: " ev (Suc(Suc m)) " shows" ev m "
proof(induction "Suc (Suc m)" arbitrary: " m " rule: ev.induct)*)
(*lemma " ev (Suc (Suc m)) \<Longrightarrow> ev m "
proof(induction " Suc (Suc m) " arbitrary: " m " rule: ev.induct)
case ev0
then show ?case sorry
next
case (evSS n)
then show ?case sorry
qed*)
(* And neither of these can apply the induction *)
(*
lemma assumes a1: " ev n " and a2: " n = (Suc (Suc m)) " shows " ev m "
proof (induction " n " arbitrary: " m " rule: ev.induct)
lemma assumes a1: " n = (Suc (Suc m)) " and a2: "ev n " shows " ev m "
proof (induction " n " arbitrary: " m " rule: ev.induct)
*)
(* But this one can ?! *)
(*
lemma assumes a1: " ev n " and a2: " n = (Suc (Suc m)) " shows " ev m "
proof -
from a1 and a2 show " ev m "
proof (induction " n " arbitrary: " m " rule: ev.induct)
case ev0
then show ?case by simp
next
case (evSS n) thus ?case by simp
qed
qed
*)
inductive_set even :: "int set" where
zero[intro!]: "0 \<in> even" |
plus[intro!]: "n \<in> even \<Longrightarrow> n+2 \<in> even " |
min[intro!]: "n \<in> even \<Longrightarrow> n-2 \<in> even "
lemma a : "2+2=4" by simp
lemma b : "(0::int)+2=2" by simp
lemma test_subst_2 : "4 \<in> even"
apply (subst a[symmetric])
apply (rule plus)
apply (subst b[symmetric])
apply (rule plus)
apply (rule zero)
done
(*lemma "\<lbrakk>P x y z; Suc x < y\<rbrakk> \<Longrightarrow> f z = x * y"
(*using [[simp_trace]]*)
(*apply (simp add: mult.commute)*)
apply (subst mult.commute)
apply (rule mult.commute [THEN ssubst])*)
datatype 'a seq = Empty | Seq 'a "'a seq"
find_consts name:"Reification_Test*seq*"
fun conc :: "'a seq \<Rightarrow> 'a seq \<Rightarrow> 'a seq"
where
c1 : "conc Empty ys = ys"
| c2 : "conc (Seq x xs) ys = Seq x (conc xs ys)"
lemma seq_not_eq : "Seq x xs \<noteq> xs"
using [[simp_trace]]
proof (induct xs arbitrary: x)
case Empty
show "Seq x Empty \<noteq> Empty" by simp
next
case (Seq y ys)
show "Seq x (Seq y ys) \<noteq> Seq y ys"
using \<open>Seq y ys \<noteq> ys\<close> by simp
qed
lemma identity_conc : "conc xs Empty = xs"
using [[simp_trace]]
using[[simp_trace_depth_limit=8]]
using [[unify_trace_simp]]
using[[unify_trace_types]]
using [[unify_trace_bound=0]]
(* using [[simp_trace_new depth=10]] *)
apply (induct xs)
apply (subst c1)
apply (rule refl)
apply (subst c2)
apply (rule_tac s="xs" and P="\<lambda>X. Seq x1 X = Seq x1 xs" in subst)
apply (rule sym)
apply assumption
apply (rule refl)
done
lemma imp_ex : "(\<exists>x. \<forall>y. P x y) \<longrightarrow> (\<forall>y. \<exists>x. P x y)"
using [[simp_trace]]
using[[simp_trace_depth_limit=8]]
apply (auto)
done
lemma length_0_conv' [iff]: "(length [] = 0)"
apply (subst List.list.size(3))
apply (rule refl)
done
lemma cons_list : "a#xs = [a]@xs"
using [[simp_trace]]
apply (subst List.append.append_Cons)
apply (subst List.append.append_Nil)
apply (rule refl)
done
lemma replacement: "\<lbrakk> a = b; c = d \<rbrakk> \<Longrightarrow> f a c = f b d"
apply (erule ssubst)+
apply (rule refl )
done
lemma assoc_append : "k @ (l @ m) = (k @ l ) @ m"
apply (induct_tac k )
apply (subst append_Nil )+
apply (rule refl )
apply (subst append_Cons)
apply (subst append_Cons)
apply (subst append_Cons)
apply (rule_tac f ="Cons" in replacement)
apply (rule refl)
apply assumption
done
lemma length_cons : "length (xs @ ys) = length xs + length ys"
using [[simp_trace]]
apply (subst List.length_append)
apply (rule refl)
done
lemma length_plus : "(length [a] + length xs = 0) = ([a] @ xs = [])"
using [[simp_trace]]
apply (subst List.list.size(4))
apply (subst List.list.size(3))
apply (subst Nat.add_Suc_right)
apply (subst Groups.monoid_add_class.add.right_neutral)
apply (subst Nat.plus_nat.add_Suc)
apply (subst Groups.monoid_add_class.add.left_neutral)
apply (subst Nat.old.nat.distinct(2))
by simp
lemma empty_list : "(length [] = 0) = ([] = []) = True"
using [[simp_trace]]
by simp
lemma TrueI: True
using [[simp_trace]]
unfolding True_def
by (rule refl)
lemma length_0_conv [iff]: "(length xs = 0) = (xs = [])"
using [[simp_trace]]
apply (induct xs)
apply (subst List.list.size(3))
apply(subst HOL.simp_thms(6))
apply(subst HOL.simp_thms(6))
apply(rule refl)
apply (subst cons_list)
apply (subst(2) cons_list)
apply (subst length_cons)
apply (subst length_plus)
apply (subst HOL.simp_thms(6))
apply (rule TrueI)
done
(*by (induct xs) auto*)
find_theorems (50) name:"HOL.simp_thms"
find_theorems (50) name:"List.list*size"
find_theorems (50) name:"List.list*length"
find_theorems "_ @ _"
find_theorems (500) "List.length [] = 0"
find_theorems (550) "length _ = length _ + length _"
lemma identity_list : "xs @ [] = xs"
using [[simp_trace]]
using[[simp_trace_depth_limit=8]]
using [[unify_trace_simp]]
using[[unify_trace_types]]
using [[unify_trace_bound=0]]
apply (induct xs)
apply (subst List.append_Nil2)
apply (subst HOL.simp_thms(6))
apply(rule TrueI)
apply (subst List.append_Nil2)
apply (subst HOL.simp_thms(6))
apply(rule TrueI)
done
lemma identity_list' : "xs @ [] = xs"
using [[simp_trace]]
using[[simp_trace_depth_limit=8]]
using [[unify_trace_simp]]
using[[unify_trace_types]]
using [[unify_trace_bound=0]]
(* using [[simp_trace_new depth=10]] *)
apply (induct "length xs")
apply (subst (asm) zero_reorient)
apply(subst(asm) length_0_conv)
apply (subst List.append_Nil2)
apply (subst HOL.simp_thms(6))
apply (rule TrueI)
apply (subst List.append_Nil2)
apply (subst HOL.simp_thms(6))
apply (rule TrueI)
done
lemma conj_test : "A \<and> B \<and> C \<longrightarrow> B \<and> A"
apply (rule impI)
apply (rule conjI)
apply (drule conjunct2)
apply (drule conjunct1)
apply assumption
apply (drule conjunct1)
apply assumption
done
declare[[show_sorts]]
declare[[ML_print_depth = 20]]
ML\<open>
val full = true
val thm = @{thm "test"}
val hyps = Thm.hyps_of thm
val prems = Thm.prems_of thm
val reconstruct_proof = Thm.reconstruct_proof_of thm
val standard_proof = Proof_Syntax.standard_proof_of
{full = full, expand_name = Thm.expand_name thm} thm
val term_of_proof = Proof_Syntax.term_of_proof standard_proof
\<close>
lemma identity_conc' : "conc xs Empty = xs"
using [[simp_trace]]
using[[simp_trace_depth_limit=8]]
using [[unify_trace_simp]]
using[[unify_trace_types]]
using [[unify_trace_bound=0]]
(* using [[simp_trace_new depth=10]] *)
apply (induct xs)
apply (subst c1)
apply (rule refl)
apply (subst c2)
apply (rule_tac s="xs" and P="\<lambda>X. Seq x1 X = Seq x1 xs" in subst)
apply (rule sym)
apply assumption
apply (rule refl)
done
declare[[show_sorts = false]]
ML\<open> (*See: *) \<^file>\<open>~~/src/HOL/Proofs/ex/Proof_Terms.thy\<close>\<close>
ML\<open>
val thm = @{thm "identity_conc'"};
(*proof body with digest*)
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
(*proof term only*)
val prf = Proofterm.proof_of body;
(*clean output*)
Pretty.writeln (Proof_Syntax.pretty_proof \<^context> prf);
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> false thm);
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
(*all theorems used in the graph of nested proofs*)
val all_thms =
Proofterm.fold_body_thms
(fn {name, ...} => insert (op =) name) [body] [];
\<close>
term*\<open>@{thm \<open>Reification_Test.identity_conc\<close>}\<close>
value*\<open>proof @{thm \<open>Reification_Test.identity_conc\<close>}\<close>
lemma cons_list' : "a#xs = [a]@xs"
using [[simp_trace]]
apply (subst List.append.append_Cons)
apply (subst List.append.append_Nil)
apply (rule refl)
done
ML\<open> (*See: *) \<^file>\<open>~~/src/HOL/Proofs/ex/Proof_Terms.thy\<close>\<close>
ML\<open>
val thm = @{thm "cons_list'"};
(*proof body with digest*)
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
(*proof term only*)
val prf = Proofterm.proof_of body;
(*clean output*)
Pretty.writeln (Proof_Syntax.pretty_proof \<^context> prf);
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> false thm);
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
(*all theorems used in the graph of nested proofs*)
val all_thms =
Proofterm.fold_body_thms
(fn {name, ...} => insert (op =) name) [body] [];
\<close>
declare[[show_sorts = false]]
declare[[ML_print_depth = 20]]
ML\<open> (*See: *) \<^file>\<open>~~/src/HOL/Proofs/ex/Proof_Terms.thy\<close>\<close>
ML\<open>
val thm = @{thm "test"};
(*proof body with digest*)
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
(*proof term only*)
val prf = Proofterm.proof_of body;
(*clean output*)
Pretty.writeln (Proof_Syntax.pretty_proof \<^context> prf);
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> false thm);
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
(*all theorems used in the graph of nested proofs*)
val all_thms =
Proofterm.fold_body_thms
(fn {name, ...} => insert (op =) name) [body] [];
\<close>
prf test
full_prf test
term*\<open>@{thm \<open>Reification_Test.test\<close>}\<close>
value*\<open>proof @{thm \<open>Reification_Test.test\<close>}\<close>
term*\<open>@{thms-of \<open>Reification_Test.test\<close>}\<close>
value*\<open>@{thms-of \<open>Reification_Test.test\<close>}\<close>
ML\<open> (*See: *) \<^file>\<open>~~/src/HOL/Proofs/ex/Proof_Terms.thy\<close>\<close>
ML\<open>
val thm = @{thm test2};
(*proof body with digest*)
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
(*proof term only*)
val prf = Proofterm.proof_of body;
(*clean output*)
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> false thm);
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
(*all theorems used in the graph of nested proofs*)
val all_thms =
Proofterm.fold_body_thms
(fn {name, ...} => insert (op =) name) [body] [];
\<close>
prf test2
full_prf test2
term*\<open>@{thm \<open>Reification_Test.test2\<close>}\<close>
value*\<open>proof @{thm \<open>Reification_Test.test2\<close>}\<close>
ML\<open> (*See: *) \<^file>\<open>~~/src/HOL/Proofs/ex/Proof_Terms.thy\<close>\<close>
ML\<open>
val thm = @{thm test3};
(*proof body with digest*)
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
(*proof term only*)
val prf = Proofterm.proof_of body;
(*clean output*)
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> false thm);
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
(*all theorems used in the graph of nested proofs*)
val all_thms =
Proofterm.fold_body_thms
(fn {name, ...} => insert (op =) name) [body] [];
\<close>
prf test3
full_prf test3
term*\<open>@{thm \<open>Reification_Test.test3\<close>}\<close>
value*\<open>@{thm \<open>Reification_Test.test3\<close>}\<close>
ML\<open> (*See: *) \<^file>\<open>~~/src/HOL/Proofs/ex/Proof_Terms.thy\<close>\<close>
ML\<open>
val thm = @{thm test4};
(*proof body with digest*)
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
(*proof term only*)
val prf = Proofterm.proof_of body;
(*clean output*)
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> false thm);
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
(*all theorems used in the graph of nested proofs*)
val all_thms =
Proofterm.fold_body_thms
(fn {name, ...} => insert (op =) name) [body] [];
\<close>
prf test4
full_prf test4
term*\<open>@{thm \<open>Reification_Test.test4\<close>}\<close>
value*\<open>@{thm \<open>Reification_Test.test4\<close>}\<close>
ML\<open> (*See: *) \<^file>\<open>~~/src/HOL/Proofs/ex/Proof_Terms.thy\<close>\<close>
ML\<open>
val thm = @{thm Pure.symmetric};
(*proof body with digest*)
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
(*proof term only*)
val prf = Proofterm.proof_of body;
(*clean output*)
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> false thm);
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
(*all theorems used in the graph of nested proofs*)
val all_thms =
Proofterm.fold_body_thms
(fn {name, ...} => insert (op =) name) [body] [];
\<close>
prf symmetric
full_prf symmetric
term*\<open>@{thm \<open>Pure.symmetric\<close>}\<close>
value*\<open>proof @{thm \<open>Pure.symmetric\<close>}\<close>
ML\<open>
val full = true
val thm = @{thm "Groups.minus_class.super"}
val standard_proof = Proof_Syntax.standard_proof_of
{full = full, expand_name = Thm.expand_name thm} thm
val term_of_proof = Proof_Syntax.term_of_proof standard_proof
\<close>
ML\<open>
val thm = Proof_Context.get_thm \<^context> "Groups.minus_class.super"
val prop = Thm.prop_of thm
val proof = Thm.proof_of thm
\<close>
prf Groups.minus_class.super
full_prf Groups.minus_class.super
term*\<open>@{thm \<open>Groups.minus_class.super\<close>}\<close>
value*\<open>@{thm \<open>Groups.minus_class.super\<close>}\<close>
(*ML\<open>
val full = true
val thm = @{thm "Homotopy.starlike_imp_contractible"}
val standard_proof = Proof_Syntax.standard_proof_of
{full = full, expand_name = Thm.expand_name thm} thm
val term_of_proof = Proof_Syntax.term_of_proof standard_proof
\<close>
ML\<open>
val thm = Proof_Context.get_thm \<^context> "Homotopy.starlike_imp_contractible"
val prop = Thm.prop_of thm
val proof = Thm.proof_of thm
\<close>
prf Homotopy.starlike_imp_contractible
full_prf Homotopy.starlike_imp_contractible
term*\<open>@{thm \<open>Homotopy.starlike_imp_contractible\<close>}\<close>
value*\<open>@{thm \<open>Homotopy.starlike_imp_contractible\<close>}\<close>*)
(* stefan bergofer phd thesis example proof construction 2.3.2 *)
lemma stefan_example : "(\<exists>x. \<forall>y. P x y) \<longrightarrow> (\<forall>y. \<exists>x. P x y)"
apply (rule impI)
apply(rule allI)
apply (erule exE)
apply(rule exI)
apply(erule allE)
apply (assumption)
done
ML\<open> (*See: *) \<^file>\<open>~~/src/HOL/Proofs/ex/Proof_Terms.thy\<close>\<close>
ML\<open>
val thm = @{thm stefan_example};
(*proof body with digest*)
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
(*proof term only*)
val prf = Proofterm.proof_of body;
(*clean output*)
Pretty.writeln (Proof_Syntax.pretty_proof \<^context> prf);
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> false thm);
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
(*all theorems used in the graph of nested proofs*)
val all_thms =
Proofterm.fold_body_thms
(fn {name, ...} => insert (op =) name) [body] [];
\<close>
ML\<open>
val thy = \<^theory>;
val prf =
Proof_Syntax.read_proof thy true false
"mp \<cdot> _ \<cdot> _ \<bullet> (impI \<cdot> _ \<cdot> _ \<bullet> (conjI \<cdot> _ \<cdot> _ ))";
(*"conjI \<cdot> _ \<cdot> _ ";*)
(*"(\<^bold>\<lambda>(H: _) Ha: _. conjI \<cdot> _ \<cdot> _ \<bullet> Ha \<bullet> H)";*)
(*val t = Proofterm.reconstruct_proof thy \<^prop>\<open>(A \<longrightarrow> B) \<Longrightarrow> A \<Longrightarrow> B\<close> prf*)
(* val thm =
Proofterm.reconstruct_proof thy \<^prop>\<open>A \<Longrightarrow> B\<close> prf
|> Proof_Checker.thm_of_proof thy
|> Drule.export_without_context
val pretty = Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);*)
\<close>
extract_type
"typeof (Trueprop P) \<equiv> typeof P"
realizers
impI (P, Q): "\<lambda>pq. pq"
"\<^bold>\<lambda>(c: _) (d: _) P Q pq (h: _). allI \<cdot> _ \<bullet> c \<bullet> (\<^bold>\<lambda>x. impI \<cdot> _ \<cdot> _ \<bullet> (h \<cdot> x))"
find_consts name:"MinProof"
ML_val \<open>
val thy = \<^theory>;
val prf =
Proof_Syntax.read_proof thy true false
"impI \<cdot> _ \<cdot> _ \<bullet> \
\ (\<^bold>\<lambda>H: _. \
\ conjE \<cdot> _ \<cdot> _ \<cdot> _ \<bullet> H \<bullet> \
\ (\<^bold>\<lambda>(H: _) Ha: _. conjI \<cdot> _ \<cdot> _ \<bullet> Ha \<bullet> H))";
val thm =
Proofterm.reconstruct_proof thy \<^prop>\<open>A \<and> B \<longrightarrow> B \<and> A\<close> prf
|> Proof_Checker.thm_of_proof thy
|> Drule.export_without_context;
val pretty = Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
\<close>
ML_file "~~/src/Provers/classical.ML"
lemma testtest : "A \<and> B \<longrightarrow> B \<and> A"
apply (rule impI)
apply (erule conjE)
apply(erule conjI)
apply assumption
done
ML\<open> (*See: *) \<^file>\<open>~~/src/HOL/Proofs/ex/Proof_Terms.thy\<close>\<close>
ML\<open>
val thm = @{thm testtest};
(*proof body with digest*)
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
(*proof term only*)
val prf = Proofterm.proof_of body;
(*clean output*)
Pretty.writeln (Proof_Syntax.pretty_proof \<^context> prf);
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> false thm);
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
(*all theorems used in the graph of nested proofs*)
val all_thms =
Proofterm.fold_body_thms
(fn {name, ...} => insert (op =) name) [body] [];
\<close>
ML\<open>
val thy = \<^theory>
val prf =
Proof_Syntax.read_proof thy true false
"impI \<cdot> _ \<cdot> _ \<bullet> \
\ (\<^bold>\<lambda>H: _. \
\ conjE \<cdot> _ \<cdot> _ \<cdot> _ \<bullet> H \<bullet> \
\ (\<^bold>\<lambda>(H: _) Ha: _. conjI \<cdot> _ \<cdot> _ \<bullet> Ha \<bullet> H))";
\<close>
ML\<open>
val thy = \<^theory>
val prf =
Proof_Syntax.read_proof thy true false
"\<^bold>\<lambda>(H: A \<and> B). conjE \<cdot> A \<cdot> B \<cdot> A \<and> B \<bullet> H";
(* val thm =
Proofterm.reconstruct_proof thy \<^prop>\<open>A \<Longrightarrow> B \<Longrightarrow> B \<and> A\<close> prf
|> Proof_Checker.thm_of_proof thy
|> Drule.export_without_context;
val pretty = Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);*)
\<close>
ML\<open>
val thy = \<^theory>
val prf =
Proof_Syntax.read_proof thy true false
"\<^bold>\<lambda>(H: _) Ha: _. conjI \<cdot> _ \<cdot> _ \<bullet> Ha \<bullet> H";
val thm =
Proofterm.reconstruct_proof thy \<^prop>\<open>A \<Longrightarrow> B \<Longrightarrow> B \<and> A\<close> prf
|> Proof_Checker.thm_of_proof thy
|> Drule.export_without_context;
val pretty = Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
\<close>
end

View File

@ -0,0 +1,20 @@
theory Very_Deep_DOF
imports "Isabelle_DOF-Proofs.Very_Deep_Interpretation"
begin
(* tests *)
term "@{typ ''int => int''}"
term "@{term ''Bound 0''}"
term "@{thm ''refl''}"
term "@{docitem ''<doc_ref>''}"
ML\<open> @{term "@{docitem ''<doc_ref>''}"}\<close>
term "@{typ \<open>int \<Rightarrow> int\<close>}"
term "@{term \<open>\<forall>x. P x \<longrightarrow> Q\<close>}"
term "@{thm \<open>refl\<close>}"
term "@{docitem \<open>doc_ref\<close>}"
ML\<open> @{term "@{docitem \<open>doc_ref\<close>}"}\<close>
(**)
end

View File

@ -0,0 +1,331 @@
theory Very_Deep_Interpretation
imports "Isabelle_DOF.Isa_COL"
Metalogic_ProofChecker.ProofTerm
begin
subsection\<open> Syntax \<close>
\<comment> \<open>and others in the future : file, http, thy, ...\<close>
(* Delete shallow interpretation notations (mixfixes) of the term anti-quotations,
so we can use them for the deep interpretation *)
no_notation "Isabelle_DOF_typ" ("@{typ _}")
no_notation "Isabelle_DOF_term" ("@{term _}")
no_notation "Isabelle_DOF_thm" ("@{thm _}")
no_notation "Isabelle_DOF_file" ("@{file _}")
no_notation "Isabelle_DOF_thy" ("@{thy _}")
no_notation "Isabelle_DOF_docitem" ("@{docitem _}")
no_notation "Isabelle_DOF_docitem_attr" ("@{docitemattr (_) :: (_)}")
no_notation "Isabelle_DOF_trace_attribute" ("@{trace-attribute _}")
consts Isabelle_DOF_typ :: "string \<Rightarrow> typ" ("@{typ _}")
consts Isabelle_DOF_term :: "string \<Rightarrow> term" ("@{term _}")
consts Isabelle_DOF_term_repr :: "string \<Rightarrow> term" ("@{termrepr _}")
datatype "thm" = Isabelle_DOF_thm string ("@{thm _}") | Thm_content ("proof":proofterm)
datatype "thms_of" = Isabelle_DOF_thms_of string ("@{thms-of _}")
datatype "file" = Isabelle_DOF_file string ("@{file _}")
datatype "thy" = Isabelle_DOF_thy string ("@{thy _}")
consts Isabelle_DOF_docitem :: "string \<Rightarrow> 'a" ("@{docitem _}")
datatype "docitem_attr" = Isabelle_DOF_docitem_attr string string ("@{docitemattr (_) :: (_)}")
consts Isabelle_DOF_trace_attribute :: "string \<Rightarrow> (string * string) list" ("@{trace-attribute _}")
subsection\<open> Semantics \<close>
ML\<open>
structure ISA_core =
struct
fun check_path check_file ctxt dir (name, pos) =
let
val _ = Context_Position.report ctxt pos (Markup.language_path true); (* TODO: pos should be
"lifted" to
type source *)
val path = Path.append dir (Path.explode name) handle ERROR msg => ISA_core.err msg pos;
val _ = Path.expand path handle ERROR msg => ISA_core.err msg pos;
val _ = Context_Position.report ctxt pos (Markup.path (Path.implode_symbolic path));
val _ =
(case check_file of
NONE => path
| SOME check => (check path handle ERROR msg => ISA_core.err msg pos));
in path end;
fun ML_isa_antiq check_file thy (name, _, pos) =
let val path = check_path check_file (Proof_Context.init_global thy) Path.current (name, pos);
in "Path.explode " ^ ML_Syntax.print_string (Path.implode path) end;
fun ML_isa_check_generic check thy (term, pos) =
let val name = (HOLogic.dest_string term
handle TERM(_,[t]) => error ("wrong term format: must be string constant: "
^ Syntax.string_of_term_global thy t ))
val _ = check thy (name,pos)
in SOME term end;
fun check_identity _ (term, _, _) _ = SOME term
fun ML_isa_check_typ thy (term, _, pos) _ =
let fun check thy (name, _) = let val ctxt = (Proof_Context.init_global thy)
in (Syntax.check_typ ctxt o Syntax.parse_typ ctxt) name end
in ML_isa_check_generic check thy (term, pos) end
fun ML_isa_check_term thy (term, _, pos) _ =
let fun check thy (name, _) = let val ctxt = (Proof_Context.init_global thy)
in (Syntax.check_term ctxt o Syntax.parse_term ctxt) name end
in ML_isa_check_generic check thy (term, pos) end
fun ML_isa_check_thm thy (term, _, pos) _ =
(* this works for long-names only *)
let fun check thy (name, _) = case Proof_Context.lookup_fact (Proof_Context.init_global thy) name of
NONE => ISA_core.err ("No Theorem:" ^name) pos
| SOME X => X
in ML_isa_check_generic check thy (term, pos) end
fun ML_isa_check_file thy (term, _, pos) _ =
let fun check thy (name, pos) = check_path (SOME File.check_file)
(Proof_Context.init_global thy)
(Path.current)
(name, pos);
in ML_isa_check_generic check thy (term, pos) end;
fun ML_isa_id thy (term,pos) = SOME term
fun ML_isa_check_docitem thy (term, req_ty, pos) _ =
let fun check thy (name, _) s =
let val DOF_core.Instance {cid,...} =
DOF_core.get_instance_global name thy
val ns = DOF_core.get_instances (Proof_Context.init_global thy)
|> Name_Space.space_of_table
val {pos=pos', ...} = Name_Space.the_entry ns name
val ctxt = (Proof_Context.init_global thy)
val req_class = case req_ty of
\<^Type>\<open>fun _ T\<close> => DOF_core.typ_to_cid T
| _ => error("can not infer type for: "^ name)
in if cid <> DOF_core.default_cid
andalso not(DOF_core.is_subclass ctxt cid req_class)
then error("reference ontologically inconsistent: "
^cid^" vs. "^req_class^ Position.here pos')
else ()
end
in ML_isa_check_generic check thy (term, pos) end
fun ML_isa_check_trace_attribute thy (term, _, pos) s =
let
val oid = (HOLogic.dest_string term
handle TERM(_,[t]) => error ("wrong term format: must be string constant: "
^ Syntax.string_of_term_global thy t ))
val _ = DOF_core.get_instance_global oid thy
in SOME term end
fun ML_isa_elaborate_generic (_:theory) isa_name ty term_option _ =
case term_option of
NONE => error("Wrong term option. You must use a defined term")
| SOME term => Const (isa_name, ty) $ term
fun reify_typ (Type (s, typ_list)) =
\<^Const>\<open>Ty\<close> $ HOLogic.mk_literal s $ HOLogic.mk_list \<^Type>\<open>typ\<close> (map reify_typ typ_list)
| reify_typ (TFree (name, sort)) =
\<^Const>\<open>Tv\<close> $(\<^Const>\<open>Free\<close> $ HOLogic.mk_literal name)
$ (HOLogic.mk_set \<^typ>\<open>class\<close> (map HOLogic.mk_literal sort))
| reify_typ (TVar (indexname, sort)) =
let val (name, index_value) = indexname
in \<^Const>\<open>Tv\<close>
$ (\<^Const>\<open>Var\<close>
$ HOLogic.mk_prod (HOLogic.mk_literal name, HOLogic.mk_number \<^Type>\<open>int\<close> index_value))
$ (HOLogic.mk_set \<^typ>\<open>class\<close> (map HOLogic.mk_literal sort)) end
fun ML_isa_elaborate_typ (thy:theory) _ _ term_option _ =
case term_option of
NONE => error("Wrong term option. You must use a defined term")
| SOME term => let
val typ_name = HOLogic.dest_string term
val typ = Syntax.read_typ_global thy typ_name
in reify_typ typ end
fun reify_term (Const (name, typ)) =\<^Const>\<open>Ct\<close> $ HOLogic.mk_literal name $ reify_typ typ
| reify_term (Free (name, typ)) =
\<^Const>\<open>Fv\<close> $ (\<^Const>\<open>Free\<close> $ HOLogic.mk_literal name) $ reify_typ typ
| reify_term (Var (indexname, typ)) =
let val (name, index_value) = indexname
in \<^Const>\<open>Fv\<close>
$ (\<^Const>\<open>Var\<close>
$ HOLogic.mk_prod (HOLogic.mk_literal name, HOLogic.mk_number \<^Type>\<open>int\<close> index_value))
$ reify_typ typ end
| reify_term (Bound i) = \<^Const>\<open>Bv\<close> $ HOLogic.mk_nat i
| reify_term (Abs (_, typ, term)) = \<^Const>\<open>Abs\<close> $ reify_typ typ $ reify_term term
| reify_term (Term.$ (t1, t2)) = \<^Const>\<open>App\<close> $ reify_term t1 $ reify_term t2
fun ML_isa_elaborate_term (thy:theory) _ _ term_option _ =
case term_option of
NONE => error("Wrong term option. You must use a defined term")
| SOME term => let
val term_name = HOLogic.dest_string term
val term = Syntax.read_term_global thy term_name
in reify_term term end
fun reify_proofterm (PBound i) =\<^Const>\<open>PBound\<close> $ (HOLogic.mk_nat i)
| reify_proofterm (Abst (_, typ_option, proof)) =
\<^Const>\<open>Abst\<close> $ reify_typ (the typ_option) $ reify_proofterm proof
| reify_proofterm (AbsP (_, term_option, proof)) =
\<^Const>\<open>AbsP\<close> $ reify_term (the term_option) $ reify_proofterm proof
| reify_proofterm (op % (proof, term_option)) =
\<^Const>\<open>Appt\<close> $ reify_proofterm proof $ reify_term (the term_option)
| reify_proofterm (op %% (proof1, proof2)) =
\<^Const>\<open>AppP\<close> $ reify_proofterm proof1 $ reify_proofterm proof2
| reify_proofterm (Hyp term) = \<^Const>\<open>Hyp\<close> $ (reify_term term)
| reify_proofterm (PAxm (_, term, typ_list_option)) =
let
val tvars = rev (Term.add_tvars term [])
val meta_tvars = map (fn ((name, index_value), sort) =>
HOLogic.mk_prod
(\<^Const>\<open>Var\<close>
$ HOLogic.mk_prod
(HOLogic.mk_literal name, HOLogic.mk_number \<^Type>\<open>int\<close> index_value)
, HOLogic.mk_set \<^typ>\<open>class\<close> (map HOLogic.mk_literal sort))) tvars
val meta_typ_list =
HOLogic.mk_list @{typ "tyinst"} (map2 (fn x => fn y => HOLogic.mk_prod (x, y))
meta_tvars (map reify_typ (the typ_list_option)))
in \<^Const>\<open>PAxm\<close> $ reify_term term $ meta_typ_list end
| reify_proofterm (PClass (typ, class)) =
\<^Const>\<open>OfClass\<close> $ reify_typ typ $ HOLogic.mk_literal class
| reify_proofterm (PThm ({prop = prop, types = types, ...}, _)) =
let
val tvars = rev (Term.add_tvars prop [])
val meta_tvars = map (fn ((name, index_value), sort) =>
HOLogic.mk_prod
(\<^Const>\<open>Var\<close>
$ HOLogic.mk_prod
(HOLogic.mk_literal name, HOLogic.mk_number \<^Type>\<open>int\<close> index_value)
, HOLogic.mk_set \<^typ>\<open>class\<close> (map HOLogic.mk_literal sort))) tvars
val meta_typ_list =
HOLogic.mk_list \<^typ>\<open>tyinst\<close> (map2 (fn x => fn y => HOLogic.mk_prod (x, y))
meta_tvars (map reify_typ (the types)))
in \<^Const>\<open>PAxm\<close> $ reify_term prop $ meta_typ_list end
fun ML_isa_elaborate_thm (thy:theory) _ _ term_option pos =
case term_option of
NONE => ISA_core.err ("Malformed term annotation") pos
| SOME term =>
let
val thm_name = HOLogic.dest_string term
val _ = writeln ("In ML_isa_elaborate_thm thm_name: " ^ \<^make_string> thm_name)
val thm = Proof_Context.get_thm (Proof_Context.init_global thy) thm_name
val _ = writeln ("In ML_isa_elaborate_thm thm: " ^ \<^make_string> thm)
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
val prf = Proofterm.proof_of body;
(* Proof_Syntax.standard_proof_of reconstructs the proof and seems to rewrite
the option arguments (with a value NONE) of the proof datatype constructors,
at least for PAxm, with "SOME (typ/term)",
allowing us the use the projection function "the".
Maybe the function can deal with
all the option types of the proof datatype constructors *)
val proof = Proof_Syntax.standard_proof_of
{full = true, expand_name = Thm.expand_name thm} thm
val _ = writeln ("In ML_isa_elaborate_thm proof: " ^ \<^make_string> proof)
(* After a small discussion with Simon Roßkopf, It seems preferable to use
Thm.reconstruct_proof_of instead of Proof_Syntax.standard_proof_of
whose operation is not well known.
Thm.reconstruct_proof_of seems sufficient to have a reifiable PAxm
in the metalogic. *)
val proof' = Thm.reconstruct_proof_of thm
(*in \<^Const>\<open>Thm_content\<close> $ reify_proofterm prf end*)
(*in \<^Const>\<open>Thm_content\<close> $ reify_proofterm proof end*)
in \<^Const>\<open>Thm_content\<close> $ reify_proofterm proof end
fun ML_isa_elaborate_thms_of (thy:theory) _ _ term_option pos =
case term_option of
NONE => ISA_core.err ("Malformed term annotation") pos
| SOME term =>
let
val thm_name = HOLogic.dest_string term
val thm = Proof_Context.get_thm (Proof_Context.init_global thy) thm_name
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm)
val all_thms_name = Proofterm.fold_body_thms (fn {name, ...} => insert (op =) name) [body] []
(*val all_thms = map (Proof_Context.get_thm (Proof_Context.init_global thy)) all_thms_name*)
(*val all_proofs = map (Proof_Syntax.standard_proof_of
{full = true, expand_name = Thm.expand_name thm}) all_thms*)
(*in HOLogic.mk_list \<^Type>\<open>thm\<close> (map (fn proof => \<^Const>\<open>Thm_content\<close> $ reify_proofterm proof) all_proofs) end*)
in HOLogic.mk_list \<^typ>\<open>string\<close> (map HOLogic.mk_string all_thms_name) end
fun ML_isa_elaborate_trace_attribute (thy:theory) _ _ term_option pos =
case term_option of
NONE => ISA_core.err ("Malformed term annotation") pos
| SOME term =>
let
val oid = HOLogic.dest_string term
val traces = ISA_core.compute_attr_access (Context.Theory thy) "trace" oid NONE pos
fun conv (\<^Const>\<open>Pair \<^typ>\<open>doc_class rexp\<close> \<^typ>\<open>string\<close>\<close>
$ (\<^Const>\<open>Atom \<^typ>\<open>doc_class\<close>\<close> $ (\<^Const>\<open>mk\<close> $ s)) $ S) =
let val s' = DOF_core.get_onto_class_name_global (HOLogic.dest_string s) thy
in \<^Const>\<open>Pair \<^typ>\<open>string\<close> \<^typ>\<open>string\<close>\<close> $ HOLogic.mk_string s' $ S end
val traces' = map conv (HOLogic.dest_list traces)
in HOLogic.mk_list \<^Type>\<open>prod \<^typ>\<open>string\<close> \<^typ>\<open>string\<close>\<close> traces' end
(* utilities *)
fun property_list_dest ctxt X =
map (fn \<^Const_>\<open>Isabelle_DOF_term for s\<close> => HOLogic.dest_string s
|\<^Const_>\<open>Isabelle_DOF_term_repr for s\<close> => holstring_to_bstring ctxt (HOLogic.dest_string s))
(HOLogic.dest_list X)
end; (* struct *)
\<close>
ML\<open>
val ty1 = ISA_core.reify_typ @{typ "int"}
val ty2 = ISA_core.reify_typ @{typ "int \<Rightarrow> bool"}
val ty3 = ISA_core.reify_typ @{typ "prop"}
val ty4 = ISA_core.reify_typ @{typ "'a list"}
\<close>
ML\<open>
val t1 = ISA_core.reify_term @{term "1::int"}
val t2 = ISA_core.reify_term @{term "\<lambda>x. x = 1"}
val t3 = ISA_core.reify_term @{term "[2, 3::int]"}
\<close>
subsection\<open> Isar - Setup\<close>
(* Isa_transformers declaration for Isabelle_DOF term anti-quotations (typ, term, thm, etc.).
They must be declared in the same theory file as the one of the declaration
of Isabelle_DOF term anti-quotations !!! *)
setup\<open>
[(\<^type_name>\<open>thm\<close>, ISA_core.ML_isa_check_thm, ISA_core.ML_isa_elaborate_thm)
, (\<^type_name>\<open>thms_of\<close>, ISA_core.ML_isa_check_thm, ISA_core.ML_isa_elaborate_thms_of)
, (\<^type_name>\<open>file\<close>, ISA_core.ML_isa_check_file, ISA_core.ML_isa_elaborate_generic)]
|> fold (fn (n, check, elaborate) => fn thy =>
let val ns = Sign.tsig_of thy |> Type.type_space
val name = n
val {pos, ...} = Name_Space.the_entry ns name
val bname = Long_Name.base_name name
val binding = Binding.make (bname, pos)
|> Binding.prefix_name DOF_core.ISA_prefix
|> Binding.prefix false bname
in DOF_core.add_isa_transformer binding ((check, elaborate) |> DOF_core.make_isa_transformer) thy
end)
#>
([(\<^const_name>\<open>Isabelle_DOF_typ\<close>, ISA_core.ML_isa_check_typ, ISA_core.ML_isa_elaborate_typ)
,(\<^const_name>\<open>Isabelle_DOF_term\<close>, ISA_core.ML_isa_check_term, ISA_core.ML_isa_elaborate_term)
,(\<^const_name>\<open>Isabelle_DOF_term_repr\<close>, ISA_core.check_identity, ISA_core.ML_isa_elaborate_generic)
,(\<^const_name>\<open>Isabelle_DOF_docitem\<close>,
ISA_core.ML_isa_check_docitem, ISA_core.ML_isa_elaborate_generic)
,(\<^const_name>\<open>Isabelle_DOF_trace_attribute\<close>,
ISA_core.ML_isa_check_trace_attribute, ISA_core.ML_isa_elaborate_trace_attribute)]
|> fold (fn (n, check, elaborate) => fn thy =>
let val ns = Sign.consts_of thy |> Consts.space_of
val name = n
val {pos, ...} = Name_Space.the_entry ns name
val bname = Long_Name.base_name name
val binding = Binding.make (bname, pos)
in DOF_core.add_isa_transformer binding ((check, elaborate) |> DOF_core.make_isa_transformer) thy
end))
\<close>
end