Compare commits

...

5 Commits

Author SHA1 Message Date
Achim D. Brucker f971a8e018 Cleanup.
HOL-OCL/Isabelle_DOF/Isabelle2018 There was a failure building this commit Details
2019-06-16 21:08:36 +01:00
Achim D. Brucker bbfe6b1a2c Migration from 2017 to 2018. 2019-06-16 21:02:36 +01:00
Achim D. Brucker 4494193d4f Removed old patch for Isabelle 2017. 2019-06-16 21:01:54 +01:00
Achim D. Brucker 033114e29d Cleanup. 2019-06-11 11:02:51 +01:00
Achim D. Brucker 52ecf3f70c Migration to Isabelle 2018. 2019-06-11 11:02:23 +01:00
22 changed files with 16 additions and 2639 deletions

View File

@ -1,52 +0,0 @@
(* Author: Tobias Nipkow
Copyright 1998 TUM
*)
section "Automata based scanner"
theory AutoMaxChop
imports DA MaxChop
begin
primrec auto_split :: "('a,'s)da \<Rightarrow> 's \<Rightarrow> 'a list * 'a list \<Rightarrow> 'a list \<Rightarrow> 'a splitter" where
"auto_split A q res ps [] = (if fin A q then (ps,[]) else res)" |
"auto_split A q res ps (x#xs) =
auto_split A (next A x q) (if fin A q then (ps,x#xs) else res) (ps@[x]) xs"
definition
auto_chop :: "('a,'s)da \<Rightarrow> 'a chopper" where
"auto_chop A = chop (\<lambda>xs. auto_split A (start A) ([],xs) [] xs)"
lemma delta_snoc: "delta A (xs@[y]) q = next A y (delta A xs q)"
by simp
lemma auto_split_lemma:
"\<And>q ps res. auto_split A (delta A ps q) res ps xs =
maxsplit (\<lambda>ys. fin A (delta A ys q)) res ps xs"
apply (induct xs)
apply simp
apply (simp add: delta_snoc[symmetric] del: delta_append)
done
lemma auto_split_is_maxsplit:
"auto_split A (start A) res [] xs = maxsplit (accepts A) res [] xs"
apply (unfold accepts_def)
apply (subst delta_Nil[where ?s = "start A", symmetric])
apply (subst auto_split_lemma)
apply simp
done
lemma is_maxsplitter_auto_split:
"is_maxsplitter (accepts A) (\<lambda>xs. auto_split A (start A) ([],xs) [] xs)"
by (simp add: auto_split_is_maxsplit is_maxsplitter_maxsplit)
lemma is_maxchopper_auto_chop:
"is_maxchopper (accepts A) (auto_chop A)"
apply (unfold auto_chop_def)
apply (rule is_maxchopper_chop)
apply (rule is_maxsplitter_auto_split)
done
end

View File

@ -1,29 +0,0 @@
(* Author: Tobias Nipkow
Copyright 1998 TUM
Is there an optimal order of arguments for `next'?
Currently we can have laws like `delta A (a#w) = delta A w o delta A a'
Otherwise we could have `acceps A == fin A o delta A (start A)'
and use foldl instead of foldl2.
*)
section "Projection functions for automata"
theory AutoProj
imports Main
begin
definition start :: "'a * 'b * 'c \<Rightarrow> 'a" where "start A = fst A"
definition "next" :: "'a * 'b * 'c \<Rightarrow> 'b" where "next A = fst(snd(A))"
definition fin :: "'a * 'b * 'c \<Rightarrow> 'c" where "fin A = snd(snd(A))"
lemma [simp]: "start(q,d,f) = q"
by(simp add:start_def)
lemma [simp]: "next(q,d,f) = d"
by(simp add:next_def)
lemma [simp]: "fin(q,d,f) = f"
by(simp add:fin_def)
end

View File

@ -1,17 +0,0 @@
(* Author: Tobias Nipkow
Copyright 1998 TUM
*)
section "Combining automata and regular expressions"
theory AutoRegExp
imports Automata RegExp2NA RegExp2NAe
begin
theorem "DA.accepts (na2da(rexp2na r)) w = (w : lang r)"
by (simp add: NA_DA_equiv[THEN sym] accepts_rexp2na)
theorem "DA.accepts (nae2da(rexp2nae r)) w = (w : lang r)"
by (simp add: NAe_DA_equiv accepts_rexp2nae)
end

View File

@ -1,55 +0,0 @@
(* Author: Tobias Nipkow
Copyright 1998 TUM
*)
section "Conversions between automata"
theory Automata
imports DA NAe
begin
definition
na2da :: "('a,'s)na \<Rightarrow> ('a,'s set)da" where
"na2da A = ({start A}, \<lambda>a Q. Union(next A a ` Q), \<lambda>Q. \<exists>q\<in>Q. fin A q)"
definition
nae2da :: "('a,'s)nae \<Rightarrow> ('a,'s set)da" where
"nae2da A = ({start A},
\<lambda>a Q. Union(next A (Some a) ` ((eps A)\<^sup>* `` Q)),
\<lambda>Q. \<exists>p \<in> (eps A)\<^sup>* `` Q. fin A p)"
(*** Equivalence of NA and DA ***)
lemma DA_delta_is_lift_NA_delta:
"\<And>Q. DA.delta (na2da A) w Q = Union(NA.delta A w ` Q)"
by (induct w)(auto simp:na2da_def)
lemma NA_DA_equiv:
"NA.accepts A w = DA.accepts (na2da A) w"
apply (simp add: DA.accepts_def NA.accepts_def DA_delta_is_lift_NA_delta)
apply (simp add: na2da_def)
done
(*** Direct equivalence of NAe and DA ***)
lemma espclosure_DA_delta_is_steps:
"\<And>Q. (eps A)\<^sup>* `` (DA.delta (nae2da A) w Q) = steps A w `` Q"
apply (induct w)
apply(simp)
apply (simp add: step_def nae2da_def)
apply (blast)
done
lemma NAe_DA_equiv:
"DA.accepts (nae2da A) w = NAe.accepts A w"
proof -
have "\<And>Q. fin (nae2da A) Q = (\<exists>q \<in> (eps A)\<^sup>* `` Q. fin A q)"
by(simp add:nae2da_def)
thus ?thesis
apply(simp add:espclosure_DA_delta_is_steps NAe.accepts_def DA.accepts_def)
apply(simp add:nae2da_def)
apply blast
done
qed
end

View File

@ -1,38 +0,0 @@
(* Author: Tobias Nipkow
Copyright 1998 TUM
*)
section "Deterministic automata"
theory DA
imports AutoProj
begin
type_synonym ('a,'s)da = "'s * ('a \<Rightarrow> 's \<Rightarrow> 's) * ('s \<Rightarrow> bool)"
definition
foldl2 :: "('a \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> 'a list \<Rightarrow> 'b \<Rightarrow> 'b" where
"foldl2 f xs a = foldl (\<lambda>a b. f b a) a xs"
definition
delta :: "('a,'s)da \<Rightarrow> 'a list \<Rightarrow> 's \<Rightarrow> 's" where
"delta A = foldl2 (next A)"
definition
accepts :: "('a,'s)da \<Rightarrow> 'a list \<Rightarrow> bool" where
"accepts A = (\<lambda>w. fin A (delta A w (start A)))"
lemma [simp]: "foldl2 f [] a = a \<and> foldl2 f (x#xs) a = foldl2 f xs (f x a)"
by(simp add:foldl2_def)
lemma delta_Nil[simp]: "delta A [] s = s"
by(simp add:delta_def)
lemma delta_Cons[simp]: "delta A (a#w) s = delta A w (next A a s)"
by(simp add:delta_def)
lemma delta_append[simp]:
"\<And>q ys. delta A (xs@ys) q = delta A ys (delta A xs q)"
by(induct xs) simp_all
end

View File

@ -1,20 +0,0 @@
(* Author: Lukas Bulwahn, TUM 2011 *)
section \<open>Executing Automata and membership of Regular Expressions\<close>
theory Execute
imports AutoRegExp
begin
subsection \<open>Example\<close>
definition example_expression
where
"example_expression = (let r0 = Atom (0::nat); r1 = Atom (1::nat)
in Times (Star (Plus (Times r1 r1) r0)) (Star (Plus (Times r0 r0) r1)))"
value "NA.accepts (rexp2na example_expression) [0,1,1,0,0,1]"
value "DA.accepts (na2da (rexp2na example_expression)) [0,1,1,0,0,1]"
end

View File

@ -1,5 +0,0 @@
theory Functional_Automata
imports AutoRegExp AutoMaxChop RegSet_of_nat_DA Execute
begin
end

View File

@ -1,120 +0,0 @@
(* Author: Tobias Nipkow
Copyright 1998 TUM
*)
section "Generic scanner"
theory MaxChop
imports MaxPrefix
begin
type_synonym 'a chopper = "'a list \<Rightarrow> 'a list list * 'a list"
definition
is_maxchopper :: "('a list \<Rightarrow> bool) \<Rightarrow> 'a chopper \<Rightarrow> bool" where
"is_maxchopper P chopper =
(\<forall>xs zs yss.
(chopper(xs) = (yss,zs)) =
(xs = concat yss @ zs \<and> (\<forall>ys \<in> set yss. ys \<noteq> []) \<and>
(case yss of
[] \<Rightarrow> is_maxpref P [] xs
| us#uss \<Rightarrow> is_maxpref P us xs \<and> chopper(concat(uss)@zs) = (uss,zs))))"
definition
reducing :: "'a splitter \<Rightarrow> bool" where
"reducing splitf =
(\<forall>xs ys zs. splitf xs = (ys,zs) \<and> ys \<noteq> [] \<longrightarrow> length zs < length xs)"
function chop :: "'a splitter \<Rightarrow> 'a list \<Rightarrow> 'a list list \<times> 'a list" where
[simp del]: "chop splitf xs = (if reducing splitf
then let pp = splitf xs
in if fst pp = [] then ([], xs)
else let qq = chop splitf (snd pp)
in (fst pp # fst qq, snd qq)
else undefined)"
by pat_completeness auto
termination apply (relation "measure (length \<circ> snd)")
apply (auto simp: reducing_def)
apply (case_tac "splitf xs")
apply auto
done
lemma chop_rule: "reducing splitf \<Longrightarrow>
chop splitf xs = (let (pre, post) = splitf xs
in if pre = [] then ([], xs)
else let (xss, zs) = chop splitf post
in (pre # xss,zs))"
apply (simp add: chop.simps)
apply (simp add: Let_def split: prod.split)
done
lemma reducing_maxsplit: "reducing(\<lambda>qs. maxsplit P ([],qs) [] qs)"
by (simp add: reducing_def maxsplit_eq)
lemma is_maxsplitter_reducing:
"is_maxsplitter P splitf \<Longrightarrow> reducing splitf"
by(simp add:is_maxsplitter_def reducing_def)
lemma chop_concat[rule_format]: "is_maxsplitter P splitf \<Longrightarrow>
(\<forall>yss zs. chop splitf xs = (yss,zs) \<longrightarrow> xs = concat yss @ zs)"
apply (induct xs rule:length_induct)
apply (simp (no_asm_simp) split del: if_split
add: chop_rule[OF is_maxsplitter_reducing])
apply (simp add: Let_def is_maxsplitter_def split: prod.split)
done
lemma chop_nonempty: "is_maxsplitter P splitf \<Longrightarrow>
\<forall>yss zs. chop splitf xs = (yss,zs) \<longrightarrow> (\<forall>ys \<in> set yss. ys \<noteq> [])"
apply (induct xs rule:length_induct)
apply (simp (no_asm_simp) add: chop_rule is_maxsplitter_reducing)
apply (simp add: Let_def is_maxsplitter_def split: prod.split)
apply (intro allI impI)
apply (rule ballI)
apply (erule exE)
apply (erule allE)
apply auto
done
lemma is_maxchopper_chop:
assumes prem: "is_maxsplitter P splitf" shows "is_maxchopper P (chop splitf)"
apply(unfold is_maxchopper_def)
apply clarify
apply (rule iffI)
apply (rule conjI)
apply (erule chop_concat[OF prem])
apply (rule conjI)
apply (erule prem[THEN chop_nonempty[THEN spec, THEN spec, THEN mp]])
apply (erule rev_mp)
apply (subst prem[THEN is_maxsplitter_reducing[THEN chop_rule]])
apply (simp add: Let_def prem[simplified is_maxsplitter_def]
split: prod.split)
apply clarify
apply (rule conjI)
apply (clarify)
apply (clarify)
apply simp
apply (frule chop_concat[OF prem])
apply (clarify)
apply (subst prem[THEN is_maxsplitter_reducing, THEN chop_rule])
apply (simp add: Let_def prem[simplified is_maxsplitter_def]
split: prod.split)
apply (clarify)
apply (rename_tac xs1 ys1 xss1 ys)
apply (simp split: list.split_asm)
apply (simp add: is_maxpref_def)
apply (blast intro: prefix_append[THEN iffD2])
apply (rule conjI)
apply (clarify)
apply (simp (no_asm_use) add: is_maxpref_def)
apply (blast intro: prefix_append[THEN iffD2])
apply (clarify)
apply (rename_tac us uss)
apply (subgoal_tac "xs1=us")
apply simp
apply simp
apply (simp (no_asm_use) add: is_maxpref_def)
apply (blast intro: prefix_append[THEN iffD2] prefix_order.antisym)
done
end

View File

@ -1,92 +0,0 @@
(* Author: Tobias Nipkow
Copyright 1998 TUM
*)
section "Maximal prefix"
theory MaxPrefix
imports "HOL-Library.Sublist"
begin
definition
is_maxpref :: "('a list \<Rightarrow> bool) \<Rightarrow> 'a list \<Rightarrow> 'a list \<Rightarrow> bool" where
"is_maxpref P xs ys =
(prefix xs ys \<and> (xs=[] \<or> P xs) \<and> (\<forall>zs. prefix zs ys \<and> P zs \<longrightarrow> prefix zs xs))"
type_synonym 'a splitter = "'a list \<Rightarrow> 'a list * 'a list"
definition
is_maxsplitter :: "('a list \<Rightarrow> bool) \<Rightarrow> 'a splitter \<Rightarrow> bool" where
"is_maxsplitter P f =
(\<forall>xs ps qs. f xs = (ps,qs) = (xs=ps@qs \<and> is_maxpref P ps xs))"
fun maxsplit :: "('a list \<Rightarrow> bool) \<Rightarrow> 'a list * 'a list \<Rightarrow> 'a list \<Rightarrow> 'a splitter" where
"maxsplit P res ps [] = (if P ps then (ps,[]) else res)" |
"maxsplit P res ps (q#qs) = maxsplit P (if P ps then (ps,q#qs) else res)
(ps@[q]) qs"
declare if_split[split del]
lemma maxsplit_lemma: "(maxsplit P res ps qs = (xs,ys)) =
(if \<exists>us. prefix us qs \<and> P(ps@us) then xs@ys=ps@qs \<and> is_maxpref P xs (ps@qs)
else (xs,ys)=res)"
proof (induction P res ps qs rule: maxsplit.induct)
case 1
thus ?case by (auto simp: is_maxpref_def split: if_splits)
next
case (2 P res ps q qs)
show ?case
proof (cases "\<exists>us. prefix us qs \<and> P ((ps @ [q]) @ us)")
case True
note ex1 = this
then guess us by (elim exE conjE) note us = this
hence ex2: "\<exists>us. prefix us (q # qs) \<and> P (ps @ us)"
by (intro exI[of _ "q#us"]) auto
with ex1 and 2 show ?thesis by simp
next
case False
note ex1 = this
show ?thesis
proof (cases "\<exists>us. prefix us (q#qs) \<and> P (ps @ us)")
case False
from 2 show ?thesis
by (simp only: ex1 False) (insert ex1 False, auto simp: prefix_Cons)
next
case True
note ex2 = this
show ?thesis
proof (cases "P ps")
case True
with 2 have "(maxsplit P (ps, q # qs) (ps @ [q]) qs = (xs, ys)) \<longleftrightarrow> (xs = ps \<and> ys = q # qs)"
by (simp only: ex1 ex2) simp_all
also have "\<dots> \<longleftrightarrow> (xs @ ys = ps @ q # qs \<and> is_maxpref P xs (ps @ q # qs))"
using ex1 True
by (auto simp: is_maxpref_def prefix_append prefix_Cons append_eq_append_conv2)
finally show ?thesis using True by (simp only: ex1 ex2) simp_all
next
case False
with 2 have "(maxsplit P res (ps @ [q]) qs = (xs, ys)) \<longleftrightarrow> ((xs, ys) = res)"
by (simp only: ex1 ex2) simp
also have "\<dots> \<longleftrightarrow> (xs @ ys = ps @ q # qs \<and> is_maxpref P xs (ps @ q # qs))"
using ex1 ex2 False
by (auto simp: append_eq_append_conv2 is_maxpref_def prefix_Cons)
finally show ?thesis
using False by (simp only: ex1 ex2) simp
qed
qed
qed
qed
declare if_split[split]
lemma is_maxpref_Nil[simp]:
"\<not>(\<exists>us. prefix us xs \<and> P us) \<Longrightarrow> is_maxpref P ps xs = (ps = [])"
by (auto simp: is_maxpref_def)
lemma is_maxsplitter_maxsplit:
"is_maxsplitter P (\<lambda>xs. maxsplit P ([],xs) [] xs)"
by (auto simp: maxsplit_lemma is_maxsplitter_def)
lemmas maxsplit_eq = is_maxsplitter_maxsplit[simplified is_maxsplitter_def]
end

View File

@ -1,50 +0,0 @@
(* Author: Tobias Nipkow
Copyright 1998 TUM
*)
section "Nondeterministic automata"
theory NA
imports AutoProj
begin
type_synonym ('a,'s) na = "'s * ('a \<Rightarrow> 's \<Rightarrow> 's set) * ('s \<Rightarrow> bool)"
primrec delta :: "('a,'s)na \<Rightarrow> 'a list \<Rightarrow> 's \<Rightarrow> 's set" where
"delta A [] p = {p}" |
"delta A (a#w) p = Union(delta A w ` next A a p)"
definition
accepts :: "('a,'s)na \<Rightarrow> 'a list \<Rightarrow> bool" where
"accepts A w = (\<exists>q \<in> delta A w (start A). fin A q)"
definition
step :: "('a,'s)na \<Rightarrow> 'a \<Rightarrow> ('s * 's)set" where
"step A a = {(p,q) . q : next A a p}"
primrec steps :: "('a,'s)na \<Rightarrow> 'a list \<Rightarrow> ('s * 's)set" where
"steps A [] = Id" |
"steps A (a#w) = step A a O steps A w"
lemma steps_append[simp]:
"steps A (v@w) = steps A v O steps A w"
by(induct v, simp_all add:O_assoc)
lemma in_steps_append[iff]:
"(p,r) : steps A (v@w) = ((p,r) : (steps A v O steps A w))"
apply(rule steps_append[THEN equalityE])
apply blast
done
lemma delta_conv_steps: "\<And>p. delta A w p = {q. (p,q) : steps A w}"
by(induct w)(auto simp:step_def)
lemma accepts_conv_steps:
"accepts A w = (\<exists>q. (start A,q) \<in> steps A w \<and> fin A q)"
by(simp add: delta_conv_steps accepts_def)
abbreviation
Cons_syn :: "'a \<Rightarrow> 'a list set \<Rightarrow> 'a list set" (infixr "##" 65) where
"x ## S \<equiv> Cons x ` S"
end

View File

@ -1,72 +0,0 @@
(* Author: Tobias Nipkow
Copyright 1998 TUM
*)
section "Nondeterministic automata with epsilon transitions"
theory NAe
imports NA
begin
type_synonym ('a,'s)nae = "('a option,'s)na"
abbreviation
eps :: "('a,'s)nae \<Rightarrow> ('s * 's)set" where
"eps A \<equiv> step A None"
primrec steps :: "('a,'s)nae \<Rightarrow> 'a list \<Rightarrow> ('s * 's)set" where
"steps A [] = (eps A)\<^sup>*" |
"steps A (a#w) = (eps A)\<^sup>* O step A (Some a) O steps A w"
definition
accepts :: "('a,'s)nae \<Rightarrow> 'a list \<Rightarrow> bool" where
"accepts A w = (\<exists>q. (start A,q) \<in> steps A w \<and> fin A q)"
(* not really used:
consts delta :: "('a,'s)nae \<Rightarrow> 'a list \<Rightarrow> 's \<Rightarrow> 's set"
primrec
"delta A [] s = (eps A)\<^sup>* `` {s}"
"delta A (a#w) s = lift(delta A w) (lift(next A (Some a)) ((eps A)\<^sup>* `` {s}))"
*)
lemma steps_epsclosure[simp]: "(eps A)\<^sup>* O steps A w = steps A w"
by (cases w) (simp_all add: O_assoc[symmetric])
lemma in_steps_epsclosure:
"[| (p,q) : (eps A)\<^sup>*; (q,r) : steps A w |] ==> (p,r) : steps A w"
apply(rule steps_epsclosure[THEN equalityE])
apply blast
done
lemma epsclosure_steps: "steps A w O (eps A)\<^sup>* = steps A w"
apply(induct w)
apply simp
apply(simp add:O_assoc)
done
lemma in_epsclosure_steps:
"[| (p,q) : steps A w; (q,r) : (eps A)\<^sup>* |] ==> (p,r) : steps A w"
apply(rule epsclosure_steps[THEN equalityE])
apply blast
done
lemma steps_append[simp]: "steps A (v@w) = steps A v O steps A w"
by(induct v)(simp_all add:O_assoc[symmetric])
lemma in_steps_append[iff]:
"(p,r) : steps A (v@w) = ((p,r) : (steps A v O steps A w))"
apply(rule steps_append[THEN equalityE])
apply blast
done
(* Equivalence of steps and delta
* Use "(\<exists>x \<in> f ` A. P x) = (\<exists>a\<in>A. P(f x))" ?? *
Goal "\<forall>p. (p,q) : steps A w = (q : delta A w p)";
by (induct_tac "w" 1);
by (Simp_tac 1);
by (asm_simp_tac (simpset() addsimps [comp_def,step_def]) 1);
by (Blast_tac 1);
qed_spec_mp "steps_equiv_delta";
*)
end

View File

@ -1,11 +0,0 @@
chapter AFP
session "Functional-Automata" (AFP) = "HOL-Library" +
options [timeout = 600]
sessions
"Regular-Sets"
theories
Functional_Automata
document_files
"root.bib"
"root.tex"

View File

@ -1,442 +0,0 @@
(* Author: Tobias Nipkow
Copyright 1998 TUM
*)
section "From regular expressions directly to nondeterministic automata"
theory RegExp2NA
imports "Regular-Sets.Regular_Exp" NA
begin
type_synonym 'a bitsNA = "('a,bool list)na"
definition
"atom" :: "'a \<Rightarrow> 'a bitsNA" where
"atom a = ([True],
\<lambda>b s. if s=[True] \<and> b=a then {[False]} else {},
\<lambda>s. s=[False])"
definition
or :: "'a bitsNA \<Rightarrow> 'a bitsNA \<Rightarrow> 'a bitsNA" where
"or = (\<lambda>(ql,dl,fl)(qr,dr,fr).
([],
\<lambda>a s. case s of
[] \<Rightarrow> (True ## dl a ql) \<union> (False ## dr a qr)
| left#s \<Rightarrow> if left then True ## dl a s
else False ## dr a s,
\<lambda>s. case s of [] \<Rightarrow> (fl ql | fr qr)
| left#s \<Rightarrow> if left then fl s else fr s))"
definition
conc :: "'a bitsNA \<Rightarrow> 'a bitsNA \<Rightarrow> 'a bitsNA" where
"conc = (\<lambda>(ql,dl,fl)(qr,dr,fr).
(True#ql,
\<lambda>a s. case s of
[] \<Rightarrow> {}
| left#s \<Rightarrow> if left then (True ## dl a s) \<union>
(if fl s then False ## dr a qr else {})
else False ## dr a s,
\<lambda>s. case s of [] \<Rightarrow> False | left#s \<Rightarrow> left \<and> fl s \<and> fr qr | \<not>left \<and> fr s))"
definition
epsilon :: "'a bitsNA" where
"epsilon = ([],\<lambda>a s. {}, \<lambda>s. s=[])"
definition
plus :: "'a bitsNA \<Rightarrow> 'a bitsNA" where
"plus = (\<lambda>(q,d,f). (q, \<lambda>a s. d a s \<union> (if f s then d a q else {}), f))"
definition
star :: "'a bitsNA \<Rightarrow> 'a bitsNA" where
"star A = or epsilon (plus A)"
primrec rexp2na :: "'a rexp \<Rightarrow> 'a bitsNA" where
"rexp2na Zero = ([], \<lambda>a s. {}, \<lambda>s. False)" |
"rexp2na One = epsilon" |
"rexp2na(Atom a) = atom a" |
"rexp2na(Plus r s) = or (rexp2na r) (rexp2na s)" |
"rexp2na(Times r s) = conc (rexp2na r) (rexp2na s)" |
"rexp2na(Star r) = star (rexp2na r)"
declare split_paired_all[simp]
(******************************************************)
(* atom *)
(******************************************************)
lemma fin_atom: "(fin (atom a) q) = (q = [False])"
by(simp add:atom_def)
lemma start_atom: "start (atom a) = [True]"
by(simp add:atom_def)
lemma in_step_atom_Some[simp]:
"(p,q) : step (atom a) b = (p=[True] \<and> q=[False] \<and> b=a)"
by (simp add: atom_def step_def)
lemma False_False_in_steps_atom:
"([False],[False]) : steps (atom a) w = (w = [])"
apply (induct "w")
apply simp
apply (simp add: relcomp_unfold)
done
lemma start_fin_in_steps_atom:
"(start (atom a), [False]) : steps (atom a) w = (w = [a])"
apply (induct "w")
apply (simp add: start_atom)
apply (simp add: False_False_in_steps_atom relcomp_unfold start_atom)
done
lemma accepts_atom:
"accepts (atom a) w = (w = [a])"
by (simp add: accepts_conv_steps start_fin_in_steps_atom fin_atom)
(******************************************************)
(* or *)
(******************************************************)
(***** lift True/False over fin *****)
lemma fin_or_True[iff]:
"\<And>L R. fin (or L R) (True#p) = fin L p"
by(simp add:or_def)
lemma fin_or_False[iff]:
"\<And>L R. fin (or L R) (False#p) = fin R p"
by(simp add:or_def)
(***** lift True/False over step *****)
lemma True_in_step_or[iff]:
"\<And>L R. (True#p,q) : step (or L R) a = (\<exists>r. q = True#r \<and> (p,r) \<in> step L a)"
apply (simp add:or_def step_def)
apply blast
done
lemma False_in_step_or[iff]:
"\<And>L R. (False#p,q) : step (or L R) a = (\<exists>r. q = False#r \<and> (p,r) \<in> step R a)"
apply (simp add:or_def step_def)
apply blast
done
(***** lift True/False over steps *****)
lemma lift_True_over_steps_or[iff]:
"\<And>p. (True#p,q)\<in>steps (or L R) w = (\<exists>r. q = True # r \<and> (p,r) \<in> steps L w)"
apply (induct "w")
apply force
apply force
done
lemma lift_False_over_steps_or[iff]:
"\<And>p. (False#p,q)\<in>steps (or L R) w = (\<exists>r. q = False#r \<and> (p,r)\<in>steps R w)"
apply (induct "w")
apply force
apply force
done
(** From the start **)
lemma start_step_or[iff]:
"\<And>L R. (start(or L R),q) : step(or L R) a =
(\<exists>p. (q = True#p \<and> (start L,p) : step L a) |
(q = False#p \<and> (start R,p) : step R a))"
apply (simp add:or_def step_def)
apply blast
done
lemma steps_or:
"(start(or L R), q) : steps (or L R) w =
( (w = [] \<and> q = start(or L R)) |
(w \<noteq> [] \<and> (\<exists>p. q = True # p \<and> (start L,p) : steps L w |
q = False # p \<and> (start R,p) : steps R w)))"
apply (case_tac "w")
apply (simp)
apply blast
apply (simp)
apply blast
done
lemma fin_start_or[iff]:
"\<And>L R. fin (or L R) (start(or L R)) = (fin L (start L) | fin R (start R))"
by (simp add:or_def)
lemma accepts_or[iff]:
"accepts (or L R) w = (accepts L w | accepts R w)"
apply (simp add: accepts_conv_steps steps_or)
(* get rid of case_tac: *)
apply (case_tac "w = []")
apply auto
done
(******************************************************)
(* conc *)
(******************************************************)
(** True/False in fin **)
lemma fin_conc_True[iff]:
"\<And>L R. fin (conc L R) (True#p) = (fin L p \<and> fin R (start R))"
by(simp add:conc_def)
lemma fin_conc_False[iff]:
"\<And>L R. fin (conc L R) (False#p) = fin R p"
by(simp add:conc_def)
(** True/False in step **)
lemma True_step_conc[iff]:
"\<And>L R. (True#p,q) : step (conc L R) a =
((\<exists>r. q=True#r \<and> (p,r): step L a) |
(fin L p \<and> (\<exists>r. q=False#r \<and> (start R,r) : step R a)))"
apply (simp add:conc_def step_def)
apply blast
done
lemma False_step_conc[iff]:
"\<And>L R. (False#p,q) : step (conc L R) a =
(\<exists>r. q = False#r \<and> (p,r) : step R a)"
apply (simp add:conc_def step_def)
apply blast
done
(** False in steps **)
lemma False_steps_conc[iff]:
"\<And>p. (False#p,q): steps (conc L R) w = (\<exists>r. q=False#r \<and> (p,r): steps R w)"
apply (induct "w")
apply fastforce
apply force
done
(** True in steps **)
lemma True_True_steps_concI:
"\<And>L R p. (p,q) : steps L w \<Longrightarrow> (True#p,True#q) : steps (conc L R) w"
apply (induct "w")
apply simp
apply simp
apply fast
done
lemma True_False_step_conc[iff]:
"\<And>L R. (True#p,False#q) : step (conc L R) a =
(fin L p \<and> (start R,q) : step R a)"
by simp
lemma True_steps_concD[rule_format]:
"\<forall>p. (True#p,q) : steps (conc L R) w \<longrightarrow>
((\<exists>r. (p,r) : steps L w \<and> q = True#r) \<or>
(\<exists>u a v. w = u@a#v \<and>
(\<exists>r. (p,r) : steps L u \<and> fin L r \<and>
(\<exists>s. (start R,s) : step R a \<and>
(\<exists>t. (s,t) : steps R v \<and> q = False#t)))))"
apply (induct "w")
apply simp
apply simp
apply (clarify del:disjCI)
apply (erule disjE)
apply (clarify del:disjCI)
apply (erule allE, erule impE, assumption)
apply (erule disjE)
apply blast
apply (rule disjI2)
apply (clarify)
apply simp
apply (rule_tac x = "a#u" in exI)
apply simp
apply blast
apply (rule disjI2)
apply (clarify)
apply simp
apply (rule_tac x = "[]" in exI)
apply simp
apply blast
done
lemma True_steps_conc:
"(True#p,q) : steps (conc L R) w =
((\<exists>r. (p,r) : steps L w \<and> q = True#r) \<or>
(\<exists>u a v. w = u@a#v \<and>
(\<exists>r. (p,r) : steps L u \<and> fin L r \<and>
(\<exists>s. (start R,s) : step R a \<and>
(\<exists>t. (s,t) : steps R v \<and> q = False#t)))))"
by(force dest!: True_steps_concD intro!: True_True_steps_concI)
(** starting from the start **)
lemma start_conc:
"\<And>L R. start(conc L R) = True#start L"
by (simp add:conc_def)
lemma final_conc:
"\<And>L R. fin(conc L R) p = ((fin R (start R) \<and> (\<exists>s. p = True#s \<and> fin L s)) \<or>
(\<exists>s. p = False#s \<and> fin R s))"
apply (simp add:conc_def split: list.split)
apply blast
done
lemma accepts_conc:
"accepts (conc L R) w = (\<exists>u v. w = u@v \<and> accepts L u \<and> accepts R v)"
apply (simp add: accepts_conv_steps True_steps_conc final_conc start_conc)
apply (rule iffI)
apply (clarify)
apply (erule disjE)
apply (clarify)
apply (erule disjE)
apply (rule_tac x = "w" in exI)
apply simp
apply blast
apply blast
apply (erule disjE)
apply blast
apply (clarify)
apply (rule_tac x = "u" in exI)
apply simp
apply blast
apply (clarify)
apply (case_tac "v")
apply simp
apply blast
apply simp
apply blast
done
(******************************************************)
(* epsilon *)
(******************************************************)
lemma step_epsilon[simp]: "step epsilon a = {}"
by(simp add:epsilon_def step_def)
lemma steps_epsilon: "((p,q) : steps epsilon w) = (w=[] \<and> p=q)"
by (induct "w") auto
lemma accepts_epsilon[iff]: "accepts epsilon w = (w = [])"
apply (simp add: steps_epsilon accepts_conv_steps)
apply (simp add: epsilon_def)
done
(******************************************************)
(* plus *)
(******************************************************)
lemma start_plus[simp]: "\<And>A. start (plus A) = start A"
by(simp add:plus_def)
lemma fin_plus[iff]: "\<And>A. fin (plus A) = fin A"
by(simp add:plus_def)
lemma step_plusI:
"\<And>A. (p,q) : step A a \<Longrightarrow> (p,q) : step (plus A) a"
by(simp add:plus_def step_def)
lemma steps_plusI: "\<And>p. (p,q) : steps A w \<Longrightarrow> (p,q) \<in> steps (plus A) w"
apply (induct "w")
apply simp
apply simp
apply (blast intro: step_plusI)
done
lemma step_plus_conv[iff]:
"\<And>A. (p,r): step (plus A) a =
( (p,r): step A a | fin A p \<and> (start A,r) : step A a )"
by(simp add:plus_def step_def)
lemma fin_steps_plusI:
"[| (start A,q) : steps A u; u \<noteq> []; fin A p |]
==> (p,q) : steps (plus A) u"
apply (case_tac "u")
apply blast
apply simp
apply (blast intro: steps_plusI)
done
(* reverse list induction! Complicates matters for conc? *)
lemma start_steps_plusD[rule_format]:
"\<forall>r. (start A,r) \<in> steps (plus A) w \<longrightarrow>
(\<exists>us v. w = concat us @ v \<and>
(\<forall>u\<in>set us. accepts A u) \<and>
(start A,r) \<in> steps A v)"
apply (induct w rule: rev_induct)
apply simp
apply (rule_tac x = "[]" in exI)
apply simp
apply simp
apply (clarify)
apply (erule allE, erule impE, assumption)
apply (clarify)
apply (erule disjE)
apply (rule_tac x = "us" in exI)
apply (simp)
apply blast
apply (rule_tac x = "us@[v]" in exI)
apply (simp add: accepts_conv_steps)
apply blast
done
lemma steps_star_cycle[rule_format]:
"us \<noteq> [] \<longrightarrow> (\<forall>u \<in> set us. accepts A u) \<longrightarrow> accepts (plus A) (concat us)"
apply (simp add: accepts_conv_steps)
apply (induct us rule: rev_induct)
apply simp
apply (rename_tac u us)
apply simp
apply (clarify)
apply (case_tac "us = []")
apply (simp)
apply (blast intro: steps_plusI fin_steps_plusI)
apply (clarify)
apply (case_tac "u = []")
apply (simp)
apply (blast intro: steps_plusI fin_steps_plusI)
apply (blast intro: steps_plusI fin_steps_plusI)
done
lemma accepts_plus[iff]:
"accepts (plus A) w =
(\<exists>us. us \<noteq> [] \<and> w = concat us \<and> (\<forall>u \<in> set us. accepts A u))"
apply (rule iffI)
apply (simp add: accepts_conv_steps)
apply (clarify)
apply (drule start_steps_plusD)
apply (clarify)
apply (rule_tac x = "us@[v]" in exI)
apply (simp add: accepts_conv_steps)
apply blast
apply (blast intro: steps_star_cycle)
done
(******************************************************)
(* star *)
(******************************************************)
lemma accepts_star:
"accepts (star A) w = (\<exists>us. (\<forall>u \<in> set us. accepts A u) \<and> w = concat us)"
apply(unfold star_def)
apply (rule iffI)
apply (clarify)
apply (erule disjE)
apply (rule_tac x = "[]" in exI)
apply simp
apply blast
apply force
done
(***** Correctness of r2n *****)
lemma accepts_rexp2na:
"\<And>w. accepts (rexp2na r) w = (w : lang r)"
apply (induct "r")
apply (simp add: accepts_conv_steps)
apply simp
apply (simp add: accepts_atom)
apply (simp)
apply (simp add: accepts_conc Regular_Set.conc_def)
apply (simp add: accepts_star in_star_iff_concat subset_iff Ball_def)
done
end

View File

@ -1,641 +0,0 @@
(* Author: Tobias Nipkow
Copyright 1998 TUM
*)
section "From regular expressions to nondeterministic automata with epsilon"
theory RegExp2NAe
imports "Regular-Sets.Regular_Exp" NAe
begin
type_synonym 'a bitsNAe = "('a,bool list)nae"
definition
epsilon :: "'a bitsNAe" where
"epsilon = ([],\<lambda>a s. {}, \<lambda>s. s=[])"
definition
"atom" :: "'a \<Rightarrow> 'a bitsNAe" where
"atom a = ([True],
\<lambda>b s. if s=[True] \<and> b=Some a then {[False]} else {},
\<lambda>s. s=[False])"
definition
or :: "'a bitsNAe \<Rightarrow> 'a bitsNAe \<Rightarrow> 'a bitsNAe" where
"or = (\<lambda>(ql,dl,fl)(qr,dr,fr).
([],
\<lambda>a s. case s of
[] \<Rightarrow> if a=None then {True#ql,False#qr} else {}
| left#s \<Rightarrow> if left then True ## dl a s
else False ## dr a s,
\<lambda>s. case s of [] \<Rightarrow> False | left#s \<Rightarrow> if left then fl s else fr s))"
definition
conc :: "'a bitsNAe \<Rightarrow> 'a bitsNAe \<Rightarrow> 'a bitsNAe" where
"conc = (\<lambda>(ql,dl,fl)(qr,dr,fr).
(True#ql,
\<lambda>a s. case s of
[] \<Rightarrow> {}
| left#s \<Rightarrow> if left then (True ## dl a s) \<union>
(if fl s \<and> a=None then {False#qr} else {})
else False ## dr a s,
\<lambda>s. case s of [] \<Rightarrow> False | left#s \<Rightarrow> \<not>left \<and> fr s))"
definition
star :: "'a bitsNAe \<Rightarrow> 'a bitsNAe" where
"star = (\<lambda>(q,d,f).
([],
\<lambda>a s. case s of
[] \<Rightarrow> if a=None then {True#q} else {}
| left#s \<Rightarrow> if left then (True ## d a s) \<union>
(if f s \<and> a=None then {True#q} else {})
else {},
\<lambda>s. case s of [] \<Rightarrow> True | left#s \<Rightarrow> left \<and> f s))"
primrec rexp2nae :: "'a rexp \<Rightarrow> 'a bitsNAe" where
"rexp2nae Zero = ([], \<lambda>a s. {}, \<lambda>s. False)" |
"rexp2nae One = epsilon" |
"rexp2nae(Atom a) = atom a" |
"rexp2nae(Plus r s) = or (rexp2nae r) (rexp2nae s)" |
"rexp2nae(Times r s) = conc (rexp2nae r) (rexp2nae s)" |
"rexp2nae(Star r) = star (rexp2nae r)"
declare split_paired_all[simp]
(******************************************************)
(* epsilon *)
(******************************************************)
lemma step_epsilon[simp]: "step epsilon a = {}"
by(simp add:epsilon_def step_def)
lemma steps_epsilon: "((p,q) : steps epsilon w) = (w=[] \<and> p=q)"
by (induct "w") auto
lemma accepts_epsilon[simp]: "accepts epsilon w = (w = [])"
apply (simp add: steps_epsilon accepts_def)
apply (simp add: epsilon_def)
done
(******************************************************)
(* atom *)
(******************************************************)
lemma fin_atom: "(fin (atom a) q) = (q = [False])"
by(simp add:atom_def)
lemma start_atom: "start (atom a) = [True]"
by(simp add:atom_def)
(* Use {x. False} = {}? *)
lemma eps_atom[simp]:
"eps(atom a) = {}"
by (simp add:atom_def step_def)
lemma in_step_atom_Some[simp]:
"(p,q) : step (atom a) (Some b) = (p=[True] \<and> q=[False] \<and> b=a)"
by (simp add:atom_def step_def)
lemma False_False_in_steps_atom:
"([False],[False]) : steps (atom a) w = (w = [])"
apply (induct "w")
apply (simp)
apply (simp add: relcomp_unfold)
done
lemma start_fin_in_steps_atom:
"(start (atom a), [False]) : steps (atom a) w = (w = [a])"
apply (induct "w")
apply (simp add: start_atom rtrancl_empty)
apply (simp add: False_False_in_steps_atom relcomp_unfold start_atom)
done
lemma accepts_atom: "accepts (atom a) w = (w = [a])"
by (simp add: accepts_def start_fin_in_steps_atom fin_atom)
(******************************************************)
(* or *)
(******************************************************)
(***** lift True/False over fin *****)
lemma fin_or_True[iff]:
"\<And>L R. fin (or L R) (True#p) = fin L p"
by(simp add:or_def)
lemma fin_or_False[iff]:
"\<And>L R. fin (or L R) (False#p) = fin R p"
by(simp add:or_def)
(***** lift True/False over step *****)
lemma True_in_step_or[iff]:
"\<And>L R. (True#p,q) : step (or L R) a = (\<exists>r. q = True#r \<and> (p,r) : step L a)"
apply (simp add:or_def step_def)
apply blast
done
lemma False_in_step_or[iff]:
"\<And>L R. (False#p,q) : step (or L R) a = (\<exists>r. q = False#r \<and> (p,r) : step R a)"
apply (simp add:or_def step_def)
apply blast
done
(***** lift True/False over epsclosure *****)
lemma lemma1a:
"(tp,tq) : (eps(or L R))\<^sup>* \<Longrightarrow>
(\<And>p. tp = True#p \<Longrightarrow> \<exists>q. (p,q) : (eps L)\<^sup>* \<and> tq = True#q)"
apply (induct rule:rtrancl_induct)
apply (blast)
apply (clarify)
apply (simp)
apply (blast intro: rtrancl_into_rtrancl)
done
lemma lemma1b:
"(tp,tq) : (eps(or L R))\<^sup>* \<Longrightarrow>
(\<And>p. tp = False#p \<Longrightarrow> \<exists>q. (p,q) : (eps R)\<^sup>* \<and> tq = False#q)"
apply (induct rule:rtrancl_induct)
apply (blast)
apply (clarify)
apply (simp)
apply (blast intro: rtrancl_into_rtrancl)
done
lemma lemma2a:
"(p,q) : (eps L)\<^sup>* \<Longrightarrow> (True#p, True#q) : (eps(or L R))\<^sup>*"
apply (induct rule: rtrancl_induct)
apply (blast)
apply (blast intro: rtrancl_into_rtrancl)
done
lemma lemma2b:
"(p,q) : (eps R)\<^sup>* \<Longrightarrow> (False#p, False#q) : (eps(or L R))\<^sup>*"
apply (induct rule: rtrancl_induct)
apply (blast)
apply (blast intro: rtrancl_into_rtrancl)
done
lemma True_epsclosure_or[iff]:
"(True#p,q) : (eps(or L R))\<^sup>* = (\<exists>r. q = True#r \<and> (p,r) : (eps L)\<^sup>*)"
by (blast dest: lemma1a lemma2a)
lemma False_epsclosure_or[iff]:
"(False#p,q) : (eps(or L R))\<^sup>* = (\<exists>r. q = False#r \<and> (p,r) : (eps R)\<^sup>*)"
by (blast dest: lemma1b lemma2b)
(***** lift True/False over steps *****)
lemma lift_True_over_steps_or[iff]:
"\<And>p. (True#p,q):steps (or L R) w = (\<exists>r. q = True # r \<and> (p,r):steps L w)"
apply (induct "w")
apply auto
apply force
done
lemma lift_False_over_steps_or[iff]:
"\<And>p. (False#p,q):steps (or L R) w = (\<exists>r. q = False#r \<and> (p,r):steps R w)"
apply (induct "w")
apply auto
apply (force)
done
(***** Epsilon closure of start state *****)
lemma unfold_rtrancl2:
"R\<^sup>* = Id \<union> (R O R\<^sup>*)"
apply (rule set_eqI)
apply (simp)
apply (rule iffI)
apply (erule rtrancl_induct)
apply (blast)
apply (blast intro: rtrancl_into_rtrancl)
apply (blast intro: converse_rtrancl_into_rtrancl)
done
lemma in_unfold_rtrancl2:
"(p,q) : R\<^sup>* = (q = p | (\<exists>r. (p,r) : R \<and> (r,q) : R\<^sup>*))"
apply (rule unfold_rtrancl2[THEN equalityE])
apply (blast)
done
lemmas [iff] = in_unfold_rtrancl2[where ?p = "start(or L R)"] for L R
lemma start_eps_or[iff]:
"\<And>L R. (start(or L R),q) : eps(or L R) =
(q = True#start L | q = False#start R)"
by (simp add:or_def step_def)
lemma not_start_step_or_Some[iff]:
"\<And>L R. (start(or L R),q) \<notin> step (or L R) (Some a)"
by (simp add:or_def step_def)
lemma steps_or:
"(start(or L R), q) : steps (or L R) w =
( (w = [] \<and> q = start(or L R)) |
(\<exists>p. q = True # p \<and> (start L,p) : steps L w |
q = False # p \<and> (start R,p) : steps R w) )"
apply (case_tac "w")
apply (simp)
apply (blast)
apply (simp)
apply (blast)
done
lemma start_or_not_final[iff]:
"\<And>L R. \<not> fin (or L R) (start(or L R))"
by (simp add:or_def)
lemma accepts_or:
"accepts (or L R) w = (accepts L w | accepts R w)"
apply (simp add:accepts_def steps_or)
apply auto
done
(******************************************************)
(* conc *)
(******************************************************)
(** True/False in fin **)
lemma in_conc_True[iff]:
"\<And>L R. fin (conc L R) (True#p) = False"
by (simp add:conc_def)
lemma fin_conc_False[iff]:
"\<And>L R. fin (conc L R) (False#p) = fin R p"
by (simp add:conc_def)
(** True/False in step **)
lemma True_step_conc[iff]:
"\<And>L R. (True#p,q) : step (conc L R) a =
((\<exists>r. q=True#r \<and> (p,r): step L a) |
(fin L p \<and> a=None \<and> q=False#start R))"
by (simp add:conc_def step_def) (blast)
lemma False_step_conc[iff]:
"\<And>L R. (False#p,q) : step (conc L R) a =
(\<exists>r. q = False#r \<and> (p,r) : step R a)"
by (simp add:conc_def step_def) (blast)
(** False in epsclosure **)
lemma lemma1b':
"(tp,tq) : (eps(conc L R))\<^sup>* \<Longrightarrow>
(\<And>p. tp = False#p \<Longrightarrow> \<exists>q. (p,q) : (eps R)\<^sup>* \<and> tq = False#q)"
apply (induct rule: rtrancl_induct)
apply (blast)
apply (blast intro: rtrancl_into_rtrancl)
done
lemma lemma2b':
"(p,q) : (eps R)\<^sup>* \<Longrightarrow> (False#p, False#q) : (eps(conc L R))\<^sup>*"
apply (induct rule: rtrancl_induct)
apply (blast)
apply (blast intro: rtrancl_into_rtrancl)
done
lemma False_epsclosure_conc[iff]:
"((False # p, q) : (eps (conc L R))\<^sup>*) =
(\<exists>r. q = False # r \<and> (p, r) : (eps R)\<^sup>*)"
apply (rule iffI)
apply (blast dest: lemma1b')
apply (blast dest: lemma2b')
done
(** False in steps **)
lemma False_steps_conc[iff]:
"\<And>p. (False#p,q): steps (conc L R) w = (\<exists>r. q=False#r \<and> (p,r): steps R w)"
apply (induct "w")
apply (simp)
apply (simp)
apply (fast) (*MUCH faster than blast*)
done
(** True in epsclosure **)
lemma True_True_eps_concI:
"(p,q): (eps L)\<^sup>* \<Longrightarrow> (True#p,True#q) : (eps(conc L R))\<^sup>*"
apply (induct rule: rtrancl_induct)
apply (blast)
apply (blast intro: rtrancl_into_rtrancl)
done
lemma True_True_steps_concI:
"\<And>p. (p,q) : steps L w \<Longrightarrow> (True#p,True#q) : steps (conc L R) w"
apply (induct "w")
apply (simp add: True_True_eps_concI)
apply (simp)
apply (blast intro: True_True_eps_concI)
done
lemma lemma1a':
"(tp,tq) : (eps(conc L R))\<^sup>* \<Longrightarrow>
(\<And>p. tp = True#p \<Longrightarrow>
(\<exists>q. tq = True#q \<and> (p,q) : (eps L)\<^sup>*) |
(\<exists>q r. tq = False#q \<and> (p,r):(eps L)\<^sup>* \<and> fin L r \<and> (start R,q) : (eps R)\<^sup>*))"
apply (induct rule: rtrancl_induct)
apply (blast)
apply (blast intro: rtrancl_into_rtrancl)
done
lemma lemma2a':
"(p, q) : (eps L)\<^sup>* \<Longrightarrow> (True#p, True#q) : (eps(conc L R))\<^sup>*"
apply (induct rule: rtrancl_induct)
apply (blast)
apply (blast intro: rtrancl_into_rtrancl)
done
lemma lem:
"\<And>L R. (p,q) : step R None \<Longrightarrow> (False#p, False#q) : step (conc L R) None"
by(simp add: conc_def step_def)
lemma lemma2b'':
"(p,q) : (eps R)\<^sup>* \<Longrightarrow> (False#p, False#q) : (eps(conc L R))\<^sup>*"
apply (induct rule: rtrancl_induct)
apply (blast)
apply (drule lem)
apply (blast intro: rtrancl_into_rtrancl)
done
lemma True_False_eps_concI:
"\<And>L R. fin L p \<Longrightarrow> (True#p, False#start R) : eps(conc L R)"
by(simp add: conc_def step_def)
lemma True_epsclosure_conc[iff]:
"((True#p,q) \<in> (eps(conc L R))\<^sup>*) =
((\<exists>r. (p,r) \<in> (eps L)\<^sup>* \<and> q = True#r) \<or>
(\<exists>r. (p,r) \<in> (eps L)\<^sup>* \<and> fin L r \<and>
(\<exists>s. (start R, s) \<in> (eps R)\<^sup>* \<and> q = False#s)))"
apply (rule iffI)
apply (blast dest: lemma1a')
apply (erule disjE)
apply (blast intro: lemma2a')
apply (clarify)
apply (rule rtrancl_trans)
apply (erule lemma2a')
apply (rule converse_rtrancl_into_rtrancl)
apply (erule True_False_eps_concI)
apply (erule lemma2b'')
done
(** True in steps **)
lemma True_steps_concD[rule_format]:
"\<forall>p. (True#p,q) : steps (conc L R) w \<longrightarrow>
((\<exists>r. (p,r) : steps L w \<and> q = True#r) \<or>
(\<exists>u v. w = u@v \<and> (\<exists>r. (p,r) \<in> steps L u \<and> fin L r \<and>
(\<exists>s. (start R,s) \<in> steps R v \<and> q = False#s))))"
apply (induct "w")
apply (simp)
apply (simp)
apply (clarify del: disjCI)
apply (erule disjE)
apply (clarify del: disjCI)
apply (erule disjE)
apply (clarify del: disjCI)
apply (erule allE, erule impE, assumption)
apply (erule disjE)
apply (blast)
apply (rule disjI2)
apply (clarify)
apply (simp)
apply (rule_tac x = "a#u" in exI)
apply (simp)
apply (blast)
apply (blast)
apply (rule disjI2)
apply (clarify)
apply (simp)
apply (rule_tac x = "[]" in exI)
apply (simp)
apply (blast)
done
lemma True_steps_conc:
"(True#p,q) \<in> steps (conc L R) w =
((\<exists>r. (p,r) \<in> steps L w \<and> q = True#r) |
(\<exists>u v. w = u@v \<and> (\<exists>r. (p,r) : steps L u \<and> fin L r \<and>
(\<exists>s. (start R,s) : steps R v \<and> q = False#s))))"
by (blast dest: True_steps_concD
intro: True_True_steps_concI in_steps_epsclosure)
(** starting from the start **)
lemma start_conc:
"\<And>L R. start(conc L R) = True#start L"
by (simp add: conc_def)
lemma final_conc:
"\<And>L R. fin(conc L R) p = (\<exists>s. p = False#s \<and> fin R s)"
by (simp add:conc_def split: list.split)
lemma accepts_conc:
"accepts (conc L R) w = (\<exists>u v. w = u@v \<and> accepts L u \<and> accepts R v)"
apply (simp add: accepts_def True_steps_conc final_conc start_conc)
apply (blast)
done
(******************************************************)
(* star *)
(******************************************************)
lemma True_in_eps_star[iff]:
"\<And>A. (True#p,q) \<in> eps(star A) =
( (\<exists>r. q = True#r \<and> (p,r) \<in> eps A) \<or> (fin A p \<and> q = True#start A) )"
by (simp add:star_def step_def) (blast)
lemma True_True_step_starI:
"\<And>A. (p,q) : step A a \<Longrightarrow> (True#p, True#q) : step (star A) a"
by (simp add:star_def step_def)
lemma True_True_eps_starI:
"(p,r) : (eps A)\<^sup>* \<Longrightarrow> (True#p, True#r) : (eps(star A))\<^sup>*"
apply (induct rule: rtrancl_induct)
apply (blast)
apply (blast intro: True_True_step_starI rtrancl_into_rtrancl)
done
lemma True_start_eps_starI:
"\<And>A. fin A p \<Longrightarrow> (True#p,True#start A) : eps(star A)"
by (simp add:star_def step_def)
lemma lem':
"(tp,s) : (eps(star A))\<^sup>* \<Longrightarrow> (\<forall>p. tp = True#p \<longrightarrow>
(\<exists>r. ((p,r) \<in> (eps A)\<^sup>* \<or>
(\<exists>q. (p,q) \<in> (eps A)\<^sup>* \<and> fin A q \<and> (start A,r) : (eps A)\<^sup>*)) \<and>
s = True#r))"
apply (induct rule: rtrancl_induct)
apply (simp)
apply (clarify)
apply (simp)
apply (blast intro: rtrancl_into_rtrancl)
done
lemma True_eps_star[iff]:
"((True#p,s) \<in> (eps(star A))\<^sup>*) =
(\<exists>r. ((p,r) \<in> (eps A)\<^sup>* \<or>
(\<exists>q. (p,q) : (eps A)\<^sup>* \<and> fin A q \<and> (start A,r) : (eps A)\<^sup>*)) \<and>
s = True#r)"
apply (rule iffI)
apply (drule lem')
apply (blast)
(* Why can't blast do the rest? *)
apply (clarify)
apply (erule disjE)
apply (erule True_True_eps_starI)
apply (clarify)
apply (rule rtrancl_trans)
apply (erule True_True_eps_starI)
apply (rule rtrancl_trans)
apply (rule r_into_rtrancl)
apply (erule True_start_eps_starI)
apply (erule True_True_eps_starI)
done
(** True in step Some **)
lemma True_step_star[iff]:
"\<And>A. (True#p,r) \<in> step (star A) (Some a) =
(\<exists>q. (p,q) \<in> step A (Some a) \<and> r=True#q)"
by (simp add:star_def step_def) (blast)
(** True in steps **)
(* reverse list induction! Complicates matters for conc? *)
lemma True_start_steps_starD[rule_format]:
"\<forall>rr. (True#start A,rr) \<in> steps (star A) w \<longrightarrow>
(\<exists>us v. w = concat us @ v \<and>
(\<forall>u\<in>set us. accepts A u) \<and>
(\<exists>r. (start A,r) \<in> steps A v \<and> rr = True#r))"
apply (induct w rule: rev_induct)
apply (simp)
apply (clarify)
apply (rule_tac x = "[]" in exI)
apply (erule disjE)
apply (simp)
apply (clarify)
apply (simp)
apply (simp add: O_assoc[symmetric] epsclosure_steps)
apply (clarify)
apply (erule allE, erule impE, assumption)
apply (clarify)
apply (erule disjE)
apply (rule_tac x = "us" in exI)
apply (rule_tac x = "v@[x]" in exI)
apply (simp add: O_assoc[symmetric] epsclosure_steps)
apply (blast)
apply (clarify)
apply (rule_tac x = "us@[v@[x]]" in exI)
apply (rule_tac x = "[]" in exI)
apply (simp add: accepts_def)
apply (blast)
done
lemma True_True_steps_starI:
"\<And>p. (p,q) : steps A w \<Longrightarrow> (True#p,True#q) : steps (star A) w"
apply (induct "w")
apply (simp)
apply (simp)
apply (blast intro: True_True_eps_starI True_True_step_starI)
done
lemma steps_star_cycle:
"(\<forall>u \<in> set us. accepts A u) \<Longrightarrow>
(True#start A,True#start A) \<in> steps (star A) (concat us)"
apply (induct "us")
apply (simp add:accepts_def)
apply (simp add:accepts_def)
by(blast intro: True_True_steps_starI True_start_eps_starI in_epsclosure_steps)
(* Better stated directly with start(star A)? Loop in star A back to start(star A)?*)
lemma True_start_steps_star:
"(True#start A,rr) : steps (star A) w =
(\<exists>us v. w = concat us @ v \<and>
(\<forall>u\<in>set us. accepts A u) \<and>
(\<exists>r. (start A,r) \<in> steps A v \<and> rr = True#r))"
apply (rule iffI)
apply (erule True_start_steps_starD)
apply (clarify)
apply (blast intro: steps_star_cycle True_True_steps_starI)
done
(** the start state **)
lemma start_step_star[iff]:
"\<And>A. (start(star A),r) : step (star A) a = (a=None \<and> r = True#start A)"
by (simp add:star_def step_def)
lemmas epsclosure_start_step_star =
in_unfold_rtrancl2[where ?p = "start (star A)"] for A
lemma start_steps_star:
"(start(star A),r) : steps (star A) w =
((w=[] \<and> r= start(star A)) | (True#start A,r) : steps (star A) w)"
apply (rule iffI)
apply (case_tac "w")
apply (simp add: epsclosure_start_step_star)
apply (simp)
apply (clarify)
apply (simp add: epsclosure_start_step_star)
apply (blast)
apply (erule disjE)
apply (simp)
apply (blast intro: in_steps_epsclosure)
done
lemma fin_star_True[iff]: "\<And>A. fin (star A) (True#p) = fin A p"
by (simp add:star_def)
lemma fin_star_start[iff]: "\<And>A. fin (star A) (start(star A))"
by (simp add:star_def)
(* too complex! Simpler if loop back to start(star A)? *)
lemma accepts_star:
"accepts (star A) w =
(\<exists>us. (\<forall>u \<in> set(us). accepts A u) \<and> (w = concat us))"
apply(unfold accepts_def)
apply (simp add: start_steps_star True_start_steps_star)
apply (rule iffI)
apply (clarify)
apply (erule disjE)
apply (clarify)
apply (simp)
apply (rule_tac x = "[]" in exI)
apply (simp)
apply (clarify)
apply (rule_tac x = "us@[v]" in exI)
apply (simp add: accepts_def)
apply (blast)
apply (clarify)
apply (rule_tac xs = "us" in rev_exhaust)
apply (simp)
apply (blast)
apply (clarify)
apply (simp add: accepts_def)
apply (blast)
done
(***** Correctness of r2n *****)
lemma accepts_rexp2nae:
"\<And>w. accepts (rexp2nae r) w = (w : lang r)"
apply (induct "r")
apply (simp add: accepts_def)
apply simp
apply (simp add: accepts_atom)
apply (simp add: accepts_or)
apply (simp add: accepts_conc Regular_Set.conc_def)
apply (simp add: accepts_star in_star_iff_concat subset_iff Ball_def)
done
end

View File

@ -1,233 +0,0 @@
(* Author: Tobias Nipkow
Copyright 1998 TUM
To generate a regular expression, the alphabet must be finite.
regexp needs to be supplied with an 'a list for a unique order
add_Atom d i j r a = (if d a i = j then Union r (Atom a) else r)
atoms d i j as = foldl (add_Atom d i j) Empty as
regexp as d i j 0 = (if i=j then Union (Star Empty) (atoms d i j as)
else atoms d i j as
*)
section "From deterministic automata to regular sets"
theory RegSet_of_nat_DA
imports "Regular-Sets.Regular_Set" DA
begin
type_synonym 'a nat_next = "'a \<Rightarrow> nat \<Rightarrow> nat"
abbreviation
deltas :: "'a nat_next \<Rightarrow> 'a list \<Rightarrow> nat \<Rightarrow> nat" where
"deltas \<equiv> foldl2"
primrec trace :: "'a nat_next \<Rightarrow> nat \<Rightarrow> 'a list \<Rightarrow> nat list" where
"trace d i [] = []" |
"trace d i (x#xs) = d x i # trace d (d x i) xs"
(* conversion a la Warshall *)
primrec regset :: "'a nat_next \<Rightarrow> nat \<Rightarrow> nat \<Rightarrow> nat \<Rightarrow> 'a list set" where
"regset d i j 0 = (if i=j then insert [] {[a] | a. d a i = j}
else {[a] | a. d a i = j})" |
"regset d i j (Suc k) =
regset d i j k \<union>
(regset d i k k) @@ (star(regset d k k k)) @@ (regset d k j k)"
definition
regset_of_DA :: "('a,nat)da \<Rightarrow> nat \<Rightarrow> 'a list set" where
"regset_of_DA A k = (\<Union>j\<in>{j. j<k \<and> fin A j}. regset (next A) (start A) j k)"
definition
bounded :: "'a nat_next \<Rightarrow> nat \<Rightarrow> bool" where
"bounded d k = (\<forall>n. n < k \<longrightarrow> (\<forall>x. d x n < k))"
declare
in_set_butlast_appendI[simp,intro] less_SucI[simp] image_eqI[simp]
(* Lists *)
lemma butlast_empty[iff]:
"(butlast xs = []) = (case xs of [] \<Rightarrow> True | y#ys \<Rightarrow> ys=[])"
by (cases xs) simp_all
lemma in_set_butlast_concatI:
"x:set(butlast xs) \<Longrightarrow> xs:set xss \<Longrightarrow> x:set(butlast(concat xss))"
apply (induct "xss")
apply simp
apply (simp add: butlast_append del: ball_simps)
apply (rule conjI)
apply (clarify)
apply (erule disjE)
apply (blast)
apply (subgoal_tac "xs=[]")
apply simp
apply (blast)
apply (blast dest: in_set_butlastD)
done
(* Regular sets *)
(* The main lemma:
how to decompose a trace into a prefix, a list of loops and a suffix.
*)
lemma decompose[rule_format]:
"\<forall>i. k \<in> set(trace d i xs) \<longrightarrow> (\<exists>pref mids suf.
xs = pref @ concat mids @ suf \<and>
deltas d pref i = k \<and> (\<forall>n\<in>set(butlast(trace d i pref)). n \<noteq> k) \<and>
(\<forall>mid\<in>set mids. (deltas d mid k = k) \<and>
(\<forall>n\<in>set(butlast(trace d k mid)). n \<noteq> k)) \<and>
(\<forall>n\<in>set(butlast(trace d k suf)). n \<noteq> k))"
apply (induct "xs")
apply (simp)
apply (rename_tac a as)
apply (intro strip)
apply (case_tac "d a i = k")
apply (rule_tac x = "[a]" in exI)
apply simp
apply (case_tac "k : set(trace d (d a i) as)")
apply (erule allE)
apply (erule impE)
apply (assumption)
apply (erule exE)+
apply (rule_tac x = "pref#mids" in exI)
apply (rule_tac x = "suf" in exI)
apply simp
apply (rule_tac x = "[]" in exI)
apply (rule_tac x = "as" in exI)
apply simp
apply (blast dest: in_set_butlastD)
apply simp
apply (erule allE)
apply (erule impE)
apply (assumption)
apply (erule exE)+
apply (rule_tac x = "a#pref" in exI)
apply (rule_tac x = "mids" in exI)
apply (rule_tac x = "suf" in exI)
apply simp
done
lemma length_trace[simp]: "\<And>i. length(trace d i xs) = length xs"
by (induct "xs") simp_all
lemma deltas_append[simp]:
"\<And>i. deltas d (xs@ys) i = deltas d ys (deltas d xs i)"
by (induct "xs") simp_all
lemma trace_append[simp]:
"\<And>i. trace d i (xs@ys) = trace d i xs @ trace d (deltas d xs i) ys"
by (induct "xs") simp_all
lemma trace_concat[simp]:
"(\<forall>xs \<in> set xss. deltas d xs i = i) \<Longrightarrow>
trace d i (concat xss) = concat (map (trace d i) xss)"
by (induct "xss") simp_all
lemma trace_is_Nil[simp]: "\<And>i. (trace d i xs = []) = (xs = [])"
by (case_tac "xs") simp_all
lemma trace_is_Cons_conv[simp]:
"(trace d i xs = n#ns) =
(case xs of [] \<Rightarrow> False | y#ys \<Rightarrow> n = d y i \<and> ns = trace d n ys)"
apply (case_tac "xs")
apply simp_all
apply (blast)
done
lemma set_trace_conv:
"\<And>i. set(trace d i xs) =
(if xs=[] then {} else insert(deltas d xs i)(set(butlast(trace d i xs))))"
apply (induct "xs")
apply (simp)
apply (simp add: insert_commute)
done
lemma deltas_concat[simp]:
"(\<forall>mid\<in>set mids. deltas d mid k = k) \<Longrightarrow> deltas d (concat mids) k = k"
by (induct mids) simp_all
lemma lem: "[| n < Suc k; n \<noteq> k |] ==> n < k"
by arith
lemma regset_spec:
"\<And>i j xs. xs \<in> regset d i j k =
((\<forall>n\<in>set(butlast(trace d i xs)). n < k) \<and> deltas d xs i = j)"
apply (induct k)
apply(simp split: list.split)
apply(fastforce)
apply (simp add: conc_def)
apply (rule iffI)
apply (erule disjE)
apply simp
apply (erule exE conjE)+
apply simp
apply (subgoal_tac
"(\<forall>m\<in>set(butlast(trace d k xsb)). m < Suc k) \<and> deltas d xsb k = k")
apply (simp add: set_trace_conv butlast_append ball_Un)
apply (erule star_induct)
apply (simp)
apply (simp add: set_trace_conv butlast_append ball_Un)
apply (case_tac "k : set(butlast(trace d i xs))")
prefer 2 apply (rule disjI1)
apply (blast intro:lem)
apply (rule disjI2)
apply (drule in_set_butlastD[THEN decompose])
apply (clarify)
apply (rule_tac x = "pref" in exI)
apply simp
apply (rule conjI)
apply (rule ballI)
apply (rule lem)
prefer 2 apply simp
apply (drule bspec) prefer 2 apply assumption
apply simp
apply (rule_tac x = "concat mids" in exI)
apply (simp)
apply (rule conjI)
apply (rule concat_in_star)
apply (clarsimp simp: subset_iff)
apply (rule lem)
prefer 2 apply simp
apply (drule bspec) prefer 2 apply assumption
apply (simp add: image_eqI in_set_butlast_concatI)
apply (rule ballI)
apply (rule lem)
apply auto
done
lemma trace_below:
"bounded d k \<Longrightarrow> \<forall>i. i < k \<longrightarrow> (\<forall>n\<in>set(trace d i xs). n < k)"
apply (unfold bounded_def)
apply (induct "xs")
apply simp
apply (simp (no_asm))
apply (blast)
done
lemma regset_below:
"[| bounded d k; i < k; j < k |] ==>
regset d i j k = {xs. deltas d xs i = j}"
apply (rule set_eqI)
apply (simp add: regset_spec)
apply (blast dest: trace_below in_set_butlastD)
done
lemma deltas_below:
"\<And>i. bounded d k \<Longrightarrow> i < k \<Longrightarrow> deltas d w i < k"
apply (unfold bounded_def)
apply (induct "w")
apply simp_all
done
lemma regset_DA_equiv:
"[| bounded (next A) k; start A < k; j < k |] ==>
w : regset_of_DA A k = accepts A w"
apply(unfold regset_of_DA_def)
apply (simp cong: conj_cong
add: regset_below deltas_below accepts_def delta_def)
done
end

View File

@ -1,6 +0,0 @@
@inproceedings{Nipkow-TPHOLs98,author={Tobias Nipkow},
title={Verified Lexical Analysis},
booktitle={Theorem Proving in Higher Order Logics},
editor={J. Grundy and M. Newey},
publisher=Springer,series=LNCS,volume={1479},pages={1--15},year=1998,
note={\url{http://www4.informatik.tu-muenchen.de/~nipkow/pubs/tphols98.html}}}

View File

@ -1,54 +0,0 @@
\documentclass[11pt,a4paper]{article}
\usepackage{isabelle,isabellesym}
% this should be the last package used
\usepackage{pdfsetup}
\begin{document}
\title{Functional Automata}
\author{Tobias Nipkow}
\maketitle
\begin{abstract}
This theory defines deterministic and nondeterministic automata in a
functional representation: the transition function/relation and the finality
predicate are just functions. Hence the state space may be infinite. It is
shown how to convert regular expressions into such automata. A scanner
(generator) is implemented with the help of functional automata: the scanner
chops the input up into longest recognized substrings. Finally we also show
how to convert a certain subclass of functional automata (essentially the
finite deterministic ones) into regular sets.
\end{abstract}
\section{Overview}
The theories are structured as follows:
\begin{itemize}
\item Automata:
\texttt{AutoProj}, \texttt{NA}, \texttt{NAe}, \texttt{DA}, \texttt{Automata}
\item Conversion of regular expressions into automata:\\
\texttt{RegExp2NA}, \texttt{RegExp2NAe}, \texttt{AutoRegExp}.
\item Scanning: \texttt{MaxPrefix}, \texttt{MaxChop}, \texttt{AutoMaxChop}.
\end{itemize}
For a full description see \cite{Nipkow-TPHOLs98}.
In contrast to that paper, the latest version of the theories provides a
fully executable scanner generator. The non-executable bits (transitive
closure) have been eliminated by going from regular expressions directly to
nondeterministic automata, thus bypassing epsilon-moves.
Not described in the paper is the conversion of certain functional automata
(essentially the finite deterministic ones) into regular sets contained in
\texttt{RegSet\_of\_nat\_DA}.
% include generated text of all theories
\input{session}
\bibliographystyle{abbrv}
\bibliography{root}
\end{document}

View File

@ -6,9 +6,9 @@ well as formal development.
## Prerequisites
Isabelle/DOF requires [Isabelle 2017](http://isabelle.in.tum.de/website-Isabelle2017/).
Please download the Isabelle 2017 distribution for your operating
system from the [Isabelle website](http://isabelle.in.tum.de/website-Isabelle2017/).
Isabelle/DOF requires [Isabelle 2018](http://isabelle.in.tum.de/website-Isabelle2018/).
Please download the Isabelle 2018 distribution for your operating
system from the [Isabelle website](http://isabelle.in.tum.de/website-Isabelle2018/).
## Installation
@ -22,7 +22,7 @@ If a specific Isabelle version should be used (i.e., not the default
one), the full path to the ``isabelle`` command needs to be passed as
using the ``-i`` command line argument of the ``install`` script:
```console
foo@bar:~$ ./install -i /usr/local/Isabelle2017/bin/isabelle
foo@bar:~$ ./install -i /usr/local/Isabelle2018/bin/isabelle
```
For further command line options of the installer, please use the
@ -36,22 +36,22 @@ foo@bar:~$ ./install -h
The installer will
* apply a patch to Isabelle that is necessary to use Isabelle/DOF.
If this patch installations fails, you need to manually replace
the file ``Isabelle2017/src/Pure/Thy/thy_output.ML`` in the Isabelle
the file ``Isabelle2018/src/Pure/Thy/thy_output.ML`` in the Isabelle
distribution with the file ``patches/thy_output.ML`` from the
Isabelle/DOF distribution:
```console
cp patches/thy_output.ML `isabelle getenv -b ISABELLE_HOME`/src/Pure/Thy/
```
* install required entries from the [AFP](https://www.isa-afp.org). If this
installations fails, you need to manually install the AFP for Isabelle 2017 as follows:
Download the [AFP for Isabelle 2017](https://sourceforge.net/projects/afp/files/afp-Isabelle2017/afp-2018-08-14.tar.gz")
installations fails, you need to manually install the AFP for Isabelle 2018 as follows:
Download the [AFP for Isabelle 2018](https://sourceforge.net/projects/afp/files/afp-Isabelle2018/afp-2019-06-04.tar.gz)
and follow the [instructions for installing the AFP as Isabelle
component](https://www.isa-afp.org/using.html). If you have extracted
the AFP archive into the directory to `/home/myself/afp`, you should
run the following command to make the AFP session `ROOTS` available to
Isabelle:
```console
echo "/home/myself/afp/thys" >> ~/.isabelle/Isabelle2017/ROOTS
echo "/home/myself/afp/thys" >> ~/.isabelle/Isabelle2018/ROOTS
```
* install the Isabelle/DOF-plugin into the Isabelle user directory
(the exact location depends on the Isabelle version).

Binary file not shown.

16
install
View File

@ -30,14 +30,14 @@
shopt -s nocasematch
# Global configuration
ISABELLE_VERSION="Isabelle2017: October 2017"
ISABELLE_URL="https://isabelle.in.tum.de/website-Isabelle2017/"
AFP_URL="https://sourceforge.net/projects/afp/files/afp-Isabelle2017/afp-2018-08-14.tar.gz"
ISABELLE_VERSION="Isabelle2018: August 2018"
ISABELLE_URL="https://isabelle.in.tum.de/website-Isabelle2018/"
AFP_URL="https://sourceforge.net/projects/afp/files/afp-Isabelle2018/afp-2019-06-04.tar.gz"
ISABELLE=`which isabelle`
GEN_DIR=document-generator
PROG=`echo $0 | sed 's|.*/||'`;
ISABELLE_VERSION=`$ISABELLE version`
ACTUAL_ISABELLE_VERSION=`$ISABELLE version`
SKIP="false"
VARS=`$ISABELLE getenv ISABELLE_HOME_USER ISABELLE_HOME ISABELLE_TOOLS`
for i in $VARS; do
@ -69,14 +69,14 @@ exit_error() {
check_isabelle_version() {
echo "* Checking Isabelle version:"
if [ "$ISABELLE_VERSION" != "$ISABELLE_VERSION" ]; then
if [ "$ISABELLE_VERSION" != "$ACTUAL_ISABELLE_VERSION" ]; then
echo " WARNING:"
echo " The version of Isabelle (i.e., $ISABELLE_VERSION) you are using"
echo " The version of Isabelle (i.e., $ACTUAL_ISABELLE_VERSION) you are using"
echo " IS NOT SUPPORTED"
echo " by the current version of Isabelle/DOF. Please install a supported"
echo " version of Isabelle and rerun the install script, providing the"
echo " the \"isabelle\" command as argument."
echo " Isabelle 2017 can be obtained from:"
echo " Isabelle ($ISABELLE_VERSION) can be obtained from:"
echo " $ISABELLE_URL"
echo
read -p " Still continue (y/N)? " -n 1 -r
@ -109,7 +109,7 @@ check_afp_entries() {
echo " Trying to install AFP (this might take a few *minutes*) ...."
extract=""
for e in $missing; do
extract="$extract afp-2018-08-14/thys/$e"
extract="$extract afp-2019-06-04/thys/$e"
done
mkdir -p .afp
if curl -s -L $AFP_URL | tar zxf - -C .afp $extract; then

View File

@ -1,686 +0,0 @@
(* Title: Pure/Thy/thy_output.ML
Author: Markus Wenzel, TU Muenchen
Theory document output with antiquotations.
*)
signature THY_OUTPUT =
sig
val display: bool Config.T
val quotes: bool Config.T
val margin: int Config.T
val indent: int Config.T
val source: bool Config.T
val break: bool Config.T
val modes: string Config.T
val add_wrapper: ((unit -> string) -> unit -> string) -> Proof.context -> Proof.context
val add_option: binding -> (string -> Proof.context -> Proof.context) -> theory -> theory
val check_command: Proof.context -> xstring * Position.T -> string
val check_option: Proof.context -> xstring * Position.T -> string
val print_antiquotations: bool -> Proof.context -> unit
val antiquotation: binding -> 'a context_parser ->
({source: Token.src, state: Toplevel.state, context: Proof.context} -> 'a -> string) ->
theory -> theory
val boolean: string -> bool
val integer: string -> int
val eval_antiquote: Toplevel.state -> Antiquote.text_antiquote -> string
val output_text: Toplevel.state -> {markdown: bool} -> Input.source -> string
val present_thy: theory -> (Toplevel.transition * Toplevel.state) list -> Token.T list -> Buffer.T
val set_meta_args_parser : (theory -> string parser) -> unit
val pretty_text: Proof.context -> string -> Pretty.T
val pretty_term: Proof.context -> term -> Pretty.T
val pretty_thm: Proof.context -> thm -> Pretty.T
val str_of_source: Token.src -> string
val maybe_pretty_source: (Proof.context -> 'a -> Pretty.T) -> Proof.context ->
Token.src -> 'a list -> Pretty.T list
val string_of_margin: Proof.context -> Pretty.T -> string
val output: Proof.context -> Pretty.T list -> string
val verbatim_text: Proof.context -> string -> string
val document_command: {markdown: bool} -> (xstring * Position.T) option * Input.source ->
Toplevel.transition -> Toplevel.transition
end;
structure Thy_Output: THY_OUTPUT =
struct
(** options **)
val display = Attrib.setup_option_bool ("thy_output_display", \<^here>);
val break = Attrib.setup_option_bool ("thy_output_break", \<^here>);
val quotes = Attrib.setup_option_bool ("thy_output_quotes", \<^here>);
val margin = Attrib.setup_option_int ("thy_output_margin", \<^here>);
val indent = Attrib.setup_option_int ("thy_output_indent", \<^here>);
val source = Attrib.setup_option_bool ("thy_output_source", \<^here>);
val modes = Attrib.setup_option_string ("thy_output_modes", \<^here>);
structure Wrappers = Proof_Data
(
type T = ((unit -> string) -> unit -> string) list;
fun init _ = [];
);
fun add_wrapper wrapper = Wrappers.map (cons wrapper);
val wrap = Wrappers.get #> fold (fn wrapper => fn f => wrapper f);
(** maintain global antiquotations **)
structure Antiquotations = Theory_Data
(
type T =
(Token.src -> Toplevel.state -> Proof.context -> string) Name_Space.table *
(string -> Proof.context -> Proof.context) Name_Space.table;
val empty : T =
(Name_Space.empty_table Markup.document_antiquotationN,
Name_Space.empty_table Markup.document_antiquotation_optionN);
val extend = I;
fun merge ((commands1, options1), (commands2, options2)) : T =
(Name_Space.merge_tables (commands1, commands2),
Name_Space.merge_tables (options1, options2));
);
val get_antiquotations = Antiquotations.get o Proof_Context.theory_of;
fun add_command name cmd thy = thy
|> Antiquotations.map (apfst (Name_Space.define (Context.Theory thy) true (name, cmd) #> snd));
fun add_option name opt thy = thy
|> Antiquotations.map (apsnd (Name_Space.define (Context.Theory thy) true (name, opt) #> snd));
fun check_command ctxt = #1 o Name_Space.check (Context.Proof ctxt) (#1 (get_antiquotations ctxt));
fun check_option ctxt = #1 o Name_Space.check (Context.Proof ctxt) (#2 (get_antiquotations ctxt));
fun command src state ctxt =
let val (src', f) = Token.check_src ctxt (#1 o get_antiquotations) src
in f src' state ctxt end;
fun option ((xname, pos), s) ctxt =
let
val (_, opt) =
Name_Space.check (Context.Proof ctxt) (#2 (get_antiquotations ctxt)) (xname, pos);
in opt s ctxt end;
fun print_antiquotations verbose ctxt =
let
val (commands, options) = get_antiquotations ctxt;
val command_names = map #1 (Name_Space.markup_table verbose ctxt commands);
val option_names = map #1 (Name_Space.markup_table verbose ctxt options);
in
[Pretty.big_list "document antiquotations:" (map Pretty.mark_str command_names),
Pretty.big_list "document antiquotation options:" (map Pretty.mark_str option_names)]
end |> Pretty.writeln_chunks;
fun antiquotation name scan body =
add_command name
(fn src => fn state => fn ctxt =>
let val (x, ctxt') = Token.syntax scan src ctxt
in body {source = src, state = state, context = ctxt'} x end);
(** syntax of antiquotations **)
(* option values *)
fun boolean "" = true
| boolean "true" = true
| boolean "false" = false
| boolean s = error ("Bad boolean value: " ^ quote s);
fun integer s =
let
fun int ss =
(case Library.read_int ss of (i, []) => i
| _ => error ("Bad integer value: " ^ quote s));
in (case Symbol.explode s of "-" :: ss => ~ (int ss) | ss => int ss) end;
(* outer syntax *)
local
val property =
Parse.position Parse.name -- Scan.optional (Parse.$$$ "=" |-- Parse.!!! Parse.name) "";
val properties =
Scan.optional (Parse.$$$ "[" |-- Parse.!!! (Parse.enum "," property --| Parse.$$$ "]")) [];
in
val antiq =
Parse.!!!
(Parse.token Parse.liberal_name -- properties -- Parse.args --| Scan.ahead Parse.eof)
>> (fn ((name, props), args) => (props, name :: args));
end;
(* eval antiquote *)
local
fun eval_antiq state (opts, src) =
let
val preview_ctxt = fold option opts (Toplevel.presentation_context_of state);
val print_ctxt = Context_Position.set_visible false preview_ctxt;
fun cmd ctxt = wrap ctxt (fn () => command src state ctxt) ();
val _ = cmd preview_ctxt;
val print_modes = space_explode "," (Config.get print_ctxt modes) @ [Latex.latexN];
in Print_Mode.with_modes print_modes (fn () => cmd print_ctxt) () end;
in
fun eval_antiquote _ (Antiquote.Text ss) = Symbol_Pos.content ss
| eval_antiquote state (Antiquote.Control {name, body, ...}) =
eval_antiq state
([], Token.make_src name (if null body then [] else [Token.read_cartouche body]))
| eval_antiquote state (Antiquote.Antiq {range = (pos, _), body, ...}) =
let
val keywords =
(case try Toplevel.presentation_context_of state of
SOME ctxt => Thy_Header.get_keywords' ctxt
| NONE =>
error ("Unknown context -- cannot expand document antiquotations" ^
Position.here pos));
in eval_antiq state (Token.read_antiq keywords antiq (body, pos)) end;
end;
(* output text *)
fun output_text state {markdown} source =
let
val is_reported =
(case try Toplevel.context_of state of
SOME ctxt => Context_Position.is_visible ctxt
| NONE => true);
val pos = Input.pos_of source;
val syms = Input.source_explode source;
val _ =
if is_reported then
Position.report pos (Markup.language_document (Input.is_delimited source))
else ();
val output_antiquotes = map (eval_antiquote state) #> implode;
fun output_line line =
(if Markdown.line_is_item line then "\\item " else "") ^
output_antiquotes (Markdown.line_content line);
fun output_blocks blocks = space_implode "\n\n" (map output_block blocks)
and output_block (Markdown.Par lines) = cat_lines (map output_line lines)
| output_block (Markdown.List {kind, body, ...}) =
Latex.environment (Markdown.print_kind kind) (output_blocks body);
in
if Toplevel.is_skipped_proof state then ""
else if markdown andalso exists (Markdown.is_control o Symbol_Pos.symbol) syms
then
let
val ants = Antiquote.parse pos syms;
val reports = Antiquote.antiq_reports ants;
val blocks = Markdown.read_antiquotes ants;
val _ = if is_reported then Position.reports (reports @ Markdown.reports blocks) else ();
in output_blocks blocks end
else
let
val ants = Antiquote.parse pos (Symbol_Pos.trim_blanks syms);
val reports = Antiquote.antiq_reports ants;
val _ = if is_reported then Position.reports (reports @ Markdown.text_reports ants) else ();
in output_antiquotes ants end
end;
(** present theory source **)
(*NB: arranging white space around command spans is a black art*)
(* presentation tokens *)
datatype token =
No_Token
| Basic_Token of Token.T
| Markup_Token of string * string * Input.source
| Markup_Env_Token of string * string * Input.source
| Raw_Token of Input.source;
fun basic_token pred (Basic_Token tok) = pred tok
| basic_token _ _ = false;
val improper_token = basic_token Token.is_improper;
val comment_token = basic_token Token.is_comment;
val blank_token = basic_token Token.is_blank;
val newline_token = basic_token Token.is_newline;
(* output token *)
fun output_token state tok =
(case tok of
No_Token => ""
| Basic_Token tok => Latex.output_token tok
| Markup_Token (cmd, meta_args, source) =>
"%\n\\isamarkup" ^ cmd ^ meta_args ^ "{" ^ output_text state {markdown = false} source ^ "%\n}\n"
| Markup_Env_Token (cmd, meta_args, source) =>
Latex.environment ("isamarkup" ^ cmd)
(meta_args ^ output_text state {markdown = true} source)
| Raw_Token source =>
"%\n" ^ output_text state {markdown = true} source ^ "\n");
(* command spans *)
type command = string * Position.T * string list; (*name, position, tags*)
type source = (token * (string * int)) list; (*token, markup flag, meta-comment depth*)
datatype span = Span of command * (source * source * source * source) * bool;
fun make_span cmd src =
let
fun take_newline (tok :: toks) =
if newline_token (fst tok) then ([tok], toks, true)
else ([], tok :: toks, false)
| take_newline [] = ([], [], false);
val (((src_prefix, src_main), src_suffix1), (src_suffix2, src_appendix, newline)) =
src
|> take_prefix (improper_token o fst)
||>> take_suffix (improper_token o fst)
||>> take_prefix (comment_token o fst)
||> take_newline;
in Span (cmd, (src_prefix, src_main, src_suffix1 @ src_suffix2, src_appendix), newline) end;
(* present spans *)
local
fun err_bad_nesting pos =
error ("Bad nesting of commands in presentation" ^ pos);
fun edge which f (x: string option, y) =
if x = y then I
else (case which (x, y) of NONE => I | SOME txt => Buffer.add (f txt));
val begin_tag = edge #2 Latex.begin_tag;
val end_tag = edge #1 Latex.end_tag;
fun open_delim delim e = edge #2 Latex.begin_delim e #> delim #> edge #2 Latex.end_delim e;
fun close_delim delim e = edge #1 Latex.begin_delim e #> delim #> edge #1 Latex.end_delim e;
in
fun present_span keywords span state state' (tag_stack, active_tag, newline, buffer, present_cont) =
let
val present = fold (fn (tok, (flag, 0)) =>
Buffer.add (output_token state' tok)
#> Buffer.add flag
| _ => I);
val Span ((cmd_name, cmd_pos, cmd_tags), srcs, span_newline) = span;
val (tag, tags) = tag_stack;
val tag' = try hd (fold (update (op =)) cmd_tags (the_list tag));
val nesting = Toplevel.level state' - Toplevel.level state;
val active_tag' =
if is_some tag' then tag'
else if cmd_name = "end" andalso not (Toplevel.is_toplevel state') then NONE
else
(case Keyword.command_tags keywords cmd_name of
default_tag :: _ => SOME default_tag
| [] =>
if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state
then active_tag
else NONE);
val edge = (active_tag, active_tag');
val newline' =
if is_none active_tag' then span_newline else newline;
val tag_stack' =
if nesting = 0 andalso not (Toplevel.is_proof state) then tag_stack
else if nesting >= 0 then (tag', replicate nesting tag @ tags)
else
(case drop (~ nesting - 1) tags of
tg :: tgs => (tg, tgs)
| [] => err_bad_nesting (Position.here cmd_pos));
val buffer' =
buffer
|> end_tag edge
|> close_delim (fst present_cont) edge
|> snd present_cont
|> open_delim (present (#1 srcs)) edge
|> begin_tag edge
|> present (#2 srcs);
val present_cont' =
if newline then (present (#3 srcs), present (#4 srcs))
else (I, present (#3 srcs) #> present (#4 srcs));
in (tag_stack', active_tag', newline', buffer', present_cont') end;
fun present_trailer ((_, tags), active_tag, _, buffer, present_cont) =
if not (null tags) then err_bad_nesting " at end of theory"
else
buffer
|> end_tag (active_tag, NONE)
|> close_delim (fst present_cont) (active_tag, NONE)
|> snd present_cont;
end;
(* present_thy *)
local
val markup_true = "\\isamarkuptrue%\n";
val markup_false = "\\isamarkupfalse%\n";
val space_proper =
Scan.one Token.is_blank -- Scan.many Token.is_comment -- Scan.one Token.is_proper;
val is_improper = not o (Token.is_proper orf Token.is_begin_ignore orf Token.is_end_ignore);
val improper = Scan.many is_improper;
val improper_end = Scan.repeat (Scan.unless space_proper (Scan.one is_improper));
val blank_end = Scan.repeat (Scan.unless space_proper (Scan.one Token.is_blank));
val opt_newline = Scan.option (Scan.one Token.is_newline);
val ignore =
Scan.depend (fn d => opt_newline |-- Scan.one Token.is_begin_ignore
>> pair (d + 1)) ||
Scan.depend (fn d => Scan.one Token.is_end_ignore --|
(if d = 0 then Scan.fail_with (K (fn () => "Bad nesting of meta-comments")) else opt_newline)
>> pair (d - 1));
val tag = (improper -- Parse.$$$ "%" -- improper) |-- Parse.!!! (Parse.tag_name --| blank_end);
val locale =
Scan.option ((Parse.$$$ "(" -- improper -- Parse.$$$ "in") |--
Parse.!!! (improper |-- Parse.name --| (improper -- Parse.$$$ ")")));
val meta_args_parser_hook = Unsynchronized.ref((fn thy => fn s => ("",s)): theory -> string parser)
in
fun present_thy thy command_results toks =
let
val keywords = Thy_Header.get_keywords thy;
(* tokens *)
val ignored = Scan.state --| ignore
>> (fn d => (NONE, (No_Token, ("", d))));
fun markup pred mk flag = Scan.peek (fn d =>
improper |--
Parse.position (Scan.one (fn tok => Token.is_command tok andalso
pred keywords (Token.content_of tok))) --
Scan.repeat tag --
(improper |--
(Parse.!!!!
( (!meta_args_parser_hook thy)
-- ( (improper -- locale -- improper)
|-- (Parse.document_source))
--| improper_end)))
>> (fn (((tok, pos'), tags), (meta_args,source)) =>
let val name = Token.content_of tok
in (SOME (name, pos', tags), (mk (name, meta_args, source), (flag, d))) end));
val command = Scan.peek (fn d =>
Scan.optional (Scan.one Token.is_command_modifier ::: improper) [] --
Scan.one Token.is_command -- Scan.repeat tag
>> (fn ((cmd_mod, cmd), tags) =>
map (fn tok => (NONE, (Basic_Token tok, ("", d)))) cmd_mod @
[(SOME (Token.content_of cmd, Token.pos_of cmd, tags),
(Basic_Token cmd, (markup_false, d)))]));
val cmt = Scan.peek (fn d =>
(Parse.$$$ "--" || Parse.$$$ Symbol.comment) |--
Parse.!!!! (improper |-- Parse.document_source) >>
(fn source => (NONE, (Markup_Token ("cmt", "", source), ("", d)))));
val other = Scan.peek (fn d =>
Parse.not_eof >> (fn tok => (NONE, (Basic_Token tok, ("", d)))));
val tokens =
(ignored ||
markup Keyword.is_document_heading Markup_Token markup_true ||
markup Keyword.is_document_body Markup_Env_Token markup_true ||
markup Keyword.is_document_raw (Raw_Token o #3) "") >> single ||
command ||
(cmt || other) >> single;
(* spans *)
val is_eof = fn (_, (Basic_Token x, _)) => Token.is_eof x | _ => false;
val stopper = Scan.stopper (K (NONE, (Basic_Token Token.eof, ("", 0)))) is_eof;
val cmd = Scan.one (is_some o fst);
val non_cmd = Scan.one (is_none o fst andf not o is_eof) >> #2;
val comments = Scan.many (comment_token o fst o snd);
val blank = Scan.one (blank_token o fst o snd);
val newline = Scan.one (newline_token o fst o snd);
val before_cmd =
Scan.option (newline -- comments) --
Scan.option (newline -- comments) --
Scan.option (blank -- comments) -- cmd;
val span =
Scan.repeat non_cmd -- cmd --
Scan.repeat (Scan.unless before_cmd non_cmd) --
Scan.option (newline >> (single o snd))
>> (fn (((toks1, (cmd, tok2)), toks3), tok4) =>
make_span (the cmd) (toks1 @ (tok2 :: (toks3 @ the_default [] tok4))));
val spans = toks
|> take_suffix Token.is_space |> #1
|> Source.of_list
|> Source.source' 0 Token.stopper (Scan.error (Scan.bulk tokens >> flat))
|> Source.source stopper (Scan.error (Scan.bulk span))
|> Source.exhaust;
(* present commands *)
fun present_command tr span st st' =
Toplevel.setmp_thread_position tr (present_span keywords span st st');
fun present _ [] = I
| present st (((tr, st'), span) :: rest) = present_command tr span st st' #> present st' rest;
in
if length command_results = length spans then
((NONE, []), NONE, true, Buffer.empty, (I, I))
|> present Toplevel.toplevel (command_results ~~ spans)
|> present_trailer
else error "Messed-up outer syntax for presentation"
end;
fun set_meta_args_parser f = (meta_args_parser_hook:= f)
end;
(** setup default output **)
(* options *)
val _ = Theory.setup
(add_option @{binding show_types} (Config.put show_types o boolean) #>
add_option @{binding show_sorts} (Config.put show_sorts o boolean) #>
add_option @{binding show_structs} (Config.put show_structs o boolean) #>
add_option @{binding show_question_marks} (Config.put show_question_marks o boolean) #>
add_option @{binding show_abbrevs} (Config.put show_abbrevs o boolean) #>
add_option @{binding names_long} (Config.put Name_Space.names_long o boolean) #>
add_option @{binding names_short} (Config.put Name_Space.names_short o boolean) #>
add_option @{binding names_unique} (Config.put Name_Space.names_unique o boolean) #>
add_option @{binding eta_contract} (Config.put Syntax_Trans.eta_contract o boolean) #>
add_option @{binding display} (Config.put display o boolean) #>
add_option @{binding break} (Config.put break o boolean) #>
add_option @{binding quotes} (Config.put quotes o boolean) #>
add_option @{binding mode} (add_wrapper o Print_Mode.with_modes o single) #>
add_option @{binding margin} (Config.put margin o integer) #>
add_option @{binding indent} (Config.put indent o integer) #>
add_option @{binding source} (Config.put source o boolean) #>
add_option @{binding goals_limit} (Config.put Goal_Display.goals_limit o integer));
(* basic pretty printing *)
fun perhaps_trim ctxt =
not (Config.get ctxt display) ? Symbol.trim_blanks;
fun pretty_text ctxt =
Pretty.chunks o map Pretty.str o map (perhaps_trim ctxt) o split_lines;
fun pretty_term ctxt t = Syntax.pretty_term (Variable.auto_fixes t ctxt) t;
fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of;
fun pretty_term_style ctxt (style, t) =
pretty_term ctxt (style t);
fun pretty_thm_style ctxt (style, th) =
pretty_term ctxt (style (Thm.full_prop_of th));
fun pretty_term_typ ctxt (style, t) =
let val t' = style t
in pretty_term ctxt (Type.constraint (Term.fastype_of t') t') end;
fun pretty_term_typeof ctxt (style, t) =
Syntax.pretty_typ ctxt (Term.fastype_of (style t));
fun pretty_const ctxt c =
let
val t = Const (c, Consts.type_scheme (Proof_Context.consts_of ctxt) c)
handle TYPE (msg, _, _) => error msg;
val ([t'], _) = Variable.import_terms true [t] ctxt;
in pretty_term ctxt t' end;
fun pretty_abbrev ctxt s =
let
val t = Syntax.read_term (Proof_Context.set_mode Proof_Context.mode_abbrev ctxt) s;
fun err () = error ("Abbreviated constant expected: " ^ Syntax.string_of_term ctxt t);
val (head, args) = Term.strip_comb t;
val (c, T) = Term.dest_Const head handle TERM _ => err ();
val (U, u) = Consts.the_abbreviation (Proof_Context.consts_of ctxt) c
handle TYPE _ => err ();
val t' = Term.betapplys (Envir.expand_atom T (U, u), args);
val eq = Logic.mk_equals (t, t');
val ctxt' = Variable.auto_fixes eq ctxt;
in Proof_Context.pretty_term_abbrev ctxt' eq end;
fun pretty_locale ctxt (name, pos) =
let
val thy = Proof_Context.theory_of ctxt
in (Pretty.str o Locale.extern thy o Locale.check thy) (name, pos) end;
fun pretty_class ctxt =
Pretty.str o Proof_Context.extern_class ctxt o Proof_Context.read_class ctxt;
fun pretty_type ctxt s =
let val Type (name, _) = Proof_Context.read_type_name {proper = true, strict = false} ctxt s
in Pretty.str (Proof_Context.extern_type ctxt name) end;
fun pretty_prf full ctxt = Proof_Syntax.pretty_clean_proof_of ctxt full;
fun pretty_theory ctxt (name, pos) = (Theory.check ctxt (name, pos); Pretty.str name);
(* default output *)
val str_of_source = space_implode " " o map Token.unparse o Token.args_of_src;
fun maybe_pretty_source pretty ctxt src xs =
map (pretty ctxt) xs (*always pretty in order to exhibit errors!*)
|> (if Config.get ctxt source then K [pretty_text ctxt (str_of_source src)] else I);
fun string_of_margin ctxt = Pretty.string_of_margin (Config.get ctxt margin);
fun output ctxt prts =
prts
|> Config.get ctxt quotes ? map Pretty.quote
|> (if Config.get ctxt display then
map (Pretty.indent (Config.get ctxt indent) #> string_of_margin ctxt #> Output.output)
#> space_implode "\\isasep\\isanewline%\n"
#> Latex.environment "isabelle"
else
map
((if Config.get ctxt break then string_of_margin ctxt else Pretty.unformatted_string_of)
#> Output.output)
#> space_implode "\\isasep\\isanewline%\n"
#> enclose "\\isa{" "}");
(* verbatim text *)
fun verbatim_text ctxt =
if Config.get ctxt display then
split_lines #> map (prefix (Symbol.spaces (Config.get ctxt indent))) #> cat_lines #>
Latex.output_ascii #> Latex.environment "isabellett"
else
split_lines #>
map (Latex.output_ascii #> enclose "\\isatt{" "}") #>
space_implode "\\isasep\\isanewline%\n";
(* antiquotations for basic entities *)
local
fun basic_entities name scan pretty =
antiquotation name scan (fn {source, context = ctxt, ...} =>
output ctxt o maybe_pretty_source pretty ctxt source);
fun basic_entities_style name scan pretty =
antiquotation name scan (fn {source, context = ctxt, ...} => fn (style, xs) =>
output ctxt
(maybe_pretty_source (fn ctxt => fn x => pretty ctxt (style, x)) ctxt source xs));
fun basic_entity name scan = basic_entities name (scan >> single);
in
val _ = Theory.setup
(basic_entities_style @{binding thm} (Term_Style.parse -- Attrib.thms) pretty_thm_style #>
basic_entity @{binding prop} (Term_Style.parse -- Args.prop) pretty_term_style #>
basic_entity @{binding term} (Term_Style.parse -- Args.term) pretty_term_style #>
basic_entity @{binding term_type} (Term_Style.parse -- Args.term) pretty_term_typ #>
basic_entity @{binding typeof} (Term_Style.parse -- Args.term) pretty_term_typeof #>
basic_entity @{binding const} (Args.const {proper = true, strict = false}) pretty_const #>
basic_entity @{binding abbrev} (Scan.lift Args.embedded_inner_syntax) pretty_abbrev #>
basic_entity @{binding typ} Args.typ_abbrev Syntax.pretty_typ #>
basic_entity @{binding locale} (Scan.lift (Parse.position Args.name)) pretty_locale #>
basic_entity @{binding class} (Scan.lift Args.embedded_inner_syntax) pretty_class #>
basic_entity @{binding type} (Scan.lift Args.embedded) pretty_type #>
basic_entities @{binding prf} Attrib.thms (pretty_prf false) #>
basic_entities @{binding full_prf} Attrib.thms (pretty_prf true) #>
basic_entity @{binding theory} (Scan.lift (Parse.position Args.name)) pretty_theory);
end;
(** document command **)
fun document_command markdown (loc, txt) =
Toplevel.keep (fn state =>
(case loc of
NONE => ignore (output_text state markdown txt)
| SOME (_, pos) =>
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt));
end;