Compare commits

..

1 Commits

Author SHA1 Message Date
Michael Herzberg f5af1116db Added missing character data stuff. 2019-12-11 20:39:26 +00:00
92 changed files with 4022 additions and 14021 deletions

2
.gitignore vendored
View File

@ -1,2 +0,0 @@
output

View File

@ -1,37 +0,0 @@
An overview of the formalization is given in:
Achim D. Brucker and Michael Herzberg. A Formal Semantics of the Core DOM
in Isabelle/HOL. In The 2018 Web Conference Companion (WWW). Pages 741-749,
ACM Press, 2018. doi:10.1145/3184558.3185980
A BibTeX entry for LaTeX users is
@InProceedings{ brucker.ea:core-dom:2018,
abstract = {At its core, the Document Object Model (DOM) defines a tree-like
data structure for representing documents in general and HTML
documents in particular. It forms the heart of any rendering engine
of modern web browsers. Formalizing the key concepts of the DOM is
a pre-requisite for the formal reasoning over client-side JavaScript
programs as well as for the analysis of security concepts in modern
web browsers. In this paper, we present a formalization of the core DOM,
with focus on the node-tree and the operations defined on node-trees,
in Isabelle/HOL. We use the formalization to verify the functional
correctness of the most important functions defined in the DOM standard.
Moreover, our formalization is (1) extensible, i.e., can be extended without
the need of re-proving already proven properties and (2) executable, i.e.,
we can generate executable code from our specification.},
address = {New York, NY, USA},
author = {Achim D. Brucker and Michael Herzberg},
booktitle= {The 2018 Web Conference Companion (WWW)},
conf_date= {April 23-27, 2018},
doi = {10.1145/3184558.3185980},
editor = {Pierre{-}Antoine Champin and Fabien L. Gandon and Mounia Lalmas and Panagiotis G. Ipeirotis},
isbn = {978-1-4503-5640-4/18/04},
keywords = {Document Object Model, DOM, Formal Semantics, Isabelle/HOL},
location = {Lyon, France},
pages = {741--749},
pdf = {https://www.brucker.ch/bibliography/download/2018/brucker.ea-core-dom-2018.pdf},
publisher= {ACM Press},
title = {A Formal Semantics of the Core {DOM} in {Isabelle/HOL}},
url = {https://www.brucker.ch/bibliography/abstract/brucker.ea-core-dom-2018},
year = {2018},
}

View File

@ -1,21 +0,0 @@
chapter AFP
session "Core_DOM" (AFP) = "HOL-Library" +
options [timeout = 1200, document = pdf, document_variants="document:outline=/proof,/ML",document_output=output]
directories
"common"
"common/classes"
"common/monads"
"common/pointers"
"common/preliminaries"
"common/tests"
"standard"
"standard/classes"
"standard/pointers"
theories
Core_DOM
Core_DOM_Tests
document_files (in "document")
"root.tex"
"root.bib"

View File

@ -1,94 +0,0 @@
(***********************************************************************************
* Copyright (c) 2016-2018 The University of Sheffield, UK
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* SPDX-License-Identifier: BSD-2-Clause
***********************************************************************************)
theory Testing_Utils
imports Main
begin
ML \<open>
val _ = Theory.setup
(Method.setup @{binding timed_code_simp}
(Scan.succeed (SIMPLE_METHOD' o (CHANGED_PROP oo (fn a => fn b => fn tac =>
let
val start = Time.now ();
val result = Code_Simp.dynamic_tac a b tac;
val t = Time.now() - start;
in
(if length (Seq.list_of result) > 0 then Output.information ("Took " ^ (Time.toString t)) else ());
result
end))))
"timed simplification with code equations");
val _ = Theory.setup
(Method.setup @{binding timed_eval}
(Scan.succeed (SIMPLE_METHOD' o (fn a => fn b => fn tac =>
let
val eval = CONVERSION (Conv.params_conv ~1 (K (Conv.concl_conv ~1 (Code_Runtime.dynamic_holds_conv a))) a) THEN'
resolve_tac a [TrueI];
val start = Time.now ();
val result = eval b tac
val t = Time.now() - start;
in
(if length (Seq.list_of result) > 0 then Output.information ("Took " ^ (Time.toString t)) else ());
result
end)))
"timed evaluation");
val _ = Theory.setup
(Method.setup @{binding timed_eval_and_code_simp}
(Scan.succeed (SIMPLE_METHOD' o (fn a => fn b => fn tac =>
let
val eval = CONVERSION (Conv.params_conv ~1 (K (Conv.concl_conv ~1 (Code_Runtime.dynamic_holds_conv a))) a) THEN'
resolve_tac a [TrueI];
val start = Time.now ();
val result = eval b tac
val t = Time.now() - start;
val start2 = Time.now ();
val result2_opt =
Timeout.apply (seconds 600.0) (fn _ => SOME (Code_Simp.dynamic_tac a b tac)) ()
handle Timeout.TIMEOUT _ => NONE;
val t2 = Time.now() - start2;
in
if length (Seq.list_of result) > 0 then (Output.information ("eval took " ^ (Time.toString t));
File.append (Path.explode "/tmp/isabellebench") (Time.toString t ^ ",")) else ();
(case result2_opt of
SOME result2 =>
(if length (Seq.list_of result2) > 0 then (Output.information ("code_simp took " ^ (Time.toString t2));
File.append (Path.explode "/tmp/isabellebench") (Time.toString t2 ^ "\n")) else ())
| NONE => (Output.information "code_simp timed out after 600s"; File.append (Path.explode "/tmp/isabellebench") (">600.000\n")));
result
end)))
"timed evaluation and simplification with code equations with file output");
\<close>
(* To run the DOM test cases with timing information output, simply replace the use *)
(* of "eval" with either "timed_code_simp", "timed_eval", or, to run both and write the results *)
(* to /tmp/isabellebench, "timed_eval_and_code_simp". *)
end

File diff suppressed because it is too large Load Diff

View File

@ -1,328 +0,0 @@
(***********************************************************************************
* Copyright (c) 2016-2018 The University of Sheffield, UK
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* SPDX-License-Identifier: BSD-2-Clause
***********************************************************************************)
section\<open>Element\<close>
text\<open>In this theory, we introduce the types for the Element class.\<close>
theory ElementClass
imports
"NodeClass"
"ShadowRootPointer"
begin
text\<open>The type @{type "DOMString"} is a type synonym for @{type "string"}, define
in \autoref{sec:Core_DOM_Basic_Datatypes}.\<close>
type_synonym attr_key = DOMString
type_synonym attr_value = DOMString
type_synonym attrs = "(attr_key, attr_value) fmap"
type_synonym tag_name = DOMString
record ('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr) RElement = RNode +
nothing :: unit
tag_name :: tag_name
child_nodes :: "('node_ptr, 'element_ptr, 'character_data_ptr) node_ptr list"
attrs :: attrs
shadow_root_opt :: "'shadow_root_ptr shadow_root_ptr option"
type_synonym
('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr, 'Element) Element
= "('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr, 'Element option)
RElement_scheme"
register_default_tvars
"('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr, 'Element) Element"
type_synonym
('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr, 'Node, 'Element) Node
= "(('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr, 'Element option) RElement_ext
+ 'Node) Node"
register_default_tvars
"('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr, 'Node, 'Element) Node"
type_synonym
('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr, 'Object, 'Node, 'Element) Object
= "('Object, ('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr, 'Element option)
RElement_ext + 'Node) Object"
register_default_tvars
"('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr, 'Object, 'Node, 'Element) Object"
type_synonym
('object_ptr, 'node_ptr, 'element_ptr, 'character_data_ptr, 'document_ptr, 'shadow_root_ptr,
'Object, 'Node, 'Element) heap
= "('document_ptr document_ptr + 'shadow_root_ptr shadow_root_ptr + 'object_ptr,
'element_ptr element_ptr + 'character_data_ptr character_data_ptr + 'node_ptr, 'Object,
('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr, 'Element option) RElement_ext +
'Node) heap"
register_default_tvars
"('object_ptr, 'node_ptr, 'element_ptr, 'character_data_ptr, 'document_ptr, 'shadow_root_ptr,
'Object, 'Node, 'Element) heap"
type_synonym heap\<^sub>f\<^sub>i\<^sub>n\<^sub>a\<^sub>l = "(unit, unit, unit, unit, unit, unit, unit, unit, unit) heap"
definition element_ptr_kinds :: "(_) heap \<Rightarrow> (_) element_ptr fset"
where
"element_ptr_kinds heap =
the |`| (cast\<^sub>n\<^sub>o\<^sub>d\<^sub>e\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>e\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r |`| (ffilter is_element_ptr_kind (node_ptr_kinds heap)))"
lemma element_ptr_kinds_simp [simp]:
"element_ptr_kinds (Heap (fmupd (cast element_ptr) element (the_heap h))) =
{|element_ptr|} |\<union>| element_ptr_kinds h"
apply(auto simp add: element_ptr_kinds_def)[1]
by force
definition element_ptrs :: "(_) heap \<Rightarrow> (_) element_ptr fset"
where
"element_ptrs heap = ffilter is_element_ptr (element_ptr_kinds heap)"
definition cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t :: "(_) Node \<Rightarrow> (_) Element option"
where
"cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t node =
(case RNode.more node of Inl element \<Rightarrow> Some (RNode.extend (RNode.truncate node) element) | _ \<Rightarrow> None)"
adhoc_overloading cast cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t
abbreviation cast\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t :: "(_) Object \<Rightarrow> (_) Element option"
where
"cast\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t obj \<equiv> (case cast\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>2\<^sub>N\<^sub>o\<^sub>d\<^sub>e obj of Some node \<Rightarrow> cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t node | None \<Rightarrow> None)"
adhoc_overloading cast cast\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t
definition cast\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>2\<^sub>N\<^sub>o\<^sub>d\<^sub>e :: "(_) Element \<Rightarrow> (_) Node"
where
"cast\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>2\<^sub>N\<^sub>o\<^sub>d\<^sub>e element = RNode.extend (RNode.truncate element) (Inl (RNode.more element))"
adhoc_overloading cast cast\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>2\<^sub>N\<^sub>o\<^sub>d\<^sub>e
abbreviation cast\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>2\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t :: "(_) Element \<Rightarrow> (_) Object"
where
"cast\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>2\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t ptr \<equiv> cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t (cast\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>2\<^sub>N\<^sub>o\<^sub>d\<^sub>e ptr)"
adhoc_overloading cast cast\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>2\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t
consts is_element_kind :: 'a
definition is_element_kind\<^sub>N\<^sub>o\<^sub>d\<^sub>e :: "(_) Node \<Rightarrow> bool"
where
"is_element_kind\<^sub>N\<^sub>o\<^sub>d\<^sub>e ptr \<longleftrightarrow> cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t ptr \<noteq> None"
adhoc_overloading is_element_kind is_element_kind\<^sub>N\<^sub>o\<^sub>d\<^sub>e
lemmas is_element_kind_def = is_element_kind\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def
abbreviation is_element_kind\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t :: "(_) Object \<Rightarrow> bool"
where
"is_element_kind\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t ptr \<equiv> cast\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t ptr \<noteq> None"
adhoc_overloading is_element_kind is_element_kind\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t
lemma element_ptr_kinds_commutes [simp]:
"cast element_ptr |\<in>| node_ptr_kinds h \<longleftrightarrow> element_ptr |\<in>| element_ptr_kinds h"
apply(auto simp add: node_ptr_kinds_def element_ptr_kinds_def)[1]
by (metis (no_types, lifting) element_ptr_casts_commute2 ffmember_filter fimage_eqI
fset.map_comp is_element_ptr_kind_none node_ptr_casts_commute3
node_ptr_kinds_commutes node_ptr_kinds_def option.sel option.simps(3))
definition get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t :: "(_) element_ptr \<Rightarrow> (_) heap \<Rightarrow> (_) Element option"
where
"get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t element_ptr h = Option.bind (get\<^sub>N\<^sub>o\<^sub>d\<^sub>e (cast element_ptr) h) cast"
adhoc_overloading get get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t
locale l_type_wf_def\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t
begin
definition a_type_wf :: "(_) heap \<Rightarrow> bool"
where
"a_type_wf h = (NodeClass.type_wf h \<and> (\<forall>element_ptr \<in> fset (element_ptr_kinds h).
get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t element_ptr h \<noteq> None))"
end
global_interpretation l_type_wf_def\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t defines type_wf = a_type_wf .
lemmas type_wf_defs = a_type_wf_def
locale l_type_wf\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t = l_type_wf type_wf for type_wf :: "((_) heap \<Rightarrow> bool)" +
assumes type_wf\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t: "type_wf h \<Longrightarrow> ElementClass.type_wf h"
sublocale l_type_wf\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t \<subseteq> l_type_wf\<^sub>N\<^sub>o\<^sub>d\<^sub>e
apply(unfold_locales)
using NodeClass.a_type_wf_def
by (meson ElementClass.a_type_wf_def l_type_wf\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_axioms l_type_wf\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def)
locale l_get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_lemmas = l_type_wf\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t
begin
sublocale l_get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_lemmas by unfold_locales
lemma get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_type_wf:
assumes "type_wf h"
shows "element_ptr |\<in>| element_ptr_kinds h \<longleftrightarrow> get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t element_ptr h \<noteq> None"
using l_type_wf\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_axioms assms
apply(simp add: type_wf_defs get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def l_type_wf\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def)
by (metis NodeClass.get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_type_wf bind_eq_None_conv element_ptr_kinds_commutes notin_fset
option.distinct(1))
end
global_interpretation l_get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_lemmas type_wf
by unfold_locales
definition put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t :: "(_) element_ptr \<Rightarrow> (_) Element \<Rightarrow> (_) heap \<Rightarrow> (_) heap"
where
"put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t element_ptr element = put\<^sub>N\<^sub>o\<^sub>d\<^sub>e (cast element_ptr) (cast element)"
adhoc_overloading put put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t
lemma put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_ptr_in_heap:
assumes "put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t element_ptr element h = h'"
shows "element_ptr |\<in>| element_ptr_kinds h'"
using assms
unfolding put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def element_ptr_kinds_def
by (metis element_ptr_kinds_commutes element_ptr_kinds_def put\<^sub>N\<^sub>o\<^sub>d\<^sub>e_ptr_in_heap)
lemma put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_put_ptrs:
assumes "put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t element_ptr element h = h'"
shows "object_ptr_kinds h' = object_ptr_kinds h |\<union>| {|cast element_ptr|}"
using assms
by (simp add: put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def put\<^sub>N\<^sub>o\<^sub>d\<^sub>e_put_ptrs)
lemma cast\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>2\<^sub>N\<^sub>o\<^sub>d\<^sub>e_inject [simp]:
"cast\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>2\<^sub>N\<^sub>o\<^sub>d\<^sub>e x = cast\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>2\<^sub>N\<^sub>o\<^sub>d\<^sub>e y \<longleftrightarrow> x = y"
apply(simp add: cast\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>2\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def RObject.extend_def RNode.extend_def)
by (metis (full_types) RNode.surjective old.unit.exhaust)
lemma cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_none [simp]:
"cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t node = None \<longleftrightarrow> \<not> (\<exists>element. cast\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>2\<^sub>N\<^sub>o\<^sub>d\<^sub>e element = node)"
apply(auto simp add: cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def cast\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>2\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def RObject.extend_def RNode.extend_def
split: sum.splits)[1]
by (metis (full_types) RNode.select_convs(2) RNode.surjective old.unit.exhaust)
lemma cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_some [simp]:
"cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t node = Some element \<longleftrightarrow> cast\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>2\<^sub>N\<^sub>o\<^sub>d\<^sub>e element = node"
by(auto simp add: cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def cast\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>2\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def RObject.extend_def RNode.extend_def
split: sum.splits)
lemma cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_inv [simp]: "cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t (cast\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>2\<^sub>N\<^sub>o\<^sub>d\<^sub>e element) = Some element"
by simp
lemma get_elment_ptr_simp1 [simp]:
"get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t element_ptr (put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t element_ptr element h) = Some element"
by(auto simp add: get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def)
lemma get_elment_ptr_simp2 [simp]:
"element_ptr \<noteq> element_ptr'
\<Longrightarrow> get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t element_ptr (put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t element_ptr' element h) = get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t element_ptr h"
by(auto simp add: get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def)
abbreviation "create_element_obj tag_name_arg child_nodes_arg attrs_arg shadow_root_opt_arg
\<equiv> \<lparr> RObject.nothing = (), RNode.nothing = (), RElement.nothing = (),
tag_name = tag_name_arg, Element.child_nodes = child_nodes_arg, attrs = attrs_arg,
shadow_root_opt = shadow_root_opt_arg, \<dots> = None \<rparr>"
definition new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t :: "(_) heap \<Rightarrow> ((_) element_ptr \<times> (_) heap)"
where
"new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t h =
(let new_element_ptr = element_ptr.Ref (Suc (fMax (finsert 0 (element_ptr.the_ref
|`| (element_ptrs h)))))
in
(new_element_ptr, put new_element_ptr (create_element_obj '''' [] fmempty None) h))"
lemma new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_ptr_in_heap:
assumes "new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t h = (new_element_ptr, h')"
shows "new_element_ptr |\<in>| element_ptr_kinds h'"
using assms
unfolding new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def Let_def
using put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_ptr_in_heap by blast
lemma new_element_ptr_new:
"element_ptr.Ref (Suc (fMax (finsert 0 (element_ptr.the_ref |`| element_ptrs h)))) |\<notin>| element_ptrs h"
by (metis Suc_n_not_le_n element_ptr.sel(1) fMax_ge fimage_finsert finsertI1 finsertI2 set_finsert)
lemma new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_ptr_not_in_heap:
assumes "new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t h = (new_element_ptr, h')"
shows "new_element_ptr |\<notin>| element_ptr_kinds h"
using assms
unfolding new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def
by (metis Pair_inject element_ptrs_def ffmember_filter new_element_ptr_new is_element_ptr_ref)
lemma new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_new_ptr:
assumes "new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t h = (new_element_ptr, h')"
shows "object_ptr_kinds h' = object_ptr_kinds h |\<union>| {|cast new_element_ptr|}"
using assms
by (metis Pair_inject new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_put_ptrs)
lemma new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_is_element_ptr:
assumes "new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t h = (new_element_ptr, h')"
shows "is_element_ptr new_element_ptr"
using assms
by(auto simp add: new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def Let_def)
lemma new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t [simp]:
assumes "new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t h = (new_element_ptr, h')"
assumes "ptr \<noteq> cast new_element_ptr"
shows "get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t ptr h = get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t ptr h'"
using assms
by(auto simp add: new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def Let_def put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def put\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def)
lemma new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_get\<^sub>N\<^sub>o\<^sub>d\<^sub>e [simp]:
assumes "new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t h = (new_element_ptr, h')"
assumes "ptr \<noteq> cast new_element_ptr"
shows "get\<^sub>N\<^sub>o\<^sub>d\<^sub>e ptr h = get\<^sub>N\<^sub>o\<^sub>d\<^sub>e ptr h'"
using assms
by(auto simp add: new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def Let_def put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def)
lemma new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t [simp]:
assumes "new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t h = (new_element_ptr, h')"
assumes "ptr \<noteq> new_element_ptr"
shows "get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t ptr h = get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t ptr h'"
using assms
by(auto simp add: new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def Let_def)
locale l_known_ptr\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t
begin
definition a_known_ptr :: "(_) object_ptr \<Rightarrow> bool"
where
"a_known_ptr ptr = (known_ptr ptr \<or> is_element_ptr ptr)"
lemma known_ptr_not_element_ptr: "\<not>is_element_ptr ptr \<Longrightarrow> a_known_ptr ptr \<Longrightarrow> known_ptr ptr"
by(simp add: a_known_ptr_def)
end
global_interpretation l_known_ptr\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t defines known_ptr = a_known_ptr .
lemmas known_ptr_defs = a_known_ptr_def
locale l_known_ptrs\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t = l_known_ptr known_ptr for known_ptr :: "(_) object_ptr \<Rightarrow> bool"
begin
definition a_known_ptrs :: "(_) heap \<Rightarrow> bool"
where
"a_known_ptrs h = (\<forall>ptr \<in> fset (object_ptr_kinds h). known_ptr ptr)"
lemma known_ptrs_known_ptr:
"ptr |\<in>| object_ptr_kinds h \<Longrightarrow> a_known_ptrs h \<Longrightarrow> known_ptr ptr"
apply(simp add: a_known_ptrs_def)
using notin_fset by fastforce
lemma known_ptrs_preserved:
"object_ptr_kinds h = object_ptr_kinds h' \<Longrightarrow> a_known_ptrs h = a_known_ptrs h'"
by(auto simp add: a_known_ptrs_def)
lemma known_ptrs_subset:
"object_ptr_kinds h' |\<subseteq>| object_ptr_kinds h \<Longrightarrow> a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
by(simp add: a_known_ptrs_def less_eq_fset.rep_eq subsetD)
lemma known_ptrs_new_ptr:
"object_ptr_kinds h' = object_ptr_kinds h |\<union>| {|new_ptr|} \<Longrightarrow> known_ptr new_ptr \<Longrightarrow>
a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
by(simp add: a_known_ptrs_def)
end
global_interpretation l_known_ptrs\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t known_ptr defines known_ptrs = a_known_ptrs .
lemmas known_ptrs_defs = a_known_ptrs_def
lemma known_ptrs_is_l_known_ptrs: "l_known_ptrs known_ptr known_ptrs"
using known_ptrs_known_ptr known_ptrs_preserved known_ptrs_subset known_ptrs_new_ptr l_known_ptrs_def by blast
end

View File

@ -1,186 +0,0 @@
(***********************************************************************************
* Copyright (c) 2016-2018 The University of Sheffield, UK
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* SPDX-License-Identifier: BSD-2-Clause
***********************************************************************************)
section\<open>ShadowRoot\<close>
text\<open>In this theory, we introduce the typed pointers for the class ShadowRoot. Note that, in
this document, we will not make use of ShadowRoots nor will we discuss their particular properties.
We only include them here, as they are required for future work and they cannot be added alter
following the object-oriented extensibility of our data model.\<close>
theory ShadowRootPointer
imports
"DocumentPointer"
begin
datatype 'shadow_root_ptr shadow_root_ptr = Ref (the_ref: ref) | Ext 'shadow_root_ptr
register_default_tvars "'shadow_root_ptr shadow_root_ptr"
type_synonym ('object_ptr, 'node_ptr, 'element_ptr, 'character_data_ptr,
'document_ptr, 'shadow_root_ptr) object_ptr
= "('shadow_root_ptr shadow_root_ptr + 'object_ptr, 'node_ptr, 'element_ptr,
'character_data_ptr, 'document_ptr) object_ptr"
register_default_tvars "('object_ptr, 'node_ptr, 'element_ptr, 'character_data_ptr,
'document_ptr, 'shadow_root_ptr) object_ptr"
definition cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r :: "(_) shadow_root_ptr \<Rightarrow> (_) shadow_root_ptr"
where
"cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r = id"
definition cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r :: "(_)shadow_root_ptr \<Rightarrow> (_) object_ptr"
where
"cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr = object_ptr.Ext (Inr (Inr (Inl ptr)))"
definition cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r :: "(_) object_ptr \<Rightarrow> (_) shadow_root_ptr option"
where
"cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr = (case ptr of
object_ptr.Ext (Inr (Inr (Inl shadow_root_ptr))) \<Rightarrow> Some shadow_root_ptr
| _ \<Rightarrow> None)"
adhoc_overloading cast cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r
definition is_shadow_root_ptr_kind :: "(_) object_ptr \<Rightarrow> bool"
where
"is_shadow_root_ptr_kind ptr = (case cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr of Some _ \<Rightarrow> True
| None \<Rightarrow> False)"
consts is_shadow_root_ptr :: 'a
definition is_shadow_root_ptr\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r :: "(_) shadow_root_ptr \<Rightarrow> bool"
where
"is_shadow_root_ptr\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr = (case ptr of shadow_root_ptr.Ref _ \<Rightarrow> True
| _ \<Rightarrow> False)"
abbreviation is_shadow_root_ptr\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r :: "(_) object_ptr \<Rightarrow> bool"
where
"is_shadow_root_ptr\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr \<equiv> (case cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr of
Some shadow_root_ptr \<Rightarrow> is_shadow_root_ptr\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r shadow_root_ptr
| None \<Rightarrow> False)"
adhoc_overloading is_shadow_root_ptr is_shadow_root_ptr\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r is_shadow_root_ptr\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r
lemmas is_shadow_root_ptr_def = is_shadow_root_ptr\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def
consts is_shadow_root_ptr_ext :: 'a
abbreviation "is_shadow_root_ptr_ext\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr \<equiv> \<not> is_shadow_root_ptr\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr"
abbreviation "is_shadow_root_ptr_ext\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr \<equiv> (case cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr of
Some shadow_root_ptr \<Rightarrow> is_shadow_root_ptr_ext\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r shadow_root_ptr
| None \<Rightarrow> False)"
adhoc_overloading is_shadow_root_ptr_ext is_shadow_root_ptr_ext\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r is_shadow_root_ptr_ext\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r
instantiation shadow_root_ptr :: (linorder) linorder
begin
definition
less_eq_shadow_root_ptr :: "(_::linorder) shadow_root_ptr \<Rightarrow> (_) shadow_root_ptr \<Rightarrow> bool"
where
"less_eq_shadow_root_ptr x y \<equiv> (case x of Ext i \<Rightarrow> (case y of Ext j \<Rightarrow> i \<le> j | Ref _ \<Rightarrow> False)
| Ref i \<Rightarrow> (case y of Ext _ \<Rightarrow> True | Ref j \<Rightarrow> i \<le> j))"
definition less_shadow_root_ptr :: "(_::linorder) shadow_root_ptr \<Rightarrow> (_) shadow_root_ptr \<Rightarrow> bool"
where "less_shadow_root_ptr x y \<equiv> x \<le> y \<and> \<not> y \<le> x"
instance
apply(standard)
by(auto simp add: less_eq_shadow_root_ptr_def less_shadow_root_ptr_def
split: shadow_root_ptr.splits)
end
lemma is_shadow_root_ptr_ref [simp]: "is_shadow_root_ptr (shadow_root_ptr.Ref n)"
by(simp add: is_shadow_root_ptr\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def)
lemma is_shadow_root_ptr_not_node_ptr[simp]: "\<not>is_shadow_root_ptr (cast\<^sub>n\<^sub>o\<^sub>d\<^sub>e\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r node_ptr)"
by(simp add: is_shadow_root_ptr_def cast\<^sub>n\<^sub>o\<^sub>d\<^sub>e\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def)
lemma cast_shadow_root_ptr_not_node_ptr [simp]:
"cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r shadow_root_ptr \<noteq> cast\<^sub>n\<^sub>o\<^sub>d\<^sub>e\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r node_ptr"
"cast\<^sub>n\<^sub>o\<^sub>d\<^sub>e\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r node_ptr \<noteq> cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r shadow_root_ptr"
unfolding cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def cast\<^sub>n\<^sub>o\<^sub>d\<^sub>e\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def by auto
lemma cast_shadow_root_ptr_not_document_ptr [simp]:
"cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r shadow_root_ptr \<noteq> cast\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r document_ptr"
"cast\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r document_ptr \<noteq> cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r shadow_root_ptr"
unfolding cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def cast\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def by auto
lemma shadow_root_ptr_no_node_ptr_cast [simp]:
"\<not> is_shadow_root_ptr_kind (cast\<^sub>n\<^sub>o\<^sub>d\<^sub>e\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r node_ptr)"
by(simp add: cast\<^sub>n\<^sub>o\<^sub>d\<^sub>e\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def is_shadow_root_ptr_kind_def)
lemma node_ptr_no_shadow_root_ptr_cast [simp]:
"\<not> is_node_ptr_kind (cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r shadow_root_ptr)"
using is_node_ptr_kind_obtains by fastforce
lemma shadow_root_ptr_no_document_ptr_cast [simp]:
"\<not> is_shadow_root_ptr_kind (cast\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r document_ptr)"
by(simp add: cast\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def is_shadow_root_ptr_kind_def)
lemma document_ptr_no_shadow_root_ptr_cast [simp]:
"\<not> is_document_ptr_kind (cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r shadow_root_ptr)"
using is_document_ptr_kind_obtains by fastforce
lemma shadow_root_ptr_shadow_root_ptr_cast [simp]:
"is_shadow_root_ptr_kind (cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r shadow_root_ptr)"
by (simp add: cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def is_shadow_root_ptr_kind_def)
lemma shadow_root_ptr_casts_commute [simp]:
"cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr = Some shadow_root_ptr \<longleftrightarrow> cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r shadow_root_ptr = ptr"
unfolding cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def
by(auto split: object_ptr.splits sum.splits)
lemma shadow_root_ptr_casts_commute2 [simp]:
"(cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r (cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r shadow_root_ptr) = Some shadow_root_ptr)"
by simp
lemma shadow_root_ptr_casts_commute3 [simp]:
assumes "is_shadow_root_ptr_kind ptr"
shows "cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r (the (cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr)) = ptr"
using assms
by(auto simp add: is_shadow_root_ptr_kind_def cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def
split: object_ptr.splits sum.splits)
lemma is_shadow_root_ptr_kind_obtains:
assumes "is_shadow_root_ptr_kind ptr"
obtains shadow_root_ptr where "ptr = cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r shadow_root_ptr"
using assms is_shadow_root_ptr_kind_def
by (metis case_optionE shadow_root_ptr_casts_commute)
lemma is_shadow_root_ptr_kind_none:
assumes "\<not>is_shadow_root_ptr_kind ptr"
shows "cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr = None"
using assms
unfolding is_shadow_root_ptr_kind_def cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def
by (auto split: object_ptr.splits sum.splits)
lemma cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_inject [simp]:
"cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r x = cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r y \<longleftrightarrow> x = y"
by(simp add: cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def)
lemma cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_ext_none [simp]:
"cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r (object_ptr.Ext (Inr (Inr (Inr object_ext_ptr)))) = None"
by(simp add: cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def)
lemma is_shadow_root_ptr_kind_simp1 [dest]: "is_document_ptr_kind ptr \<Longrightarrow> \<not>is_shadow_root_ptr_kind ptr"
by (metis document_ptr_no_shadow_root_ptr_cast shadow_root_ptr_casts_commute3)
lemma is_shadow_root_ptr_kind_simp2 [dest]: "is_node_ptr_kind ptr \<Longrightarrow> \<not>is_shadow_root_ptr_kind ptr"
by (metis node_ptr_no_shadow_root_ptr_cast shadow_root_ptr_casts_commute3)
end

View File

@ -1,37 +0,0 @@
An overview of the formalization is given in:
Achim D. Brucker and Michael Herzberg. A Formal Semantics of the Core DOM
in Isabelle/HOL. In The 2018 Web Conference Companion (WWW). Pages 741-749,
ACM Press, 2018. doi:10.1145/3184558.3185980
A BibTeX entry for LaTeX users is
@InProceedings{ brucker.ea:core-dom:2018,
abstract = {At its core, the Document Object Model (DOM) defines a tree-like
data structure for representing documents in general and HTML
documents in particular. It forms the heart of any rendering engine
of modern web browsers. Formalizing the key concepts of the DOM is
a pre-requisite for the formal reasoning over client-side JavaScript
programs as well as for the analysis of security concepts in modern
web browsers. In this paper, we present a formalization of the core DOM,
with focus on the node-tree and the operations defined on node-trees,
in Isabelle/HOL. We use the formalization to verify the functional
correctness of the most important functions defined in the DOM standard.
Moreover, our formalization is (1) extensible, i.e., can be extended without
the need of re-proving already proven properties and (2) executable, i.e.,
we can generate executable code from our specification.},
address = {New York, NY, USA},
author = {Achim D. Brucker and Michael Herzberg},
booktitle= {The 2018 Web Conference Companion (WWW)},
conf_date= {April 23-27, 2018},
doi = {10.1145/3184558.3185980},
editor = {Pierre{-}Antoine Champin and Fabien L. Gandon and Mounia Lalmas and Panagiotis G. Ipeirotis},
isbn = {978-1-4503-5640-4/18/04},
keywords = {Document Object Model, DOM, Formal Semantics, Isabelle/HOL},
location = {Lyon, France},
pages = {741--749},
pdf = {https://www.brucker.ch/bibliography/download/2018/brucker.ea-core-dom-2018.pdf},
publisher= {ACM Press},
title = {A Formal Semantics of the Core {DOM} in {Isabelle/HOL}},
url = {https://www.brucker.ch/bibliography/abstract/brucker.ea-core-dom-2018},
year = {2018},
}

View File

@ -1,20 +0,0 @@
chapter AFP
session "Core_SC_DOM" (AFP) = "HOL-Library" +
options [timeout = 1200, document = pdf, document_variants="document:outline=/proof,/ML",document_output=output]
directories
"common"
"common/classes"
"common/monads"
"common/pointers"
"common/preliminaries"
"common/tests"
"safely_composable"
"safely_composable/classes"
"safely_composable/pointers"
theories
Core_DOM
Core_DOM_Tests
document_files (in "document")
"root.tex"
"root.bib"

View File

@ -1 +0,0 @@
../../Core_DOM/common/Core_DOM.thy

View File

@ -1 +0,0 @@
../../Core_DOM/common/Core_DOM_Basic_Datatypes.thy

View File

@ -1 +0,0 @@
../../Core_DOM/common/Core_DOM_Functions.thy

View File

@ -1 +0,0 @@
../../Core_DOM/common/Core_DOM_Tests.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/classes/BaseClass.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/classes/CharacterDataClass.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/classes/DocumentClass.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/classes/NodeClass.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/classes/ObjectClass.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/monads/BaseMonad.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/monads/CharacterDataMonad.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/monads/DocumentMonad.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/monads/ElementMonad.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/monads/NodeMonad.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/monads/ObjectMonad.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/pointers/CharacterDataPointer.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/pointers/DocumentPointer.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/pointers/ElementPointer.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/pointers/NodePointer.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/pointers/ObjectPointer.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/pointers/Ref.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/preliminaries/Heap_Error_Monad.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/preliminaries/Hiding_Type_Variables.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/preliminaries/Testing_Utils.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/tests/Core_DOM_BaseTest.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/tests/Document-adoptNode.html

View File

@ -1 +0,0 @@
../../../Core_DOM/common/tests/Document-adoptNode.html.orig

View File

@ -1 +0,0 @@
../../../Core_DOM/common/tests/Document-getElementById.html

View File

@ -1 +0,0 @@
../../../Core_DOM/common/tests/Document-getElementById.html.orig

View File

@ -1 +0,0 @@
../../../Core_DOM/common/tests/Document_adoptNode.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/tests/Document_getElementById.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/tests/Node-insertBefore.html

View File

@ -1 +0,0 @@
../../../Core_DOM/common/tests/Node-insertBefore.html.orig

View File

@ -1 +0,0 @@
../../../Core_DOM/common/tests/Node-removeChild.html

View File

@ -1 +0,0 @@
../../../Core_DOM/common/tests/Node-removeChild.html.orig

View File

@ -1 +0,0 @@
../../../Core_DOM/common/tests/Node_insertBefore.thy

View File

@ -1 +0,0 @@
../../../Core_DOM/common/tests/Node_removeChild.thy

View File

@ -1,508 +0,0 @@
@STRING{j-fac = "Formal Aspects of Computing" }
@STRING{pub-springer={Springer-Verlag} }
@STRING{pub-springer:adr={Heidelberg} }
@STRING{s-lncs = "Lecture Notes in Computer Science" }
@Book{ nipkow.ea:isabelle:2002,
author = {Tobias Nipkow and Lawrence C. Paulson and Markus Wenzel},
title = {Isabelle/HOL---A Proof Assistant for Higher-Order Logic},
publisher = pub-springer,
address = pub-springer:adr,
series = s-lncs,
volume = 2283,
doi = {10.1007/3-540-45949-9},
abstract = {This book is a self-contained introduction to interactive
proof in higher-order logic (HOL), using the proof
assistant Isabelle2002. It is a tutorial for potential
users rather than a monograph for researchers. The book has
three parts.
1. Elementary Techniques shows how to model functional
programs in higher-order logic. Early examples involve
lists and the natural numbers. Most proofs are two steps
long, consisting of induction on a chosen variable followed
by the auto tactic. But even this elementary part covers
such advanced topics as nested and mutual recursion. 2.
Logic and Sets presents a collection of lower-level tactics
that you can use to apply rules selectively. It also
describes Isabelle/HOL's treatment of sets, functions and
relations and explains how to define sets inductively. One
of the examples concerns the theory of model checking, and
another is drawn from a classic textbook on formal
languages. 3. Advanced Material describes a variety of
other topics. Among these are the real numbers, records and
overloading. Advanced techniques are described involving
induction and recursion. A whole chapter is devoted to an
extended example: the verification of a security protocol.
},
year = 2002,
acknowledgement={brucker, 2007-02-19},
bibkey = {nipkow.ea:isabelle:2002}
}
@Misc{ dom-specification,
year = 2016,
month = {DOM Living Standard -- Last Updated 20 October 2016},
day = 20,
url = {https://dom.spec.whatwg.org/},
organization = {Web Hypertext Application Technology Working Group
(WHATWG)},
note = {An archived copy of the version from 20 October 2016 is
available at
\url{https://git.logicalhacking.com/BrowserSecurity/fDOM-idl/}.}
}
@InProceedings{ brucker.ea:core-dom:2018,
author = {Achim D. Brucker and Michael Herzberg},
title = {A Formal Semantics of the Core {DOM} in {Isabelle/HOL}},
booktitle = {Proceedings of the Web Programming, Design, Analysis, And
Implementation (WPDAI) track at WWW 2018},
location = {Lyon, France},
url = {https://www.brucker.ch/bibliography/abstract/brucker.ea-fdom-2018},
year = {2018},
abstract = {At its core, the Document Object Model (DOM) defines a
tree-like data structure for representing documents in
general and HTML documents in particular. It forms the
heart of any rendering engine of modern web browsers.
Formalizing the key concepts of the DOM is a pre-requisite
for the formal reasoning over client-side JavaScript
programs as well as for the analysis of security concepts
in modern web browsers. In this paper, we present a
formalization of the core DOM, with focus on the node-tree
and the operations defined on node-trees, in Isabelle/HOL.
We use the formalization to verify the functional
correctness of the most important functions defined in the
DOM standard. Moreover, our formalization is (1)
extensible, i.e., can be extended without the need of
re-proving already proven properties and (2) executable,
i.e., we can generate executable code from our
specification. },
keywords = {Document Object Model, DOM, Formal Semantics,
Isabelle/HOL},
classification= {conference},
areas = {formal methods, software},
public = {yes}
}
@Article{ klein:operating:2009,
author = {Gerwin Klein},
title = {Operating System Verification --- An Overview},
journal = {S\={a}dhan\={a}},
publisher = pub-springer,
year = 2009,
volume = 34,
number = 1,
month = feb,
pages = {27--69},
abstract = {This paper gives a high-level introduction to the topic of
formal, interactive, machine-checked software verification
in general, and the verification of operating systems code
in particular. We survey the state of the art, the
advantages and limitations of machine-checked code proofs,
and describe two specific ongoing larger-scale verification
projects in more detail.}
}
@InProceedings{ gardner.ea:securing:2009,
author = {Ryan W. Gardner and Sujata Garera and Matthew W. Pagano
and Matthew Green and Aviel D. Rubin},
title = {Securing medical records on smart phones},
booktitle = {ACM workshop on Security and privacy in medical and
home-care systems (SPIMACS)},
year = 2009,
isbn = {978-1-60558-790-5},
pages = {31--40},
location = {Chicago, Illinois, USA},
doi = {10.1145/1655084.1655090},
address = pub-acm:adr,
publisher = pub-acm,
abstract = {There is an inherent conflict between the desire to
maintain privacy of one's medical records and the need to
make those records available during an emergency. To
satisfy both objectives, we introduce a flexible
architecture for the secure storage of medical records on
smart phones. In our system, a person can view her records
at any time, and emergency medical personnel can view the
records as long as the person is present (even if she is
unconscious). Our solution allows for efficient revocation
of access rights and is robust against adversaries who can
access the phone's storage offline.}
}
@InProceedings{ raad.ea:dom:2016,
author = {Azalea Raad and Jos{\'{e}} Fragoso Santos and Philippa
Gardner},
title = {{DOM:} Specification and Client Reasoning},
booktitle = {Programming Languages and Systems - 14th Asian Symposium,
{APLAS} 2016, Hanoi, Vietnam, November 21-23, 2016,
Proceedings},
pages = {401--422},
year = 2016,
crossref = {igarashi:programming:2016},
doi = {10.1007/978-3-319-47958-3_21},
abstract = {We present an axiomatic specification of a key fragment of
DOM using structural separation logic. This specification
allows us to develop modular reasoning about client
programs that call the DOM.}
}
@InProceedings{ bohannon.ea:featherweight:2010,
author = {Aaron Bohannon and Benjamin C. Pierce},
title = {Featherweight {F}irefox: {F}ormalizing the Core of a Web
Browser},
booktitle = {Usenix Conference on Web Application Development
(WebApps)},
year = 2010,
month = jun,
url = {http://www.cis.upenn.edu/~bohannon/browser-model/},
abstract = {We offer a formal specification of the core functionality
of a web browser in the form of a small-step operational
semantics. The specification accurately models the asyn-
chronous nature of web browsers and covers the basic as-
pects of windows, DOM trees, cookies, HTTP requests and
responses, user input, and a minimal scripting lan- guage
with first-class functions, dynamic evaluation, and AJAX
requests. No security enforcement mechanisms are
included{\^a}instead, the model is intended to serve as a
basis for formalizing and experimenting with different
security policies and mechanisms. We survey the most
interesting design choices and discuss how our model re-
lates to real web browsers.}
}
@Proceedings{ joyce.ea:higher:1994,
editor = {Jeffrey J. Joyce and Carl-Johan H. Seger},
title = {Higher Order Logic Theorem Proving and Its Applications
(HUG)},
booktitle = {Higher Order Logic Theorem Proving and Its Applications
(HUG)},
publisher = pub-springer,
address = pub-springer:adr,
series = s-lncs,
abstract = {Theorem proving based techniques for formal hardware
verification have been evolving constantly and researchers
are getting able to reason about more complex issues than
it was possible or practically feasible in the past. It is
often the case that a model of a system is built in a
formal logic and then reasoning about this model is carried
out in the logic. Concern is growing on how to consistently
interface a model built in a formal logic with an informal
CAD environment. Researchers have been investigating how to
define the formal semantics of hardware description
languages so that one can formally reason about models
informally dealt with in a CAD environment. At the
University of Cambridge, the embedding of hardware
description languages in a logic is classified in two
categories: deep embedding and shallow embedding. In this
paper we argue that there are degrees of formality in
shallow embedding a language in a logic. The choice of the
degree of formality is a trade-off between the security of
the embedding and the amount and complexity of the proof
effort in the logic. We also argue that the design of a
language could consider this verifiability issue. There are
choices in the design of a language that can make it easier
to improve the degree of formality, without implying
serious drawbacks for the CAD environment.},
volume = 780,
year = 1994,
doi = {10.1007/3-540-57826-9},
isbn = {3-540-57826-9},
acknowledgement={brucker, 2007-02-19}
}
@Misc{ whatwg:dom:2017,
key={whatwg},
author={{WHATWG}},
url={https://dom.spec.whatwg.org/commit-snapshots/6253e53af2fbfaa6d25ad09fd54280d8083b2a97/},
month=mar,
year=2017,
day=24,
title={{DOM} -- Living Standard},
note={Last Updated 24 {March} 2017},
institution = {WHATWG},
}
@Misc{ whatwg:html:2017,
key={whatwg},
author={{WHATWG}},
url={https://html.spec.whatwg.org/},
month=apr,
year=2017,
day=13,
title={{HTML} -- Living Standard},
note={Last Updated 13 {April} 2017},
institution = {WHATWG},
}
@Misc{ w3c:dom:2015,
key={w3c},
author={{W3C}},
url={https://www.w3.org/TR/dom/},
month=nov,
year=2015,
day=19,
title={{W3C} {DOM4}},
institution = {W3C},
}
@Proceedings{ igarashi:programming:2016,
editor = {Atsushi Igarashi},
title = {Programming Languages and Systems - 14th Asian Symposium,
{APLAS} 2016, Hanoi, Vietnam, November 21-23, 2016,
Proceedings},
series = {Lecture Notes in Computer Science},
volume = 10017,
year = 2016,
doi = {10.1007/978-3-319-47958-3},
isbn = {978-3-319-47957-6}
}
@InProceedings{ gardner.ea:dom:2008,
author = {Philippa Gardner and Gareth Smith and Mark J. Wheelhouse
and Uri Zarfaty},
title = {{DOM:} Towards a Formal Specification},
booktitle = {{PLAN-X} 2008, Programming Language Technologies for XML,
An {ACM} {SIGPLAN} Workshop colocated with {POPL} 2008, San
Francisco, California, USA, January 9, 2008},
year = 2008,
crossref = {plan-x:2008},
url = {http://gemo.futurs.inria.fr/events/PLANX2008/papers/p18.pdf},
abstract = {The W3C Document Object Model (DOM) specifies an XML up-
date library. DOM is written in English, and is therefore
not compo- sitional and not complete. We provide a first
step towards a compo- sitional specification of DOM. Unlike
DOM, we are able to work with a minimal set of commands and
obtain a complete reason- ing for straight-line code. Our
work transfers O{\^a}Hearn, Reynolds and Yang{\^a}s
local Hoare reasoning for analysing heaps to XML, viewing
XML as an in-place memory store as does DOM. In par-
ticular, we apply recent work by Calcagno, Gardner and
Zarfaty on local Hoare reasoning about a simple tree-update
language to DOM, showing that our reasoning scales to DOM.
Our reasoning not only formally specifies a significant
subset of DOM Core Level 1, but can also be used to verify
e.g. invariant properties of simple Javascript programs.}
}
@InProceedings{ jang.ea:establishing:2012,
author = {Dongseok Jang and Zachary Tatlock and Sorin Lerner},
title = {Establishing Browser Security Guarantees through Formal
Shim Verification},
booktitle = {Proceedings of the 21th {USENIX} Security Symposium,
Bellevue, WA, USA, August 8-10, 2012},
pages = {113--128},
year = 2012,
crossref = {kohno:proceedings:2012},
url = {https://www.usenix.org/conference/usenixsecurity12/technical-sessions/presentation/jang},
abstract = { Web browsers mediate access to valuable private data in
domains ranging from health care to banking. Despite this
critical role, attackers routinely exploit browser
vulnerabilities to exfiltrate private data and take over
the un- derlying system. We present Q UARK , a browser
whose kernel has been implemented and verified in Coq. We
give a specification of our kernel, show that the
implementation satisfies the specification, and finally
show that the specification implies several security
properties, including tab non-interference, cookie
integrity and confidentiality, and address bar integrity.
}
}
@Proceedings{ kohno:proceedings:2012,
editor = {Tadayoshi Kohno},
title = {Proceedings of the 21th {USENIX} Security Symposium,
Bellevue, WA, USA, August 8-10, 2012},
publisher = {{USENIX} Association},
year = 2012,
timestamp = {Thu, 15 May 2014 09:12:27 +0200}
}
@Proceedings{ plan-x:2008,
title = {{PLAN-X} 2008, Programming Language Technologies for XML,
An {ACM} {SIGPLAN} Workshop colocated with {POPL} 2008, San
Francisco, California, USA, January 9, 2008},
year = 2008,
timestamp = {Fri, 18 Jan 2008 13:01:04 +0100}
}
@Article{ brucker.ea:extensible:2008-b,
abstract = {We present an extensible encoding of object-oriented data models into HOL. Our encoding is supported by a datatype package that leverages the use of the shallow embedding technique to object-oriented specification and programming languages. The package incrementally compiles an object-oriented data model, i.e., a class model, to a theory containing object-universes, constructors, accessor functions, coercions (casts) between dynamic and static types, characteristic sets, and co-inductive class invariants. The package is conservative, i.e., all properties are derived entirely from constant definitions, including the constraints over object structures. As an application, we use the package for an object-oriented core-language called IMP++, for which we formally prove the correctness of a Hoare-Logic with respect to a denotational semantics.},
address = {Heidelberg},
author = {Achim D. Brucker and Burkhart Wolff},
doi = {10.1007/s10817-008-9108-3},
issn = {0168-7433},
issue = {3},
journal = {Journal of Automated Reasoning},
keywords = {object-oriented data models, HOL, theorem proving, verification},
language = {USenglish},
pages = {219--249},
pdf = {https://www.brucker.ch/bibliography/download/2008/brucker.ea-extensible-2008-b.pdf},
publisher = {Springer-Verlag},
title = {An Extensible Encoding of Object-oriented Data Models in HOL},
url = {https://www.brucker.ch/bibliography/abstract/brucker.ea-extensible-2008-b},
volume = {41},
year = {2008},
}
@PhDThesis{ brucker:interactive:2007,
abstract = {We present a semantic framework for object-oriented specification languages. We develop this framework as a conservative shallow embedding in Isabelle/HOL. Using only conservative extensions guarantees by construction the consistency of our formalization. Moreover, we show how our framework can be used to build an interactive proof environment, called HOL-OCL, for object-oriented specifications in general and for UML/OCL in particular.\\\\Our main contributions are an extensible encoding of object-oriented data structures in HOL, a datatype package for object-oriented specifications, and the development of several equational and tableaux calculi for object-oriented specifications. Further, we show that our formal framework can be the basis of a formal machine-checked semantics for OCL that is compliant to the OCL 2.0 standard.},
abstract_de = {In dieser Arbeit wird ein semantisches Rahmenwerk f{\"u}r objektorientierte Spezifikationen vorgestellt. Das Rahmenwerk ist als konservative, flache Einbettung in Isabelle/HOL realisiert. Durch die Beschr{\"a}nkung auf konservative Erweiterungen kann die logische Konsistenz der Einbettung garantiert werden. Das semantische Rahmenwerk wird verwendet, um das interaktives Beweissystem HOL-OCL f{\"u}r objektorientierte Spezifikationen im Allgemeinen und insbesondere f{\"u}r UML/OCL zu entwickeln.\\\\Die Hauptbeitr{\"a}ge dieser Arbeit sind die Entwicklung einer erweiterbaren Kodierung objektorientierter Datenstrukturen in HOL, ein Datentyp-Paket f{\"u}r objektorientierte Spezifikationen und die Entwicklung verschiedener Kalk{\"u}le f{\"u}r objektorientierte Spezifikationen. Zudem zeigen wir, wie das formale Rahmenwerk verwendet werden kann, um eine formale, maschinell gepr{\"u}fte Semantik f{\"u}r OCL anzugeben, die konform zum Standard f{\"u}r OCL 2.0 ist.},
author = {Achim D. Brucker},
keywords = {OCL, UML, formal semantics, theorem proving, Isabelle, HOL-OCL},
month = {mar},
note = {ETH Dissertation No. 17097.},
pdf = {https://www.brucker.ch/bibliography/download/2007/brucker-interactive-2007.pdf},
school = {ETH Zurich},
title = {An Interactive Proof Environment for Object-oriented Specifications},
url = {https://www.brucker.ch/bibliography/abstract/brucker-interactive-2007},
year = {2007},
}
@InCollection{ brucker.ea:standard-compliance-testing:2018,
talk = {talk:brucker.ea:standard-compliance-testing:2018},
abstract = {Most popular technologies are based on informal or
semiformal standards that lack a rigid formal semantics.
Typical examples include web technologies such as the DOM
or HTML, which are defined by the Web Hypertext Application
Technology Working Group (WHATWG) and the World Wide Web
Consortium (W3C). While there might be API specifications
and test cases meant to assert the compliance of a certain
implementation, the actual standard is rarely accompanied
by a formal model that would lend itself for, e.g.,
verifying the security or safety properties of real
systems.
Even when such a formalization of a standard exists, two
important questions arise: first, to what extend does the
formal model comply to the standard and, second, to what
extend does the implementation comply to the formal model
and the assumptions made during the verification? In this
paper, we present an approach that brings all three
involved artifacts - the (semi-)formal standard, the
formalization of the standard, and the implementations -
closer together by combining verification, symbolic
execution, and specification based testing.},
keywords = {standard compliance, compliance tests, DOM},
location = {Toulouse, France},
author = {Achim D. Brucker and Michael Herzberg},
booktitle = {{TAP} 2018: Tests And Proofs},
language = {USenglish},
publisher = pub-springer,
address = pub-springer:adr,
series = s-lncs,
number = 10889,
editor = {Cathrine Dubois and Burkhart Wolff},
title = {Formalizing (Web) Standards: An Application of Test and
Proof},
categories = {holtestgen, websecurity},
classification= {conference},
areas = {formal methods, software engineering},
public = {yes},
year = 2018,
doi = {10.1007/978-3-319-92994-1_9},
pages = {159--166},
isbn = {978-3-642-38915-3},
pdf = {http://www.brucker.ch/bibliography/download/2018/brucker.ea-standard-compliance-testing-2018.pdf},
url = {http://www.brucker.ch/bibliography/abstract/brucker.ea-standard-compliance-testing-2018}
}
@InCollection{ brucker.ea:interactive:2005,
keywords = {symbolic test case generations, black box testing, white
box testing, theorem proving, interactive testing},
abstract = {HOL-TestGen is a test environment for specification-based
unit testing build upon the proof assistant Isabelle/HOL\@.
While there is considerable skepticism with regard to
interactive theorem provers in testing communities, we
argue that they are a natural choice for (automated)
symbolic computations underlying systematic tests. This
holds in particular for the development on non-trivial
formal test plans of complex software, where some parts of
the overall activity require inherently guidance by a test
engineer. In this paper, we present the underlying methods
for both black box and white box testing in interactive
unit test scenarios. HOL-TestGen can also be understood as
a unifying technical and conceptual framework for
presenting and investigating the variety of unit test
techniques in a logically consistent way. },
location = {Edinburgh},
author = {Achim D. Brucker and Burkhart Wolff},
booktitle = {Formal Approaches to Testing of Software},
language = {USenglish},
publisher = pub-springer,
address = pub-springer:adr,
series = s-lncs,
number = 3997,
doi = {10.1007/11759744_7},
isbn = {3-540-25109-X},
editor = {Wolfgang Grieskamp and Carsten Weise},
pdf = {http://www.brucker.ch/bibliography/download/2005/brucker.ea-interactive-2005.pdf},
project = {CSFMDOS},
title = {Interactive Testing using {HOL}-{TestGen}},
classification= {workshop},
areas = {formal methods, software},
categories = {holtestgen},
year = 2005,
public = {yes},
url = {http://www.brucker.ch/bibliography/abstract/brucker.ea-interactive-2005}
}
@Article{ brucker.ea:theorem-prover:2012,
author = {Achim D. Brucker and Burkhart Wolff},
journal = j-fac,
publisher = pub-springer,
address = pub-springer:adr,
language = {USenglish},
categories = {holtestgen},
title = {On Theorem Prover-based Testing},
year = 2013,
issn = {0934-5043},
pages = {683--721},
volume = 25,
number = 5,
classification= {journal},
areas = {formal methods, software},
public = {yes},
doi = {10.1007/s00165-012-0222-y},
keywords = {test case generation, domain partitioning, test sequence,
theorem proving, HOL-TestGen},
abstract = {HOL-TestGen is a specification and test case generation
environment extending the interactive theorem prover
Isabelle/HOL. As such, HOL-TestGen allows for an integrated
workflow supporting interactive theorem proving, test case
generation, and test data generation.
The HOL-TestGen method is two-staged: first, the original
formula is partitioned into test cases by transformation
into a normal form called test theorem. Second, the test
cases are analyzed for ground instances (the test data)
satisfying the constraints of the test cases. Particular
emphasis is put on the control of explicit test-hypotheses
which can be proven over concrete programs.
Due to the generality of the underlying framework, our
system can be used for black-box unit, sequence, reactive
sequence and white-box test scenarios. Although based on
particularly clean theoretical foundations, the system can
be applied for substantial case-studies. },
pdf = {http://www.brucker.ch/bibliography/download/2012/brucker.ea-theorem-prover-2012.pdf},
url = {http://www.brucker.ch/bibliography/abstract/brucker.ea-theorem-prover-2012}
}

View File

@ -1,261 +0,0 @@
\documentclass[10pt,DIV16,a4paper,abstract=true,twoside=semi,openright]
{scrreprt}
\usepackage[USenglish]{babel}
\usepackage[numbers, sort&compress]{natbib}
\usepackage{isabelle,isabellesym}
\usepackage{booktabs}
\usepackage{paralist}
\usepackage{graphicx}
\usepackage{amssymb}
\usepackage{xspace}
\usepackage{xcolor}
\usepackage{listings}
\lstloadlanguages{HTML}
\usepackage[]{mathtools}
\usepackage[pdfpagelabels, pageanchor=false, plainpages=false]{hyperref}
\lstdefinestyle{html}{language=XML,
basicstyle=\ttfamily,
commentstyle=\itshape,
keywordstyle=\color{blue},
ndkeywordstyle=\color{blue},
}
\lstdefinestyle{displayhtml}{style=html,
floatplacement={tbp},
captionpos=b,
framexleftmargin=0pt,
basicstyle=\ttfamily\scriptsize,
backgroundcolor=\color{black!2},
frame=lines,
}
\lstnewenvironment{html}[1][]{\lstset{style=displayhtml, #1}}{}
\def\inlinehtml{\lstinline[style=html, columns=fullflexible]}
\pagestyle{headings}
\isabellestyle{default}
\setcounter{tocdepth}{1}
\newcommand{\ie}{i.\,e.\xspace}
\newcommand{\eg}{e.\,g.\xspace}
\newcommand{\thy}{\isabellecontext}
\renewcommand{\isamarkupsection}[1]{%
\begingroup%
\def\isacharunderscore{\textunderscore}%
\section{#1 (\thy)}%
\def\isacharunderscore{-}%
\expandafter\label{sec:\isabellecontext}%
\endgroup%
}
\title{Core SC DOM\\\medskip \Large
A Formal Model of the Document Object Model for Safe Components}%
\author{%
\href{https://www.brucker.ch/}{Achim~D.~Brucker}\footnotemark[1]
\and
\href{https://www.michael-herzberg.de/}{Michael Herzberg}\footnotemark[2]
}
\publishers{
\footnotemark[1]~Department of Computer Science, University of Exeter, Exeter, UK\texorpdfstring{\\}{, }
\texttt{a.brucker@exeter.ac.uk}\\[2em]
%
\footnotemark[2]~ Department of Computer Science, The University of Sheffield, Sheffield, UK\texorpdfstring{\\}{, }
\texttt{msherzberg1@sheffield.ac.uk}
}
\begin{document}
\maketitle
\begin{abstract}
\begin{quote}
In this AFP entry, we formalize the core of the Document Object
Model (DOM). At its core, the DOM defines a tree-like data
structure for representing documents in general and HTML documents
in particular. It is the heart of any modern web browser.
Formalizing the key concepts of the DOM is a prerequisite for the
formal reasoning over client-side JavaScript programs and for the
analysis of security concepts in modern web browsers.
We present a formalization of the core DOM, with focus on the
\emph{node-tree} and the operations defined on node-trees, in
Isabelle/HOL\@. We use the formalization to verify the functional
correctness of the most important functions defined in the DOM
standard. Moreover, our formalization is
\begin{inparaenum}
\item \emph{extensible}, i.e., can be extended without the need of
re-proving already proven properties and
\item \emph{executable}, i.e., we can generate executable code
from our specification.
\end{inparaenum}
\bigskip
\noindent{\textbf{Keywords:}}
Document Object Model, DOM, Formal Semantics, Isabelle/HOL
\end{quote}
\end{abstract}
\tableofcontents
\cleardoublepage
\chapter{Introduction}
In a world in which more and more applications are offered as services
on the internet, web browsers start to take on a similarly central
role in our daily IT infrastructure as operating systems. Thus, web
browsers should be developed as rigidly and formally as operating
systems. While formal methods are a well-established technique in the
development of operating systems (see,
\eg,~\citet{klein:operating:2009} for an overview of formal
verification of operating systems), there are few proposals for
improving the development of web browsers using formal
approaches~\cite{gardner.ea:dom:2008,raad.ea:dom:2016,jang.ea:establishing:2012,bohannon.ea:featherweight:2010}.
As a first step towards a verified client-side web application stack,
we model and formally verify the Document Object Model (DOM) in
Isabelle/HOL\@. The DOM~\cite{whatwg:dom:2017,w3c:dom:2015} is
\emph{the} central data structure of all modern web browsers. At its
core, the Document Object Model (DOM), defines a tree-like data
structure for representing documents in general and HTML documents in
particular. Thus, the correctness of a DOM implementation is crucial
for ensuring that a web browser displays web pages correctly.
Moreover, the DOM is the core data structure underlying client-side
JavaScript programs, \ie, client-side JavaScript programs are mostly
programs that read, write, and update the DOM.
In more detail, we formalize the core DOM as a shallow
embedding~\cite{joyce.ea:higher:1994} in Isabelle/HOL\@. Our
formalization is based on a typed data model for the \emph{node-tree},
\ie, a data structure for representing XML-like documents in a tree
structure. Furthermore, we formalize a typed heap for storing
(partial) node-trees together with the necessary consistency
constraints. Finally, we formalize the operations (as described in the
DOM standard~\cite{whatwg:dom:2017}) on this heap that allow
manipulating node-trees.
Our machine-checked formalization of the DOM node
tree~\cite{whatwg:dom:2017} has the following desirable properties:
\begin{itemize}
\item It provides a \emph{consistency guarantee.} Since all
definitions in our formal semantics are conservative and all rules
are derived, the logical consistency of the DOM node-tree is reduced
to the consistency of HOL.
\item It serves as a \emph{technical basis for a proof system.} Based
on the derived rules and specific setup of proof tactics over
node-trees, our formalization provides a generic proof environment
for the verification of programs manipulating node-trees.
\item It is \emph{executable}, which allows to validate its compliance
to the standard by evaluating the compliance test suite on the
formal model and
\item It is \emph{extensible} in the sense
of~\cite{brucker.ea:extensible:2008-b,brucker:interactive:2007},
\ie, properties proven over the core DOM do not need to be re-proven
for object-oriented extensions such as the HTML document model.
\end{itemize}
The rest of this document is automatically generated from the
formalization in Isabelle/HOL, i.e., all content is checked by
Isabelle.\footnote{For a brief overview of the work, we refer the
reader to~\cite{brucker.ea:core-dom:2018}.} The structure follows
the theory dependencies (see \autoref{fig:session-graph}): we start
with introducing the technical preliminaries of our formalization
(\autoref{cha:perliminaries}). Next, we introduce the concepts of
pointers (\autoref{cha:pointers}) and classes (\autoref{cha:classes}),
i.e., the core object-oriented datatypes of the DOM. On top of this
data model, we define the functional behavior of the DOM classes,
i.e., their methods (\autoref{cha:monads}). In \autoref{cha:dom}, we
introduce the formalization of the functionality of the core DOM,
i.e., the \emph{main entry point for users} that want to use this AFP
entry. Finally, we formalize the relevant compliance test cases in
\autoref{cha:tests}.
\begin{figure}
\centering
\includegraphics[width=.8\textwidth]{session_graph}
\caption{The Dependency Graph of the Isabelle Theories.\label{fig:session-graph}}
\end{figure}
\clearpage
\chapter{Preliminaries}
\label{cha:perliminaries}
In this chapter, we introduce the technical preliminaries of our
formalization of the core DOM, namely a mechanism for hiding type
variables and the heap error monad.
\input{Hiding_Type_Variables}
\input{Heap_Error_Monad}
\chapter{References and Pointers}
\label{cha:pointers}
In this chapter, we introduce a generic type for object-oriented
references and typed pointers for each class type defined in the DOM
standard.
\input{Ref}
\input{ObjectPointer}
\input{NodePointer}
\input{ElementPointer}
\input{CharacterDataPointer}
\input{DocumentPointer}
\input{ShadowRootPointer}
\chapter{Classes}
\label{cha:classes}
In this chapter, we introduce the classes of our DOM model.
The definition of the class types follows closely the one of the
pointer types. Instead of datatypes, we use records for our classes.
a generic type for object-oriented references and typed pointers for
each class type defined in the DOM standard.
\input{BaseClass}
\input{ObjectClass}
\input{NodeClass}
\input{ElementClass}
\input{CharacterDataClass}
\input{DocumentClass}
\chapter{Monadic Object Constructors and Accessors}
\label{cha:monads}
In this chapter, we introduce the moandic method definitions for the
classes of our DOM formalization. Again the overall structure follows
the same structure as for the class types and the pointer types.
\input{BaseMonad}
\input{ObjectMonad}
\input{NodeMonad}
\input{ElementMonad}
\input{CharacterDataMonad}
\input{DocumentMonad}
\chapter{The Core DOM}
\label{cha:dom}
In this chapter, we introduce the formalization of the core DOM, i.e.,
the most important algorithms for querying or modifying the DOM, as
defined in the standard. For more details, we refer the reader to
\cite{brucker.ea:core-dom:2018}.
\input{Core_DOM_Basic_Datatypes}
\input{Core_DOM_Functions}
\input{Core_DOM_Heap_WF}
\input{Core_DOM}
\chapter{Test Suite}
\label{cha:tests}
In this chapter, we present the formalized compliance test cases for
the core DOM. As our formalization is executable, we can
(symbolically) execute the test cases on top of our model. Executing
these test cases successfully shows that our model is compliant to the
official DOM standard. As future work, we plan to generate test cases
from our formal model (e.g.,
using~\cite{brucker.ea:interactive:2005,brucker.ea:theorem-prover:2012})
to improve the quality of the official compliance test suite. For more
details on the relation of test and proof in the context of web
standards, we refer the reader to
\cite{brucker.ea:standard-compliance-testing:2018}.
\input{Core_DOM_BaseTest} \input{Document_adoptNode}
\input{Document_getElementById} \input{Node_insertBefore}
\input{Node_removeChild} \input{Core_DOM_Tests}
{\small
\bibliographystyle{abbrvnat}
\bibliography{root}
}
\end{document}
%%% Local Variables:
%%% mode: latex
%%% TeX-master: t
%%% End:

10
Core_DOM/ROOT Normal file
View File

@ -0,0 +1,10 @@
chapter AFP
session "Core_DOM" (AFP) = "HOL-Library" +
options [timeout = 600]
theories
Core_DOM
Core_DOM_Tests
document_files
"root.tex"
"root.bib"

View File

@ -1,2 +0,0 @@
Core_DOM
Core_SC_DOM

View File

@ -164,8 +164,7 @@ lemma get\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>
\<longleftrightarrow> get\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a character_data_ptr h \<noteq> None"
using l_type_wf\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_axioms assms
apply(simp add: type_wf_defs get\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_def l_type_wf\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_def)
by (metis assms bind.bind_lzero character_data_ptr_kinds_commutes fmember.rep_eq
local.get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_type_wf option.exhaust option.simps(3))
by (metis assms bind.bind_lzero character_data_ptr_kinds_commutes fmember.rep_eq local.get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_type_wf option.exhaust option.simps(3))
end
global_interpretation l_get\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_lemmas type_wf
@ -259,16 +258,14 @@ lemma new\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>
lemma new_character_data_ptr_new:
"character_data_ptr.Ref (Suc (fMax (finsert 0 (character_data_ptr.the_ref |`| character_data_ptrs h))))
|\<notin>| character_data_ptrs h"
by (metis Suc_n_not_le_n character_data_ptr.sel(1) fMax_ge fimage_finsert finsertI1
finsertI2 set_finsert)
by (metis Suc_n_not_le_n character_data_ptr.sel(1) fMax_ge fimage_finsert finsertI1 finsertI2 set_finsert)
lemma new\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_ptr_not_in_heap:
assumes "new\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a h = (new_character_data_ptr, h')"
shows "new_character_data_ptr |\<notin>| character_data_ptr_kinds h"
using assms
unfolding new\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_def
by (metis Pair_inject character_data_ptrs_def fMax_finsert fempty_iff ffmember_filter
fimage_is_fempty is_character_data_ptr_ref max_0L new_character_data_ptr_new)
by (metis Pair_inject character_data_ptrs_def fMax_finsert fempty_iff ffmember_filter fimage_is_fempty is_character_data_ptr_ref max_0L new_character_data_ptr_new)
lemma new\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_new_ptr:
assumes "new\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a h = (new_character_data_ptr, h')"
@ -340,9 +337,7 @@ lemma known_ptrs_preserved:
lemma known_ptrs_subset:
"object_ptr_kinds h' |\<subseteq>| object_ptr_kinds h \<Longrightarrow> a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
by(simp add: a_known_ptrs_def less_eq_fset.rep_eq subsetD)
lemma known_ptrs_new_ptr:
"object_ptr_kinds h' = object_ptr_kinds h |\<union>| {|new_ptr|} \<Longrightarrow> known_ptr new_ptr \<Longrightarrow>
a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
lemma known_ptrs_new_ptr: "object_ptr_kinds h' = object_ptr_kinds h |\<union>| {|new_ptr|} \<Longrightarrow> known_ptr new_ptr \<Longrightarrow> a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
by(simp add: a_known_ptrs_def)
end
global_interpretation l_known_ptrs\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a known_ptr defines known_ptrs = a_known_ptrs .

View File

@ -136,8 +136,7 @@ lemma get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t_type_w
shows "document_ptr |\<in>| document_ptr_kinds h \<longleftrightarrow> get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t document_ptr h \<noteq> None"
using l_type_wf\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t_axioms assms
apply(simp add: type_wf_defs get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def l_type_wf\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def)
by (metis document_ptr_kinds_commutes fmember.rep_eq is_none_bind is_none_simps(1)
is_none_simps(2) local.get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_type_wf)
by (metis document_ptr_kinds_commutes fmember.rep_eq is_none_bind is_none_simps(1) is_none_simps(2) local.get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_type_wf)
end
global_interpretation l_get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t_lemmas type_wf by unfold_locales
@ -196,14 +195,12 @@ lemma get_document_ptr_simp2 [simp]:
lemma get_document_ptr_simp3 [simp]:
"get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t element_ptr (put\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t document_ptr f h) = get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t element_ptr h"
by(auto simp add: get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def put\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def)
lemma get_document_ptr_simp4 [simp]:
"get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t document_ptr (put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t element_ptr f h) = get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t document_ptr h"
lemma get_document_ptr_simp4 [simp]: "get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t document_ptr (put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t element_ptr f h) = get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t document_ptr h"
by(auto simp add: get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def put\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def)
lemma get_document_ptr_simp5 [simp]:
"get\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a character_data_ptr (put\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t document_ptr f h) = get\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a character_data_ptr h"
by(auto simp add: get\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_def get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def put\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def)
lemma get_document_ptr_simp6 [simp]:
"get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t document_ptr (put\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a character_data_ptr f h) = get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t document_ptr h"
lemma get_document_ptr_simp6 [simp]: "get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t document_ptr (put\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a character_data_ptr f h) = get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t document_ptr h"
by(auto simp add: get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def put\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_def put\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def)
lemma new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t [simp]:
@ -330,9 +327,7 @@ lemma known_ptrs_preserved:
lemma known_ptrs_subset:
"object_ptr_kinds h' |\<subseteq>| object_ptr_kinds h \<Longrightarrow> a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
by(simp add: a_known_ptrs_def less_eq_fset.rep_eq subsetD)
lemma known_ptrs_new_ptr:
"object_ptr_kinds h' = object_ptr_kinds h |\<union>| {|new_ptr|} \<Longrightarrow> known_ptr new_ptr \<Longrightarrow>
a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
lemma known_ptrs_new_ptr: "object_ptr_kinds h' = object_ptr_kinds h |\<union>| {|new_ptr|} \<Longrightarrow> known_ptr new_ptr \<Longrightarrow> a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
by(simp add: a_known_ptrs_def)
end
global_interpretation l_known_ptrs\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t known_ptr defines known_ptrs = a_known_ptrs .

View File

@ -31,18 +31,18 @@ section\<open>Element\<close>
text\<open>In this theory, we introduce the types for the Element class.\<close>
theory ElementClass
imports
"NodeClass"
"ShadowRootPointer"
NodeClass
"../pointers/ShadowRootPointer"
begin
text\<open>The type @{type "DOMString"} is a type synonym for @{type "string"}, define
in \autoref{sec:Core_DOM_Basic_Datatypes}.\<close>
type_synonym attr_key = DOMString
type_synonym attr_value = DOMString
type_synonym attrs = "(attr_key, attr_value) fmap"
type_synonym tag_name = DOMString
type_synonym tag_type = DOMString
record ('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr) RElement = RNode +
nothing :: unit
tag_name :: tag_name
tag_type :: tag_type
child_nodes :: "('node_ptr, 'element_ptr, 'character_data_ptr) node_ptr list"
attrs :: attrs
shadow_root_opt :: "'shadow_root_ptr shadow_root_ptr option"
@ -58,31 +58,24 @@ register_default_tvars
"('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr, 'Node, 'Element) Node"
type_synonym
('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr, 'Object, 'Node, 'Element) Object
= "('Object, ('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr, 'Element option)
RElement_ext + 'Node) Object"
= "('Object, ('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr, 'Element option) RElement_ext + 'Node) Object"
register_default_tvars
"('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr, 'Object, 'Node, 'Element) Object"
type_synonym
('object_ptr, 'node_ptr, 'element_ptr, 'character_data_ptr, 'document_ptr, 'shadow_root_ptr,
'Object, 'Node, 'Element) heap
= "(('document_ptr, 'shadow_root_ptr) document_ptr + 'object_ptr, 'element_ptr element_ptr +
'character_data_ptr character_data_ptr + 'node_ptr, 'Object,
('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr, 'Element option)
RElement_ext + 'Node) heap"
('object_ptr, 'node_ptr, 'element_ptr, 'character_data_ptr, 'document_ptr, 'shadow_root_ptr, 'Object, 'Node, 'Element) heap
= "(('document_ptr, 'shadow_root_ptr) document_ptr + 'object_ptr, 'element_ptr element_ptr + 'character_data_ptr character_data_ptr + 'node_ptr, 'Object,
('node_ptr, 'element_ptr, 'character_data_ptr, 'shadow_root_ptr, 'Element option) RElement_ext + 'Node) heap"
register_default_tvars
"('object_ptr, 'node_ptr, 'element_ptr, 'character_data_ptr, 'document_ptr, 'shadow_root_ptr,
'Object, 'Node, 'Element) heap"
"('object_ptr, 'node_ptr, 'element_ptr, 'character_data_ptr, 'document_ptr, 'shadow_root_ptr, 'Object, 'Node, 'Element) heap"
type_synonym heap\<^sub>f\<^sub>i\<^sub>n\<^sub>a\<^sub>l = "(unit, unit, unit, unit, unit, unit, unit, unit, unit) heap"
definition element_ptr_kinds :: "(_) heap \<Rightarrow> (_) element_ptr fset"
where
"element_ptr_kinds heap = the |`| (cast\<^sub>n\<^sub>o\<^sub>d\<^sub>e\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>e\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r |`|
(ffilter is_element_ptr_kind (node_ptr_kinds heap)))"
"element_ptr_kinds heap = the |`| (cast\<^sub>n\<^sub>o\<^sub>d\<^sub>e\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>e\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r |`| (ffilter is_element_ptr_kind (node_ptr_kinds heap)))"
lemma element_ptr_kinds_simp [simp]:
"element_ptr_kinds (Heap (fmupd (cast element_ptr) element (the_heap h))) =
{|element_ptr|} |\<union>| element_ptr_kinds h"
"element_ptr_kinds (Heap (fmupd (cast element_ptr) element (the_heap h))) = {|element_ptr|} |\<union>| element_ptr_kinds h"
apply(auto simp add: element_ptr_kinds_def)[1]
by force
@ -92,8 +85,7 @@ definition element_ptrs :: "(_) heap \<Rightarrow> (_) element_ptr fset"
definition cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t :: "(_) Node \<Rightarrow> (_) Element option"
where
"cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t node = (case RNode.more node of Inl element \<Rightarrow>
Some (RNode.extend (RNode.truncate node) element) | _ \<Rightarrow> None)"
"cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t node = (case RNode.more node of Inl element \<Rightarrow> Some (RNode.extend (RNode.truncate node) element) | _ \<Rightarrow> None)"
adhoc_overloading cast cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t
abbreviation cast\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>2\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t :: "(_) Object \<Rightarrow> (_) Element option"
@ -164,7 +156,7 @@ lemma get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_type_wf:
using l_type_wf\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_axioms assms
apply(simp add: type_wf_defs get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def l_type_wf\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def)
by (metis NodeClass.get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_type_wf bind_eq_None_conv element_ptr_kinds_commutes notin_fset
option.distinct(1))
option.distinct(1))
end
global_interpretation l_get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_lemmas type_wf
@ -218,9 +210,9 @@ lemma get_elment_ptr_simp2 [simp]:
by(auto simp add: get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def)
abbreviation "create_element_obj tag_name_arg child_nodes_arg attrs_arg shadow_root_opt_arg
abbreviation "create_element_obj tag_type_arg child_nodes_arg attrs_arg shadow_root_opt_arg
\<equiv> \<lparr> RObject.nothing = (), RNode.nothing = (), RElement.nothing = (),
tag_name = tag_name_arg, Element.child_nodes = child_nodes_arg, attrs = attrs_arg,
tag_type = tag_type_arg, Element.child_nodes = child_nodes_arg, attrs = attrs_arg,
shadow_root_opt = shadow_root_opt_arg, \<dots> = None \<rparr>"
definition new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t :: "(_) heap \<Rightarrow> ((_) element_ptr \<times> (_) heap)"
@ -306,22 +298,17 @@ lemma known_ptrs_known_ptr:
apply(simp add: a_known_ptrs_def)
using notin_fset by fastforce
lemma known_ptrs_preserved:
"object_ptr_kinds h = object_ptr_kinds h' \<Longrightarrow> a_known_ptrs h = a_known_ptrs h'"
lemma known_ptrs_preserved: "object_ptr_kinds h = object_ptr_kinds h' \<Longrightarrow> a_known_ptrs h = a_known_ptrs h'"
by(auto simp add: a_known_ptrs_def)
lemma known_ptrs_subset:
"object_ptr_kinds h' |\<subseteq>| object_ptr_kinds h \<Longrightarrow> a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
lemma known_ptrs_subset: "object_ptr_kinds h' |\<subseteq>| object_ptr_kinds h \<Longrightarrow> a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
by(simp add: a_known_ptrs_def less_eq_fset.rep_eq subsetD)
lemma known_ptrs_new_ptr:
"object_ptr_kinds h' = object_ptr_kinds h |\<union>| {|new_ptr|} \<Longrightarrow> known_ptr new_ptr \<Longrightarrow>
a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
lemma known_ptrs_new_ptr: "object_ptr_kinds h' = object_ptr_kinds h |\<union>| {|new_ptr|} \<Longrightarrow> known_ptr new_ptr \<Longrightarrow> a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
by(simp add: a_known_ptrs_def)
end
global_interpretation l_known_ptrs\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t known_ptr defines known_ptrs = a_known_ptrs .
lemmas known_ptrs_defs = a_known_ptrs_def
lemma known_ptrs_is_l_known_ptrs: "l_known_ptrs known_ptr known_ptrs"
using known_ptrs_known_ptr known_ptrs_preserved known_ptrs_subset known_ptrs_new_ptr l_known_ptrs_def
by blast
using known_ptrs_known_ptr known_ptrs_preserved known_ptrs_subset known_ptrs_new_ptr l_known_ptrs_def by blast
end

View File

@ -111,7 +111,7 @@ lemma get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_type_wf:
using l_type_wf\<^sub>N\<^sub>o\<^sub>d\<^sub>e_axioms assms
apply(simp add: type_wf_defs get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def l_type_wf\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def)
by (metis bind_eq_None_conv ffmember_filter fimage_eqI fmember.rep_eq is_node_ptr_kind_cast
get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_type_wf node_ptr_casts_commute2 node_ptr_kinds_def option.sel option.simps(3))
get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_type_wf node_ptr_casts_commute2 node_ptr_kinds_def option.sel option.simps(3))
end
global_interpretation l_get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_lemmas type_wf
@ -181,23 +181,18 @@ definition a_known_ptrs :: "(_) heap \<Rightarrow> bool"
lemma known_ptrs_known_ptr: "a_known_ptrs h \<Longrightarrow> ptr |\<in>| object_ptr_kinds h \<Longrightarrow> known_ptr ptr"
apply(simp add: a_known_ptrs_def)
using notin_fset by fastforce
lemma known_ptrs_preserved:
"object_ptr_kinds h = object_ptr_kinds h' \<Longrightarrow> a_known_ptrs h = a_known_ptrs h'"
lemma known_ptrs_preserved: "object_ptr_kinds h = object_ptr_kinds h' \<Longrightarrow> a_known_ptrs h = a_known_ptrs h'"
by(auto simp add: a_known_ptrs_def)
lemma known_ptrs_subset:
"object_ptr_kinds h' |\<subseteq>| object_ptr_kinds h \<Longrightarrow> a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
lemma known_ptrs_subset: "object_ptr_kinds h' |\<subseteq>| object_ptr_kinds h \<Longrightarrow> a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
by(simp add: a_known_ptrs_def less_eq_fset.rep_eq subsetD)
lemma known_ptrs_new_ptr:
"object_ptr_kinds h' = object_ptr_kinds h |\<union>| {|new_ptr|} \<Longrightarrow> known_ptr new_ptr \<Longrightarrow>
a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
lemma known_ptrs_new_ptr: "object_ptr_kinds h' = object_ptr_kinds h |\<union>| {|new_ptr|} \<Longrightarrow> known_ptr new_ptr \<Longrightarrow> a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
by(simp add: a_known_ptrs_def)
end
global_interpretation l_known_ptrs\<^sub>N\<^sub>o\<^sub>d\<^sub>e known_ptr defines known_ptrs = a_known_ptrs .
lemmas known_ptrs_defs = a_known_ptrs_def
lemma known_ptrs_is_l_known_ptrs: "l_known_ptrs known_ptr known_ptrs"
using known_ptrs_known_ptr known_ptrs_preserved l_known_ptrs_def known_ptrs_subset
known_ptrs_new_ptr
using known_ptrs_known_ptr known_ptrs_preserved l_known_ptrs_def known_ptrs_subset known_ptrs_new_ptr
by blast
lemma get_node_ptr_simp1 [simp]: "get\<^sub>N\<^sub>o\<^sub>d\<^sub>e node_ptr (put\<^sub>N\<^sub>o\<^sub>d\<^sub>e node_ptr node h) = Some node"

View File

@ -127,13 +127,9 @@ lemmas known_ptr_defs = a_known_ptr_def
locale l_known_ptrs = l_known_ptr known_ptr for known_ptr :: "(_) object_ptr \<Rightarrow> bool" +
fixes known_ptrs :: "(_) heap \<Rightarrow> bool"
assumes known_ptrs_known_ptr: "known_ptrs h \<Longrightarrow> ptr |\<in>| object_ptr_kinds h \<Longrightarrow> known_ptr ptr"
assumes known_ptrs_preserved:
"object_ptr_kinds h = object_ptr_kinds h' \<Longrightarrow> known_ptrs h = known_ptrs h'"
assumes known_ptrs_subset:
"object_ptr_kinds h' |\<subseteq>| object_ptr_kinds h \<Longrightarrow> known_ptrs h \<Longrightarrow> known_ptrs h'"
assumes known_ptrs_new_ptr:
"object_ptr_kinds h' = object_ptr_kinds h |\<union>| {|new_ptr|} \<Longrightarrow> known_ptr new_ptr \<Longrightarrow>
known_ptrs h \<Longrightarrow> known_ptrs h'"
assumes known_ptrs_preserved: "object_ptr_kinds h = object_ptr_kinds h' \<Longrightarrow> known_ptrs h = known_ptrs h'"
assumes known_ptrs_subset: "object_ptr_kinds h' |\<subseteq>| object_ptr_kinds h \<Longrightarrow> known_ptrs h \<Longrightarrow> known_ptrs h'"
assumes known_ptrs_new_ptr: "object_ptr_kinds h' = object_ptr_kinds h |\<union>| {|new_ptr|} \<Longrightarrow> known_ptr new_ptr \<Longrightarrow> known_ptrs h \<Longrightarrow> known_ptrs h'"
locale l_known_ptrs\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t = l_known_ptr known_ptr for known_ptr :: "(_) object_ptr \<Rightarrow> bool"
begin
@ -146,15 +142,11 @@ lemma known_ptrs_known_ptr:
apply(simp add: a_known_ptrs_def)
using notin_fset by fastforce
lemma known_ptrs_preserved:
"object_ptr_kinds h = object_ptr_kinds h' \<Longrightarrow> a_known_ptrs h = a_known_ptrs h'"
lemma known_ptrs_preserved: "object_ptr_kinds h = object_ptr_kinds h' \<Longrightarrow> a_known_ptrs h = a_known_ptrs h'"
by(auto simp add: a_known_ptrs_def)
lemma known_ptrs_subset:
"object_ptr_kinds h' |\<subseteq>| object_ptr_kinds h \<Longrightarrow> a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
lemma known_ptrs_subset: "object_ptr_kinds h' |\<subseteq>| object_ptr_kinds h \<Longrightarrow> a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
by(simp add: a_known_ptrs_def less_eq_fset.rep_eq subsetD)
lemma known_ptrs_new_ptr:
"object_ptr_kinds h' = object_ptr_kinds h |\<union>| {|new_ptr|} \<Longrightarrow> known_ptr new_ptr \<Longrightarrow>
a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
lemma known_ptrs_new_ptr: "object_ptr_kinds h' = object_ptr_kinds h |\<union>| {|new_ptr|} \<Longrightarrow> known_ptr new_ptr \<Longrightarrow> a_known_ptrs h \<Longrightarrow> a_known_ptrs h'"
by(simp add: a_known_ptrs_def)
end
global_interpretation l_known_ptrs\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t known_ptr defines known_ptrs = a_known_ptrs .

View File

@ -36,8 +36,8 @@ theory BaseMonad
begin
subsection\<open>Datatypes\<close>
datatype exception = NotFoundError | HierarchyRequestError | NotSupportedError | SegmentationFault
| AssertException | NonTerminationException | InvokeError | TypeError
datatype exception = NotFoundError | SegmentationFault | HierarchyRequestError | AssertException
| NonTerminationException | InvokeError | TypeError | DebugException nat
lemma finite_set_in [simp]: "x \<in> fset FS \<longleftrightarrow> x |\<in>| FS"
by (meson notin_fset)

View File

@ -57,7 +57,7 @@ lemma character_data_ptr_kinds_M_eq:
lemma character_data_ptr_kinds_M_reads:
"reads (\<Union>node_ptr. {preserved (get_M\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t node_ptr RObject.nothing)}) character_data_ptr_kinds_M h h'"
using node_ptr_kinds_M_reads
apply (simp add: reads_def node_ptr_kinds_M_defs character_data_ptr_kinds_M_defs
apply(simp add: reads_def node_ptr_kinds_M_defs character_data_ptr_kinds_M_defs
character_data_ptr_kinds_def preserved_def)
by (smt node_ptr_kinds_small preserved_def unit_all_impI)
@ -308,7 +308,7 @@ lemma type_wf_put_ptr_not_in_heap_E:
shows "type_wf h"
using assms
apply(auto simp add: type_wf_defs elim!: ElementMonad.type_wf_put_ptr_not_in_heap_E
split: option.splits if_splits)[1]
split: option.splits if_splits)
using assms(2) node_ptr_kinds_commutes by blast
lemma type_wf_put_ptr_in_heap_E:
@ -319,8 +319,7 @@ lemma type_wf_put_ptr_in_heap_E:
shows "type_wf h"
using assms
apply(auto simp add: type_wf_defs split: option.splits if_splits)[1]
by (metis (no_types, lifting) ElementClass.get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_type_wf assms(2) bind.bind_lunit
cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_inv cast\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>2\<^sub>N\<^sub>o\<^sub>d\<^sub>e_inv get\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_def get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def notin_fset option.collapse)
by (metis (no_types, lifting) ElementClass.get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_type_wf assms(2) bind.bind_lunit cast\<^sub>N\<^sub>o\<^sub>d\<^sub>e\<^sub>2\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_inv cast\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>2\<^sub>N\<^sub>o\<^sub>d\<^sub>e_inv get\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_def get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def notin_fset option.collapse)
subsection\<open>Preserving Types\<close>
@ -341,8 +340,8 @@ lemma new_element_is_l_new_element: "l_new_element type_wf"
using l_new_element.intro new_element_type_wf_preserved
by blast
lemma put_M\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_tag_name_type_wf_preserved [simp]:
"h \<turnstile> put_M element_ptr tag_name_update v \<rightarrow>\<^sub>h h' \<Longrightarrow> type_wf h = type_wf h'"
lemma put_M\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_tag_type_type_wf_preserved [simp]:
"h \<turnstile> put_M element_ptr tag_type_update v \<rightarrow>\<^sub>h h' \<Longrightarrow> type_wf h = type_wf h'"
apply(auto simp add: ElementMonad.put_M_defs put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def put\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def
dest!: get_heap_E
elim!: bind_returns_heap_E2
@ -467,7 +466,7 @@ lemma put_M\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^su
NodeClass.type_wf_defs CharacterDataMonad.get_M_defs
ObjectClass.a_type_wf_def
split: option.splits)[1]
apply (metis (no_types, lifting) bind_eq_Some_conv finite_set_in get\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_def)
apply (metis (no_types, lifting) bind_eq_Some_conv finite_set_in get\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_def)
apply (metis finite_set_in)
done
@ -499,7 +498,7 @@ lemma type_wf_preserved_small:
allI[OF assms(4), of id, simplified] character_data_ptr_kinds_small[OF assms(1)]
apply(auto simp add: type_wf_defs preserved_def get_M_defs character_data_ptr_kinds_small[OF assms(1)]
split: option.splits)[1]
apply(force)
apply(force)
by force
lemma type_wf_preserved:
@ -527,8 +526,6 @@ lemma type_wf_drop: "type_wf h \<Longrightarrow> type_wf (Heap (fmdrop ptr (the_
apply(auto simp add: type_wf_def ElementMonad.type_wf_drop
l_type_wf_def\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a.a_type_wf_def)[1]
using type_wf_drop
by (metis (no_types, lifting) ElementClass.type_wf\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t ObjectClass.get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_type_wf
character_data_ptr_kinds_commutes finite_set_in fmlookup_drop get\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_def get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def
get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_def node_ptr_kinds_commutes object_ptr_kinds_code5)
by (metis (no_types, lifting) ElementClass.type_wf\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t ObjectClass.get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_type_wf character_data_ptr_kinds_commutes finite_set_in fmlookup_drop get\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_def get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_def node_ptr_kinds_commutes object_ptr_kinds_code5)
end

View File

@ -55,10 +55,10 @@ lemma document_ptr_kinds_M_eq:
lemma document_ptr_kinds_M_reads:
"reads (\<Union>object_ptr. {preserved (get_M\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t object_ptr RObject.nothing)}) document_ptr_kinds_M h h'"
using object_ptr_kinds_M_reads
apply (simp add: reads_def object_ptr_kinds_M_defs document_ptr_kinds_M_defs
document_ptr_kinds_def preserved_def cong del: image_cong_simp)
apply (metis (mono_tags, hide_lams) object_ptr_kinds_preserved_small old.unit.exhaust preserved_def)
done
apply(simp add: reads_def object_ptr_kinds_M_defs document_ptr_kinds_M_defs
document_ptr_kinds_def preserved_def)
by (smt object_ptr_kinds_preserved_small preserved_def unit_all_impI)
global_interpretation l_dummy defines get_M\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t = "l_get_M.a_get_M get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t" .
lemma get_M_is_l_get_M: "l_get_M get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t type_wf document_ptr_kinds"
@ -322,8 +322,7 @@ lemma type_wf_put_ptr_in_heap_E:
using assms
apply(auto simp add: type_wf_defs elim!: CharacterDataMonad.type_wf_put_ptr_in_heap_E
split: option.splits if_splits)[1]
by (metis (no_types, lifting) CharacterDataClass.get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_type_wf bind.bind_lunit get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def
is_document_kind_def notin_fset option.exhaust_sel)
by (metis (no_types, lifting) CharacterDataClass.get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_type_wf bind.bind_lunit get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def is_document_kind_def notin_fset option.exhaust_sel)
@ -348,8 +347,8 @@ lemma new_element_is_l_new_element [instances]:
using l_new_element.intro new_element_type_wf_preserved
by blast
lemma put_M\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_tag_name_type_wf_preserved [simp]:
"h \<turnstile> put_M element_ptr tag_name_update v \<rightarrow>\<^sub>h h' \<Longrightarrow> type_wf h = type_wf h'"
lemma put_M\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_tag_type_type_wf_preserved [simp]:
"h \<turnstile> put_M element_ptr tag_type_update v \<rightarrow>\<^sub>h h' \<Longrightarrow> type_wf h = type_wf h'"
apply(auto simp add: ElementMonad.put_M_defs put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def put\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def
DocumentClass.type_wf\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a DocumentClass.type_wf\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t
DocumentClass.type_wf\<^sub>N\<^sub>o\<^sub>d\<^sub>e DocumentClass.type_wf\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t
@ -361,9 +360,7 @@ lemma put_M\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_tag_name_typ
apply(auto simp add: is_node_kind_def type_wf_defs ElementClass.type_wf_defs NodeClass.type_wf_defs
ElementMonad.get_M_defs ObjectClass.type_wf_defs
CharacterDataClass.type_wf_defs split: option.splits)[1]
apply (metis NodeClass.a_type_wf_def NodeClass.get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_type_wf ObjectClass.a_type_wf_def
bind.bind_lzero finite_set_in get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def l_type_wf_def\<^sub>N\<^sub>o\<^sub>d\<^sub>e.a_type_wf_def option.collapse
option.distinct(1) option.simps(3))
apply (metis NodeClass.a_type_wf_def NodeClass.get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_type_wf ObjectClass.a_type_wf_def bind.bind_lzero finite_set_in get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def l_type_wf_def\<^sub>N\<^sub>o\<^sub>d\<^sub>e.a_type_wf_def option.collapse option.distinct(1) option.simps(3))
by (metis fmember.rep_eq)
lemma put_M\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_child_nodes_type_wf_preserved [simp]:
@ -379,9 +376,7 @@ lemma put_M\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_child_nodes_
apply(auto simp add: is_node_kind_def type_wf_defs ElementClass.type_wf_defs
NodeClass.type_wf_defs ElementMonad.get_M_defs ObjectClass.type_wf_defs
CharacterDataClass.type_wf_defs split: option.splits)[1]
apply (metis NodeClass.a_type_wf_def NodeClass.get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_type_wf ObjectClass.a_type_wf_def
bind.bind_lzero finite_set_in get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def l_type_wf_def\<^sub>N\<^sub>o\<^sub>d\<^sub>e.a_type_wf_def option.collapse
option.distinct(1) option.simps(3))
apply (metis NodeClass.a_type_wf_def NodeClass.get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_type_wf ObjectClass.a_type_wf_def bind.bind_lzero finite_set_in get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def l_type_wf_def\<^sub>N\<^sub>o\<^sub>d\<^sub>e.a_type_wf_def option.collapse option.distinct(1) option.simps(3))
by (metis fmember.rep_eq)
lemma put_M\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_attrs_type_wf_preserved [simp]:
@ -397,9 +392,7 @@ lemma put_M\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_attrs_type_w
apply(auto simp add: is_node_kind_def type_wf_defs ElementClass.type_wf_defs
NodeClass.type_wf_defs ElementMonad.get_M_defs ObjectClass.type_wf_defs
CharacterDataClass.type_wf_defs split: option.splits)[1]
apply (metis NodeClass.a_type_wf_def NodeClass.get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_type_wf ObjectClass.a_type_wf_def
bind.bind_lzero finite_set_in get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def l_type_wf_def\<^sub>N\<^sub>o\<^sub>d\<^sub>e.a_type_wf_def option.collapse
option.distinct(1) option.simps(3))
apply (metis NodeClass.a_type_wf_def NodeClass.get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_type_wf ObjectClass.a_type_wf_def bind.bind_lzero finite_set_in get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def l_type_wf_def\<^sub>N\<^sub>o\<^sub>d\<^sub>e.a_type_wf_def option.collapse option.distinct(1) option.simps(3))
by (metis fmember.rep_eq)
lemma put_M\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_shadow_root_opt_type_wf_preserved [simp]:
@ -415,9 +408,7 @@ lemma put_M\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_shadow_root_
apply(auto simp add: is_node_kind_def type_wf_defs ElementClass.type_wf_defs
NodeClass.type_wf_defs ElementMonad.get_M_defs ObjectClass.type_wf_defs
CharacterDataClass.type_wf_defs split: option.splits)[1]
apply (metis NodeClass.a_type_wf_def NodeClass.get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_type_wf ObjectClass.a_type_wf_def
bind.bind_lzero finite_set_in get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def l_type_wf_def\<^sub>N\<^sub>o\<^sub>d\<^sub>e.a_type_wf_def option.collapse
option.distinct(1) option.simps(3))
apply (metis NodeClass.a_type_wf_def NodeClass.get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_type_wf ObjectClass.a_type_wf_def bind.bind_lzero finite_set_in get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def l_type_wf_def\<^sub>N\<^sub>o\<^sub>d\<^sub>e.a_type_wf_def option.collapse option.distinct(1) option.simps(3))
by (metis fmember.rep_eq)
lemma new_character_data_type_wf_preserved [simp]:
@ -449,7 +440,7 @@ lemma put_M\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^su
apply(auto simp add: is_node_kind_def type_wf_defs ElementClass.type_wf_defs
NodeClass.type_wf_defs CharacterDataMonad.get_M_defs ObjectClass.type_wf_defs
CharacterDataClass.type_wf_defs split: option.splits)[1]
apply (metis bind.bind_lzero finite_set_in get\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_def option.distinct(1) option.exhaust_sel)
apply (metis bind.bind_lzero finite_set_in get\<^sub>C\<^sub>h\<^sub>a\<^sub>r\<^sub>a\<^sub>c\<^sub>t\<^sub>e\<^sub>r\<^sub>D\<^sub>a\<^sub>t\<^sub>a_def option.distinct(1) option.exhaust_sel)
by (metis finite_set_in)
@ -467,8 +458,7 @@ lemma new_document_type_wf_preserved [simp]: "h \<turnstile> new_document \<righ
split: option.splits)[1]
using document_ptrs_def apply fastforce
apply (simp add: is_document_kind_def)
apply (metis Suc_n_not_le_n document_ptr.sel(1) document_ptrs_def fMax_ge ffmember_filter
fimage_eqI is_document_ptr_ref)
apply (metis Suc_n_not_le_n document_ptr.sel(1) document_ptrs_def fMax_ge ffmember_filter fimage_eqI is_document_ptr_ref)
done
locale l_new_document = l_type_wf +
@ -508,7 +498,7 @@ lemma put_M\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t_doct
apply(auto simp add: is_document_kind_def type_wf_defs ElementClass.type_wf_defs
NodeClass.type_wf_defs ElementMonad.get_M_defs ObjectClass.type_wf_defs
CharacterDataClass.type_wf_defs split: option.splits)[1]
apply(auto simp add: get_M_defs)[1]
apply(auto simp add: get_M_defs)
by (metis (mono_tags) error_returns_result finite_set_in option.exhaust_sel option.simps(4))
lemma put_M\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t_document_element_type_wf_preserved [simp]:
@ -609,6 +599,5 @@ lemma type_wf_drop: "type_wf h \<Longrightarrow> type_wf (Heap (fmdrop ptr (the_
apply(auto simp add: type_wf_defs)[1]
using type_wf_drop
apply blast
by (metis (no_types, lifting) CharacterDataClass.get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_type_wf CharacterDataMonad.type_wf_drop
document_ptr_kinds_commutes finite_set_in fmlookup_drop get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_def heap.sel)
by (metis (no_types, lifting) CharacterDataClass.get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_type_wf CharacterDataMonad.type_wf_drop document_ptr_kinds_commutes finite_set_in fmlookup_drop get\<^sub>D\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_def heap.sel)
end

View File

@ -32,7 +32,7 @@ text\<open>In this theory, we introduce the monadic method setup for the Element
theory ElementMonad
imports
NodeMonad
"ElementClass"
"../classes/ElementClass"
begin
type_synonym ('object_ptr, 'node_ptr, 'element_ptr, 'character_data_ptr, 'document_ptr,
@ -54,10 +54,9 @@ lemma element_ptr_kinds_M_eq:
lemma element_ptr_kinds_M_reads:
"reads (\<Union>element_ptr. {preserved (get_M\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t element_ptr RObject.nothing)}) element_ptr_kinds_M h h'"
apply (simp add: reads_def node_ptr_kinds_M_defs element_ptr_kinds_M_defs element_ptr_kinds_def
node_ptr_kinds_M_reads preserved_def cong del: image_cong_simp)
apply (metis (mono_tags, hide_lams) node_ptr_kinds_small old.unit.exhaust preserved_def)
done
apply(simp add: reads_def node_ptr_kinds_M_defs element_ptr_kinds_M_defs element_ptr_kinds_def
node_ptr_kinds_M_reads preserved_def)
by (smt filter_fset node_ptr_kinds_small preserved_def unit_all_impI)
global_interpretation l_dummy defines get_M\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t = "l_get_M.a_get_M get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t" .
lemma get_M_is_l_get_M: "l_get_M get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t type_wf element_ptr_kinds"
@ -233,10 +232,10 @@ lemma new_element_child_nodes:
by(auto simp add: get_M_defs new_element_def new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def Let_def
split: option.splits prod.splits elim!: bind_returns_result_E bind_returns_heap_E)
lemma new_element_tag_name:
lemma new_element_tag_type:
assumes "h \<turnstile> new_element \<rightarrow>\<^sub>h h'"
assumes "h \<turnstile> new_element \<rightarrow>\<^sub>r new_element_ptr"
shows "h' \<turnstile> get_M new_element_ptr tag_name \<rightarrow>\<^sub>r ''''"
shows "h' \<turnstile> get_M new_element_ptr tag_type \<rightarrow>\<^sub>r ''''"
using assms
by(auto simp add: get_M_defs new_element_def new\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def Let_def
split: option.splits prod.splits elim!: bind_returns_result_E bind_returns_heap_E)
@ -300,7 +299,7 @@ lemma type_wf_put_ptr_not_in_heap_E:
shows "type_wf h"
using assms
apply(auto simp add: type_wf_defs elim!: NodeMonad.type_wf_put_ptr_not_in_heap_E
split: option.splits if_splits)[1]
split: option.splits if_splits)
using assms(2) node_ptr_kinds_commutes by blast
lemma type_wf_put_ptr_in_heap_E:
@ -323,7 +322,7 @@ lemma new_element_type_wf_preserved [simp]: "h \<turnstile> new_element \<righta
get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_def
split: prod.splits if_splits elim!: bind_returns_heap_E)[1]
apply (metis element_ptr_kinds_commutes element_ptrs_def fempty_iff ffmember_filter finite_set_in
is_element_ptr_ref)
is_element_ptr_ref)
apply (metis element_ptrs_def fempty_iff ffmember_filter finite_set_in is_element_ptr_ref)
apply (metis (no_types, lifting) Suc_n_not_le_n element_ptr.sel(1) element_ptr_kinds_commutes
element_ptrs_def fMax_ge ffmember_filter fimage_eqI is_element_ptr_ref notin_fset)
@ -338,8 +337,8 @@ lemma new_element_is_l_new_element: "l_new_element type_wf"
using l_new_element.intro new_element_type_wf_preserved
by blast
lemma put_M\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_tag_name_type_wf_preserved [simp]:
"h \<turnstile> put_M element_ptr tag_name_update v \<rightarrow>\<^sub>h h' \<Longrightarrow> type_wf h = type_wf h'"
lemma put_M\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_tag_type_type_wf_preserved [simp]:
"h \<turnstile> put_M element_ptr tag_type_update v \<rightarrow>\<^sub>h h' \<Longrightarrow> type_wf h = type_wf h'"
apply(auto simp add: type_wf_defs NodeClass.type_wf_defs ObjectClass.type_wf_defs
Let_def put_M_defs get_M_defs put\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def put\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def put\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_def
get\<^sub>E\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t_def get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_def

View File

@ -75,7 +75,7 @@ global_interpretation l_get_M\<^sub>N\<^sub>o\<^sub>d\<^sub>e_lemmas type_wf by
lemma node_ptr_kinds_M_reads:
"reads (\<Union>object_ptr. {preserved (get_M\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t object_ptr RObject.nothing)}) node_ptr_kinds_M h h'"
using object_ptr_kinds_M_reads
apply (simp add: reads_def node_ptr_kinds_M_defs node_ptr_kinds_def
apply(simp add: reads_def node_ptr_kinds_M_defs node_ptr_kinds_def
object_ptr_kinds_M_reads preserved_def)
by (smt object_ptr_kinds_preserved_small preserved_def unit_all_impI)
@ -165,7 +165,7 @@ lemma type_wf_put_ptr_in_heap_E:
assumes "is_node_ptr_kind ptr \<Longrightarrow> is_node_kind (the (get ptr h))"
shows "type_wf h"
using assms
apply(auto simp add: type_wf_defs split: option.splits if_splits)[1]
apply(auto simp add: type_wf_defs split: option.splits if_splits)
by (metis ObjectClass.get\<^sub>O\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t_type_wf bind.bind_lunit finite_set_in get\<^sub>N\<^sub>o\<^sub>d\<^sub>e_def is_node_kind_def option.exhaust_sel)
@ -192,7 +192,7 @@ lemma type_wf_preserved_small:
assumes "\<And>node_ptr. preserved (get_M\<^sub>N\<^sub>o\<^sub>d\<^sub>e node_ptr RNode.nothing) h h'"
shows "type_wf h = type_wf h'"
using type_wf_preserved allI[OF assms(2), of id, simplified]
apply(auto simp add: type_wf_defs)[1]
apply(auto simp add: type_wf_defs)
apply(auto simp add: preserved_def get_M_defs node_ptr_kinds_small[OF assms(1)]
split: option.splits)[1]
apply (metis notin_fset option.simps(3))

View File

@ -34,7 +34,7 @@ We only include them here, as they are required for future work and they cannot
following the object-oriented extensibility of our data model.\<close>
theory ShadowRootPointer
imports
"DocumentPointer"
DocumentPointer
begin
datatype 'shadow_root_ptr shadow_root_ptr = Ref (the_ref: ref) | Ext 'shadow_root_ptr
@ -79,14 +79,13 @@ adhoc_overloading cast cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^su
consts is_shadow_root_ptr_kind :: 'a
definition is_shadow_root_ptr_kind\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r :: "(_) document_ptr \<Rightarrow> bool"
where
"is_shadow_root_ptr_kind\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr =
(case cast\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr of Some _ \<Rightarrow> True | _ \<Rightarrow> False)"
"is_shadow_root_ptr_kind\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr = (case cast\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr of Some _ \<Rightarrow> True | _ \<Rightarrow> False)"
abbreviation is_shadow_root_ptr_kind\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r :: "(_) object_ptr \<Rightarrow> bool"
where
"is_shadow_root_ptr_kind\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr \<equiv> (case cast ptr of
Some document_ptr \<Rightarrow> is_shadow_root_ptr_kind\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r document_ptr
| None \<Rightarrow> False)"
Some document_ptr \<Rightarrow> is_shadow_root_ptr_kind\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r document_ptr
| None \<Rightarrow> False)"
adhoc_overloading is_shadow_root_ptr_kind is_shadow_root_ptr_kind\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r is_shadow_root_ptr_kind\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r
lemmas is_shadow_root_ptr_kind_def = is_shadow_root_ptr_kind\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def
@ -99,8 +98,8 @@ definition is_shadow_root_ptr\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^
abbreviation is_shadow_root_ptr\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r :: "(_) document_ptr \<Rightarrow> bool"
where
"is_shadow_root_ptr\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr \<equiv> (case cast ptr of
Some shadow_root_ptr \<Rightarrow> is_shadow_root_ptr\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r shadow_root_ptr
| _ \<Rightarrow> False)"
Some shadow_root_ptr \<Rightarrow> is_shadow_root_ptr\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r shadow_root_ptr
| _ \<Rightarrow> False)"
abbreviation is_shadow_root_ptr\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r :: "(_) object_ptr \<Rightarrow> bool"
where
@ -108,18 +107,15 @@ abbreviation is_shadow_root_ptr\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\
Some document_ptr \<Rightarrow> is_shadow_root_ptr\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r document_ptr
| None \<Rightarrow> False)"
adhoc_overloading is_shadow_root_ptr is_shadow_root_ptr\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r is_shadow_root_ptr\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r
is_shadow_root_ptr\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r
adhoc_overloading is_shadow_root_ptr is_shadow_root_ptr\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r is_shadow_root_ptr\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r is_shadow_root_ptr\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r
lemmas is_shadow_root_ptr_def = is_shadow_root_ptr\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def
consts is_shadow_root_ptr_ext :: 'a
abbreviation "is_shadow_root_ptr_ext\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr \<equiv> \<not> is_shadow_root_ptr\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr"
abbreviation "is_shadow_root_ptr_ext\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr \<equiv>
is_shadow_root_ptr_kind ptr \<and> (\<not> is_shadow_root_ptr\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr)"
abbreviation "is_shadow_root_ptr_ext\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr \<equiv> is_shadow_root_ptr_kind ptr \<and> (\<not> is_shadow_root_ptr\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr)"
abbreviation "is_shadow_root_ptr_ext\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr \<equiv>
is_shadow_root_ptr_kind ptr \<and> (\<not> is_shadow_root_ptr\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr)"
abbreviation "is_shadow_root_ptr_ext\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr \<equiv> is_shadow_root_ptr_kind ptr \<and> (\<not> is_shadow_root_ptr\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr)"
adhoc_overloading is_shadow_root_ptr_ext is_shadow_root_ptr_ext\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r is_shadow_root_ptr_ext\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r
@ -143,8 +139,7 @@ lemma is_shadow_root_ptr_ref [simp]: "is_shadow_root_ptr (shadow_root_ptr.Ref n)
by(simp add: is_shadow_root_ptr\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def)
lemma shadow_root_ptr_casts_commute [simp]:
"cast\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r document_ptr =
Some shadow_root_ptr \<longleftrightarrow> cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r shadow_root_ptr = document_ptr"
"cast\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r document_ptr = Some shadow_root_ptr \<longleftrightarrow> cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r shadow_root_ptr = document_ptr"
unfolding cast\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def cast\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def
by(auto split: document_ptr.splits sum.splits)
@ -183,8 +178,7 @@ lemma cast\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub
"cast\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r (document_ptr.Ext (Inr (Inr document_ext_ptr))) = None"
by(simp add: cast\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>s\<^sub>h\<^sub>a\<^sub>d\<^sub>o\<^sub>w\<^sub>_\<^sub>r\<^sub>o\<^sub>o\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r_def)
lemma is_shadow_root_ptr_implies_kind [dest]:
"is_shadow_root_ptr\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr \<Longrightarrow> is_shadow_root_ptr_kind\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr"
lemma is_shadow_root_ptr_implies_kind [dest]: "is_shadow_root_ptr\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr \<Longrightarrow> is_shadow_root_ptr_kind\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr"
by(auto split: option.splits)
lemma is_shadow_root_ptr_kind_not_document_ptr [simp]: "\<not>is_shadow_root_ptr_kind (document_ptr.Ref x)"

View File

@ -93,8 +93,7 @@ definition
where
"returns_result_heap h p r h' \<longleftrightarrow> h \<turnstile> p \<rightarrow>\<^sub>r r \<and> h \<turnstile> p \<rightarrow>\<^sub>h h'"
lemma return_result_heap_code [code]:
"returns_result_heap h p r h' \<longleftrightarrow> (case h \<turnstile> p of Inr (r', h'') \<Rightarrow> r = r' \<and> h' = h'' | Inl _ \<Rightarrow> False)"
lemma [code]: "returns_result_heap h p r h' \<longleftrightarrow> (case h \<turnstile> p of Inr (r', h'') \<Rightarrow> r = r' \<and> h' = h'' | Inl _ \<Rightarrow> False)"
by(auto simp add: returns_result_heap_def returns_result_def returns_heap_def split: sum.splits)
fun select_result_heap ("|(_)|\<^sub>r\<^sub>h")
@ -453,10 +452,32 @@ fun forall_M :: "('y \<Rightarrow> ('heap, 'e, 'result) prog) \<Rightarrow> 'y l
P x;
forall_M P xs
}"
(*
lemma forall_M_elim:
assumes "h \<turnstile> forall_M P xs \<rightarrow>\<^sub>r True" and "\<And>x h. x \<in> set xs \<Longrightarrow> pure (P x) h"
shows "\<forall>x \<in> set xs. h \<turnstile> P x \<rightarrow>\<^sub>r True"
apply(insert assms, induct xs)
apply(simp)
apply(auto elim!: bind_returns_result_E)[1]
by (metis (full_types) pure_returns_heap_eq) *)
lemma pure_forall_M_I: "(\<And>x. x \<in> set xs \<Longrightarrow> pure (P x) h) \<Longrightarrow> pure (forall_M P xs) h"
apply(induct xs)
by(auto intro!: bind_pure_I)
(*
lemma forall_M_pure_I:
assumes "\<And>x. x \<in> set xs \<Longrightarrow> h \<turnstile> P x \<rightarrow>\<^sub>r True" and "\<And>x h. x \<in> set xs \<Longrightarrow> pure (P x)h"
shows "h \<turnstile> forall_M P xs \<rightarrow>\<^sub>r True"
apply(insert assms, induct xs)
apply(simp)
by(fastforce)
lemma forall_M_pure_eq:
assumes "\<And>x. x \<in> set xs \<Longrightarrow> h \<turnstile> P x \<rightarrow>\<^sub>r True \<longleftrightarrow> h' \<turnstile> P x \<rightarrow>\<^sub>r True"
and "\<And>x h. x \<in> set xs \<Longrightarrow> pure (P x) h"
shows "(h \<turnstile> forall_M P xs \<rightarrow>\<^sub>r True) \<longleftrightarrow> h' \<turnstile> forall_M P xs \<rightarrow>\<^sub>r True"
using assms
by(auto intro!: forall_M_pure_I dest!: forall_M_elim) *)
subsection \<open>Fold\<close>
@ -485,8 +506,7 @@ lemma filter_M_pure_I [intro]: "(\<And>x. x \<in> set xs \<Longrightarrow> pure
apply(induct xs)
by(auto intro!: bind_pure_I)
lemma filter_M_is_OK_I [intro]:
"(\<And>x. x \<in> set xs \<Longrightarrow> h \<turnstile> ok (P x)) \<Longrightarrow> (\<And>x. x \<in> set xs \<Longrightarrow> pure (P x) h) \<Longrightarrow> h \<turnstile> ok (filter_M P xs)"
lemma filter_M_is_OK_I [intro]: "(\<And>x. x \<in> set xs \<Longrightarrow> h \<turnstile> ok (P x)) \<Longrightarrow> (\<And>x. x \<in> set xs \<Longrightarrow> pure (P x) h) \<Longrightarrow> h \<turnstile> ok (filter_M P xs)"
apply(induct xs)
apply(simp)
by(auto intro!: bind_is_OK_pure_I)
@ -498,8 +518,7 @@ lemma filter_M_not_more_elements:
by(auto elim!: bind_returns_result_E2 split: if_splits intro!: set_ConsD)
lemma filter_M_in_result_if_ok:
assumes "h \<turnstile> filter_M P xs \<rightarrow>\<^sub>r ys" and "\<And>h x. x \<in> set xs \<Longrightarrow> pure (P x) h" and "x \<in> set xs" and
"h \<turnstile> P x \<rightarrow>\<^sub>r True"
assumes "h \<turnstile> filter_M P xs \<rightarrow>\<^sub>r ys" and "\<And>h x. x \<in> set xs \<Longrightarrow> pure (P x) h" and "x \<in> set xs" and "h \<turnstile> P x \<rightarrow>\<^sub>r True"
shows "x \<in> set ys"
apply(insert assms, induct xs arbitrary: ys)
apply(simp)
@ -711,9 +730,8 @@ definition preserved :: "('heap, 'e, 'result) prog \<Rightarrow> 'heap \<Rightar
where
"preserved f h h' \<longleftrightarrow> (\<forall>x. h \<turnstile> f \<rightarrow>\<^sub>r x \<longleftrightarrow> h' \<turnstile> f \<rightarrow>\<^sub>r x)"
lemma preserved_code [code]:
"preserved f h h' = (((h \<turnstile> ok f) \<and> (h' \<turnstile> ok f) \<and> |h \<turnstile> f|\<^sub>r = |h' \<turnstile> f|\<^sub>r) \<or> ((\<not>h \<turnstile> ok f) \<and> (\<not>h' \<turnstile> ok f)))"
apply(auto simp add: preserved_def)[1]
lemma preserved_code [code]: "preserved f h h' = (((h \<turnstile> ok f) \<and> (h' \<turnstile> ok f) \<and> |h \<turnstile> f|\<^sub>r = |h' \<turnstile> f|\<^sub>r) \<or> ((\<not>h \<turnstile> ok f) \<and> (\<not>h' \<turnstile> ok f)))"
apply(auto simp add: preserved_def)
apply (meson is_OK_returns_result_E is_OK_returns_result_I)+
done
@ -750,16 +768,13 @@ lemma reads_bind_pure:
dest: pure_returns_heap_eq
elim!: bind_returns_result_E)
lemma reads_insert_writes_set_left:
"\<forall>P \<in> S. reflp P \<and> transp P \<Longrightarrow> reads {getter} f h h' \<Longrightarrow> reads (insert getter S) f h h'"
lemma reads_insert_writes_set_left: "\<forall>P \<in> S. reflp P \<and> transp P \<Longrightarrow> reads {getter} f h h' \<Longrightarrow> reads (insert getter S) f h h'"
unfolding reads_def by simp
lemma reads_insert_writes_set_right:
"reflp getter \<Longrightarrow> transp getter \<Longrightarrow> reads S f h h' \<Longrightarrow> reads (insert getter S) f h h'"
lemma reads_insert_writes_set_right: "reflp getter \<Longrightarrow> transp getter \<Longrightarrow> reads S f h h' \<Longrightarrow> reads (insert getter S) f h h'"
unfolding reads_def by blast
lemma reads_subset:
"reads S f h h' \<Longrightarrow> \<forall>P \<in> S' - S. reflp P \<and> transp P \<Longrightarrow> S \<subseteq> S' \<Longrightarrow> reads S' f h h'"
lemma reads_subset: "reads S f h h' \<Longrightarrow> \<forall>P \<in> S' - S. reflp P \<and> transp P \<Longrightarrow> S \<subseteq> S' \<Longrightarrow> reads S' f h h'"
by(auto simp add: reads_def)
lemma return_reads [simp]: "reads {} (return x) h h'"

View File

@ -276,9 +276,8 @@ structure Hide_Tvar : HIDE_TVAR = struct
val thy = Proof_Context.theory_of ctx
fun parse_ast ((Ast.Constant const)::[]) = (const,NONE)
| parse_ast ((Ast.Constant sort)::(Ast.Constant const)::[])
= (const,SOME sort)
| parse_ast _ = error("AST type not supported.")
| parse_ast (sort::(Ast.Constant const)::[]) = (const,SOME sort)
| parse_ast _ = error("AST type not supported.")
val (decorated_name, decorated_sort) = parse_ast ast
@ -295,9 +294,9 @@ structure Hide_Tvar : HIDE_TVAR = struct
let fun mk_tvar n =
case decorated_sort of
NONE => Ast.Variable(name_of_tvar n)
| SOME sort => Ast.Appl([Ast.Constant("_ofsort"),
| SOME sort => Ast.Appl([Ast.Constant("_ofsort"),
Ast.Variable(name_of_tvar n),
Ast.Constant(sort)])
sort])
in
map mk_tvar (#tvars default_info)
end

View File

@ -0,0 +1,39 @@
(***********************************************************************************
* Copyright (c) 2016-2018 The University of Sheffield, UK
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* SPDX-License-Identifier: BSD-2-Clause
***********************************************************************************)
theory Testing_Utils
imports Main
begin
ML \<open>
val _ = Theory.setup
(Method.setup @{binding timed_code_simp}
(Scan.succeed (SIMPLE_METHOD' o (CHANGED_PROP oo (fn a => fn b => Timeout.apply (seconds 3600.0) (Code_Simp.dynamic_tac a b)))))
"simplification with code equations, aborts after 1 hour");
\<close>
end

View File

@ -142,23 +142,19 @@ fun get_element_by_id_with_null :: "((_::linorder) object_ptr option) \<Rightarr
| "get_element_by_id_with_null _ _ = error SegmentationFault"
notation get_element_by_id_with_null ("_ . getElementById'(_')")
fun get_elements_by_class_name_with_null ::
"((_::linorder) object_ptr option) \<Rightarrow> string \<Rightarrow> (_, ((_) object_ptr option) list) dom_prog"
fun get_elements_by_class_name_with_null :: "((_::linorder) object_ptr option) \<Rightarrow> string \<Rightarrow> (_, ((_) object_ptr option) list) dom_prog"
where
"get_elements_by_class_name_with_null (Some ptr) class_name =
get_elements_by_class_name ptr class_name \<bind> map_M (return \<circ> Some \<circ> cast\<^sub>e\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r)"
notation get_elements_by_class_name_with_null ("_ . getElementsByClassName'(_')")
fun get_elements_by_tag_name_with_null ::
"((_::linorder) object_ptr option) \<Rightarrow> string \<Rightarrow> (_, ((_) object_ptr option) list) dom_prog"
fun get_elements_by_tag_name_with_null :: "((_::linorder) object_ptr option) \<Rightarrow> string \<Rightarrow> (_, ((_) object_ptr option) list) dom_prog"
where
"get_elements_by_tag_name_with_null (Some ptr) tag =
get_elements_by_tag_name ptr tag \<bind> map_M (return \<circ> Some \<circ> cast\<^sub>e\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r)"
"get_elements_by_tag_name_with_null (Some ptr) tag_name =
get_elements_by_tag_name ptr tag_name \<bind> map_M (return \<circ> Some \<circ> cast\<^sub>e\<^sub>l\<^sub>e\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r)"
notation get_elements_by_tag_name_with_null ("_ . getElementsByTagName'(_')")
fun insert_before_with_null ::
"((_::linorder) object_ptr option) \<Rightarrow> ((_) object_ptr option) \<Rightarrow> ((_) object_ptr option) \<Rightarrow>
(_, ((_) object_ptr option)) dom_prog"
fun insert_before_with_null :: "((_::linorder) object_ptr option) \<Rightarrow> ((_) object_ptr option) \<Rightarrow> ((_) object_ptr option) \<Rightarrow> (_, ((_) object_ptr option)) dom_prog"
where
"insert_before_with_null (Some ptr) (Some child_obj) ref_child_obj_opt = (case cast child_obj of
Some child \<Rightarrow> do {
@ -169,8 +165,7 @@ fun insert_before_with_null ::
| None \<Rightarrow> error HierarchyRequestError)"
notation insert_before_with_null ("_ . insertBefore'(_, _')")
fun append_child_with_null :: "((_::linorder) object_ptr option) \<Rightarrow> ((_) object_ptr option) \<Rightarrow>
(_, unit) dom_prog"
fun append_child_with_null :: "((_::linorder) object_ptr option) \<Rightarrow> ((_) object_ptr option) \<Rightarrow> (_, unit) dom_prog"
where
"append_child_with_null (Some ptr) (Some child_obj) = (case cast child_obj of
Some child \<Rightarrow> append_child ptr child
@ -185,8 +180,7 @@ fun get_body :: "((_::linorder) object_ptr option) \<Rightarrow> (_, ((_) object
}"
notation get_body ("_ . body")
fun get_document_element_with_null :: "((_::linorder) object_ptr option) \<Rightarrow>
(_, ((_) object_ptr option)) dom_prog"
fun get_document_element_with_null :: "((_::linorder) object_ptr option) \<Rightarrow> (_, ((_) object_ptr option)) dom_prog"
where
"get_document_element_with_null (Some ptr) = (case cast\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r ptr of
Some document_ptr \<Rightarrow> do {
@ -196,16 +190,14 @@ fun get_document_element_with_null :: "((_::linorder) object_ptr option) \<Right
| None \<Rightarrow> None)})"
notation get_document_element_with_null ("_ . documentElement")
fun get_owner_document_with_null :: "((_::linorder) object_ptr option) \<Rightarrow>
(_, ((_) object_ptr option)) dom_prog"
fun get_owner_document_with_null :: "((_::linorder) object_ptr option) \<Rightarrow> (_, ((_) object_ptr option)) dom_prog"
where
"get_owner_document_with_null (Some ptr) = (do {
document_ptr \<leftarrow> get_owner_document ptr;
return (Some (cast\<^sub>d\<^sub>o\<^sub>c\<^sub>u\<^sub>m\<^sub>e\<^sub>n\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r\<^sub>2\<^sub>o\<^sub>b\<^sub>j\<^sub>e\<^sub>c\<^sub>t\<^sub>_\<^sub>p\<^sub>t\<^sub>r document_ptr))})"
notation get_owner_document_with_null ("_ . ownerDocument")
fun remove_with_null :: "((_::linorder) object_ptr option) \<Rightarrow> ((_) object_ptr option) \<Rightarrow>
(_, ((_) object_ptr option)) dom_prog"
fun remove_with_null :: "((_::linorder) object_ptr option) \<Rightarrow> ((_) object_ptr option) \<Rightarrow> (_, ((_) object_ptr option)) dom_prog"
where
"remove_with_null (Some ptr) (Some child) = (case cast child of
Some child_node \<Rightarrow> do {
@ -216,8 +208,7 @@ fun remove_with_null :: "((_::linorder) object_ptr option) \<Rightarrow> ((_) ob
| "remove_with_null _ None = error TypeError"
notation remove_with_null ("_ . remove'(')")
fun remove_child_with_null :: "((_::linorder) object_ptr option) \<Rightarrow> ((_) object_ptr option) \<Rightarrow>
(_, ((_) object_ptr option)) dom_prog"
fun remove_child_with_null :: "((_::linorder) object_ptr option) \<Rightarrow> ((_) object_ptr option) \<Rightarrow> (_, ((_) object_ptr option)) dom_prog"
where
"remove_child_with_null (Some ptr) (Some child) = (case cast child of
Some child_node \<Rightarrow> do {
@ -231,7 +222,7 @@ notation remove_child_with_null ("_ . removeChild")
fun get_tag_name_with_null :: "((_) object_ptr option) \<Rightarrow> (_, attr_value) dom_prog"
where
"get_tag_name_with_null (Some ptr) = (case cast ptr of
Some element_ptr \<Rightarrow> get_M element_ptr tag_name)"
Some element_ptr \<Rightarrow> get_M element_ptr tag_type)"
notation get_tag_name_with_null ("_ . tagName")
abbreviation "remove_attribute_with_null ptr k \<equiv> set_attribute_with_null2 ptr k None"
@ -265,8 +256,7 @@ fun first_child_with_null :: "((_) object_ptr option) \<Rightarrow> (_, ((_) obj
| None \<Rightarrow> None)}"
notation first_child_with_null ("_ . firstChild")
fun adopt_node_with_null ::
"((_::linorder) object_ptr option) \<Rightarrow> ((_) object_ptr option) \<Rightarrow>(_, ((_) object_ptr option)) dom_prog"
fun adopt_node_with_null :: "((_::linorder) object_ptr option) \<Rightarrow> ((_) object_ptr option) \<Rightarrow> (_, ((_) object_ptr option)) dom_prog"
where
"adopt_node_with_null (Some ptr) (Some child) = (case cast ptr of
Some document_ptr \<Rightarrow> (case cast child of
@ -276,8 +266,7 @@ fun adopt_node_with_null ::
notation adopt_node_with_null ("_ . adoptNode'(_')")
definition createTestTree ::
"((_::linorder) object_ptr option) \<Rightarrow> (_, (string \<Rightarrow> (_, ((_) object_ptr option)) dom_prog)) dom_prog"
definition createTestTree :: "((_::linorder) object_ptr option) \<Rightarrow> (_, (string \<Rightarrow> (_, ((_) object_ptr option)) dom_prog)) dom_prog"
where
"createTestTree ref = return (\<lambda>id. get_element_by_id_with_null ref id)"

View File

@ -29,15 +29,15 @@
(* This file is automatically generated, please do not modify! *)
section\<open>Testing Document\_adoptNode\<close>
text\<open>This theory contains the test cases for Document\_adoptNode.\<close>
section\<open>Testing Document_adoptNode\<close>
text\<open>This theory contains the test cases for Document_adoptNode.\<close>
theory Document_adoptNode
imports
"Core_DOM_BaseTest"
begin
definition Document_adoptNode_heap :: heap\<^sub>f\<^sub>i\<^sub>n\<^sub>a\<^sub>l where
definition Document_adoptNode_heap :: heap⇩f⇩i⇩n⇩a⇩l where
"Document_adoptNode_heap = create_heap [(cast (document_ptr.Ref 1), cast (create_document_obj html (Some (cast (element_ptr.Ref 1))) [])),
(cast (element_ptr.Ref 1), cast (create_element_obj ''html'' [cast (element_ptr.Ref 2), cast (element_ptr.Ref 8)] fmempty None)),
(cast (element_ptr.Ref 2), cast (create_element_obj ''head'' [cast (element_ptr.Ref 3), cast (element_ptr.Ref 4), cast (element_ptr.Ref 5), cast (element_ptr.Ref 6), cast (element_ptr.Ref 7)] fmempty None)),

View File

@ -29,15 +29,15 @@
(* This file is automatically generated, please do not modify! *)
section\<open>Testing Document\_getElementById\<close>
text\<open>This theory contains the test cases for Document\_getElementById.\<close>
section\<open>Testing Document_getElementById\<close>
text\<open>This theory contains the test cases for Document_getElementById.\<close>
theory Document_getElementById
imports
"Core_DOM_BaseTest"
begin
definition Document_getElementById_heap :: heap\<^sub>f\<^sub>i\<^sub>n\<^sub>a\<^sub>l where
definition Document_getElementById_heap :: heap⇩f⇩i⇩n⇩a⇩l where
"Document_getElementById_heap = create_heap [(cast (document_ptr.Ref 1), cast (create_document_obj html (Some (cast (element_ptr.Ref 1))) [])),
(cast (element_ptr.Ref 1), cast (create_element_obj ''html'' [cast (element_ptr.Ref 2), cast (element_ptr.Ref 9)] fmempty None)),
(cast (element_ptr.Ref 2), cast (create_element_obj ''head'' [cast (element_ptr.Ref 3), cast (element_ptr.Ref 4), cast (element_ptr.Ref 5), cast (element_ptr.Ref 6), cast (element_ptr.Ref 7), cast (element_ptr.Ref 8)] fmempty None)),

View File

@ -29,15 +29,15 @@
(* This file is automatically generated, please do not modify! *)
section\<open>Testing Node\_insertBefore\<close>
text\<open>This theory contains the test cases for Node\_insertBefore.\<close>
section\<open>Testing Node_insertBefore\<close>
text\<open>This theory contains the test cases for Node_insertBefore.\<close>
theory Node_insertBefore
imports
"Core_DOM_BaseTest"
begin
definition Node_insertBefore_heap :: heap\<^sub>f\<^sub>i\<^sub>n\<^sub>a\<^sub>l where
definition Node_insertBefore_heap :: heap⇩f⇩i⇩n⇩a⇩l where
"Node_insertBefore_heap = create_heap [(cast (document_ptr.Ref 1), cast (create_document_obj html (Some (cast (element_ptr.Ref 1))) [])),
(cast (element_ptr.Ref 1), cast (create_element_obj ''html'' [cast (element_ptr.Ref 2), cast (element_ptr.Ref 6)] fmempty None)),
(cast (element_ptr.Ref 2), cast (create_element_obj ''head'' [cast (element_ptr.Ref 3), cast (element_ptr.Ref 4), cast (element_ptr.Ref 5)] fmempty None)),

View File

@ -29,15 +29,15 @@
(* This file is automatically generated, please do not modify! *)
section\<open>Testing Node\_removeChild\<close>
text\<open>This theory contains the test cases for Node\_removeChild.\<close>
section\<open>Testing Node_removeChild\<close>
text\<open>This theory contains the test cases for Node_removeChild.\<close>
theory Node_removeChild
imports
"Core_DOM_BaseTest"
begin
definition Node_removeChild_heap :: heap\<^sub>f\<^sub>i\<^sub>n\<^sub>a\<^sub>l where
definition Node_removeChild_heap :: heap⇩f⇩i⇩n⇩a⇩l where
"Node_removeChild_heap = create_heap [(cast (document_ptr.Ref 1), cast (create_document_obj html (Some (cast (element_ptr.Ref 1))) [])),
(cast (element_ptr.Ref 1), cast (create_element_obj ''html'' [cast (element_ptr.Ref 2), cast (element_ptr.Ref 7)] fmempty None)),
(cast (element_ptr.Ref 2), cast (create_element_obj ''head'' [cast (element_ptr.Ref 3), cast (element_ptr.Ref 4), cast (element_ptr.Ref 5), cast (element_ptr.Ref 6)] fmempty None)),