x64: update ainvs for asid_map removal

This commit is contained in:
Gerwin Klein 2018-01-10 11:47:26 +11:00
parent 1fbcf1d3ea
commit 3bc1cb7f71
8 changed files with 59 additions and 565 deletions

View File

@ -31,9 +31,6 @@ bundle unfold_objects_asm =
kernel_object.split_asm[split]
arch_kernel_obj.split_asm[split]
definition
"valid_asid asid s \<equiv> x64_asid_map (arch_state s) asid \<noteq> None"
definition
"update_object ptr obj = do
kobj <- get_object ptr;
@ -1549,52 +1546,6 @@ lemma image_in_rtrancl_image:
definition "set_diff A B \<equiv> A - B \<union> (B - A)"
lemma set_pt_asid_map [wp]:
"\<lbrace>valid_asid_map and
(\<lambda>s. (\<forall>a aobj ptr. aa_type nobj = AASIDPool \<and> ((a, ptr) \<in> graph_of (x64_asid_map (arch_state s)) \<and> ko_at (ArchObj aobj) p s)
\<longrightarrow> ([VSRef (ucast (asid_high_bits_of a)) None], p) \<in> vs_asid_refs (x64_asid_table (arch_state s))
\<longrightarrow> (VSRef (a && mask asid_low_bits) (Some AASIDPool), ptr) \<notin> set_diff (vs_refs (ArchObj nobj)) (vs_refs (ArchObj aobj))))
\<rbrace> update_object p (ArchObj nobj) \<lbrace>\<lambda>_. valid_asid_map\<rbrace>"
apply (simp add: valid_asid_map_def vspace_at_asid_def update_object_def set_object_def)
apply (wp get_object_wp)
apply clarsimp
apply (drule(1) bspec)
apply clarsimp
apply (rule decrease_predictI)
apply force
apply (simp add: vs_lookup_def)
apply (rule image_in_rtrancl_image)
apply (clarsimp simp: vs_lookup_def)
apply (erule vs_lookup1_wellformed.lookupE)
apply (clarsimp simp: vs_lookup1_def vs_asid_refs_def)
apply (clarsimp simp: Image_def vs_asid_refs_def dest!: vs_lookup1D)
apply (rule bexI[rotated])
apply simp
apply clarsimp
apply (case_tac "pa \<noteq> p")
apply (rule vs_lookup1I[rotated -1])
apply simp
apply (simp add: obj_at_def)
apply (clarsimp simp: lookup_leaf_def obj_at_def
dest!: vs_lookup1_wellformed.lookup_rtrancl_stepD vs_lookup1D)
apply (clarsimp simp: lookup_leaf_def obj_at_def
dest!: vs_lookup1_wellformed.lookup_rtrancl_stepD vs_lookup1D)
apply (clarsimp simp: vs_lookup1_def)
apply (drule_tac x = a in spec)
apply (clarsimp simp: obj_at_def dest!:a_type_is_aobj)
apply (rule ccontr)
apply (elim allE)
apply (erule impE)
apply (rule context_conjI)
apply (rule ccontr)
apply (case_tac nobj)
apply ((fastforce simp: aa_type_simps a_type_def vs_refs_def image_def
split: kernel_object.split_asm if_split_asm arch_kernel_obj.split_asm)+)[6]
apply fastforce
apply (clarsimp simp: image_def graph_of_def)
apply (fastforce simp: set_diff_def image_def)
done
declare graph_of_None_update[simp]
declare graph_of_Some_update[simp]
@ -1887,9 +1838,6 @@ lemma update_object_state_hyp_refs[wp]:
lemma update_object_invs[wp]:
"\<lbrace> \<lambda>s. invs s \<and> valid_table_caps_aobj (caps_of_state s) (arch_state s) (ArchObj obj) ptr
\<and> (aa_type obj \<noteq> AASIDPool \<longrightarrow> ptr \<notin> global_refs s)
\<and> (\<forall>a aobj p. (aa_type obj = AASIDPool \<and> ((a, p) \<in> graph_of (x64_asid_map (arch_state s)) \<and> ko_at (ArchObj aobj) ptr s))
\<longrightarrow> ([VSRef (ucast (asid_high_bits_of a)) None], ptr) \<in> vs_asid_refs (x64_asid_table (arch_state s))
\<longrightarrow> (VSRef (a && mask asid_low_bits) (Some AASIDPool), p) \<notin> set_diff (vs_refs (ArchObj obj)) (vs_refs (ArchObj aobj)))
\<and> obj_at (\<lambda>ko. case obj of PageMapL4 pm \<Rightarrow> \<exists>pm'. ko = ArchObj (PageMapL4 pm') \<and> (\<forall>x\<in>kernel_mapping_slots. pm x = pm' x) | _ \<Rightarrow> True) ptr s
\<and> valid_kernel_mappings_if_pm (set (second_level_tables (arch_state s))) (ArchObj obj)
\<and> valid_global_objs_upd ptr obj (arch_state s)
@ -1921,8 +1869,8 @@ lemma update_object_invs[wp]:
(\<exists>a b cap. caps_of_state s (a, b) = Some cap \<and> p \<in> obj_refs cap \<and> vs_cap_ref cap = Some rs))\<rbrace>
update_object ptr (ArchObj obj)
\<lbrace> \<lambda>_. invs \<rbrace>"
apply (clarsimp simp: invs_def valid_state_def valid_pspace_def)
apply_trace (wp valid_irq_node_typ valid_irq_handlers_lift update_aobj_valid_global_vspace_mappings)
apply (clarsimp simp: invs_def valid_state_def valid_pspace_def valid_asid_map_def)
apply (wp valid_irq_node_typ valid_irq_handlers_lift update_aobj_valid_global_vspace_mappings)
apply (clarsimp simp: valid_arch_state_def)
done
@ -2179,17 +2127,6 @@ lemma vs_lookup_pages_arch_update:
apply simp
done
lemma vs_lookup_asid_map [iff]:
"vs_lookup (s\<lparr>arch_state := x64_asid_map_update f (arch_state s)\<rparr>) = vs_lookup s"
by (simp add: vs_lookup_arch_update)
lemma vs_lookup_pages_asid_map[iff]:
"vs_lookup_pages (s\<lparr>arch_state := x64_asid_map_update f (arch_state s)\<rparr>) =
vs_lookup_pages s"
by (simp add: vs_lookup_pages_arch_update)
lemma valid_vspace_objs_arch_update:
"x64_asid_table (f (arch_state s)) = x64_asid_table (arch_state s) \<Longrightarrow>
valid_vspace_objs (arch_state_update f s) = valid_vspace_objs s"
@ -2828,48 +2765,6 @@ crunch v_ker_map[wp]: set_asid_pool "valid_kernel_mappings"
(ignore: set_object wp: set_object_v_ker_map crunch_wps)
lemma set_asid_pool_restrict_asid_map:
"\<lbrace>valid_asid_map and ko_at (ArchObj (ASIDPool ap)) p and
(\<lambda>s. \<forall>asid. asid \<le> mask asid_bits \<longrightarrow> ucast asid \<notin> S \<longrightarrow>
x64_asid_table (arch_state s) (asid_high_bits_of asid) = Some p \<longrightarrow>
x64_asid_map (arch_state s) asid = None)\<rbrace>
set_asid_pool p (ap |` S) \<lbrace>\<lambda>_. valid_asid_map\<rbrace>"
apply (simp add: set_asid_pool_def valid_asid_map_def set_object_def update_object_def)
apply (wp get_object_wp)
apply (clarsimp split: kernel_object.splits arch_kernel_obj.splits
simp del: fun_upd_apply)
apply (drule(1) bspec)
apply (clarsimp simp: vspace_at_asid_def obj_at_def graph_of_def)
apply (drule subsetD, erule domI)
apply simp
apply (drule spec, drule(1) mp)
apply simp
apply (erule vs_lookupE)
apply (rule vs_lookupI, simp)
apply (clarsimp simp: vs_asid_refs_def graph_of_def)
apply (drule rtranclD)
apply (erule disjE, clarsimp)
apply clarsimp
apply (drule tranclD)
apply clarsimp
apply (rule r_into_rtrancl)
apply (drule vs_lookup1D)
apply clarsimp
apply (subst vs_lookup1_def)
apply (clarsimp simp: obj_at_def)
apply (erule rtranclE)
apply (clarsimp simp: vs_refs_def graph_of_def)
apply (rule image_eqI[where x="(_, _)"])
apply (simp add: split_def)
apply (clarsimp simp: restrict_map_def)
apply (drule ucast_up_inj, simp)
apply (simp add: mask_asid_low_bits_ucast_ucast)
apply clarsimp
apply (drule vs_lookup1_trans_is_append)
apply clarsimp
apply (drule vs_lookup1D)
by clarsimp
lemma set_asid_pool_cur [wp]:
"\<lbrace>\<lambda>s. P (cur_thread s)\<rbrace> set_asid_pool p a \<lbrace>\<lambda>_ s. P (cur_thread s)\<rbrace>"
unfolding set_asid_pool_def by (wpsimp wp: get_object_wp)
@ -3006,24 +2901,18 @@ lemma set_asid_pool_zombies_state_hyp_refs [wp]:
by (simp add: update_object_state_hyp_refs)
lemma set_asid_pool_invs_restrict:
"\<lbrace>invs and ko_at (ArchObj (ASIDPool ap)) p and
(\<lambda>s. \<forall>asid. asid \<le> mask asid_bits \<longrightarrow> ucast asid \<notin> S \<longrightarrow>
x64_asid_table (arch_state s) (asid_high_bits_of asid) = Some p \<longrightarrow>
x64_asid_map (arch_state s) asid = None)\<rbrace>
"\<lbrace>invs and ko_at (ArchObj (ASIDPool ap)) p \<rbrace>
set_asid_pool p (ap |` S) \<lbrace>\<lambda>_. invs\<rbrace>"
apply (simp add: invs_def valid_state_def valid_pspace_def
apply (simp add: invs_def valid_state_def valid_pspace_def valid_asid_map_def
valid_arch_caps_def del: set_asid_pool_simpler_def)
apply (wp valid_irq_node_typ set_asid_pool_typ_at
apply (wp valid_irq_node_typ
set_asid_pool_vspace_objs_unmap valid_irq_handlers_lift
set_asid_pool_vs_lookup_unmap set_asid_pool_restrict_asid_map
set_asid_pool_vs_lookup_unmap
| simp del: set_asid_pool_simpler_def)+
done
lemma set_asid_pool_invs_unmap:
"\<lbrace>invs and ko_at (ArchObj (ASIDPool ap)) p and
(\<lambda>s. \<forall>asid. asid \<le> mask asid_bits \<longrightarrow> ucast asid = x \<longrightarrow>
x64_asid_table (arch_state s) (asid_high_bits_of asid) = Some p \<longrightarrow>
x64_asid_map (arch_state s) asid = None)\<rbrace>
"\<lbrace>invs and ko_at (ArchObj (ASIDPool ap)) p\<rbrace>
set_asid_pool p (ap(x := None)) \<lbrace>\<lambda>_. invs\<rbrace>"
using set_asid_pool_invs_restrict[where S="- {x}"]
by (simp add: restrict_map_def fun_upd_def if_flip)

View File

@ -382,24 +382,7 @@ lemma valid_arch_caps:
lemma valid_asid_map':
"valid_asid_map s \<Longrightarrow> valid_asid_map s'"
using empty
apply (clarsimp simp: valid_asid_map_def s'_def)
apply (drule bspec, blast)
apply (clarsimp simp: vspace_at_asid_def)
apply (drule vs_lookup_2ConsD)
apply clarsimp
apply (erule vs_lookup_atE)
apply (drule vs_lookup1D)
apply clarsimp
apply (rule vs_lookupI[rotated])
apply (rule r_into_rtrancl)
apply (rule vs_lookup1I)
apply (fastforce simp: obj_at_def)
apply assumption
apply simp
apply (clarsimp simp: vs_asid_refs_def graph_of_def)
apply fastforce
done
by (clarsimp simp: valid_asid_map_def)
end

View File

@ -397,16 +397,7 @@ lemma valid_kernel_mappings_detype[detype_invs_proofs]:
qed
lemma valid_asid_map_detype[detype_invs_proofs]: "valid_asid_map (detype (untyped_range cap) s)"
proof -
have "valid_asid_map s"
using invs by (simp add: invs_def valid_state_def)
thus ?thesis
apply (clarsimp simp: valid_asid_map_def arch_state_det)
apply (drule bspec)
apply (blast)
apply (clarsimp simp: vspace_at_asid_def vs_lookup)
done
qed
by (simp add: valid_asid_map_def)
lemma equal_kernel_mappings_detype[detype_invs_proofs]:
"equal_kernel_mappings (detype (untyped_range cap) s)"

View File

@ -75,51 +75,26 @@ lemmas nat_to_cref_unat_of_bl = nat_to_cref_unat_of_bl' [OF _ refl]
lemma invs_x64_asid_table_unmap:
"invs s \<and> is_aligned base asid_low_bits \<and> base \<le> mask asid_bits
\<and> (\<forall>x\<in>set [0.e.2 ^ asid_low_bits - 1]. x64_asid_map (arch_state s) (base + x) = None)
\<and> tab = x64_asid_table (arch_state s)
\<longrightarrow> invs (s\<lparr>arch_state := arch_state s\<lparr>x64_asid_table := tab(asid_high_bits_of base := None)\<rparr>\<rparr>)"
apply (clarsimp simp: invs_def valid_state_def valid_arch_caps_def)
apply (strengthen valid_asid_map_unmap valid_vspace_objs_unmap_strg
apply (strengthen valid_vspace_objs_unmap_strg
valid_vs_lookup_unmap_strg valid_arch_state_unmap_strg)
apply (simp add: valid_irq_node_def valid_kernel_mappings_def
valid_global_objs_arch_update)
valid_global_objs_arch_update valid_asid_map_def)
apply (simp add: valid_table_caps_def valid_machine_state_def second_level_tables_def)
done
crunch asid_map[wp]: do_machine_op "valid_asid_map"
lemma delete_asid_pool_invs[wp]:
"\<lbrace>invs and K (base \<le> mask asid_bits)\<rbrace>
delete_asid_pool base pptr
\<lbrace>\<lambda>rv. invs\<rbrace>"
apply (simp add: delete_asid_pool_def)
apply wp
apply (strengthen invs_x64_asid_table_unmap)
apply simp
apply (rule hoare_vcg_conj_lift,
(rule mapM_invalidate[where ptr=pptr, simplified])?,
((wp mapM_wp' | simp add: if_apply_def2)+)[1])+
apply wp+
apply (strengthen invs_x64_asid_table_unmap)
apply (wpsimp wp: mapM_wp', simp)
apply (wp+)[3]
apply (clarsimp simp: is_aligned_mask[symmetric])
apply (rule conjI)
apply (rule vs_lookupI)
apply (erule vs_asid_refsI)
apply simp
apply clarsimp
done
lemma invalidate_asid_entry_invalidates:
"\<lbrace>valid_asid_map and valid_arch_state and K (asid \<le> mask asid_bits) and
(\<lambda>s. x64_asid_table (arch_state s) (asid_high_bits_of asid) = Some ap)\<rbrace>
invalidate_asid_entry asid pm
\<lbrace>\<lambda>rv s. \<forall>asida. asida \<le> mask asid_bits \<longrightarrow>
ucast asida = (ucast asid :: 9 word) \<longrightarrow>
x64_asid_table (arch_state s) (asid_high_bits_of asida) =
Some ap \<longrightarrow>
x64_asid_map (arch_state s) asida = None\<rbrace>"
apply (simp add: invalidate_asid_entry_def)
apply (wp invalidate_asid_invalidates)
apply clarsimp
done
lemma delete_asid_invs[wp]:
@ -127,10 +102,7 @@ lemma delete_asid_invs[wp]:
delete_asid asid pd
\<lbrace>\<lambda>rv. invs\<rbrace>"
apply (simp add: delete_asid_def cong: option.case_cong del: set_arch_obj_simps(5))
apply (wp set_asid_pool_invs_unmap | wpc)+
apply (wp invalidate_asid_entry_invalidates gets_wp hoare_vcg_const_imp_lift hoare_vcg_all_lift
hoare_vcg_imp_lift invalidate_asid_asid_map_None_inv)+
apply (clarsimp simp del: fun_upd_apply simp: invs_valid_asid_map)
apply (wpsimp wp: set_asid_pool_invs_unmap)
done
crunch vs_lookup[wp]: set_vm_root "\<lambda>s. P (vs_lookup s)"
@ -1564,37 +1536,6 @@ lemma set_asid_pool_empty_table_lookup:
apply (simp add: vs_cap_ref_def)
done
lemma set_asid_pool_empty_valid_asid_map:
"\<lbrace>\<lambda>s. valid_asid_map s \<and> asid_pool_at p s
\<and> (\<forall>asid'. \<not> ([VSRef asid' None] \<rhd> p) s)
\<and> (\<forall>p'. \<not> ([VSRef (ucast (asid_high_bits_of base)) None] \<rhd> p') s)\<rbrace>
set_asid_pool p empty
\<lbrace>\<lambda>rv s. valid_asid_map (s\<lparr>arch_state := arch_state s\<lparr>x64_asid_table :=
x64_asid_table (arch_state s)(asid_high_bits_of base \<mapsto> p)\<rparr>\<rparr>)\<rbrace>"
apply (simp add: set_asid_pool_def set_object_def update_object_def)
apply (wp get_object_wp)
apply (clarsimp simp: valid_asid_map_def vspace_at_asid_def
dest!: graph_ofD
split: Structures_A.kernel_object.split_asm
arch_kernel_obj.split_asm )
apply (drule bspec, erule graph_ofI)
apply (clarsimp dest!: vs_lookup_2ConsD vs_lookup1D)
apply (case_tac "p = pa")
apply simp
apply (clarsimp elim!: vs_lookup_atE)
apply (rule vs_lookupI[rotated])
apply (rule r_into_rtrancl)
apply (rule_tac p=pa in vs_lookup1I)
apply (simp add: obj_at_def)
apply assumption
apply simp
apply (rule vs_asid_refsI)
apply (clarsimp simp: a_type_simps)
apply (drule vs_asid_refsI)+
apply (drule vs_lookupI, rule rtrancl_refl)
apply simp
done
lemma set_asid_pool_invs_table:
"\<lbrace>\<lambda>s. invs s \<and> asid_pool_at p s
\<and> (\<exists>p'. caps_of_state s p' = Some (ArchObjectCap (ASIDPoolCap p base)))
@ -1603,28 +1544,23 @@ lemma set_asid_pool_invs_table:
set_asid_pool p empty
\<lbrace>\<lambda>x s. invs (s\<lparr>arch_state := arch_state s\<lparr>x64_asid_table :=
x64_asid_table (arch_state s)(asid_high_bits_of base \<mapsto> p)\<rparr>\<rparr>)\<rbrace>"
apply (simp add: invs_def valid_state_def valid_pspace_def valid_arch_caps_def del:set_asid_pool_simpler_def)
apply (rule hoare_pre)
apply (wp valid_irq_node_typ set_asid_pool_typ_at
set_asid_pool_empty_table_objs
valid_irq_handlers_lift set_asid_pool_empty_table_lookup
set_asid_pool_empty_valid_asid_map
apply (simp add: invs_def valid_state_def valid_pspace_def valid_arch_caps_def valid_asid_map_def
del: set_asid_pool_simpler_def)
apply (wp valid_irq_node_typ set_asid_pool_typ_at
set_asid_pool_empty_table_objs
valid_irq_handlers_lift set_asid_pool_empty_table_lookup
| strengthen valid_arch_state_table_strg)+
apply (clarsimp simp: conj_comms)
apply (rule context_conjI)
apply clarsimp
apply (frule valid_vs_lookupD[OF vs_lookup_pages_vs_lookupI], clarsimp)
apply clarsimp
apply (drule obj_ref_elemD)
apply (frule(2) unique_table_refsD,
unfold obj_refs.simps aobj_ref.simps option.simps,
assumption)
apply (clarsimp simp:vs_cap_ref_def table_cap_ref_def
split:cap.split_asm arch_cap.split_asm)
apply clarsimp
apply (drule vs_asid_refsI)
apply (drule vs_lookupI, rule rtrancl_refl)
apply simp
apply (frule valid_vs_lookupD[OF vs_lookup_pages_vs_lookupI], clarsimp)
apply clarsimp
apply (drule obj_ref_elemD)
apply (frule(2) unique_table_refsD,
unfold obj_refs.simps aobj_ref.simps option.simps,
assumption)
apply (clarsimp simp: vs_cap_ref_def table_cap_ref_def
split: cap.split_asm arch_cap.split_asm)
done
lemma delete_asid_pool_unmapped2:

View File

@ -1219,10 +1219,7 @@ where
definition
valid_asid_map :: "'z::state_ext state \<Rightarrow> bool"
where
"valid_asid_map \<equiv>
\<lambda>s. dom (x64_asid_map (arch_state s)) \<subseteq> {0 .. mask asid_bits} \<and>
(\<forall>(asid, pd) \<in> graph_of (x64_asid_map (arch_state s)).
vspace_at_asid asid pd s \<and> asid \<noteq> 0)"
"valid_asid_map \<equiv> \<lambda>s. True"
section "Lemmas"

View File

@ -486,10 +486,7 @@ lemma valid_global_objs_lift_weak:
lemma valid_asid_map_lift:
"\<lbrace>valid_asid_map\<rbrace> f \<lbrace>\<lambda>rv. valid_asid_map\<rbrace>"
apply (simp add: valid_asid_map_def)
apply (rule hoare_lift_Pf[where f="arch_state", OF _ arch])
apply (simp add: vspace_at_asid_def)
by (rule vs_lookup_vspace_obj_at_lift[OF aobj_at arch])
by (wpsimp simp: valid_asid_map_def)
lemma valid_kernel_mappings_lift:
"\<lbrace>valid_kernel_mappings\<rbrace> f \<lbrace>\<lambda>rv. valid_kernel_mappings\<rbrace>"
@ -617,22 +614,6 @@ lemmas set_object_v_ker_map
= set_object_valid_kernel_mappings
[unfolded valid_kernel_mappings_if_pm_def]
lemma set_object_asid_map:
"\<lbrace>valid_asid_map and
obj_at (\<lambda>ko'. vs_refs ko' \<subseteq> vs_refs ko) p\<rbrace>
set_object p ko
\<lbrace>\<lambda>_. valid_asid_map\<rbrace>"
apply (simp add: valid_asid_map_def set_object_def)
apply wp
apply (clarsimp simp: vspace_at_asid_def simp del: fun_upd_apply)
apply (drule bspec, blast)
apply clarsimp
apply (rule vs_lookup_stateI, assumption)
apply (clarsimp simp: obj_at_def)
apply blast
apply simp
done
lemma set_object_equal_mappings:
"\<lbrace>\<lambda>s. equal_kernel_mappings s
\<and> (\<forall>pml4. ko = ArchObj (PageMapL4 pml4)

View File

@ -389,7 +389,8 @@ lemma copy_global_invs_mappings_restricted:
apply (rule mapM_x_wp[where S="{x. get_pml4_index pptr_base \<le> x
\<and> x < 2 ^ (pml4_bits - word_size_bits)}"])
apply simp_all
apply (wpsimp wp: valid_irq_node_typ valid_irq_handlers_lift get_pml4e_wp simp: store_pml4e_def)
apply (wpsimp wp: valid_irq_node_typ valid_irq_handlers_lift get_pml4e_wp
simp: store_pml4e_def valid_asid_map_def)
apply (clarsimp simp: valid_global_objs_def)
apply (frule(1) invs_aligned_pml4D)
apply (frule shiftl_less_t2n)
@ -970,21 +971,6 @@ lemma valid_kernel_mappings:
aobject_type.split)
done
lemma valid_asid_map:
"valid_asid_map s \<Longrightarrow> valid_asid_map s'"
apply (clarsimp simp: valid_asid_map_def)
apply (drule bspec, blast)
apply (clarsimp simp: vspace_at_asid_def)
apply (drule vs_lookup_2ConsD)
apply clarsimp
apply (erule vs_lookup_atE)
apply (drule vs_lookup1D)
apply clarsimp
apply (drule obj_at_pres)
apply (fastforce simp: vs_asid_refs_def graph_of_def
intro: vs_lookupI vs_lookup1I)
done
lemma equal_kernel_mappings:
"equal_kernel_mappings s \<Longrightarrow>
if ty = ArchObject PML4Obj
@ -1107,7 +1093,7 @@ lemma post_retype_invs:
valid_vspace_objs' valid_irq_handlers
valid_mdb_rep2 mdb_and_revokable
valid_pspace cur_tcb only_idle
valid_kernel_mappings valid_asid_map
valid_kernel_mappings valid_asid_map_def
valid_global_vspace_mappings valid_ioc vms
pspace_in_kernel_window pspace_respects_device_region
cap_refs_respects_device_region
@ -1206,7 +1192,7 @@ lemma invs_irq_state_independent:
valid_irq_node_def valid_irq_handlers_def valid_machine_state_def
valid_arch_caps_def valid_global_objs_def
valid_kernel_mappings_def equal_kernel_mappings_def
valid_asid_map_def vspace_at_asid_def
vspace_at_asid_def
pspace_in_kernel_window_def cap_refs_in_kernel_window_def
cur_tcb_def sym_refs_def state_refs_of_def
swp_def valid_irq_states_def)

View File

@ -67,9 +67,6 @@ definition
crunch pspace_in_kernel_window[wp]: unmap_page, perform_page_invocation "pspace_in_kernel_window"
(simp: crunch_simps wp: crunch_wps)
definition
"vspace_at_uniq asid pd \<equiv> \<lambda>s. pd \<notin> ran (x64_asid_map (arch_state s) |` (- {asid}))"
crunch inv[wp]: find_vspace_for_asid_assert "P"
(simp: crunch_simps)
@ -152,23 +149,6 @@ lemma vspace_at_asid_unique2:
done
lemma vspace_at_asid_uniq:
"\<lbrakk> vspace_at_asid asid pml4 s; asid \<le> mask asid_bits; valid_asid_map s;
unique_table_refs (caps_of_state s); valid_vs_lookup s;
valid_vspace_objs s; valid_global_objs s; valid_arch_state s \<rbrakk>
\<Longrightarrow> vspace_at_uniq asid pml4 s"
apply (clarsimp simp: vspace_at_uniq_def ran_option_map
dest!: ran_restrictD)
apply (clarsimp simp: valid_asid_map_def)
apply (drule bspec, erule graph_ofI)
apply clarsimp
apply (rule vspace_at_asid_unique, assumption+)
apply (drule subsetD, erule domI)
apply (simp add: mask_def)
apply (simp add: mask_def)
done
lemma valid_vs_lookupE:
"\<lbrakk> valid_vs_lookup s; \<And>ref p. (ref \<unrhd> p) s' \<Longrightarrow> (ref \<unrhd> p) s;
set (x64_global_pdpts (arch_state s)) \<subseteq> set (x64_global_pdpts (arch_state s'));
@ -207,30 +187,6 @@ lemma find_vspace_for_asid_vspace_at_asid [wp]:
crunch valid_vs_lookup[wp]: do_machine_op "valid_vs_lookup"
lemma valid_asid_mapD:
"\<lbrakk> x64_asid_map (arch_state s) asid = Some pml4; valid_asid_map s \<rbrakk>
\<Longrightarrow> vspace_at_asid asid pml4 s \<and> asid \<le> mask asid_bits"
by (auto simp add: valid_asid_map_def graph_of_def)
lemma pml4_cap_vspace_at_uniq:
"\<lbrakk> cte_wp_at (op = (ArchObjectCap (PML4Cap pml4 (Some asid)))) slot s;
valid_asid_map s; valid_vs_lookup s; unique_table_refs (caps_of_state s);
valid_arch_state s; valid_global_objs s; valid_objs s \<rbrakk>
\<Longrightarrow> vspace_at_uniq asid pml4 s"
apply (frule(1) cte_wp_at_valid_objs_valid_cap)
apply (clarsimp simp: vspace_at_uniq_def restrict_map_def valid_cap_def
elim!: ranE split: if_split_asm)
apply (drule(1) valid_asid_mapD)
apply (clarsimp simp: vspace_at_asid_def)
apply (frule(1) valid_vs_lookupD[OF vs_lookup_pages_vs_lookupI])
apply (clarsimp simp: cte_wp_at_caps_of_state dest!: obj_ref_elemD)
apply (drule(1) unique_table_refsD[rotated, where cps="caps_of_state s"],
simp+)
apply (clarsimp simp: table_cap_ref_ap_eq[symmetric] table_cap_ref_def
split: cap.splits arch_cap.splits option.splits)
apply (drule(1) asid_low_high_bits, simp_all add: mask_def)
done
lemma invalidateTLB_underlying_memory:
"\<lbrace>\<lambda>m'. underlying_memory m' p = um\<rbrace>
@ -245,12 +201,6 @@ lemma vspace_at_asid_arch_up':
by (clarsimp simp add: vspace_at_asid_def vs_lookup_def vs_lookup1_def)
lemma vspace_at_asid_arch_up:
"vspace_at_asid asid pml4 (s\<lparr>arch_state := arch_state s \<lparr>x64_asid_map := a\<rparr>\<rparr>) =
vspace_at_asid asid pml4 s"
by (simp add: vspace_at_asid_arch_up')
lemmas ackInterrupt_irq_masks = no_irq[OF no_irq_ackInterrupt]
@ -374,59 +324,6 @@ lemma asid_high_bits_shl:
done
lemma valid_asid_map_unmap:
"valid_asid_map s \<and> is_aligned base asid_low_bits \<and> base \<le> mask asid_bits \<and>
(\<forall>x \<in> set [0.e.2^asid_low_bits - 1]. x64_asid_map (arch_state s) (base + x) = None) \<longrightarrow>
valid_asid_map(s\<lparr>arch_state := arch_state s\<lparr>x64_asid_table := (x64_asid_table (arch_state s))(asid_high_bits_of base := None)\<rparr>\<rparr>)"
apply (clarsimp simp: valid_asid_map_def vspace_at_asid_def)
apply (drule bspec, blast)
apply clarsimp
apply (erule vs_lookupE)
apply (clarsimp simp: vs_asid_refs_def dest!: graph_ofD)
apply (frule vs_lookup1_trans_is_append, clarsimp)
apply (drule ucast_up_inj, simp)
apply clarsimp
apply (rule_tac ref'="([VSRef (ucast (asid_high_bits_of a)) None],ba)" in vs_lookupI)
apply (simp add: vs_asid_refs_def)
apply (simp add: graph_of_def)
apply (rule_tac x="(asid_high_bits_of a, ba)" in image_eqI)
apply simp
apply clarsimp
apply (subgoal_tac "a \<le> mask asid_bits")
prefer 2
apply fastforce
apply (drule_tac asid=a in ex_asid_high_bits_plus)
apply (clarsimp simp: asid_high_bits_shl)
apply (drule rtranclD, simp)
apply (drule tranclD)
apply clarsimp
apply (drule vs_lookup1D)
apply clarsimp
apply (frule vs_lookup1_trans_is_append, clarsimp)
apply (drule vs_lookup_trans_ptr_eq, clarsimp)
apply (rule r_into_rtrancl)
apply (rule vs_lookup1I)
apply simp
apply assumption
apply simp
done
lemma invalidate_asid_asid_map_None_inv:
"\<lbrace>\<lambda>s. x64_asid_map (arch_state s) y = None\<rbrace>
invalidate_asid x
\<lbrace>\<lambda>_ s. x64_asid_map (arch_state s) y = None\<rbrace>"
apply (simp add: invalidate_asid_def)
apply (wp )
apply simp
done
lemma invalidate_asid_entry_asid_map_None_inv:
"\<lbrace>\<lambda>s. x64_asid_map (arch_state s) y = None\<rbrace>
invalidate_asid_entry x z
\<lbrace>\<lambda>_ s. x64_asid_map (arch_state s) y = None\<rbrace>"
apply (simp add: invalidate_asid_entry_def)
by (wp invalidate_asid_asid_map_None_inv)
crunch vs_lookup [wp]: invalidate_asid_entry "\<lambda>s. P (vs_lookup s)"
crunch aligned [wp]: invalidate_asid_entry pspace_aligned
@ -438,30 +335,6 @@ crunch caps_of_state[wp]: invalidate_asid_entry "\<lambda>s. P (caps_of_state s)
crunch vspace_objs [wp]: invalidate_asid_entry valid_vspace_objs
(simp: valid_vspace_objs_arch_update)
lemma invalidate_asid_invalidates:
"\<lbrace>valid_asid_map and valid_arch_state and K (asid \<le> mask asid_bits) and
(\<lambda>s. x64_asid_table (arch_state s) (asid_high_bits_of asid) = Some ap)\<rbrace>
invalidate_asid asid
\<lbrace>\<lambda>rv s. \<forall>asida. asida \<le> mask asid_bits \<longrightarrow>
ucast asida = (ucast asid :: 9 word) \<longrightarrow>
x64_asid_table (arch_state s) (asid_high_bits_of asida) =
Some ap \<longrightarrow>
x64_asid_map (arch_state s) asida = None\<rbrace>"
apply (simp add: invalidate_asid_def)
apply wp
apply clarsimp
apply (clarsimp simp: valid_arch_state_def valid_asid_table_def)
apply (drule_tac x="asid_high_bits_of asid" and y="asid_high_bits_of asida" in inj_onD)
apply simp
apply blast
apply blast
apply clarsimp
apply (drule asid_low_high_bits', simp)
apply (fastforce simp: mask_def)
apply (fastforce simp: mask_def)
apply (erule (1) notE)
done
crunch typ_at [wp]: invalidate_asid_entry "\<lambda>s. P (typ_at T p s)"
lemmas invalidate_asid_entry_typ_ats [wp] =
@ -471,41 +344,8 @@ crunch cur [wp]: invalidate_asid_entry cur_tcb
crunch valid_objs [wp]: invalidate_asid_entry valid_objs
lemma invalidate_asid_asid_map [wp]:
"\<lbrace>valid_asid_map\<rbrace> invalidate_asid asid \<lbrace>\<lambda>_. valid_asid_map\<rbrace>"
apply (simp add: invalidate_asid_def )
apply (wp )
apply (clarsimp simp: valid_asid_map_def simp del: fun_upd_apply)
apply (clarsimp simp: vspace_at_asid_def vs_lookup_arch_update)
apply auto
done
crunch asid_map[wp]: invalidate_asid_entry "valid_asid_map"
crunch obj_at [wp]: invalidate_asid_entry "\<lambda>s. P (obj_at Q p s)"
lemma invalidate_asid_invs [wp]:
"\<lbrace>invs\<rbrace> invalidate_asid asid \<lbrace>\<lambda>_. invs\<rbrace>"
apply (simp add: invalidate_asid_def)
apply (rule hoare_pre, wp )
apply (clarsimp simp: invs_def valid_state_def valid_irq_node_def
valid_arch_caps_def valid_table_caps_def
valid_kernel_mappings_def valid_global_objs_def
valid_global_refs_def global_refs_def
valid_vs_lookup_def
vs_lookup_arch_update vs_lookup_pages_arch_update
valid_vspace_objs_def valid_arch_state_def
second_level_tables_def
simp del: fun_upd_apply)
apply (clarsimp simp: valid_asid_map_def valid_machine_state_def)
apply (rule conjI)
apply (erule order_trans[rotated], clarsimp)
apply (simp add: vspace_at_asid_arch_up')
done
crunch valid_vs_lookup[wp]: invalidate_asid "valid_vs_lookup"
(simp: valid_vs_lookup_def)
crunch valid_vs_lookup[wp]: invalidate_asid_entry "valid_vs_lookup"
crunch arm_asid_table_inv[wp]: invalidate_asid_entry
@ -533,51 +373,6 @@ lemma dmo_hwASIDInvalidate_invs[wp]:
lemma invalidate_asid_entry_invs[wp]: "\<lbrace>invs\<rbrace> invalidate_asid_entry asid vspace \<lbrace>\<lambda>_. invs\<rbrace>"
by (wpsimp simp: invalidate_asid_entry_def)
lemma mapM_invalidate:
"\<lbrace>[VSRef (ucast (asid_high_bits_of base)) None] \<rhd> ptr and
ko_at (ArchObj (arch_kernel_obj.ASIDPool pool)) ptr and
valid_asid_map and K (is_aligned base asid_low_bits)\<rbrace>
mapM (\<lambda>offset. when (\<exists>y. pool (ucast offset) = Some y) $
invalidate_asid_entry (base + offset) (the (pool (ucast offset))))
[0.e.2 ^ asid_low_bits - 1]
\<lbrace>\<lambda>rv s. \<forall>x\<le>2 ^ asid_low_bits - 1. x64_asid_map (arch_state s) (base + x) = None\<rbrace>"
proof -
have ball: "\<And>P w::machine_word. (\<forall>x\<le>w. P x) = (\<forall>x \<in> set [0.e.w]. P x)" by simp
show ?thesis
apply (subst ball)
apply (rule mapM_set)
apply (wp, simp)
apply (wp | simp add: invalidate_asid_entry_def invalidate_asid_def
cong: if_cong)+
apply clarsimp
apply (rule ccontr)
apply clarsimp
apply (clarsimp simp: valid_asid_map_def)
apply (drule bspec, erule graph_ofI)
apply (erule vs_lookup_atE)
apply (clarsimp simp: vspace_at_asid_def)
apply (drule vs_lookup_2ConsD)
apply clarsimp
apply (erule vs_lookup_atE)
apply (drule vs_lookup1D)
apply clarsimp
apply (subgoal_tac "p' = ptr")
apply (clarsimp simp: obj_at_def vs_refs_def graph_of_def)
apply (subgoal_tac "base + x && mask asid_low_bits = x")
apply (simp add: ucast_ucast_mask)
apply (subgoal_tac "a && mask word_bits = a")
apply (simp add: word_bits_def)
apply (rule word_eqI)
apply (simp add: word_size word_bits_def)
apply (subst add_mask_eq, assumption+)
apply (simp add: asid_low_bits_def word_bits_def)
apply (rule refl)
apply (subst (asm) asid_high_bits_of_add, assumption+)
apply simp
apply (wp invalidate_asid_entry_asid_map_None_inv)
apply simp
done
qed
lemma asid_low_bits_word_bits:
"asid_low_bits < word_bits"
@ -625,12 +420,6 @@ lemma valid_vs_lookup_arch_update:
\<Longrightarrow> valid_vs_lookup (arch_state_update f s) = valid_vs_lookup s"
by (simp add: valid_vs_lookup_def vs_lookup_pages_arch_update)
lemma valid_asid_map_arch_update:
assumes "x64_asid_table (f (arch_state s)) = x64_asid_table (arch_state s)"
assumes "x64_asid_map (f (arch_state s)) = x64_asid_map (arch_state s)"
shows "valid_asid_map (arch_state_update f s) = valid_asid_map s"
by (simp add: assms valid_asid_map_def vspace_at_asid_def vs_lookup_arch_update)
crunch typ_at [wp]: find_vspace_for_asid "\<lambda>s. P (typ_at T p s)"
lemmas find_vspace_for_asid_typ_ats [wp] = abs_typ_at_lifts [OF find_vspace_for_asid_typ_at]
@ -1111,7 +900,6 @@ lemma get_current_cr3_rewrite_lift[wp]:
lemma arch_state_update_invs:
assumes "invs s"
assumes "x64_asid_table (f (arch_state s)) = x64_asid_table (arch_state s)"
assumes "x64_asid_map (f (arch_state s)) = x64_asid_map (arch_state s)"
assumes "valid_global_refs s \<Longrightarrow> valid_global_refs (arch_state_update f s)"
assumes "valid_arch_state s \<Longrightarrow> valid_arch_state (arch_state_update f s)"
assumes "valid_table_caps s \<Longrightarrow> valid_table_caps (arch_state_update f s)"
@ -1122,9 +910,8 @@ lemma arch_state_update_invs:
assumes "cap_refs_in_kernel_window s \<Longrightarrow> cap_refs_in_kernel_window (arch_state_update f s)"
shows "invs (arch_state_update f s)"
using assms by (simp add: invs_def valid_state_def valid_irq_node_def valid_irq_states_def
valid_machine_state_def valid_arch_caps_def
valid_vspace_objs_arch_update valid_vs_lookup_arch_update
valid_asid_map_arch_update)
valid_machine_state_def valid_arch_caps_def valid_asid_map_def
valid_vspace_objs_arch_update valid_vs_lookup_arch_update)
lemma set_current_cr3_invs[wp]:
"\<lbrace>invs\<rbrace> set_current_cr3 c \<lbrace>\<lambda>rv. invs\<rbrace>"
@ -1138,55 +925,15 @@ lemma set_current_vspace_root_invs[wp]:
apply (wp)
done
lemma update_asid_map_valid_arch:
notes hoare_pre [wp_pre del]
shows "\<lbrace>valid_arch_state\<rbrace>
update_asid_map asid
\<lbrace>\<lambda>_. valid_arch_state\<rbrace>"
apply (simp add: update_asid_map_def)
apply (wp find_vspace_for_asid_assert_wp)
apply (simp add: valid_arch_state_def fun_upd_def[symmetric] comp_upd_simp)
done
lemma update_asid_map_invs:
"\<lbrace>invs and K (asid \<le> mask asid_bits)\<rbrace> update_asid_map asid \<lbrace>\<lambda>rv. invs\<rbrace>"
apply (rule hoare_add_post)
apply (rule update_asid_map_valid_arch)
apply fastforce
apply (simp add: update_asid_map_def)
apply (wp find_vspace_for_asid_assert_wp)
apply (clarsimp simp: invs_def valid_state_def)
apply (simp add: valid_global_refs_def global_refs_def
valid_irq_node_def valid_vspace_objs_arch_update
valid_global_objs_def valid_arch_caps_def
valid_table_caps_def valid_kernel_mappings_def
valid_machine_state_def valid_vs_lookup_arch_update
second_level_tables_def)
apply (simp add: valid_asid_map_def fun_upd_def[symmetric] vspace_at_asid_arch_up)
done
lemma update_asid_map_pred_tcb_at[wp]:
"\<lbrace>pred_tcb_at proj P t\<rbrace> update_asid_map a \<lbrace>\<lambda>_. pred_tcb_at proj P t\<rbrace>"
by (clarsimp simp: pred_tcb_at_def update_asid_map_def | wp)+
lemma svr_invs [wp]:
"\<lbrace>invs\<rbrace> set_vm_root t' \<lbrace>\<lambda>_. invs\<rbrace>"
apply (simp add: set_vm_root_def)
apply (rule hoare_pre)
apply (wp hoare_whenE_wp find_vspace_for_asid_inv hoare_vcg_all_lift update_asid_map_invs
| wpc
| simp add: split_def if_apply_def2 cong: if_cong)+
apply (rule_tac Q'="\<lambda>_ s. invs s \<and> x2 \<le> mask asid_bits" in hoare_post_imp_R)
prefer 2
apply simp
apply (rule valid_validE_R)
apply (wp find_vspace_for_asid_inv | simp add: split_def)+
apply (rule_tac Q="\<lambda>c s. invs s \<and> s \<turnstile> c" in hoare_strengthen_post)
apply wp
apply (clarsimp simp: valid_cap_def mask_def)
by (fastforce)
apply (wp hoare_whenE_wp get_cap_wp
| wpc
| simp add: split_def if_apply_def2 cong: if_cong)+
done
crunch pred_tcb_at[wp]: set_current_vspace_root, update_asid_map "pred_tcb_at proj P t"
crunch pred_tcb_at[wp]: set_current_vspace_root "pred_tcb_at proj P t"
lemma svr_pred_st_tcb[wp]:
"\<lbrace>pred_tcb_at proj P t\<rbrace> set_vm_root t \<lbrace>\<lambda>_. pred_tcb_at proj P t\<rbrace>"
@ -3975,7 +3722,7 @@ lemma asid_is_zeroI:
lemma store_asid_pool_entry_invs:
"\<lbrace>invs and K (asid \<le> mask asid_bits \<and> 0 < asid) and
(\<lambda>s. case pml4base of None \<Rightarrow> x64_asid_map (arch_state s) asid = None
(\<lambda>s. case pml4base of None \<Rightarrow> True
| Some ptr \<Rightarrow> obj_at (empty_table (set (second_level_tables (arch_state s)))) ptr s
\<and> typ_at (AArch APageMapL4) ptr s \<and> (\<forall>pool. ko_at (ArchObj (ASIDPool pool)) p s \<longrightarrow> pool (ucast asid) = None)
\<and> p \<notin> set (second_level_tables (arch_state s))
@ -3983,48 +3730,32 @@ lemma store_asid_pool_entry_invs:
and [VSRef (ucast (asid_high_bits_of asid)) None] \<rhd> p
\<rbrace>
store_asid_pool_entry p asid pml4base \<lbrace>\<lambda>_. invs\<rbrace>"
apply (simp add: store_asid_pool_entry_def)
apply (wp)
unfolding store_asid_pool_entry_def
apply simp
apply wp
apply simp
apply (intro impI allI conjI valid_table_caps_aobj_upd_invalid_pml4e invs_valid_table_caps , simp_all add: obj_at_def)
apply (clarsimp simp: obj_at_def aa_type_simps aobj_upd_invalid_slots_simps ucast_ucast_mask a_type_simps
split: if_split_asm option.split_asm dest:valid_vspace_obj_from_invs valid_obj_from_invs
| intro valid_table_caps_asid_upd)+
apply (drule subsetD[OF vs_refs_of_set_diff])
apply (frule invs_valid_asid_map)
apply (clarsimp dest!: asid_lvl_lookup1D
split: option.splits simp: ucast_ucast_id graph_of_def ucast_ucast_mask)
apply (clarsimp simp: asid_low_bits_def[symmetric])
apply (drule asid_low_high_bits)
apply (drule(2) valid_asid_table_injD[OF _ _ invs_valid_asid_table])
apply ((clarsimp dest!: valid_asid_mapD simp: mask_def)+)[4]
apply (drule subsetD[OF vs_refs_of_set_diff])
apply (frule invs_valid_asid_map)
apply (clarsimp dest!: asid_lvl_lookup1D valid_asid_mapD
split: option.splits simp: ucast_ucast_id graph_of_def ucast_ucast_mask)
apply (clarsimp simp: asid_low_bits_def[symmetric] vspace_at_asid_def vs_lookup_def)
apply (erule wellformed_lookup.lookup_forwardE[OF vs_lookup1_is_wellformed_lookup]
| erule wellformed_lookup.lookupE[OF vs_lookup1_is_wellformed_lookup]
| clarsimp simp: vs_asid_refs_def up_ucast_inj_eq obj_at_def
dest!: vs_lookup1D graph_ofD)+
apply (clarsimp simp: lookup_leaf_def vs_refs_def mask_asid_low_bits_ucast_ucast dest!: graph_ofD)
apply (clarsimp simp: obj_at_def aa_type_simps aobj_upd_invalid_slots_simps ucast_ucast_mask a_type_simps
split: if_split_asm option.split_asm dest:valid_vspace_obj_from_invs valid_obj_from_invs
| intro valid_table_caps_asid_upd)+
apply (clarsimp simp: obj_at_def aa_type_simps aobj_upd_invalid_slots_simps ucast_ucast_mask
split: if_split_asm option.split_asm dest:valid_vspace_obj_from_invs valid_obj_from_invs
| intro valid_kernel_mappings_if_pm_asid invs_valid_kernel_mappings valid_global_objs_upd_invalid_asid_slot
| fastforce)+
apply (fastforce dest!: valid_vspace_obj_from_invs valid_vspace_obj_from_invs ran_del_subset
simp: obj_at_def)+
apply (fastforce dest!: valid_vspace_obj_from_invs valid_vspace_obj_from_invs ran_del_subset
simp: obj_at_def)+
apply (clarsimp simp: obj_at_def aa_type_simps aobj_upd_invalid_slots_simps ucast_ucast_mask
split: if_split_asm option.split_asm dest:valid_vspace_obj_from_invs valid_obj_from_invs)
apply (rule ccontr)
apply (erule(1) unmapped_cap_lookup_refs_empty[OF _ rtrancl_into_trancl1])
apply (drule wellformed_lookup.lookup1_cut[OF vs_lookup1_is_wellformed_lookup],fastforce,fastforce)
apply (drule wellformed_lookup.lookup1_cut[OF vs_lookup1_is_wellformed_lookup],fastforce,fastforce)
apply (clarsimp simp: aa_type_simps obj_at_def split: option.split_asm if_split_asm)
apply (clarsimp simp: obj_at_def aa_type_simps aobj_upd_invalid_slots_simps ucast_ucast_mask
split: if_split_asm option.split_asm dest:valid_vspace_obj_from_invs valid_obj_from_invs)
split: if_split_asm option.split_asm dest:valid_vspace_obj_from_invs valid_obj_from_invs)
apply (rule ccontr)
apply (erule unmapped_cap_lookup_refs_pages_empty[OF _ ])
apply (erule rtrancl_into_trancl1)
apply (drule wellformed_lookup.lookup1_cut[OF vs_lookup_pages1_is_wellformed_lookup],fastforce,fastforce)
apply (erule rtrancl_into_trancl1)
apply (drule wellformed_lookup.lookup1_cut[OF vs_lookup_pages1_is_wellformed_lookup],fastforce,fastforce)
apply (clarsimp split: option.split_asm if_split_asm simp: aa_type_simps obj_at_def)
apply (clarsimp split: if_split_asm option.split_asm dest!:ref_pages_Some simp: lookupable_refs_def obj_at_def)
apply (frule(2) empty_table_lookup_refs_refl)
@ -4037,9 +3768,9 @@ lemma store_asid_pool_entry_invs:
apply force
apply force
apply (drule(1) ref_is_unique)
apply force+
apply (clarsimp dest!: invs_valid_objs valid_objs_caps)
apply clarsimp
apply force+
apply (clarsimp dest!: invs_valid_objs valid_objs_caps)
apply clarsimp
apply (drule asid_is_zeroI, simp+)
apply (clarsimp simp: lookupable_refs_def obj_at_def split: option.split_asm if_split_asm)
apply (frule(2) empty_table_lookup_refs_pages_refl)
@ -4049,7 +3780,7 @@ lemma store_asid_pool_entry_invs:
apply force
apply force
apply (drule(1) ref_is_unique)
apply force+
apply force+
apply (clarsimp dest!: invs_valid_objs valid_objs_caps)
apply (clarsimp simp: cte_wp_at_caps_of_state is_cap_simps)
apply (intro exI conjI)