x64: fix ArchArch_AI

Also includes some corrections to the abstract specification, and minor
improvements to some existing proofs.
This commit is contained in:
Matthew Brecknell 2017-02-22 15:47:29 +11:00
parent cd5c789f50
commit 237fb11012
10 changed files with 962 additions and 909 deletions

View File

@ -40,11 +40,6 @@ lemma safe_parent_strg:
done
lemma asid_low_bits_pageBits:
"Suc (Suc asid_low_bits) = pageBits"
by (simp add: pageBits_def asid_low_bits_def)
(* 32-bit instance of Detype_AI.range_cover_full *)
lemma range_cover_full:
"\<lbrakk>is_aligned ptr sz;sz<word_bits\<rbrakk> \<Longrightarrow> range_cover (ptr::word32) sz sz (Suc 0)"

View File

@ -1064,11 +1064,6 @@ lemma kernel_base_kernel_mapping_slots:
apply word_bitwise
by blast
private lemma kernel_base_kernel_mapping_slots_hoare_pre:
assumes "ucast (get_pml4_index vptr) \<notin> kernel_mapping_slots \<Longrightarrow> \<lbrace> P \<rbrace> f \<lbrace> Q \<rbrace>,-"
shows "\<lbrace> P and K (vptr < pptr_base \<and> canonical_address vptr) \<rbrace> f \<lbrace> Q \<rbrace>,-"
by (rule hoare_gen_asmE; simp add: assms kernel_base_kernel_mapping_slots)
private lemma vtable_index_mask:
fixes r :: word64
shows "(r && mask table_size >> word_size_bits) && mask 9
@ -1083,25 +1078,25 @@ private lemma vtable_index_mask:
private abbreviation (input)
"pre pm_lookup \<equiv> pspace_aligned and valid_arch_objs and valid_arch_state
and equal_kernel_mappings and valid_global_objs
and page_map_l4_at pm and pm_lookup
and K (vptr < pptr_base \<and> canonical_address vptr)"
and equal_kernel_mappings and valid_global_objs and pm_lookup
and K (is_aligned pm pml4_bits \<and> vptr < pptr_base \<and> canonical_address vptr)"
private abbreviation (input)
"pdpt_ref pm_ref \<equiv> VSRef (get_pml4_index vptr) (Some APageMapL4) # pm_ref"
private lemma lookup_pdpt_slot_rv [simplified pred_and_true_var]:
"\<And>pm_ref.
\<lbrace> pre (pm_ref \<rhd> pm) \<rbrace>
lookup_pdpt_slot pm vptr
\<lbrace> \<lambda>rv s. pdpte_at rv s
\<and> (pdpt_ref pm_ref \<rhd> (rv && ~~ mask pdpt_bits)) s
\<and> (get_pdpt_index vptr = rv && mask pdpt_bits >> word_size_bits) \<rbrace>,-"
apply (rule kernel_base_kernel_mapping_slots_hoare_pre)
lemma lookup_pdpt_slot_wp:
"\<lbrace> \<lambda>s. \<forall>rv. (\<forall>pm_ref. pre (pm_ref \<rhd> pm) s \<longrightarrow>
pdpte_at rv s
\<and> (pdpt_ref pm_ref \<rhd> (rv && ~~ mask pdpt_bits)) s
\<and> get_pdpt_index vptr = rv && mask pdpt_bits >> word_size_bits)
\<longrightarrow> Q rv s \<rbrace>
lookup_pdpt_slot pm vptr
\<lbrace> Q \<rbrace>, -"
apply (clarsimp simp: lookup_pdpt_slot_def)
apply (wp get_pml4e_wp | wpc)+
apply (clarsimp simp: lookup_pml4_slot_def Let_def)
apply (simp add: pml4_shifting_at)
apply (clarsimp, drule spec, erule mp)
apply (clarsimp simp: lookup_pml4_slot_def pml4_shifting)
apply (frule (1) kernel_base_kernel_mapping_slots)
apply (frule (2) valid_arch_objsD; clarsimp)
apply (bspec "ucast (get_pml4_index vptr)"; clarsimp)
apply (rule conjI)
@ -1119,64 +1114,87 @@ private lemma lookup_pdpt_slot_rv [simplified pred_and_true_var]:
private abbreviation (input)
"pd_ref pm_ref \<equiv> VSRef (get_pdpt_index vptr) (Some APDPointerTable) # pdpt_ref pm_ref"
private lemma lookup_pd_slot_rv [simplified pred_and_true_var]:
lemma lookup_pd_slot_wp:
"\<lbrace> \<lambda>s. \<forall>rv. (\<forall>pm_ref. pre (pm_ref \<rhd> pm) s \<longrightarrow>
pde_at rv s
\<and> (pd_ref pm_ref \<rhd> (rv && ~~ mask pd_bits)) s
\<and> get_pd_index vptr = rv && mask pd_bits >> word_size_bits)
\<longrightarrow> Q rv s \<rbrace>
lookup_pd_slot pm vptr
\<lbrace> Q \<rbrace>, -"
apply (clarsimp simp: lookup_pd_slot_def)
apply (wp get_pdpte_wp lookup_pdpt_slot_wp | wpc | simp)+
apply (clarsimp; drule spec; erule mp; clarsimp)
apply (drule spec; erule (1) impE; clarsimp)
apply (frule (2) valid_arch_objsD; clarsimp)
apply (drule_tac x="ucast (rv && mask pdpt_bits >> word_size_bits)" in spec; clarsimp)
apply (rule conjI)
apply (erule page_directory_pde_atI, simp_all)
apply (simp add: get_pd_index_def bit_simps mask_def)
apply (rule order_le_less_trans, rule word_and_le1, simp)
apply (frule vs_lookup_step, rule vs_lookup1I[OF _ _ refl], assumption)
apply (simp add: vs_refs_def)
apply (rule image_eqI[rotated])
apply (rule_tac x="ucast (rv && mask pdpt_bits >> word_size_bits)" in graph_ofI)
by (simp add: pdpte_ref_def pd_shifting_at ucast_ucast_mask pdpt_bits_def vtable_index_mask)+
private abbreviation (input)
"pt_ref pm_ref \<equiv> VSRef (get_pd_index vptr) (Some APageDirectory) # pd_ref pm_ref"
lemma lookup_pt_slot_wp:
"\<lbrace> \<lambda>s. \<forall>rv. (\<forall>pm_ref. pre (pm_ref \<rhd> pm) s \<longrightarrow>
pte_at rv s
\<and> (pt_ref pm_ref \<rhd> (rv && ~~ mask pt_bits)) s
\<and> get_pt_index vptr = rv && mask pt_bits >> word_size_bits)
\<longrightarrow> Q rv s \<rbrace>
lookup_pt_slot pm vptr
\<lbrace> Q \<rbrace>, -"
apply (clarsimp simp: lookup_pt_slot_def)
apply (wp get_pde_wp lookup_pd_slot_wp | wpc | simp)+
apply (clarsimp; drule spec; erule mp; clarsimp)
apply (drule spec; erule (1) impE; clarsimp)
apply (frule (2) valid_arch_objsD; clarsimp)
apply (drule_tac x="ucast (rv && mask pd_bits >> word_size_bits)" in spec; clarsimp)
apply (rule conjI)
apply (erule page_table_pte_atI, simp_all)
apply (simp add: get_pt_index_def bit_simps mask_def)
apply (rule order_le_less_trans, rule word_and_le1, simp)
apply (frule vs_lookup_step, rule vs_lookup1I[OF _ _ refl], assumption)
apply (simp add: vs_refs_def)
apply (rule image_eqI[rotated])
apply (rule_tac x="ucast (rv && mask pd_bits >> word_size_bits)" in graph_ofI)
by (simp add: pde_ref_def pt_shifting_at ucast_ucast_mask pd_bits_def vtable_index_mask)+
(* The following older lemmas are kept for a few existing proofs.
They are weaker than the above lookup_*_slot_wp rules, and harder to use.
New proofs should use the lookup_*_slot_wp rules. *)
private lemma lookup_pd_slot_rv:
"\<And>pm_ref.
\<lbrace> pre (pm_ref \<rhd> pm) \<rbrace>
lookup_pd_slot pm vptr
\<lbrace> \<lambda>rv s. pde_at rv s
\<and> (pd_ref pm_ref \<rhd> (rv && ~~ mask pd_bits)) s
\<and> (get_pd_index vptr = rv && mask pd_bits >> word_size_bits) \<rbrace>,-"
apply (simp add: lookup_pd_slot_def)
apply (rule hoare_pre)
apply (wp get_pdpte_wp hoare_vcg_all_lift_R | wpc | simp)+
apply (rule hoare_post_imp_R)
apply (rule_tac Q="\<lambda>_. valid_arch_objs and pspace_aligned" in hoare_vcg_conj_lift_R)
apply (rule lookup_pdpt_slot_inv_any)
apply (rule lookup_pdpt_slot_rv)
apply clarsimp
apply (frule (2) valid_arch_objsD; clarsimp)
apply (drule_tac x="ucast (r && mask pdpt_bits >> word_size_bits)" in spec; clarsimp)
apply (rule conjI)
apply (erule page_directory_pde_atI, simp_all)
apply (simp add: get_pd_index_def bit_simps mask_def)
apply (rule order_le_less_trans, rule word_and_le1, simp)
apply (frule vs_lookup_step)
apply (rule vs_lookup1I[OF _ _ refl], assumption)
apply (simp add: vs_refs_def)
apply (rule image_eqI[rotated])
apply (rule_tac x="ucast (r && mask pdpt_bits >> word_size_bits)" in graph_ofI)
by (simp add: pdpte_ref_def pd_shifting_at ucast_ucast_mask pdpt_bits_def vtable_index_mask)+
by (rule hoare_pre, rule lookup_pd_slot_wp, clarsimp)
private abbreviation (input)
"pt_ref pm_ref \<equiv> VSRef (get_pd_index vptr) (Some APageDirectory) # pd_ref pm_ref"
private lemma lookup_pdpt_slot_rv:
"\<And>pm_ref.
\<lbrace> pre (pm_ref \<rhd> pm) \<rbrace>
lookup_pdpt_slot pm vptr
\<lbrace> \<lambda>rv s. pdpte_at rv s
\<and> (pdpt_ref pm_ref \<rhd> (rv && ~~ mask pdpt_bits)) s
\<and> (get_pdpt_index vptr = rv && mask pdpt_bits >> word_size_bits) \<rbrace>,-"
by (rule hoare_pre, rule lookup_pdpt_slot_wp, clarsimp)
private lemma lookup_pt_slot_rv [simplified pred_and_true_var]:
private lemma lookup_pt_slot_rv:
"\<And>pm_ref.
\<lbrace> pre (pm_ref \<rhd> pm) \<rbrace>
lookup_pt_slot pm vptr
\<lbrace>\<lambda>rv s. pte_at rv s
\<and> (pt_ref pm_ref \<rhd> (rv && ~~ mask pt_bits)) s
\<and> (get_pt_index vptr = rv && mask pt_bits >> word_size_bits) \<rbrace>,-"
apply (simp add: lookup_pt_slot_def)
apply (rule hoare_pre)
apply (wp get_pde_wp hoare_vcg_all_lift_R | wpc | simp)+
apply (rule hoare_post_imp_R)
apply (rule_tac Q="\<lambda>rv. valid_arch_objs and pspace_aligned" in hoare_vcg_conj_lift_R)
apply (rule lookup_pd_slot_inv_any)
apply (rule lookup_pd_slot_rv)
apply clarsimp
apply (frule (2) valid_arch_objsD; clarsimp)
apply (drule_tac x="ucast (r && mask pd_bits >> word_size_bits)" in spec; clarsimp)
apply (rule conjI)
apply (erule page_table_pte_atI, simp_all)
apply (simp add: get_pt_index_def bit_simps mask_def)
apply (rule order_le_less_trans, rule word_and_le1, simp)
apply (frule vs_lookup_step)
apply (rule vs_lookup1I[OF _ _ refl], assumption)
apply (simp add: vs_refs_def)
apply (rule image_eqI[rotated])
apply (rule_tac x="ucast (r && mask pd_bits >> word_size_bits)" in graph_ofI)
by (simp add: pde_ref_def pt_shifting_at ucast_ucast_mask pd_bits_def vtable_index_mask)+
by (rule hoare_pre, rule lookup_pt_slot_wp, clarsimp)
(* It's awkward to prove the following more generally, since these are probably only true
when the vs_lookups are only under conjunctions. *)
@ -1198,25 +1216,18 @@ private lemmas get_ent_pm = validE_R_post_conjD1
private lemmas get_ent_ex = vs_lookup_all_ex_convert_pre[OF get_ent_pm]
private lemmas get_lookup = validE_R_post_conjD1[OF validE_R_post_conjD2]
private lemmas get_lkp_ex = vs_lookup_all_ex_convert_post[OF get_lookup]
private lemmas get_vindex = validE_R_post_conjD2[OF validE_R_post_conjD2]
lemmas lookup_pdpt_slot_pdpte = get_ent_pm[OF lookup_pdpt_slot_rv]
lemmas lookup_pdpt_slot_pdpte_ex [wp] = get_ent_ex[OF lookup_pdpt_slot_rv]
lemmas lookup_pdpt_slot_vs_lookup [wp] = get_lookup[OF lookup_pdpt_slot_rv]
lemmas lookup_pdpt_slot_vs_lookup_ex [wp] = get_lkp_ex[OF lookup_pdpt_slot_rv]
lemmas lookup_pdpt_slot_vs_index = get_vindex[OF lookup_pdpt_slot_rv]
lemmas lookup_pd_slot_pde = get_ent_pm[OF lookup_pd_slot_rv]
lemmas lookup_pd_slot_pde_ex [wp] = get_ent_ex[OF lookup_pd_slot_rv]
lemmas lookup_pd_slot_vs_lookup [wp] = get_lookup[OF lookup_pd_slot_rv]
lemmas lookup_pd_slot_vs_lookup_ex [wp] = get_lkp_ex[OF lookup_pd_slot_rv]
lemmas lookup_pd_slot_vs_index = get_vindex[OF lookup_pd_slot_rv]
lemmas lookup_pt_slot_pte = get_ent_pm[OF lookup_pt_slot_rv]
lemmas lookup_pt_slot_pte_ex [wp] = get_ent_ex[OF lookup_pt_slot_rv]
lemmas lookup_pt_slot_vs_lookup [wp] = get_lookup[OF lookup_pt_slot_rv]
lemmas lookup_pt_slot_vs_lookup_ex [wp] = get_lkp_ex[OF lookup_pt_slot_rv]
lemmas lookup_pt_slot_vs_index = get_vindex[OF lookup_pt_slot_rv]
lemma create_mapping_entries_valid [wp]:
"\<lbrace> pre (\<exists>\<rhd> pm) \<rbrace>
@ -1229,16 +1240,65 @@ lemma create_mapping_entries_valid [wp]:
end
context begin
method finish =
(simp?; erule is_aligned_weaken[rotated]; simp add: is_aligned_def pptrBase_def)
lemma is_aligned_addrFromPPtr_eq: "n \<le> 39 \<Longrightarrow> is_aligned (addrFromPPtr p) n = is_aligned p n"
apply (simp add: addrFromPPtr_def; rule iffI)
apply (drule aligned_sub_aligned[where y="-pptrBase"]; finish)
apply (erule aligned_sub_aligned; finish)
done
lemma is_aligned_ptrFromPAddr_eq: "n \<le> 39 \<Longrightarrow> is_aligned (ptrFromPAddr p) n = is_aligned p n"
apply (simp add: ptrFromPAddr_def; rule iffI)
apply (drule aligned_add_aligned[where y="-pptrBase"]; finish)
apply (erule aligned_add_aligned; finish)
done
end
lemma is_aligned_addrFromPPtr_n:
"\<lbrakk> is_aligned p n; n \<le> 39 \<rbrakk> \<Longrightarrow> is_aligned (addrFromPPtr p) n"
by (simp add: is_aligned_addrFromPPtr_eq)
lemma is_aligned_ptrFromPAddr_n:
"\<lbrakk>is_aligned x sz; sz \<le> 39\<rbrakk> \<Longrightarrow> is_aligned (ptrFromPAddr x) sz"
by (simp add: is_aligned_ptrFromPAddr_eq)
lemma is_aligned_addrFromPPtr:
"is_aligned p pageBits \<Longrightarrow> is_aligned (addrFromPPtr p) pageBits"
by (simp add: is_aligned_addrFromPPtr_n pageBits_def)
lemma is_aligned_ptrFromPAddr:
"is_aligned p pageBits \<Longrightarrow> is_aligned (ptrFromPAddr p) pageBits"
by (simp add: is_aligned_ptrFromPAddr_n pageBits_def)
lemma is_aligned_addrFromPPtr_pageBitsForSize:
"is_aligned (addrFromPPtr p) (pageBitsForSize sz) = is_aligned p (pageBitsForSize sz)"
by (cases sz; simp add: is_aligned_addrFromPPtr_eq bit_simps)
lemma is_aligned_ptrFromPAddr_pageBitsForSize:
"is_aligned (ptrFromPAddr p) (pageBitsForSize sz) = is_aligned p (pageBitsForSize sz)"
by (cases sz; simp add: is_aligned_ptrFromPAddr_eq bit_simps)
lemma data_at_vmsz_aligned:
"data_at sz (ptrFromPAddr base) s \<Longrightarrow> pspace_aligned s \<Longrightarrow> vmsz_aligned base sz"
by (cases sz)
(auto simp: data_at_def obj_at_def vmsz_aligned_def bit_simps
dest!: pspace_alignedD
elim!: iffD1[OF is_aligned_ptrFromPAddr_eq, rotated])
lemma create_mapping_entries_valid_slots [wp]:
"\<lbrace> pspace_aligned and valid_arch_objs and valid_arch_state and equal_kernel_mappings
and valid_global_objs and page_map_l4_at pm and (\<exists>\<rhd> pm) and data_at sz (ptrFromPAddr base)
and K (vptr < pptr_base \<and> canonical_address vptr \<and> vmsz_aligned base sz
and valid_global_objs and (\<exists>\<rhd> pm) and data_at sz (ptrFromPAddr base)
and K (is_aligned pm pml4_bits \<and> vptr < pptr_base \<and> canonical_address vptr
\<and> vm_rights \<in> valid_vm_rights) \<rbrace>
create_mapping_entries base vptr sz vm_rights attrib pm
\<lbrace>\<lambda>m. valid_slots m\<rbrace>,-"
apply (cases sz; simp; rule hoare_pre)
apply (wp | simp add: valid_slots_def
apply (wp | clarsimp simp: valid_slots_def elim!: data_at_vmsz_aligned
| rule lookup_pt_slot_inv_any lookup_pd_slot_inv_any lookup_pdpt_slot_inv_any)+
done
@ -2085,34 +2145,6 @@ lemma lookup_pml4_slot_eq:
apply simp+
done
lemma is_aligned_addrFromPPtr_n:
"\<lbrakk> is_aligned p n; n \<le> 39 \<rbrakk> \<Longrightarrow> is_aligned (Platform.X64.addrFromPPtr p) n"
apply (simp add: Platform.X64.addrFromPPtr_def)
apply (erule aligned_sub_aligned, simp_all)
apply (simp add: pptrBase_def
pageBits_def)
apply (erule is_aligned_weaken[rotated])
apply (simp add: is_aligned_def)
done
lemma is_aligned_addrFromPPtr:
"is_aligned p pageBits \<Longrightarrow> is_aligned (Platform.X64.addrFromPPtr p) pageBits"
by (simp add: is_aligned_addrFromPPtr_n pageBits_def)
lemma is_aligned_ptrFromPAddr_n:
"\<lbrakk>is_aligned x sz; sz \<le> 39\<rbrakk> \<Longrightarrow> is_aligned (ptrFromPAddr x) sz"
apply (simp add: ptrFromPAddr_def pptrBase_def)
apply (erule aligned_add_aligned)
apply (erule is_aligned_weaken[rotated])
apply (simp add:is_aligned_def)
apply (simp add:word_bits_def)
done
lemma is_aligned_ptrFromPAddr:
"is_aligned p pageBits \<Longrightarrow> is_aligned (ptrFromPAddr p) pageBits"
by (simp add: is_aligned_ptrFromPAddr_n pageBits_def)
lemma vs_lookup_arch_update:
"x64_asid_table (f (arch_state s)) = x64_asid_table (arch_state s) \<Longrightarrow>
vs_lookup (arch_state_update f s) = vs_lookup s"
@ -2485,8 +2517,19 @@ lemma valid_cap_typ_at:
"\<lbrakk>is_pt_cap cap; p \<in> obj_refs cap; s \<turnstile> cap\<rbrakk> \<Longrightarrow> typ_at (AArch APageTable) p s"
"\<lbrakk>is_pdpt_cap cap; p \<in> obj_refs cap; s \<turnstile> cap\<rbrakk> \<Longrightarrow> typ_at (AArch APDPointerTable) p s"
"\<lbrakk>is_pml4_cap cap; p \<in> obj_refs cap; s \<turnstile> cap\<rbrakk> \<Longrightarrow> typ_at (AArch APageMapL4) p s"
"\<lbrakk>is_asid_pool_cap cap; p \<in> obj_refs cap; s \<turnstile> cap\<rbrakk> \<Longrightarrow> typ_at (AArch AASIDPool) p s"
by (auto simp: is_cap_simps valid_cap_def)
lemma valid_arch_cap_typ_at [elim]:
"\<And>s p r t sz asid. s \<turnstile> ArchObjectCap (PageCap True p r t sz asid) \<Longrightarrow> typ_at (AArch (ADeviceData sz)) p s"
"\<And>s p r t sz asid. s \<turnstile> ArchObjectCap (PageCap False p r t sz asid) \<Longrightarrow> typ_at (AArch (AUserData sz)) p s"
"\<And>s p asid. s \<turnstile> ArchObjectCap (PageTableCap p asid) \<Longrightarrow> typ_at (AArch APageTable) p s"
"\<And>s p asid. s \<turnstile> ArchObjectCap (PageDirectoryCap p asid) \<Longrightarrow> typ_at (AArch APageDirectory) p s"
"\<And>s p asid. s \<turnstile> ArchObjectCap (PDPointerTableCap p asid) \<Longrightarrow> typ_at (AArch APDPointerTable) p s"
"\<And>s p asid. s \<turnstile> ArchObjectCap (PML4Cap p asid) \<Longrightarrow> typ_at (AArch APageMapL4) p s"
"\<And>s p asid. s \<turnstile> ArchObjectCap (ASIDPoolCap p asid) \<Longrightarrow> typ_at (AArch AASIDPool) p s"
by (auto simp: valid_cap_def)
lemma is_cap_disjoint[simp]:
"\<lbrakk>is_pd_cap cap; is_pdpt_cap cap\<rbrakk>\<Longrightarrow> False"
"\<lbrakk>is_pd_cap cap; is_pml4_cap cap\<rbrakk>\<Longrightarrow> False"

File diff suppressed because it is too large Load Diff

View File

@ -1043,6 +1043,12 @@ lemma page_map_l4_at_def2:
apply simp
done
lemma
shows pd_pointer_table_at_def2: "pd_pointer_table_at p s = (\<exists> pdpt. ako_at (PDPointerTable pdpt) p s)"
and page_directory_at_def2: "page_directory_at p s = (\<exists> pd. ako_at (PageDirectory pd) p s)"
and page_table_at_def2: "page_table_at p s = (\<exists> pt. ako_at (PageTable pt) p s)"
by (auto simp: obj_at_def) (auto simp: a_type_def)
definition
pml4e_wp_at :: "(pml4e \<Rightarrow> bool) \<Rightarrow> machine_word \<Rightarrow> 9 word \<Rightarrow> 'z state \<Rightarrow> bool"
where

View File

@ -1016,9 +1016,12 @@ lemmas is_vspace_table_cap_defs = is_pt_cap_def is_pd_cap_def is_pdpt_cap_def is
abbreviation
"is_vspace_table_cap c \<equiv> is_pt_cap c \<or> is_pd_cap c \<or> is_pdpt_cap c \<or> is_pml4_cap c"
definition
"is_asid_pool_cap c \<equiv> \<exists>ptr asid. c = ArchObjectCap (ASIDPoolCap ptr asid)"
lemmas is_arch_cap_simps [simplified atomize_eq] =
is_pg_cap_def is_pd_cap_def is_pt_cap_def is_pdpt_cap_def is_pml4_cap_def
is_pg_cap_def is_pd_cap_def is_pt_cap_def is_pdpt_cap_def is_pml4_cap_def is_asid_pool_cap_def
is_nondevice_page_cap

View File

@ -33,7 +33,7 @@ lemma ptrFormPAddr_addFromPPtr :
lemma asid_high_bits_of_add_ucast:
"is_aligned w asid_low_bits \<Longrightarrow>
asid_high_bits_of (ucast (x::8 word) + w) = asid_high_bits_of w"
asid_high_bits_of (ucast (x::9 word) + w) = asid_high_bits_of w"
apply (rule word_eqI)
apply (simp add: word_size asid_high_bits_of_def nth_ucast nth_shiftr is_aligned_nth)
apply (subst word_plus_and_or_coroll)

View File

@ -879,29 +879,8 @@ lemma lookup_pd_slot_is_aligned:
apply (simp add: bit_simps)
done *)
lemma pd_pointer_table_at_aligned_pdpt_bits:
"\<lbrakk>pd_pointer_table_at pdpt s;pspace_aligned s\<rbrakk>
\<Longrightarrow> is_aligned pdpt pdpt_bits"
apply (clarsimp simp:obj_at_def)
apply (drule(1) pspace_alignedD)
apply (simp add:pdpt_bits_def pageBits_def)
done
lemma page_directory_at_aligned_pd_bits:
"\<lbrakk>page_directory_at pd s;pspace_aligned s\<rbrakk>
\<Longrightarrow> is_aligned pd pd_bits"
apply (clarsimp simp:obj_at_def)
apply (drule(1) pspace_alignedD)
apply (simp add:pd_bits_def pageBits_def)
done
lemma page_map_l4_at_aligned_pml4_bits:
"\<lbrakk>page_map_l4_at pm s;pspace_aligned s\<rbrakk>
\<Longrightarrow> is_aligned pm pml4_bits"
apply (clarsimp simp:obj_at_def)
apply (drule(1) pspace_alignedD)
apply (simp add:pml4_bits_def pageBits_def)
done
(* FIXME: remove *)
lemmas page_directory_at_aligned_pd_bits = is_aligned_pd
(* FIXME x64: check *)
definition
@ -912,11 +891,11 @@ definition
definition
"parent_for_refs entry \<equiv> \<lambda>cap.
case entry of (VMPTE _, slot)
\<Rightarrow> slot \<in> obj_refs cap \<and> is_pt_cap cap \<and> cap_asid cap \<noteq> None
\<Rightarrow> slot && ~~ mask pt_bits \<in> obj_refs cap \<and> is_pt_cap cap \<and> cap_asid cap \<noteq> None
| (VMPDE _, slot)
\<Rightarrow> slot \<in> obj_refs cap \<and> is_pd_cap cap \<and> cap_asid cap \<noteq> None
\<Rightarrow> slot && ~~ mask pd_bits \<in> obj_refs cap \<and> is_pd_cap cap \<and> cap_asid cap \<noteq> None
| (VMPDPTE _, slot)
\<Rightarrow> slot \<in> obj_refs cap \<and> is_pdpt_cap cap \<and> cap_asid cap \<noteq> None
\<Rightarrow> slot && ~~ mask pdpt_bits \<in> obj_refs cap \<and> is_pdpt_cap cap \<and> cap_asid cap \<noteq> None
| (VMPML4E _, _) \<Rightarrow> True"
(* FIXME x64: check *)
@ -926,7 +905,7 @@ definition
(VMPTE pte, slot) \<Rightarrow>
(\<exists>p. pte_ref_pages pte = Some p \<and> p \<in> obj_refs cap) \<and>
(\<forall>ref. (ref \<rhd> (slot && ~~ mask pt_bits)) s \<longrightarrow>
vs_cap_ref cap = Some (VSRef ((slot && mask table_size >> word_size_bits) && mask ptTranslationBits) (Some APageTable) # ref))
vs_cap_ref cap = Some (VSRef ((slot && mask pt_bits >> word_size_bits) && mask ptTranslationBits) (Some APageTable) # ref))
| (VMPDE pde, slot) \<Rightarrow>
(\<exists>p. pde_ref_pages pde = Some p \<and> p \<in> obj_refs cap) \<and>
(\<forall>ref. (ref \<rhd> (slot && ~~ mask pd_bits)) s \<longrightarrow>
@ -1276,25 +1255,30 @@ end
context Arch begin global_naming X64
definition is_asid_pool_cap :: "cap \<Rightarrow> bool"
where "is_asid_pool_cap cap \<equiv> \<exists>ptr asid. cap = cap.ArchObjectCap (arch_cap.ASIDPoolCap ptr asid)"
(* FIXME: move *)
lemma valid_cap_to_pt_cap:
"\<lbrakk>valid_cap c s; obj_refs c = {p}; page_table_at p s\<rbrakk> \<Longrightarrow> is_pt_cap c"
by (clarsimp simp: valid_cap_def obj_at_def is_obj_defs is_pt_cap_def
split: cap.splits option.splits arch_cap.splits if_splits)
context
fixes c :: cap and s :: "'a::state_ext state" and p :: obj_ref
assumes vc: "valid_cap c s"
begin
lemma valid_cap_to_pdpt_cap:
"\<lbrakk>valid_cap c s; obj_refs c = {p}; pd_pointer_table_at p s\<rbrakk> \<Longrightarrow> is_pdpt_cap c"
by (clarsimp simp: valid_cap_def obj_at_def is_obj_defs is_pdpt_cap_def
split: cap.splits option.splits arch_cap.splits if_splits)
lemma valid_cap_to_table_cap':
assumes ref: "p \<in> obj_refs c"
shows valid_cap_to_pt_cap' : "page_table_at p s \<Longrightarrow> is_pt_cap c"
and valid_cap_to_pd_cap' : "page_directory_at p s \<Longrightarrow> is_pd_cap c"
and valid_cap_to_pdpt_cap': "pd_pointer_table_at p s \<Longrightarrow> is_pdpt_cap c"
using vc ref
by (auto simp: valid_cap_def obj_at_def is_obj_defs is_pt_cap_def is_pd_cap_def is_pdpt_cap_def
split: cap.splits option.splits arch_cap.splits if_splits)
lemma valid_cap_to_table_cap:
assumes ref: "obj_refs c = {p}"
shows valid_cap_to_pt_cap : "page_table_at p s \<Longrightarrow> is_pt_cap c"
and valid_cap_to_pd_cap : "page_directory_at p s \<Longrightarrow> is_pd_cap c"
and valid_cap_to_pdpt_cap: "pd_pointer_table_at p s \<Longrightarrow> is_pdpt_cap c"
by (rule valid_cap_to_table_cap'; simp add: ref)+
end
lemma valid_cap_to_pd_cap:
"\<lbrakk>valid_cap c s; obj_refs c = {p}; page_directory_at p s\<rbrakk> \<Longrightarrow> is_pd_cap c"
by (clarsimp simp: valid_cap_def obj_at_def is_obj_defs is_pd_cap_def
split: cap.splits option.splits arch_cap.splits if_splits)
lemma ref_is_unique:
"\<lbrakk>(ref \<rhd> p) s; (ref' \<rhd> p) s; p \<notin> set (x64_global_pdpts (arch_state s));
@ -2793,7 +2777,7 @@ lemma unmap_pdpt_invs[wp]:
| wpc | simp add: flush_all_def pml4e_ref_pages_def)+
apply (strengthen lookup_pml4_slot_kernel_mappings[THEN notE[where R=False], rotated -1, mk_strg D], simp)
apply (strengthen not_in_global_refs_vs_lookup)+
apply (auto simp: vspace_at_asid_def page_map_l4_at_aligned_pml4_bits[simplified] invs_arch_objs
apply (auto simp: vspace_at_asid_def is_aligned_pml4[simplified] invs_arch_objs
invs_psp_aligned lookup_pml4_slot_eq pml4e_ref_def)
done
@ -2818,15 +2802,15 @@ lemma unmap_pd_invs[wp]:
"\<lbrace>invs and K (asid \<le> mask asid_bits \<and> vaddr < pptr_base \<and> canonical_address vaddr)\<rbrace>
unmap_pd asid vaddr pd
\<lbrace>\<lambda>rv. invs\<rbrace>"
apply (rule hoare_gen_asm)
apply (simp add: unmap_pd_def)
apply (rule hoare_pre)
apply (wp_trace store_pdpte_invs do_machine_op_global_refs_inv get_pdpte_wp
apply (wp store_pdpte_invs do_machine_op_global_refs_inv get_pdpte_wp
hoare_vcg_all_lift find_vspace_for_asid_lots
| wpc | simp add: flush_all_def pdpte_ref_pages_def
| strengthen imp_consequent )+
apply (strengthen not_in_global_refs_vs_lookup invs_valid_vs_lookup invs_valid_global_refs
invs_arch_state invs_valid_global_objs | wp)+
apply (auto simp: vspace_at_asid_def page_map_l4_at_aligned_pml4_bits[simplified] invs_arch_objs
invs_arch_state invs_valid_global_objs | wp find_vspace_for_asid_aligned_pm_bits | simp)+
apply (auto simp: vspace_at_asid_def is_aligned_pml4[simplified] invs_arch_objs
invs_psp_aligned lookup_pml4_slot_eq pml4e_ref_def)
done
@ -2834,6 +2818,7 @@ lemma unmap_pt_invs[wp]:
"\<lbrace>invs and K (asid \<le> mask asid_bits \<and> vaddr < pptr_base \<and> canonical_address vaddr)\<rbrace>
unmap_page_table asid vaddr pt
\<lbrace>\<lambda>rv. invs\<rbrace>"
apply (rule hoare_gen_asm)
apply (simp add: unmap_page_table_def)
apply (rule hoare_pre)
apply (wp_trace store_pde_invs do_machine_op_global_refs_inv get_pde_wp
@ -2841,8 +2826,8 @@ lemma unmap_pt_invs[wp]:
| wpc | simp add: flush_all_def pdpte_ref_pages_def
| strengthen imp_consequent )+
apply (strengthen not_in_global_refs_vs_lookup invs_valid_vs_lookup invs_valid_global_refs
invs_arch_state invs_valid_global_objs | wp)+
apply (auto simp: vspace_at_asid_def page_map_l4_at_aligned_pml4_bits[simplified] invs_arch_objs
invs_arch_state invs_valid_global_objs | wp find_vspace_for_asid_aligned_pm_bits | simp)+
apply (auto simp: vspace_at_asid_def is_aligned_pml4[simplified] invs_arch_objs
invs_psp_aligned lookup_pml4_slot_eq pml4e_ref_def)
done
@ -3683,6 +3668,11 @@ lemma reachable_pd_not_global:
apply (fastforce simp: valid_global_pdpts_def obj_at_def)+
done
lemma vs_lookup_invs_ref_is_unique: "\<lbrakk> (ref \<rhd> p) s; (ref' \<rhd> p) s; invs s\<rbrakk> \<Longrightarrow> ref = ref'"
apply (erule (1) ref_is_unique)
apply (erule reachable_pd_not_global)
by (auto elim: invs_valid_kernel_mappings intro!: valid_objs_caps)
crunch global_refs: store_pde "\<lambda>s. P (global_refs s)"
crunch invs[wp]: pte_check_if_mapped, pde_check_if_mapped "invs"
@ -3714,7 +3704,7 @@ lemma unmap_page_invs[wp]:
invs_valid_global_refs
invs_arch_state invs_valid_global_objs | clarsimp simp: conj_ac)+
apply wp
apply (auto simp: vspace_at_asid_def page_map_l4_at_aligned_pml4_bits[simplified] invs_arch_objs
apply (auto simp: vspace_at_asid_def is_aligned_pml4[simplified] invs_arch_objs
invs_psp_aligned lookup_pml4_slot_eq pml4e_ref_def)
done
@ -3808,8 +3798,6 @@ lemma perform_page_invs [wp]:
apply (clarsimp dest!: caps_of_state_valid[OF _ invs_valid_objs]
simp: valid_cap_simps cap_asid_def pt_bits_def
split: option.split_asm)
apply (frule(1) is_aligned_pt[OF _ invs_psp_aligned])
apply (clarsimp simp: is_aligned_neg_mask_eq pt_bits_def)
apply force
apply force
apply clarsimp
@ -3830,8 +3818,6 @@ lemma perform_page_invs [wp]:
apply (clarsimp dest!: caps_of_state_valid[OF _ invs_valid_objs]
simp: valid_cap_simps cap_asid_def pt_bits_def
split: option.split_asm)
apply (frule(1) is_aligned_pd[OF _ invs_psp_aligned])
apply (clarsimp simp: is_aligned_neg_mask_eq pt_bits_def)
apply force
apply force
apply (drule ref_is_unique[OF _ _ reachable_page_table_not_global])
@ -3856,8 +3842,6 @@ lemma perform_page_invs [wp]:
apply (clarsimp dest!: caps_of_state_valid[OF _ invs_valid_objs]
simp: valid_cap_simps cap_asid_def pdpt_bits_def
split: option.split_asm)
apply (frule(1) is_aligned_pdpt[OF _ invs_psp_aligned])
apply (clarsimp simp: is_aligned_neg_mask_eq pdpt_bits_def)
apply force
apply force
apply (drule ref_is_unique[OF _ _ reachable_page_table_not_global])
@ -4159,7 +4143,7 @@ lemma dmo_out16[wp]: "\<lbrace>invs\<rbrace> do_machine_op (out16 irq b) \<lbrac
lemma dmo_out32[wp]: "\<lbrace>invs\<rbrace> do_machine_op (out32 irq b) \<lbrace>\<lambda>y. invs\<rbrace>"
by (clarsimp simp: out32_def do_machine_op_lift_invs)
lemma perform_io_port_invocation_invs:
lemma perform_io_port_invocation_invs[wp]:
"\<lbrace>invs\<rbrace> perform_io_port_invocation iopinv \<lbrace>\<lambda>rv. invs\<rbrace>"
apply (clarsimp simp: perform_io_port_invocation_def)
apply (rule hoare_pre)
@ -4167,5 +4151,78 @@ lemma perform_io_port_invocation_invs:
apply (clarsimp simp: tcb_at_invs)
done
lemma user_vtop_mask: "user_vtop = mask 47"
by (simp add: user_vtop_def mask_def)
lemma user_vtop_le: "p && user_vtop \<le> mask 47"
by (simp add: user_vtop_mask word_and_le1)
lemma canonical_address_user_vtop_p: "p = p' && user_vtop \<Longrightarrow> canonical_address p"
by (simp add: canonical_address_range user_vtop_le)
lemmas canonical_address_user_vtop [simp] = canonical_address_user_vtop_p[OF refl]
lemma user_vtop_less_pptr_base [simp]: "p && user_vtop < pptr_base"
by (rule le_less_trans[OF user_vtop_le];
simp add: mask_def user_vtop_def pptr_base_def pptrBase_def)
lemma user_vtop_kernel_mapping_slots:
"ucast (get_pml4_index (p && user_vtop)) \<notin> kernel_mapping_slots"
by (rule kernel_base_kernel_mapping_slots; simp)
lemma kernel_vsrefs_kernel_mapping_slots':
"VSRef (get_pml4_index p) (Some APageMapL4) \<in> kernel_vsrefs
\<longleftrightarrow> ucast (p >> pml4_shift_bits) \<in> kernel_mapping_slots"
using kernel_vsrefs_kernel_mapping_slots[of "p >> (pml4_shift_bits - word_size_bits)", symmetric]
apply (clarsimp simp: shiftr_over_and_dist get_pml4_index_def bit_simps shiftr_mask2 shiftr_shiftr)
apply (subst ucast_mask_drop; clarsimp)
done
lemma user_vtop_kernel_vsrefs:
"VSRef (get_pml4_index (p && user_vtop)) (Some APageMapL4) \<notin> kernel_vsrefs"
using user_vtop_kernel_mapping_slots kernel_vsrefs_kernel_mapping_slots'
by (simp add: get_pml4_index_def bit_simps ucast_mask_drop)
context
fixes c :: cap
assumes vsc: "is_vspace_table_cap c"
begin
lemma diminished_table_cap_refl: "diminished c c"
using vsc
apply (simp add: diminished_def mask_cap_def; cases c; simp add: is_cap_simps)
apply (rename_tac ac; case_tac ac; simp add: cap_rights_update_def acap_rights_update_def)
done
lemma diminished_table_cap_iff_eq: "diminished c c' \<longleftrightarrow> c = c'"
apply (rule iffI; (drule sym; simp add: diminished_table_cap_refl)?)
using vsc
apply (cases c; simp add: is_cap_simps;
rename_tac ac; case_tac ac; clarsimp simp: is_cap_simps)
apply (all \<open>thin_tac "c = _"\<close>)
apply (all \<open>clarsimp simp: diminished_def; rule sym; drule sym; simp\<close>)
apply (all \<open>cases c'; simp add: mask_cap_def cap_rights_update_def acap_rights_update_def;
rename_tac ac'; case_tac ac'; simp\<close>)
done
lemma diminished_table_cap_eq: "diminished c = (op = c)"
by (rule ext, rule diminished_table_cap_iff_eq)
end
context begin
private lemma is_vspace_table_cap:
"is_vspace_table_cap (ArchObjectCap (PageTableCap ptr mapped))"
"is_vspace_table_cap (ArchObjectCap (PageDirectoryCap ptr mapped))"
"is_vspace_table_cap (ArchObjectCap (PDPointerTableCap ptr mapped))"
"is_vspace_table_cap (ArchObjectCap (PML4Cap ptr asid))"
by (auto simp: is_cap_simps)
lemmas diminished_table_cap_simps
= is_vspace_table_cap[THEN diminished_table_cap_eq]
end
end
end

View File

@ -304,7 +304,7 @@ where
PageCap dev p R map_type pgsz mapped_address \<Rightarrow>
if invocation_type label = ArchInvocationLabel X64PageMap then
if length args > 2 \<and> length extra_caps > 0
then let vaddr = args ! 0;
then let vaddr = args ! 0 && user_vtop;
rights_mask = args ! 1;
attr = args ! 2;
vspace_cap = fst (extra_caps ! 0)
@ -316,8 +316,8 @@ where
| _ \<Rightarrow> throwError $ InvalidCapability 1);
vspace' \<leftarrow> lookup_error_on_failure False $ find_vspace_for_asid asid;
whenE (vspace' \<noteq> vspace) $ throwError $ InvalidCapability 1;
vtop \<leftarrow> returnOk $ vaddr + (1 << (pageBitsForSize pgsz)) - 1;
whenE (vtop \<ge> kernel_base) $ throwError $ InvalidArgument 0;
vtop \<leftarrow> returnOk $ vaddr + mask (pageBitsForSize pgsz);
whenE (vtop > user_vtop) $ throwError $ InvalidArgument 0;
vm_rights \<leftarrow> returnOk $ mask_vm_rights R (data_to_rights rights_mask);
check_vp_alignment pgsz vaddr;
entries \<leftarrow> create_mapping_entries (addrFromPPtr p) vaddr pgsz vm_rights
@ -373,99 +373,27 @@ definition
(arch_invocation,'z::state_ext) se_monad"
where
"arch_decode_invocation label args x_slot cte cap extra_caps \<equiv> case cap of
PageDirectoryCap p mapped_address \<Rightarrow>
if invocation_type label = ArchInvocationLabel X64PageDirectoryMap then
if length args > 1 \<and> length extra_caps > 0
then let vaddr = args ! 0;
attr = args ! 1;
vspace_cap = fst (extra_caps ! 0)
in doE
whenE (mapped_address \<noteq> None) $ throwError $ InvalidCapability 0;
shift_bits \<leftarrow> returnOk $ pdpt_shift_bits;
vaddr' \<leftarrow> returnOk $ vaddr && ~~ mask shift_bits;
whenE (vaddr' \<ge> kernel_base) $ throwError IllegalOperation;
(pml,asid) \<leftarrow> (case vspace_cap of
ArchObjectCap (PML4Cap pml (Some asid)) \<Rightarrow> returnOk (pml,asid)
| _ \<Rightarrow> throwError $ InvalidCapability 0);
pml' \<leftarrow> lookup_error_on_failure False $ find_vspace_for_asid (asid);
whenE (pml \<noteq> pml) $ throwError $ InvalidCapability 0;
pdpt_slot \<leftarrow> lookup_error_on_failure False $ lookup_pdpt_slot pml vaddr;
old_pdpte \<leftarrow> liftE $ get_pdpte pdpt_slot;
cap' <- returnOk $ ArchObjectCap $ PageDirectoryCap p $ Some (asid,vaddr');
unlessE (old_pdpte = InvalidPDPTE) $ throwError DeleteFirst;
pdpte \<leftarrow> returnOk (PageDirectoryPDPTE (addrFromPPtr p)
(filter_frame_attrs $ attribs_from_word attr) vm_read_write);
returnOk $ InvokePageDirectory $ PageDirectoryMap cap' cte pdpte pdpt_slot pml
odE
else throwError TruncatedMessage
else if invocation_type label = ArchInvocationLabel X64PageDirectoryUnmap then doE
is_final \<leftarrow> liftE $ is_final_cap (ArchObjectCap cap);
if is_final
then throwError RevokeFirst
else returnOk $ InvokePageDirectory $ PageDirectoryUnmap (ArchObjectCap cap) cte
odE
else throwError IllegalOperation
| PageTableCap p mapped_address \<Rightarrow>
if invocation_type label = ArchInvocationLabel X64PageMap then
if length args > 1 \<and> length extra_caps > 0
then let vaddr = args ! 0;
attr = args ! 1;
pml_cap = fst (extra_caps ! 0)
in doE
whenE (mapped_address \<noteq> None) $ throwError $ InvalidCapability 0;
shift_bits \<leftarrow> returnOk $ pageBits + ptTranslationBits;
vaddr' \<leftarrow> returnOk $ vaddr && ~~ mask shift_bits;
whenE (vaddr' \<ge> kernel_base) $ throwError IllegalOperation;
(pml,asid) \<leftarrow> (case pml_cap of
ArchObjectCap ( PML4Cap pml (Some asid)) \<Rightarrow> returnOk (pml,asid)
| _ \<Rightarrow> throwError $ InvalidCapability 0);
pml' \<leftarrow> lookup_error_on_failure False $ find_vspace_for_asid asid;
whenE (pml \<noteq> pml') $ throwError $ InvalidCapability 1;
pd_slot \<leftarrow> lookup_error_on_failure False $ lookup_pd_slot pml vaddr';
pde \<leftarrow> liftE $ get_pde pd_slot;
unlessE (pde = InvalidPDE) $ throwError DeleteFirst;
pde \<leftarrow> returnOk (PageTablePDE (addrFromPPtr p)
(filter_frame_attrs $ attribs_from_word attr) vm_read_write);
returnOk $ InvokePageTable $
PageTableMap
(ArchObjectCap $ PageTableCap p (Some (asid,vaddr')))
cte pde pd_slot pml
odE
else throwError TruncatedMessage
else if invocation_type label = ArchInvocationLabel X64PageTableUnmap then doE
final \<leftarrow> liftE $ is_final_cap (ArchObjectCap cap);
unlessE final $ throwError RevokeFirst;
returnOk $ InvokePageTable $ PageTableUnmap (ArchObjectCap cap) cte
odE
else throwError IllegalOperation
| PageCap _ _ _ _ _ _ \<Rightarrow> decode_page_invocation label args cte cap extra_caps
| PDPointerTableCap p mapped_address \<Rightarrow>
PDPointerTableCap p mapped_address \<Rightarrow>
if invocation_type label = ArchInvocationLabel X64PDPTMap then
if length args > 1 \<and> length extra_caps > 0
then let vaddr = args ! 0;
then let vaddr = args ! 0 && user_vtop;
attr = args ! 1;
vspace_cap = fst (extra_caps ! 0)
pml4_cap = fst (extra_caps ! 0)
in doE
whenE (mapped_address \<noteq> None) $ throwError $ InvalidCapability 0;
shift_bits \<leftarrow> returnOk $ pageBits + ptTranslationBits
+ ptTranslationBits + ptTranslationBits;
vaddr' \<leftarrow> returnOk $ vaddr && ~~ mask shift_bits;
whenE (vaddr' \<ge> kernel_base) $ throwError IllegalOperation;
(vspace,asid) \<leftarrow> (case vspace_cap of
ArchObjectCap (PML4Cap pml (Some asid)) \<Rightarrow> returnOk (pml,asid)
vaddr' \<leftarrow> returnOk $ vaddr && ~~ mask pml4_shift_bits;
(pml4 ,asid) \<leftarrow> (case pml4_cap of
ArchObjectCap (PML4Cap pml4 (Some asid)) \<Rightarrow> returnOk (pml4, asid)
| _ \<Rightarrow> throwError $ InvalidCapability 0);
vspace' \<leftarrow> lookup_error_on_failure False $ find_vspace_for_asid (asid);
whenE (vspace' \<noteq> vspace) $ throwError $ InvalidCapability 0;
pml_slot \<leftarrow> returnOk $ lookup_pml4_slot vspace vaddr;
pml4' \<leftarrow> lookup_error_on_failure False $ find_vspace_for_asid (asid);
whenE (pml4' \<noteq> pml4) $ throwError $ InvalidCapability 0;
pml_slot \<leftarrow> returnOk $ lookup_pml4_slot pml4 vaddr;
old_pml4e \<leftarrow> liftE $ get_pml4e pml_slot;
cap' <- returnOk $ ArchObjectCap $ PDPointerTableCap p $ Some (asid,vaddr');
unlessE (old_pml4e = InvalidPML4E) $ throwError DeleteFirst;
pml4e \<leftarrow> returnOk (PDPointerTablePML4E (addrFromPPtr p)
(filter_frame_attrs $ attribs_from_word attr) vm_read_write);
returnOk $ InvokePDPT $ PDPTMap cap' cte pml4e pml_slot vspace
cap' <- returnOk $ ArchObjectCap $ PDPointerTableCap p $ Some (asid, vaddr');
returnOk $ InvokePDPT $ PDPTMap cap' cte pml4e pml_slot pml4
odE
else throwError TruncatedMessage
else if invocation_type label = ArchInvocationLabel X64PDPTUnmap then doE
@ -475,6 +403,68 @@ where
odE
else throwError IllegalOperation
| PageDirectoryCap p mapped_address \<Rightarrow>
if invocation_type label = ArchInvocationLabel X64PageDirectoryMap then
if length args > 1 \<and> length extra_caps > 0
then let vaddr = args ! 0 && user_vtop;
attr = args ! 1;
pml4_cap = fst (extra_caps ! 0)
in doE
whenE (mapped_address \<noteq> None) $ throwError $ InvalidCapability 0;
vaddr' \<leftarrow> returnOk $ vaddr && ~~ mask pdpt_shift_bits;
(pml4, asid) \<leftarrow> (case pml4_cap of
ArchObjectCap (PML4Cap pml4 (Some asid)) \<Rightarrow> returnOk (pml4, asid)
| _ \<Rightarrow> throwError $ InvalidCapability 0);
pml4' \<leftarrow> lookup_error_on_failure False $ find_vspace_for_asid asid;
whenE (pml4' \<noteq> pml4) $ throwError $ InvalidCapability 0;
pdpt_slot \<leftarrow> lookup_error_on_failure False $ lookup_pdpt_slot pml4 vaddr;
old_pdpte \<leftarrow> liftE $ get_pdpte pdpt_slot;
unlessE (old_pdpte = InvalidPDPTE) $ throwError DeleteFirst;
pdpte \<leftarrow> returnOk (PageDirectoryPDPTE (addrFromPPtr p)
(filter_frame_attrs $ attribs_from_word attr) vm_read_write);
cap' <- returnOk $ ArchObjectCap $ PageDirectoryCap p $ Some (asid, vaddr');
returnOk $ InvokePageDirectory $ PageDirectoryMap cap' cte pdpte pdpt_slot pml4
odE
else throwError TruncatedMessage
else if invocation_type label = ArchInvocationLabel X64PageDirectoryUnmap then doE
final \<leftarrow> liftE $ is_final_cap (ArchObjectCap cap);
unlessE final $ throwError RevokeFirst;
returnOk $ InvokePageDirectory $ PageDirectoryUnmap (ArchObjectCap cap) cte
odE
else throwError IllegalOperation
| PageTableCap p mapped_address \<Rightarrow>
if invocation_type label = ArchInvocationLabel X64PageTableMap then
if length args > 1 \<and> length extra_caps > 0
then let vaddr = args ! 0 && user_vtop;
attr = args ! 1;
pml4_cap = fst (extra_caps ! 0)
in doE
whenE (mapped_address \<noteq> None) $ throwError $ InvalidCapability 0;
vaddr' \<leftarrow> returnOk $ vaddr && ~~ mask pd_shift_bits;
(pml4, asid) \<leftarrow> (case pml4_cap of
ArchObjectCap (PML4Cap pml4 (Some asid)) \<Rightarrow> returnOk (pml4, asid)
| _ \<Rightarrow> throwError $ InvalidCapability 0);
pml4' \<leftarrow> lookup_error_on_failure False $ find_vspace_for_asid asid;
whenE (pml4' \<noteq> pml4) $ throwError $ InvalidCapability 1;
pd_slot \<leftarrow> lookup_error_on_failure False $ lookup_pd_slot pml4 vaddr;
old_pde \<leftarrow> liftE $ get_pde pd_slot;
unlessE (old_pde = InvalidPDE) $ throwError DeleteFirst;
pde \<leftarrow> returnOk (PageTablePDE (addrFromPPtr p)
(filter_frame_attrs $ attribs_from_word attr) vm_read_write);
cap' <- returnOk $ ArchObjectCap $ PageTableCap p $ Some (asid, vaddr');
returnOk $ InvokePageTable $ PageTableMap cap' cte pde pd_slot pml4
odE
else throwError TruncatedMessage
else if invocation_type label = ArchInvocationLabel X64PageTableUnmap then doE
final \<leftarrow> liftE $ is_final_cap (ArchObjectCap cap);
unlessE final $ throwError RevokeFirst;
returnOk $ InvokePageTable $ PageTableUnmap (ArchObjectCap cap) cte
odE
else throwError IllegalOperation
| PageCap _ _ _ _ _ _ \<Rightarrow> decode_page_invocation label args cte cap extra_caps
| ASIDControlCap \<Rightarrow>
if invocation_type label = ArchInvocationLabel X64ASIDControlMakePool then
if length args > 1 \<and> length extra_caps > 1

View File

@ -278,11 +278,11 @@ definition
case i of
InvokePageTable oper \<Rightarrow> perform_page_table_invocation oper
| InvokePageDirectory oper \<Rightarrow> perform_page_directory_invocation oper
| InvokePDPT oper \<Rightarrow> perform_pdpt_invocation oper
| InvokePage oper \<Rightarrow> perform_page_invocation oper
| InvokeASIDControl oper \<Rightarrow> perform_asid_control_invocation oper
| InvokeASIDPool oper \<Rightarrow> perform_asid_pool_invocation oper
| InvokeIOPort oper \<Rightarrow> perform_io_port_invocation oper
| _ \<Rightarrow> fail;
| InvokeIOPort oper \<Rightarrow> perform_io_port_invocation oper;
return $ []
od"

View File

@ -116,6 +116,10 @@ definition
pptr_base :: "machine_word" where
"pptr_base = Platform.X64.pptrBase"
(* Virtual address space available to users. *)
definition
user_vtop :: "machine_word" where
"user_vtop = 0x00007fffffffffff"
text {* The lowest virtual address in the kernel window. The kernel reserves the
virtual addresses from here up in every virtual address space. *}