aarch64 ainvs: finish vspace lemma statement pass

The statements of all VSpace-related lemmas are now in as good a state
as we can predict without proving them.

Signed-off-by: Gerwin Klein <gerwin.klein@proofcraft.systems>
This commit is contained in:
Gerwin Klein 2022-05-20 15:12:46 +10:00 committed by Gerwin Klein
parent 794c2475e9
commit 1dd1f54f6b
17 changed files with 188 additions and 652 deletions

View File

@ -15,153 +15,35 @@ context Arch begin
global_naming AARCH64
(* FIXME AARCH64
lemma canonical_not_kernel_is_user:
"\<lbrakk> v \<notin> kernel_mappings; canonical_address v \<rbrakk> \<Longrightarrow> v \<in> user_region "
by (simp add: kernel_mappings_def not_le canonical_below_pptr_base_user)
lemma no_user_region_kernel_mappings:
"\<lbrakk> p \<in> user_region; p \<in> kernel_mappings \<rbrakk> \<Longrightarrow> False"
using kernel_mappings_user_region by blast
lemma kernel_mappings_slots_eq:
"canonical_address p \<Longrightarrow>
ucast (p >> pt_bits_left max_pt_level) \<in> kernel_mapping_slots \<longleftrightarrow> p \<in> kernel_mappings"
apply (simp add: kernel_mappings_def kernel_mapping_slots_def ucast_mask_drop canonical_address_range)
apply word_bitwise
by (auto simp: canonical_bit_def word_bits_def pt_bits_left_def bit_simps level_defs word_size
rev_bl_order_simps)
*)
lemma ucast_ucast_mask_low: "(ucast (x && mask asid_low_bits) :: asid_low_index) = ucast x"
by (rule ucast_mask_drop, simp add: asid_low_bits_def)
(* FIXME AARCH64
lemma valid_global_table_rights:
"\<lbrakk> pt_ptr \<in> riscv_global_pts (arch_state s) level;
valid_global_tables s; valid_global_arch_objs s \<rbrakk> \<Longrightarrow>
\<exists>pt. pts_of s pt_ptr = Some pt \<and> (\<forall>idx. pte_rights_of (pt idx) = vm_kernel_only)"
by (frule (1) global_pts_ofD) (clarsimp simp: valid_global_tables_def Let_def) *)
(* FIXME AARCH64
lemma ptes_of_idx:
"\<lbrakk> ptes_of s (pt_slot_offset level pt_ptr p) = Some pte;
"\<lbrakk> ptes_of s (pt_type pt) (pt_slot_offset level pt_ptr p) = Some pte;
pts_of s pt_ptr = Some pt; pspace_aligned s \<rbrakk> \<Longrightarrow>
\<exists>idx. pt idx = pte"
\<exists>idx. pt_apply pt idx = pte"
apply (drule_tac pt_ptr=pt_ptr in pspace_aligned_pts_ofD, simp)
apply (fastforce simp: pte_of_def)
done
lemma valid_global_arch_objs_global_ptD:
"valid_global_arch_objs s \<Longrightarrow>
riscv_global_pt (arch_state s) \<in> riscv_global_pts (arch_state s) max_pt_level"
by (auto simp: valid_global_arch_objs_def Let_def riscv_global_pt_def)
lemma equal_kernel_mappingsD:
"\<lbrakk> vspace_for_asid asid s = Some pt_ptr; pts_of s pt_ptr = Some pt;
equal_kernel_mappings s \<rbrakk> \<Longrightarrow> has_kernel_mappings pt s"
by (simp add: equal_kernel_mappings_def)
lemma has_kernel_mappingsD:
"\<lbrakk> has_kernel_mappings pt s; valid_global_arch_objs s; idx \<in> kernel_mapping_slots;
pte = pt idx \<rbrakk> \<Longrightarrow>
\<exists>pt'. pts_of s (riscv_global_pt (arch_state s)) = Some pt' \<and> pte = pt' idx"
unfolding has_kernel_mappings_def
by (fastforce simp: pt_at_eq dest: valid_global_arch_objs_pt_at)
lemma pte_rights_of_kernel:
"\<lbrakk> p \<in> kernel_mappings; canonical_address p; valid_global_vspace_mappings s;
equal_kernel_mappings s; valid_global_tables s; valid_global_arch_objs s; valid_vspace_objs s;
valid_asid_table s; valid_uses s; pspace_aligned s;
(\<exists>asid. vspace_for_asid asid s = Some pt_ref) \<or> pt_ref = riscv_global_pt (arch_state s);
pt_lookup_slot pt_ref p (ptes_of s) = Some (level, slot); ptes_of s slot = Some pte \<rbrakk>
\<Longrightarrow> pte_rights_of pte = vm_kernel_only"
apply (clarsimp simp: pt_lookup_slot_def pt_lookup_slot_from_level_def)
apply (rename_tac pt_ptr)
apply (erule disjE; clarsimp)
apply (subgoal_tac "is_aligned pt_ref pt_bits")
prefer 2
apply (drule (2) vspace_for_asid_valid_pt)
apply clarsimp
apply (erule pspace_aligned_pts_ofD)
apply simp
apply (cases "level = max_pt_level")
apply (drule pt_walk_level)
apply (clarsimp simp flip: kernel_mappings_slots_eq)
apply (clarsimp simp: ptes_of_def table_index_offset_pt_bits_left)
apply (drule (2) equal_kernel_mappingsD)
apply (drule (3) has_kernel_mappingsD)
apply clarsimp
apply (frule valid_global_arch_objs_global_ptD)
apply (drule (2) valid_global_table_rights)
apply fastforce
apply (drule pt_walk_equal_top_slot_Some[where pt_ptr'="riscv_global_pt (arch_state s)", rotated])
apply (erule (7) equal_mappings_pt_slot_offset)
apply clarsimp
apply (frule (1) valid_global_tablesD, simp)
apply (fastforce dest!: ptes_of_idx valid_global_table_rights)
apply (frule (1) valid_global_tablesD, simp)
apply (fastforce dest!: ptes_of_idx valid_global_table_rights)
done
lemma some_get_page_info_kmapsD:
"\<lbrakk> get_page_info (aobjs_of s) pt_ref p = Some (b, a, attr, r);
p \<in> kernel_mappings; canonical_address p; valid_global_vspace_mappings s;
equal_kernel_mappings s; valid_global_tables s; valid_global_arch_objs s;
valid_vspace_objs s; valid_asid_table s; valid_uses s; pspace_aligned s;
(\<exists>asid. vspace_for_asid asid s = Some pt_ref) \<or> pt_ref = riscv_global_pt (arch_state s) \<rbrakk>
\<Longrightarrow> (\<exists>sz. pageBitsForSize sz = a) \<and> r = {}"
apply (clarsimp simp: get_page_info_def in_omonad)
apply (rename_tac level slot pte)
apply (frule (12) pte_rights_of_kernel, simp add: vm_kernel_only_def)
apply (clarsimp simp: pte_info_def split: pte.splits)
apply (drule pt_lookup_slot_max_pt_level)
apply (rule_tac x="vmpage_size_of_level level" in exI)
apply simp
done
sorry (* FIXME AARCH64
apply (fastforce simp: level_pte_of_def in_omonad)
done *)
lemma pte_info_not_InvalidPTE:
"pte_info level pte = Some (b, a, attr, r) \<Longrightarrow> pte \<noteq> InvalidPTE"
by (simp add: pte_info_def split: pte.splits)
lemma valid_global_tables_toplevel_pt:
"\<lbrakk> pts_of s (riscv_global_pt (arch_state s)) = Some pt; valid_global_tables s \<rbrakk> \<Longrightarrow>
\<forall>idx\<in>- kernel_mapping_slots. pt idx = InvalidPTE"
by (simp add: valid_global_tables_def Let_def riscv_global_pt_def)
lemma global_pt_not_invalid_kernel:
"\<lbrakk> ptes_of s (pt_slot_offset max_pt_level (riscv_global_pt (arch_state s)) p) = Some pte;
pte \<noteq> InvalidPTE; canonical_address p; valid_global_tables s;
is_aligned (riscv_global_pt (arch_state s)) pt_bits\<rbrakk>
\<Longrightarrow> p \<in> kernel_mappings"
apply (clarsimp simp: pte_of_def table_index_offset_pt_bits_left)
apply (fastforce simp flip: kernel_mappings_slots_eq dest: valid_global_tables_toplevel_pt)
done
lemma get_page_info_gpd_kmaps:
"\<lbrakk> valid_global_vspace_mappings s; valid_arch_state s;
get_page_info (aobjs_of s) (riscv_global_pt (arch_state s)) p = Some (b, a, attr, r) \<rbrakk>
\<Longrightarrow> p \<in> kernel_mappings"
apply (clarsimp simp: get_page_info_def in_omonad pt_lookup_slot_def pt_lookup_slot_from_level_def)
apply (subst (asm) pt_walk.simps)
apply (fastforce dest: pte_info_not_InvalidPTE global_pt_not_invalid_kernel
simp: valid_arch_state_def in_omonad)
done
lemma get_vspace_of_thread_reachable:
"\<lbrakk> get_vspace_of_thread (kheap s) (arch_state s) t \<noteq> riscv_global_pt (arch_state s);
"\<lbrakk> get_vspace_of_thread (kheap s) (arch_state s) t \<noteq> global_pt s;
valid_uses s \<rbrakk>
\<Longrightarrow> (\<exists>\<rhd> (max_pt_level, get_vspace_of_thread (kheap s) (arch_state s) t)) s"
by (auto simp: get_vspace_of_thread_def
split: option.splits cap.splits kernel_object.splits arch_cap.splits if_split_asm
split: option.splits cap.splits kernel_object.splits arch_cap.splits if_split_asm pt_type.splits
intro: vspace_for_asid_vs_lookup)
*)
lemma data_at_aligned:
"\<lbrakk> data_at sz p s; pspace_aligned s\<rbrakk> \<Longrightarrow> is_aligned p (pageBitsForSize sz)"
unfolding data_at_def by (auto simp: obj_at_def dest: pspace_alignedD)
lemma is_aligned_ptrFromPAddr_n_eq:
lemma is_aligned_ptrFromPAddr_n_eq: (* FIXME AARCH64: might need a different precondition *)
"sz \<le> canonical_bit \<Longrightarrow> is_aligned (ptrFromPAddr x) sz = is_aligned x sz"
apply (rule iffI)
apply (simp add: ptrFromPAddr_def pptrBaseOffset_def pptrBase_def paddrBase_def canonical_bit_def)
@ -225,10 +107,9 @@ named_theorems AInvsPre_asms
lemma get_vspace_of_thread_asid_or_global_pt:
"(\<exists>asid. vspace_for_asid asid s = Some (get_vspace_of_thread (kheap s) (arch_state s) t))
\<or> get_vspace_of_thread (kheap s) (arch_state s) t = riscv_global_pt (arch_state s)"
sorry (* FIXME AARCH64
\<or> get_vspace_of_thread (kheap s) (arch_state s) t = global_pt s"
by (auto simp: get_vspace_of_thread_def
split: option.split kernel_object.split cap.split arch_cap.split) *)
split: option.split kernel_object.split cap.split arch_cap.split pt_type.splits)
lemma ptable_rights_imp_frame[AInvsPre_asms]:
assumes "valid_state s"
@ -238,8 +119,8 @@ lemma ptable_rights_imp_frame[AInvsPre_asms]:
using assms get_vspace_of_thread_asid_or_global_pt[of s t]
apply (clarsimp simp: ptable_lift_def ptable_rights_def in_user_frame_def in_device_frame_def
split: option.splits)
apply (case_tac "x \<in> kernel_mappings")
sorry (* FIXME AARCH64
apply (case_tac "x \<in> kernel_mappings")
apply (frule (2) some_get_page_info_kmapsD;
fastforce simp: valid_state_def valid_arch_state_def valid_pspace_def)
apply (frule some_get_page_info_umapsD)

View File

@ -711,6 +711,10 @@ lemma set_asid_pool_valid_objs [wp]:
including unfold_objects
by (clarsimp simp: a_type_def valid_obj_def)
lemma invs_valid_global_arch_objs:
"invs s \<Longrightarrow> valid_global_arch_objs s"
by (clarsimp simp: invs_def valid_state_def valid_arch_state_def)
lemma is_aligned_pt:
"\<lbrakk> pt_at pt_t pt s; pspace_aligned s \<rbrakk> \<Longrightarrow> is_aligned pt (pt_bits pt_t)"
apply (clarsimp simp: obj_at_def)
@ -1316,15 +1320,12 @@ lemma kheap_pt_upd_simp[simp]:
unfolding aobj_of_def opt_map_def
by (auto split: kernel_object.split)
(* FIXME AARCH64: we might need valid_global_arch_objs after all if we want arm_us_global_vspace to be aligned
lemma arm_global_vspace_aligned[simp]:
"\<lbrakk> pspace_aligned s ; valid_global_arch_objs s \<rbrakk>
\<Longrightarrow> is_aligned (global_pt s) pt_bits"
\<Longrightarrow> is_aligned (global_pt s) (pt_bits VSRootPT_T)"
apply (clarsimp simp add: valid_global_arch_objs_def)
apply (rule is_aligned_pt; assumption?)
apply (fastforce simp: riscv_global_pt_def)
done
*)
lemma global_pt_in_global_refs[simp]:
"global_pt s \<in> global_refs s"
@ -2126,32 +2127,11 @@ crunches store_pte
(wp: set_pt_zombies set_pt_ifunsafe set_pt_reply_caps set_pt_reply_masters
set_pt_valid_global valid_irq_node_typ valid_irq_handlers_lift set_pt_cur)
(* FIXME AARCH64: probably remove
lemma store_pte_valid_global_tables:
"\<lbrace> valid_global_tables and valid_global_arch_objs and valid_global_vspace_mappings
and (\<lambda>s. table_base p \<notin> global_refs s) \<rbrace>
store_pte p pte
\<lbrace>\<lambda>_. valid_global_tables \<rbrace>"
unfolding store_pte_def set_pt_def
supply fun_upd_apply[simp del]
apply (wpsimp wp: set_object_wp)
apply (simp (no_asm) add: valid_global_tables_def Let_def)
apply (rule conjI)
apply clarsimp
apply (subst (asm) pt_walk_pt_upd_idem)
apply (fastforce simp: global_refs_def dest: valid_global_tablesD[simplified riscv_global_pt_def])
apply (fastforce dest: valid_global_vspace_mappings_aligned[simplified riscv_global_pt_def])
apply (fastforce simp: valid_global_tables_def Let_def)
apply (clarsimp simp: valid_global_tables_def Let_def fun_upd_apply split: if_splits)
apply (fastforce dest: riscv_global_pt_in_global_refs simp: riscv_global_pt_def global_refs_def)
done
lemma store_pte_valid_global_arch_objs[wp]:
"store_pte p pte \<lbrace> valid_global_arch_objs \<rbrace>"
"store_pte pt_t p pte \<lbrace> valid_global_arch_objs \<rbrace>"
unfolding store_pte_def set_pt_def
by (wpsimp wp: set_object_wp)
(clarsimp simp: valid_global_arch_objs_def obj_at_def)
*)
lemma store_pte_unique_table_refs[wp]:
"store_pte pt_t p pte \<lbrace> unique_table_refs \<rbrace>"

View File

@ -209,11 +209,10 @@ proof -
done
qed
(* FIXME AARCH64
crunch typ_at [wp]:
perform_page_table_invocation, perform_page_invocation, perform_asid_pool_invocation
"\<lambda>s. P (typ_at T p s)"
(wp: crunch_wps)
(wp: crunch_wps simp: crunch_simps ignore: store_pte)
lemmas perform_page_table_invocation_typ_ats [wp] =
abs_typ_at_lifts [OF perform_page_table_invocation_typ_at]
@ -223,7 +222,7 @@ lemmas perform_page_invocation_typ_ats [wp] =
lemmas perform_asid_pool_invocation_typ_ats [wp] =
abs_typ_at_lifts [OF perform_asid_pool_invocation_typ_at]
*)
lemma perform_asid_control_invocation_tcb_at:
"\<lbrace>invs and valid_aci aci and st_tcb_at active p and
K (\<forall>w a b c. aci = asid_control_invocation.MakePool w a b c \<longrightarrow> w \<noteq> p)\<rbrace>
@ -960,14 +959,12 @@ lemma sts_vspace_at_asid[wp]:
sorry (* FIXME AARCH64
unfolding vspace_at_asid_def by wpsimp *)
(* FIXME AARCH64
lemma sts_valid_slots_inv[wp]:
"set_thread_state t st \<lbrace>valid_slots m\<rbrace>"
unfolding valid_slots_def
apply (cases m)
apply (wpsimp wp: hoare_vcg_all_lift hoare_vcg_imp_lift' sts_typ_ats)
apply fastforce
done *)
done
lemma sts_same_ref[wp]:
"set_thread_state t st \<lbrace>\<lambda>s. P (same_ref ref cap s)\<rbrace>"
@ -978,7 +975,7 @@ lemma sts_valid_page_inv[wp]:
unfolding valid_page_inv_def
apply (cases page_invocation)
apply (wpsimp wp: sts_typ_ats hoare_vcg_ex_lift hoare_vcg_disj_lift | wps)+
sorry (* FIXME AARCH64 something wrong, getting undefined in pre/post
sorry (* FIXME AARCH64 something wrong, getting undefined in pre/post; valid_page_inv is missing PageFlush
done *)
crunch global_refs_inv[wp]: set_thread_state "\<lambda>s. P (global_refs s)"
@ -990,9 +987,8 @@ lemma sts_vs_lookup_slot[wp]:
lemma sts_valid_vspace_table_inv[wp]:
"set_thread_state t st \<lbrace>valid_pti i\<rbrace>"
unfolding valid_pti_def
sorry (* FIXME AARCH64
by (cases i; wpsimp wp: sts_typ_ats hoare_vcg_ex_lift hoare_vcg_all_lift
simp: invalid_pte_at_def aobjs_of_ako_at_Some[symmetric]) *)
simp: invalid_pte_at_def aobjs_of_ako_at_Some[symmetric])
lemma sts_valid_arch_inv:
"set_thread_state t st \<lbrace>valid_arch_inv ai\<rbrace>"
@ -1000,7 +996,7 @@ lemma sts_valid_arch_inv:
apply (rename_tac asid_control_invocation)
apply (case_tac asid_control_invocation)
apply (clarsimp simp: valid_aci_def cte_wp_at_caps_of_state)
sorry (* FIXME AARCH64
sorry (* FIXME AARCH64 VCPU: valid_arch_inv is missing cases (VCPU and InvokeVSpace)
apply (rule hoare_pre, wp hoare_vcg_ex_lift cap_table_at_typ_at)
apply clarsimp
apply (clarsimp simp: valid_apinv_def split: asid_pool_invocation.splits)
@ -1041,16 +1037,9 @@ lemma shiftr_irrelevant:
declare mask_shift [simp]
declare word_less_sub_le [simp del]
declare ptrFormPAddr_addFromPPtr [simp]
lemma le_user_vtop_less_pptr_base[simp]:
"x \<le> user_vtop \<Longrightarrow> x < pptr_base"
using dual_order.strict_trans2 sorry (* FIXME AARCH64 by blast *)
(* FIXME AARCH64
lemmas le_user_vtop_canonical_address = below_user_vtop_canonical[simp]
lemma ptrFromPAddr_addr_from_ppn:
"is_aligned pt_ptr table_size \<Longrightarrow>
ptrFromPAddr (addr_from_ppn (ucast (addrFromPPtr pt_ptr >> pageBits))) = pt_ptr"
@ -1066,8 +1055,8 @@ lemma ptrFromPAddr_addr_from_ppn':
*)
lemma is_aligned_pageBitsForSize_table_size:
"is_aligned p (pageBitsForSize vmpage_size) \<Longrightarrow> is_aligned p (table_size pt_t)"
sorry (* FIXME AARCH64 ^ no obvious mapping from pt_type to vmpage_size
"is_aligned p (pageBitsForSize vmpage_size) \<Longrightarrow> is_aligned p (table_size NormalPT_T)"
sorry (* FIXME AARCH64 ^ no obvious mapping from pt_type to vmpage_size (only true for NormalPT_T)
apply (erule is_aligned_weaken)
apply (simp add: pbfs_atleast_pageBits[unfolded bit_simps] bit_simps)
done *)
@ -1080,8 +1069,7 @@ lemma vmsz_aligned_vref_for_level:
lemma vs_lookup_slot_pte_at:
"\<lbrakk> vs_lookup_slot level asid vref s = Some (level, pt_slot);
vref \<in> user_region; level \<le> max_pt_level; invs s \<rbrakk> \<Longrightarrow>
pte_at pt_t pt_slot s"
sorry (* FIXME AARCH64 check statement
pte_at level pt_slot s"
apply (clarsimp simp: pte_at_eq vs_lookup_slot_table_unfold in_omonad)
apply (drule valid_vspace_objs_strongD[rotated]; clarsimp)
apply (clarsimp simp: ptes_of_def in_omonad)
@ -1090,13 +1078,13 @@ lemma vs_lookup_slot_pte_at:
apply (thin_tac "pt_slot = t" for t)
apply (clarsimp simp: pt_slot_offset_def)
apply (rule is_aligned_add; simp add: is_aligned_shift)
done *)
done
(* FIXME AARCH64
lemma vmpage_size_of_level_pt_bits_left:
"\<lbrakk> pt_bits_left level = pageBitsForSize vmpage_size; level \<le> max_pt_level \<rbrakk> \<Longrightarrow>
vmpage_size_of_level level = vmpage_size"
by (cases vmpage_size; simp add: vmpage_size_of_level_def pt_bits_left_def bit_simps) auto *)
vmsize_of_level level = vmpage_size"
by (cases vmpage_size; simp add: vmsize_of_level_def pt_bits_left_def bit_simps
split: if_split_asm) auto
lemma is_PagePTE_make_user[simp]:
"is_PagePTE (make_user_pte p attr R sz) \<or> make_user_pte p attr R sz = InvalidPTE"

View File

@ -98,8 +98,7 @@ lemma same_object_as_def2:
apply (simp add: same_object_as_def is_cap_simps split: cap.split)
apply (auto simp: cap_master_cap_def bits_of_def is_cap_simps
split: arch_cap.splits)
sorry (* FIXME AARCH64
done *)
done
lemma same_object_as_cap_master [CNodeInv_AI_assms]:
"same_object_as cap cap' \<Longrightarrow> cap_master_cap cap = cap_master_cap cap'"

View File

@ -301,7 +301,7 @@ lemma set_cap_valid_arch_caps:
\<or> (\<forall>oref \<in> obj_refs cap'. \<not> reachable_target vref oref s))
\<and> no_cap_to_obj_with_diff_ref cap {ptr} s
\<and> (is_pt_cap cap \<longrightarrow> cap_asid cap = None
\<longrightarrow> (\<forall>r \<in> obj_refs cap. pts_of s r = Some (empty_pt FIXME)))
\<longrightarrow> (\<forall>r \<in> obj_refs cap. pts_of s r = Some (empty_pt (cap_pt_type cap))))
\<and> (is_pt_cap cap
\<longrightarrow> (\<forall>oldcap. caps_of_state s ptr = Some oldcap \<longrightarrow>
is_pt_cap oldcap
@ -313,9 +313,8 @@ lemma set_cap_valid_arch_caps:
set_cap cap ptr
\<lbrace>\<lambda>rv. valid_arch_caps\<rbrace>"
unfolding valid_arch_caps_def
sorry (*
by (wpsimp wp: set_cap_valid_vs_lookup set_cap_valid_table_caps set_cap_unique_table_caps
simp: cte_wp_at_caps_of_state) *)
simp: cte_wp_at_caps_of_state)
lemma valid_table_capsD:
"\<lbrakk> cte_wp_at ((=) cap) ptr s; valid_table_caps s; is_pt_cap cap; cap_asid cap = None \<rbrakk>

View File

@ -141,7 +141,7 @@ lemma hyp_refs_of: "\<And>obj p. \<lbrakk> ko_at obj p s \<rbrakk> \<Longrightar
lemma arch_valid_obj[detype_invs_proofs]:
"\<And>p ao. \<lbrakk>ko_at (ArchObj ao) p s; arch_valid_obj ao s\<rbrakk>
\<Longrightarrow> arch_valid_obj ao (detype (untyped_range cap) s)"
sorry (* FIXME AARCH64
sorry (* FIXME AARCH64 VCPU
by simp *)
lemma sym_hyp_refs_detype[detype_invs_proofs]:
@ -181,7 +181,7 @@ lemma tcb_arch_detype[detype_invs_proofs]:
"\<lbrakk>ko_at (TCB t) p s; valid_arch_tcb (tcb_arch t) s\<rbrakk>
\<Longrightarrow> valid_arch_tcb (tcb_arch t) (detype (untyped_range cap) s)"
apply (clarsimp simp: valid_arch_tcb_def)
sorry (* FIXME AARCH64
sorry (* FIXME AARCH64 VCPU
done *)
declare arch_state_det[simp]
@ -255,10 +255,9 @@ lemma vs_lookup_slot:
lemma vs_lookup_target:
"(vs_lookup_target level asid vref (detype S s) = Some (level, p)) \<Longrightarrow>
(vs_lookup_target level asid vref s = Some (level, p))"
sorry (* FIXME AARCH64
by (fastforce simp: vs_lookup_target_def in_omonad asid_pools_of_detype
split: if_split_asm
dest!: vs_lookup_slot) *)
dest!: vs_lookup_slot)
lemma vs_lookup_target_preserved:
"\<lbrakk> x \<in> untyped_range cap; vs_lookup_target level asid vref s = Some (level', x);
@ -283,26 +282,26 @@ lemma valid_asid_table:
apply (drule no_obj_refs; simp)
done
(* FIXME AARCH64
lemma vmid_inv_detype:
"vmid_inv (detype (untyped_range cap) s)"
sorry (* FIXME AARCH64 *)
lemma cur_vcpu_detype:
"cur_vcpu (detype (untyped_range cap) s)"
sorry (* FIXME AARCH64 VCPU *)
lemma valid_global_arch_objs:
"valid_global_arch_objs (detype (untyped_range cap) s)"
using valid_arch_state
by (fastforce dest!: riscv_global_pts_global_ref valid_global_refsD[OF globals cap]
using valid_arch_state global_pt_in_global_refs
by (fastforce dest!: valid_global_refsD[OF globals cap]
simp: cap_range_def valid_global_arch_objs_def valid_arch_state_def)
lemma valid_global_tables:
"valid_global_tables (detype (untyped_range cap) s)"
using valid_arch_state
by (fastforce dest: pt_walk_level pt_walk_detype
simp: valid_global_tables_def valid_arch_state_def Let_def)
*)
lemma valid_arch_state_detype[detype_invs_proofs]:
"valid_arch_state (detype (untyped_range cap) s)"
using valid_vs_lookup valid_arch_state ut_mdb valid_global_refsD [OF globals cap] cap
cur_vcpu_detype vmid_inv_detype valid_global_arch_objs
unfolding valid_arch_state_def pred_conj_def
sorry (* FIXME AARCH64
by (simp only: valid_asid_table valid_global_arch_objs valid_global_tables) simp *)
by (simp only: valid_asid_table) simp
lemma vs_lookup_asid_pool_level:
assumes lookup: "vs_lookup_table level asid vref s = Some (level, p)" "vref \<in> user_region"
@ -455,50 +454,13 @@ qed
lemma valid_asid_map_detype[detype_invs_proofs]: "valid_asid_map (detype (untyped_range cap) s)"
by (simp add: valid_asid_map_def)
(* FIXME AARCH64
lemma has_kernel_mappings:
"valid_global_arch_objs s \<Longrightarrow>
has_kernel_mappings pt (detype (untyped_range cap) s) = has_kernel_mappings pt s"
by (auto dest!: riscv_global_pt_in_global_refs valid_global_refsD [OF globals cap]
simp: cap_range_def has_kernel_mappings_def )
lemma equal_kernel_mappings_detype[detype_invs_proofs]:
"equal_kernel_mappings (detype (untyped_range cap) s)"
proof -
have "equal_kernel_mappings s"
using invs by (simp add: invs_def valid_state_def)
moreover
have "valid_global_arch_objs s"
using invs by (simp add: invs_def valid_state_def valid_arch_state_def)
ultimately
show ?thesis
by (clarsimp simp: equal_kernel_mappings_def has_kernel_mappings)
qed
by (simp add: equal_kernel_mappings_def)
lemma valid_global_mappings_detype[detype_invs_proofs]:
"valid_global_vspace_mappings (detype (untyped_range cap) s)"
proof -
have "valid_global_vspace_mappings s"
"valid_global_tables s"
"valid_global_arch_objs s"
"pspace_aligned s"
"valid_uses s"
using invs by (auto simp: invs_def valid_state_def valid_arch_state_def)
then show ?thesis
unfolding valid_global_vspace_mappings_def
apply (clarsimp simp: Let_def)
apply (safe; drule (1) bspec; thin_tac "Ball _ _")
apply (all \<open>drule kernel_regionsI, erule option_Some_value_independent\<close>)
apply (distinct_subgoals)
apply (subst pt_lookup_target_translate_address_upd_eq; assumption?)
apply (rule pt_lookup_target_pt_eqI; clarsimp)
apply (drule (1) valid_global_tablesD, simp add: kernel_regions_in_mappings)
apply (drule riscv_global_pts_global_ref)
apply (drule valid_global_refsD[OF globals cap])
apply (clarsimp simp: cap_range_def opt_map_def detype_def split: option.splits)
done
qed
*)
by (simp add: valid_global_vspace_mappings_def)
lemma pspace_in_kernel_window_detype[detype_invs_proofs]:
"pspace_in_kernel_window (detype (untyped_range cap) s)"
@ -607,8 +569,7 @@ sublocale detype_locale < detype_locale_gen_2
proof goal_cases
interpret detype_locale_arch ..
case 1 show ?case
sorry (* FIXME AARCH64
by (intro_locales; (unfold_locales; fact detype_invs_proofs)?) *)
by (intro_locales; (unfold_locales; fact detype_invs_proofs)?)
qed
context detype_locale begin

View File

@ -72,7 +72,7 @@ lemma arch_decode_ARMASIDControlMakePool_empty_fail:
split: if_split_asm)
apply wpsimp
apply (wpsimp simp: decode_frame_invocation_def)
sorry (* FIXME AARCH64
sorry (* FIXME AARCH64 VCPU and PageFlush invocations
apply (wpsimp simp: decode_page_table_invocation_def)
done *)

View File

@ -22,11 +22,10 @@ declare prepare_thread_delete_caps_of_state [Finalise_AI_asms]
global_naming AARCH64
(* FIXME AARCH64
lemma valid_global_refs_asid_table_udapte [iff]:
"valid_global_refs (s\<lparr>arch_state := arm_asid_table_update f (arch_state s)\<rparr>) =
valid_global_refs s"
by (simp add: valid_global_refs_def global_refs_def) *)
by (simp add: valid_global_refs_def global_refs_def)
lemma nat_to_cref_unat_of_bl':
"\<lbrakk> length xs < 64; n = length xs \<rbrakk> \<Longrightarrow>
@ -44,38 +43,27 @@ lemma nat_to_cref_unat_of_bl':
lemmas nat_to_cref_unat_of_bl = nat_to_cref_unat_of_bl' [OF _ refl]
(* FIXME AARCH64
lemma riscv_global_pt_asid_table_update[simp]:
"riscv_global_pt (arch_state s\<lparr>arm_asid_table := atable\<rparr>) = riscv_global_pt (arch_state s)"
by (simp add: riscv_global_pt_def) *)
lemma global_pt_asid_table_update[simp]:
"arm_us_global_vspace (arch_state s\<lparr>arm_asid_table := atable\<rparr>) = global_pt s"
by simp
lemma equal_kernel_mappings_asid_table_unmap:
"equal_kernel_mappings s
\<Longrightarrow> equal_kernel_mappings (s\<lparr>arch_state := arch_state s
\<lparr>arm_asid_table := (asid_table s)(i := None)\<rparr>\<rparr>)"
unfolding equal_kernel_mappings_def
apply clarsimp
sorry (* FIXME AARCH64
apply (erule_tac x=asid in allE)
apply (erule_tac x=pt_ptr in allE)
apply (clarsimp simp: fun_upd_def)
apply (erule impE)
subgoal by (clarsimp simp: vspace_for_asid_def in_omonad pool_for_asid_def split: if_splits)
apply (clarsimp simp: has_kernel_mappings_def)
done *)
unfolding equal_kernel_mappings_def by simp
lemma invs_arm_asid_table_unmap:
"invs s \<and> is_aligned base asid_low_bits
\<and> tab = arm_asid_table (arch_state s)
\<longrightarrow> invs (s\<lparr>arch_state := arch_state s\<lparr>arm_asid_table := tab(asid_high_bits_of base := None)\<rparr>\<rparr>)"
apply (clarsimp simp: invs_def valid_state_def valid_arch_caps_def)
sorry (* FIXME AARCH64
apply (strengthen valid_asid_map_unmap valid_vspace_objs_unmap_strg
valid_vs_lookup_unmap_strg valid_arch_state_unmap_strg)
apply (simp add: valid_irq_node_def valid_kernel_mappings_def)
apply (simp add: valid_table_caps_def valid_machine_state_def valid_global_objs_def
valid_asid_pool_caps_def equal_kernel_mappings_asid_table_unmap)
done *)
done
lemma delete_asid_pool_invs[wp]:
"delete_asid_pool base pptr \<lbrace>invs\<rbrace>"
@ -803,15 +791,14 @@ lemma delete_asid_no_vs_lookup_target:
done *)
lemma delete_asid_unreachable:
"\<lbrace>\<lambda>s. vspace_for_asid asid s = Some pt \<and> pt_at pt_t pt s \<and> valid_asid_table s \<rbrace>
"\<lbrace>\<lambda>s. vspace_for_asid asid s = Some pt \<and> pt_at VSRootPT_T pt s \<and> valid_asid_table s \<rbrace>
delete_asid asid pt
\<lbrace>\<lambda>_ s. \<not> reachable_target (asid, vref) pt s\<rbrace>"
unfolding reachable_target_def
sorry (* FIXME AARCH64
apply (wpsimp wp: hoare_vcg_all_lift delete_asid_no_vs_lookup_target)
apply (drule (1) pool_for_asid_validD)
apply (clarsimp simp: obj_at_def in_omonad)
done *)
done
lemma arch_finalise_cap_replaceable:
notes strg = tcb_cap_valid_imp_NullCap
@ -1372,7 +1359,7 @@ lemma valid_kernel_mappings [iff]:
"valid_kernel_mappings (s\<lparr>arch_state := arch_state s\<lparr>arm_asid_table := table'\<rparr>\<rparr>) = valid_kernel_mappings s"
by (simp add: valid_kernel_mappings_def)
crunches unmap_page_table, store_pte, delete_asid_pool(* FIXME AARCH64 , copy_global_mappings *)
crunches unmap_page_table, store_pte, delete_asid_pool
for valid_cap[wp]: "valid_cap c"
(wp: mapM_wp_inv mapM_x_wp' simp: crunch_simps)

View File

@ -269,7 +269,7 @@ end
interpretation Interrupt_AI?: Interrupt_AI
proof goal_cases
interpret Arch .
case 1 show ?case sorry (* FIXME AARCH64 by (intro_locales; (unfold_locales, simp_all add: Interrupt_AI_asms)?) *)
case 1 show ?case by (intro_locales; (unfold_locales, simp_all add: Interrupt_AI_asms)?)
qed
end

View File

@ -606,8 +606,12 @@ definition vmid_for_asid :: "asid \<Rightarrow> 'z::state_ext state \<Rightarrow
definition vmid_inv :: "'z::state_ext state \<Rightarrow> bool" where
"vmid_inv s \<equiv> is_inv (arm_vmid_table (arch_state s)) (swp vmid_for_asid s)"
definition valid_global_arch_objs where
"valid_global_arch_objs \<equiv> \<lambda>s. vspace_pt_at (global_pt s) s"
definition valid_arch_state :: "'z::state_ext state \<Rightarrow> bool" where
"valid_arch_state \<equiv> valid_asid_table and valid_uses and vmid_inv and cur_vcpu"
"valid_arch_state \<equiv> valid_asid_table and valid_uses and vmid_inv and cur_vcpu and
valid_global_arch_objs"
(* ---------------------------------------------------------------------------------------------- *)
@ -1658,6 +1662,11 @@ lemma pt_slot_offset_or_def:
apply (drule shiftl_less_t2n'[where n=pte_bits], auto simp: bit_simps)
done
lemma valid_global_arch_objs_pt_at[elim!]:
"valid_global_arch_objs s \<Longrightarrow> vspace_pt_at (global_pt s) s"
unfolding valid_global_arch_objs_def
by simp
lemma pool_for_asid_vs_lookup:
"(vs_lookup_table asid_pool_level asid vref s = Some (level, p)) =
(pool_for_asid asid s = Some p \<and> level = asid_pool_level)"
@ -2394,6 +2403,13 @@ lemma vspace_for_asid_lift:
apply (wpsimp wp: assms entry_for_asid_lift split: option.splits)+
done
lemma valid_global_arch_objs_lift:
assumes "\<And>P. f \<lbrace> \<lambda>s. P (global_pt s) \<rbrace>"
assumes "\<And>p. f \<lbrace> vspace_pt_at p \<rbrace>"
shows "f \<lbrace> valid_global_arch_objs \<rbrace>"
unfolding valid_global_arch_objs_def
by (rule hoare_lift_Pf; rule assms)
lemma valid_arch_state_lift_arch:
assumes atyp[wp]: "\<And>T p. f \<lbrace> typ_at (AArch T) p\<rbrace>"
assumes aobjs[wp]: "\<And>P. f \<lbrace>\<lambda>s. P (asid_pools_of s) \<rbrace>"
@ -2405,7 +2421,7 @@ lemma valid_arch_state_lift_arch:
apply (rule hoare_lift_Pf[where f="arch_state", rotated], rule arch)
apply (wpsimp wp: dom_asid_pools_of_lift)
apply blast
apply (wpsimp wp: vcpus cur_vcpu_typ_lift vmid_inv_ap_lift)+
apply (wpsimp wp: vcpus cur_vcpu_typ_lift vmid_inv_ap_lift valid_global_arch_objs_lift)+
done
lemma aobjs_of_atyp_lift:
@ -2422,7 +2438,7 @@ lemma aobjs_of_vcpu_lift:
to a dependency on arch objects *)
lemma valid_arch_state_lift:
assumes aobjs[wp]: "\<And>P. f \<lbrace>\<lambda>s. P (aobjs_of s)\<rbrace>"
assumes [wp]: "\<And>P. \<lbrace>\<lambda>s. P (arch_state s)\<rbrace> f \<lbrace>\<lambda>_ s. P (arch_state s)\<rbrace>"
assumes [wp]: "\<And>P. f \<lbrace>\<lambda>s. P (arch_state s)\<rbrace>"
shows "f \<lbrace>valid_arch_state\<rbrace>"
by (rule valid_arch_state_lift_arch; fastforce intro: aobjs_of_atyp_lift assms)
@ -2668,6 +2684,9 @@ lemma valid_arch_state_asid_table:
"valid_arch_state s \<Longrightarrow> valid_asid_table s"
by (simp add: valid_arch_state_def)
lemma valid_arch_state_global_arch_objs[elim!]:
"valid_arch_state s \<Longrightarrow> valid_global_arch_objs s"
by (simp add: valid_arch_state_def)
(* VCPU and related symrefs *)
@ -2828,6 +2847,10 @@ lemma vmid_inv_update [iff]:
"vmid_inv (f s) = vmid_inv s"
by (simp add: vmid_inv_def arch is_inv_def swp_def)
lemma valid_global_arch_objs_update [iff]:
"valid_global_arch_objs (f s) = valid_global_arch_objs s"
by (simp add: valid_global_arch_objs_def arch)
end
declare AARCH64.arch_tcb_context_absorbs[simp]

View File

@ -416,8 +416,6 @@ crunch vspace_objs [wp, Ipc_AI_assms]: make_arch_fault_msg "valid
crunch global_objs [wp, Ipc_AI_assms]: make_arch_fault_msg "valid_global_objs"
crunch global_vspace_mapping [wp, Ipc_AI_assms]: make_arch_fault_msg "valid_global_vspace_mappings"
crunch arch_caps [wp, Ipc_AI_assms]: make_arch_fault_msg "valid_arch_caps"
(* FIXME AARCH64: addressTranslateS1
crunch v_ker_map [wp, Ipc_AI_assms]: make_arch_fault_msg "valid_kernel_mappings" *)
crunch eq_ker_map [wp, Ipc_AI_assms]: make_arch_fault_msg "equal_kernel_mappings"
crunch asid_map [wp, Ipc_AI_assms]: make_arch_fault_msg "valid_asid_map"
crunch only_idle [wp, Ipc_AI_assms]: make_arch_fault_msg "only_idle"
@ -428,6 +426,9 @@ crunch valid_ioc [wp, Ipc_AI_assms]: make_arch_fault_msg "valid
crunch pred_tcb [wp, Ipc_AI_assms]: make_arch_fault_msg "pred_tcb_at proj P t"
crunch cap_to [wp, Ipc_AI_assms]: make_arch_fault_msg "ex_nonz_cap_to p"
crunch v_ker_map [wp, Ipc_AI_assms]: make_arch_fault_msg "valid_kernel_mappings"
(simp: valid_kernel_mappings_def)
crunch obj_at[wp, Ipc_AI_assms]: make_arch_fault_msg "\<lambda>s. P (obj_at P' pd s)"
(wp: as_user_inv getRestartPC_inv mapM_wp' simp: getRegister_def)
@ -457,7 +458,7 @@ end
interpretation Ipc_AI?: Ipc_AI
proof goal_cases
interpret Arch .
case 1 show ?case sorry (* FIXME AARCH64 by (unfold_locales; (fact Ipc_AI_assms)?) *)
case 1 show ?case by (unfold_locales; (fact Ipc_AI_assms)?)
qed
context Arch begin global_naming AARCH64

View File

@ -17,8 +17,8 @@ text \<open>
Showing that there is a state that satisfies the abstract invariants.
\<close>
lemmas ptr_defs = idle_thread_ptr_def init_irq_node_ptr_def (* FIXME AARCH64 riscv_global_pt_ptr_def *)
lemmas state_defs = init_A_st_def init_kheap_def init_arch_state_def (* FIXME AARCH64 init_global_pt_def *)
lemmas ptr_defs = idle_thread_ptr_def init_irq_node_ptr_def arm_global_pt_ptr_def
lemmas state_defs = init_A_st_def init_kheap_def init_arch_state_def
init_vspace_uses_def ptr_defs
lemma is_tcb_TCB[simp]: "is_tcb (TCB t)" by (simp add: is_tcb_def)
@ -156,8 +156,6 @@ lemma cte_wp_at_init_A_st_Null:
lemmas cte_wp_at_caps_of_state_eq
= cte_wp_at_caps_of_state[where P="(=) cap" for cap]
declare ptrFormPAddr_addFromPPtr[simp]
lemma pspace_respects_device_region_init[simp]:
"pspace_respects_device_region init_A_st"
apply (clarsimp simp: pspace_respects_device_region_def state_defs init_machine_state_def
@ -175,151 +173,74 @@ lemma cap_refs_respects_device_region_init[simp]:
apply (clarsimp simp: cte_wp_at_caps_of_state cap_range_respects_device_region_def)
done
(* FIXME AARCH64
lemma kernel_mapping_slot: "0x1FF \<in> kernel_mapping_slots"
by (clarsimp simp: kernel_mapping_slots_def pptr_base_def pptrBase_def canonical_bit_def
pt_bits_left_def bit_simps level_defs) *)
lemma pool_for_asid_init_A_st[simp]:
"pool_for_asid asid init_A_st = None"
by (simp add: pool_for_asid_def state_defs)
(* FIXME AARCH64
lemma vspace_for_asid_init_A_st[simp]:
"vspace_for_asid asid init_A_st = None"
by (simp add: vspace_for_asid_def obind_def)
sorry (* FIXME AARCH64
by (simp add: vspace_for_asid_def obind_def) *)
lemma global_pt_init_A_st[simp]:
"global_pt init_A_st = riscv_global_pt_ptr"
by (simp add: state_defs riscv_global_pt_def)
"global_pt init_A_st = arm_global_pt_ptr"
by (simp add: state_defs)
lemma is_aligned_riscv_global_pt_ptr[simp]:
"is_aligned riscv_global_pt_ptr pt_bits"
by (simp add: riscv_global_pt_ptr_def pptr_base_num bit_simps is_aligned_def)
lemma is_alignedarm_global_pt_ptr[simp]:
"is_aligned arm_global_pt_ptr (pt_bits VSRootPT_T)"
by (simp add: arm_global_pt_ptr_def pptr_base_num bit_simps is_aligned_def)
lemma ptes_of_init_A_st_global:
"ptes_of init_A_st =
(\<lambda>p. if table_base p = riscv_global_pt_ptr \<and> is_aligned p pte_bits then
Some ((un_PageTable (the_arch_obj init_global_pt)) (table_index p)) else None)"
by (auto simp add: state_defs pte_of_def obind_def opt_map_def split: option.splits)
(\<lambda>pt_t p. if pt_t = VSRootPT_T \<and> table_base VSRootPT_T p = arm_global_pt_ptr \<and>
is_aligned p pte_bits then Some InvalidPTE else None)"
sorry (* FIXME AARCH64: might need adjustment
by (auto simp add: state_defs level_pte_of_def obind_def opt_map_def split: option.splits) *)
lemma pt_walk_init_A_st[simp]:
"pt_walk max_pt_level level riscv_global_pt_ptr vref (ptes_of init_A_st) =
Some (max_pt_level, riscv_global_pt_ptr)"
"pt_walk max_pt_level level arm_global_pt_ptr vref (ptes_of init_A_st) =
Some (max_pt_level, arm_global_pt_ptr)"
apply (subst pt_walk.simps)
apply (simp add: in_omonad ptes_of_init_A_st_global init_global_pt_def
apply (simp add: in_omonad ptes_of_init_A_st_global
is_aligned_pt_slot_offset_pte global_pte_def)
done
sorry (* FIXME AARCH64 *)
lemma table_index_riscv_global_pt_ptr:
"table_index (pt_slot_offset max_pt_level riscv_global_pt_ptr vref) =
ucast ((vref >> ptTranslationBits * 2 + pageBits) && mask ptTranslationBits)"
apply (simp add: pt_slot_offset_def pt_index_def pt_bits_left_def bit_simps level_defs
riscv_global_pt_ptr_def pptr_base_def pptrBase_def canonical_bit_def)
(* FIXME AARCH64: statement -- this depends on PA_40 config; potentially not needed
lemma table_index_arm_global_pt_ptr:
"table_index VSRootPT_T (pt_slot_offset max_pt_level arm_global_pt_ptr vref) =
(vref >> (ptTranslationBits NormalPT_T) * 2 + pageBits) && mask (ptTranslationBits NormalPT_T)"
apply (simp add: pt_slot_offset_def pt_index_def pt_bits_left_def level_defs
arm_global_pt_ptr_def pptr_base_def pptrBase_def canonical_bit_def)
apply (subst word_plus_and_or_coroll)
apply word_bitwise
apply simp
apply word_bitwise
apply (clarsimp simp: word_size)
done
done *)
lemma kernel_window_1G:
"\<lbrakk> pptr_base \<le> vref; vref < pptr_base + (1 << 30) \<rbrakk> \<Longrightarrow>
table_index (pt_slot_offset max_pt_level riscv_global_pt_ptr vref) = 0x100"
apply (simp add: table_index_riscv_global_pt_ptr)
table_index VSRootPT_T (pt_slot_offset max_pt_level arm_global_pt_ptr vref) = 0x100"
sorry (* FIXME AARCH64 -- this depends on PA_40 config; potentially not needed
apply (simp add: table_index_arm_global_pt_ptr)
apply (simp add: bit_simps pptr_base_def pptrBase_def neg_mask_le_high_bits word_size flip: NOT_mask)
apply (subst (asm) mask_def)
apply (simp add: canonical_bit_def)
apply word_bitwise
apply (clarsimp simp: word_size)
done
lemma kernel_mapping_slots_0x100[simp]:
"0x100 \<in> kernel_mapping_slots"
by (simp add: kernel_mapping_slots_def pptr_base_def bit_simps pt_bits_left_def level_defs
pptrBase_def canonical_bit_def)
lemma translate_address_kernel_window:
"\<lbrakk> pptr_base \<le> vref; vref < pptr_base + (1 << 30) \<rbrakk>\<Longrightarrow>
translate_address riscv_global_pt_ptr vref (ptes_of init_A_st) = Some (addrFromPPtr vref)"
apply (clarsimp simp: translate_address_def in_omonad pt_lookup_target_def
pt_lookup_slot_from_level_def)
apply (simp add: ptes_of_init_A_st_global[THEN fun_cong] init_global_pt_def global_pte_def pte_ref_def)
apply (simp add: kernel_window_1G is_aligned_pt_slot_offset_pte)
apply (simp add: bit_simps addr_from_ppn_def shiftl_shiftl)
apply (simp add: ptrFromPAddr_def addrFromPPtr_def)
apply (simp add: pptrBaseOffset_def paddrBase_def)
apply (simp add: pt_bits_left_def bit_simps level_defs)
apply (rule conjI)
apply (rule is_aligned_add)
apply (simp add: mask_def)
apply (simp add: pptrBase_def canonical_bit_def is_aligned_def)
apply (simp add: pptr_base_def)
apply (simp add: pptrBase_def neg_mask_le_high_bits flip: NOT_mask)
apply (subst word_plus_and_or_coroll; simp add: canonical_bit_def word_size mask_def)
apply word_bitwise
apply clarsimp
done
lemma elf_window_1M:
"\<lbrakk> kernel_elf_base \<le> vref; vref < kernel_elf_base + (1 << 20) \<rbrakk> \<Longrightarrow>
table_index (pt_slot_offset max_pt_level riscv_global_pt_ptr vref) = 0x1FE"
apply (simp add: table_index_riscv_global_pt_ptr)
apply (simp add: bit_simps kernel_elf_base_def kernelELFBase_def)
apply word_bitwise
apply (clarsimp simp: word_size)
done
lemma kernel_mapping_slots_0x1FE[simp]:
"0x1FE \<in> kernel_mapping_slots"
by (simp add: kernel_mapping_slots_def pptr_base_def bit_simps pt_bits_left_def level_defs
pptrBase_def canonical_bit_def)
lemma translate_address_kernel_elf_window:
"\<lbrakk> kernel_elf_base \<le> vref; vref < kernel_elf_base + (1 << 20) \<rbrakk> \<Longrightarrow>
translate_address riscv_global_pt_ptr vref (ptes_of init_A_st) = Some (addrFromKPPtr vref)"
apply (clarsimp simp: translate_address_def in_omonad pt_lookup_target_def
pt_lookup_slot_from_level_def)
apply (simp add: ptes_of_init_A_st_global[THEN fun_cong] init_global_pt_def global_pte_def pte_ref_def)
apply (simp add: elf_window_1M is_aligned_pt_slot_offset_pte)
apply (simp add: bit_simps addr_from_ppn_def shiftl_shiftl)
apply (simp add: ptrFromPAddr_def addrFromPPtr_def)
apply (simp add: addrFromKPPtr_def kernelELFBaseOffset_def kernelELFPAddrBase_def kernelELFBase_def)
apply (simp add: pt_bits_left_def bit_simps level_defs)
apply (rule conjI)
apply (simp add: pptrBase_def pptrBaseOffset_def paddrBase_def canonical_bit_def is_aligned_def)
apply (simp add: kernel_elf_base_def kernelELFBase_def)
apply (subst word_plus_and_or_coroll)
apply (simp add: canonical_bit_def word_size mask_def)
apply word_bitwise
apply (simp add: canonical_bit_def word_size mask_def)
apply word_bitwise
apply clarsimp
done
done *)
lemma kernel_window_init_st:
"kernel_window init_A_st = { pptr_base ..< pptr_base + (1 << 30) }"
by (auto simp: state_defs kernel_window_def)
lemma kernel_elf_window_init_st:
"kernel_elf_window init_A_st = { kernel_elf_base ..< kernel_elf_base + (1 << 20) }"
apply (clarsimp simp: state_defs kernel_elf_window_def kernel_elf_base_def kernelELFBase_def
pptr_base_def pptrBase_def canonical_bit_def)
apply (rule set_eqI, clarsimp)
apply (rule iffI)
apply auto[1]
apply clarsimp
apply word_bitwise
apply clarsimp
done
lemma valid_global_vspace_mappings_init_A_st[simp]:
"valid_global_vspace_mappings init_A_st"
unfolding valid_global_vspace_mappings_def
by (simp add: translate_address_kernel_window kernel_window_init_st
translate_address_kernel_elf_window kernel_elf_window_init_st)
by simp
lemma valid_uses_init_A_st[simp]: "valid_uses_2 init_vspace_uses"
sorry (* FIXME AARCH64
proof -
note canonical_bit_def[simp]
have [simp]: "pptr_base < pptr_base + 0x40000000"
@ -345,21 +266,17 @@ proof -
using canonical_user_pptr_base pptr_base_kernel_elf_base
unfolding valid_uses_2_def init_vspace_uses_def window_defs
by (auto simp: canonical_user_canonical above_pptr_base_canonical)
qed
qed *)
lemma valid_global_arch_objs_init_A_st[simp]:
"valid_global_arch_objs init_A_st"
by (simp add: valid_global_arch_objs_def state_defs level_defs obj_at_def)
lemma valid_global_tables_init_A_st[simp]:
"valid_global_tables init_A_st"
apply (simp add: valid_global_tables_def Let_def riscv_global_pt_def[symmetric])
apply (clarsimp simp: state_defs pte_rights_of_def vm_kernel_only_def global_pte_def)
done
sorry (* FIXME AARCH64
by (simp add: valid_global_arch_objs_def state_defs level_defs obj_at_def) *)
lemma vspace_for_pool_init_A_st[simp]:
"vspace_for_pool ap asid (asid_pools_of init_A_st) = None"
by (clarsimp simp: vspace_for_pool_def obind_def in_opt_map_eq state_defs split: option.splits)
sorry (* FIXME AARCH64
by (clarsimp simp: vspace_for_pool_def obind_def in_opt_map_eq state_defs split: option.splits) *)
lemma user_region_vs_lookup_target_init_A_st[simp]:
"vref \<in> user_region \<Longrightarrow> vs_lookup_target bot_level asid vref init_A_st = None"
@ -374,16 +291,9 @@ lemma valid_vspace_objs_init_A_st[simp]:
"valid_vspace_objs init_A_st"
by (clarsimp simp: valid_vspace_objs_def in_omonad vs_lookup_table_def)
lemma global_pt_kernel_window_init_arch_state[simp]:
"obj_addrs init_global_pt riscv_global_pt_ptr \<subseteq>
kernel_window_2 (riscv_kernel_vspace init_arch_state)"
apply (clarsimp simp: state_defs pptr_base_num bit_simps kernel_window_def kernel_elf_base_def)
apply (rule conjI; unat_arith)
done
lemma idle_thread_in_kernel_window_init_arch_state[simp]:
"{idle_thread_ptr..0x3FF + idle_thread_ptr} \<subseteq>
kernel_window_2 (riscv_kernel_vspace init_arch_state)"
kernel_window_2 (arm_kernel_vspace init_arch_state)"
apply (clarsimp simp: state_defs pptr_base_num bit_simps kernel_window_def kernel_elf_base_def)
apply (rule conjI; unat_arith)
done
@ -399,12 +309,13 @@ lemma irq_node_pptr_base_kernel_elf_base:
lemma irq_node_in_kernel_window_init_arch_state':
"\<lbrakk> init_irq_node_ptr + m \<le> x; x \<le> init_irq_node_ptr + m + mask cte_level_bits;
m \<le> mask (size (irq::irq)) << cte_level_bits\<rbrakk>
\<Longrightarrow> x \<in> kernel_window_2 (riscv_kernel_vspace init_arch_state)"
\<Longrightarrow> x \<in> kernel_window_2 (arm_kernel_vspace init_arch_state)"
apply (clarsimp simp: kernel_window_def init_vspace_uses_def init_arch_state_def)
apply (rule conjI)
apply (clarsimp simp: state_defs)
apply (rule ccontr, simp add:not_le)
apply (drule(1) le_less_trans)
sorry (* FIXME AARCH64
apply (cut_tac is_aligned_no_wrap'[where ptr=pptr_base
and off="0x3000 + m"
and sz=canonical_bit, simplified])
@ -417,19 +328,19 @@ lemma irq_node_in_kernel_window_init_arch_state':
mask_def init_irq_node_ptr_def pptr_base_num word_size)
apply unat_arith
apply clarsimp
done
done *)
lemma irq_node_in_kernel_window_init_arch_state[simp]:
"\<lbrakk> init_irq_node_ptr + (ucast (irq::irq) << cte_level_bits) \<le> x;
x \<le> init_irq_node_ptr + (ucast irq << cte_level_bits) + 2 ^ cte_level_bits - 1 \<rbrakk>
\<Longrightarrow> x \<in> kernel_window_2 (riscv_kernel_vspace init_arch_state)"
\<Longrightarrow> x \<in> kernel_window_2 (arm_kernel_vspace init_arch_state)"
apply (erule irq_node_in_kernel_window_init_arch_state')
apply (simp add: mask_def add_diff_eq)
apply (simp add: word_size mask_def cte_level_bits_def)
apply (thin_tac P for P)
apply word_bitwise
done
*)
lemma invs_A:
"invs init_A_st" (is "invs ?st")
@ -440,11 +351,11 @@ lemma invs_A:
apply (rule conjI)
prefer 2
apply (simp add: cur_tcb_def state_defs obj_at_def)
sorry (* FIXME AARCH64 rest of proof won't make sense until example state is tweaked from RISCV64
version
apply (simp add: valid_state_def)
apply (rule conjI)
apply (simp add: valid_pspace_def)
sorry (* FIXME AARCH64 rest of proof won't make sense until example state is tweaked from RISCV64
version
apply (rule conjI)
apply (clarsimp simp: valid_objs_def state_defs wellformed_pte_def global_pte_def
valid_obj_def valid_vm_rights_def vm_kernel_only_def

View File

@ -73,20 +73,18 @@ lemma valid_vspace_objs_pte:
apply simp
done
(* FIXME AARCH64
lemma get_pte_valid[wp]:
"\<lbrace>valid_vspace_objs and \<exists>\<rhd> (level, table_base p) and
K (level = max_pt_level \<longrightarrow> table_index p \<notin> kernel_mapping_slots)\<rbrace>
get_pte p
"\<lbrace>valid_vspace_objs and \<exists>\<rhd> (level, table_base pt_t p) and
K (level = max_pt_level \<longrightarrow> ucast (table_index pt_t p) \<notin> invalid_mapping_slots)\<rbrace>
get_pte pt_t p
\<lbrace>valid_pte level\<rbrace>"
by wpsimp (fastforce dest: valid_vspace_objs_pte) *)
by wpsimp (fastforce dest: valid_vspace_objs_pte)
lemma get_pte_wellformed[wp]:
"\<lbrace>valid_objs\<rbrace> get_pte pt_t p \<lbrace>\<lambda>rv _. wellformed_pte rv\<rbrace>"
sorry (* FIXME AARCH64
apply wpsimp
apply (fastforce simp: valid_objs_def dom_def valid_obj_def ptes_of_def in_opt_map_eq)
done *)
done
crunch valid_objs[wp]: init_arch_objects "valid_objs"
(ignore: clearMemory wp: crunch_wps)
@ -106,56 +104,6 @@ crunch cap_refs_respects_device_region[wp]: reserve_region cap_refs_respects_dev
crunch invs [wp]: reserve_region "invs"
(* FIXME AARCH64 no copy_global_mappings
crunch iflive[wp]: copy_global_mappings "if_live_then_nonz_cap"
(wp: crunch_wps)
crunch zombies[wp]: copy_global_mappings "zombies_final"
(wp: crunch_wps)
crunch state_refs_of[wp]: copy_global_mappings "\<lambda>s. P (state_refs_of s)"
(wp: crunch_wps)
crunch valid_idle[wp]: copy_global_mappings "valid_idle"
(wp: crunch_wps)
crunch only_idle[wp]: copy_global_mappings "only_idle"
(wp: crunch_wps)
crunch ifunsafe[wp]: copy_global_mappings "if_unsafe_then_cap"
(wp: crunch_wps)
crunch reply_caps[wp]: copy_global_mappings "valid_reply_caps"
(wp: crunch_wps)
crunch reply_masters[wp]: copy_global_mappings "valid_reply_masters"
(wp: crunch_wps)
crunch valid_global[wp]: copy_global_mappings "valid_global_refs"
(wp: crunch_wps)
crunch irq_node[wp]: copy_global_mappings "\<lambda>s. P (interrupt_irq_node s)"
(wp: crunch_wps)
crunch irq_states[wp]: copy_global_mappings "\<lambda>s. P (interrupt_states s)"
(wp: crunch_wps)
crunch caps_of_state[wp]: copy_global_mappings "\<lambda>s. P (caps_of_state s)"
(wp: crunch_wps)
crunch pspace_in_kernel_window[wp]: copy_global_mappings "pspace_in_kernel_window"
(wp: crunch_wps)
crunch cap_refs_in_kernel_window[wp]: copy_global_mappings "cap_refs_in_kernel_window"
(wp: crunch_wps)
crunch pspace_respects_device_region[wp]: copy_global_mappings "pspace_respects_device_region"
(wp: crunch_wps)
crunch cap_refs_respects_device_region[wp]: copy_global_mappings "cap_refs_respects_device_region"
(wp: crunch_wps)
*)
lemma dmo_eq_kernel_restricted [wp, Retype_AI_assms]:
"\<lbrace>\<lambda>s. equal_kernel_mappings (kheap_update (f (kheap s)) s)\<rbrace>
do_machine_op m
@ -369,7 +317,7 @@ lemma pts_of:
lemma pts_of':
"pts_of s' p = Some pt \<Longrightarrow>
pts_of s p = Some pt \<or> pt = (empty_pt pt_t) \<and> p \<in> set (retype_addrs ptr ty n us)"
pts_of s p = Some pt \<or> pt = (empty_pt (pt_type pt_t)) \<and> p \<in> set (retype_addrs ptr ty n us)"
sorry (* FIXME AARCH64
apply (clarsimp simp: in_opt_map_eq s'_def ps_def split: if_split_asm)
apply (simp add: default_object_def default_arch_object_def tyunt
@ -380,10 +328,9 @@ lemma valid_asid_table:
"valid_asid_table s \<Longrightarrow> valid_asid_table s'"
unfolding valid_asid_table_def by (auto simp: asid_pools)
(* FIXME AARCH64
lemma valid_global_arch_objs:
"valid_global_arch_objs s \<Longrightarrow> valid_global_arch_objs s'"
by (fastforce simp: valid_global_arch_objs_def pt_at_eq pts_of) *)
by (fastforce simp: valid_global_arch_objs_def pt_at_eq pts_of)
lemma ptes_of:
"ptes_of s pt_t p = Some pte \<Longrightarrow> ptes_of s' pt_t p = Some pte"
@ -391,7 +338,7 @@ lemma ptes_of:
by (simp add: pte_of_def obind_def pts_of split: option.splits) *)
lemma default_empty:
"default_object ty dev us = ArchObj (PageTable pt) \<Longrightarrow> pt = (empty_pt pt_t)"
"default_object ty dev us = ArchObj (PageTable pt) \<Longrightarrow> pt = (empty_pt (pt_type pt))"
sorry (* FIXME AARCH64 ^exists pt_t?
by (simp add: default_object_def default_arch_object_def tyunt
split: apiobject_type.splits aobject_type.splits) *)
@ -448,32 +395,25 @@ lemma global_no_retype:
using dev retype_addrs_subset_ptr_bits[OF cover]
by (fastforce simp: valid_global_refs_def valid_refs_def cte_wp_at_caps_of_state)
(* FIXME AARCH64
lemma global_pts_no_retype:
"\<lbrakk> pt_ptr \<in> riscv_global_pts (arch_state s) level; valid_global_refs s \<rbrakk> \<Longrightarrow>
pt_ptr \<notin> set (retype_addrs ptr ty n us)"
by (drule riscv_global_pts_global_ref, erule global_no_retype)
"valid_global_refs s \<Longrightarrow> global_pt s \<notin> set (retype_addrs ptr ty n us)"
sorry (* FIXME AARCH64
by (drule riscv_global_pts_global_ref, erule global_no_retype) *)
lemma valid_global_tables:
"valid_global_tables s \<Longrightarrow> valid_global_tables s'"
apply (clarsimp simp: valid_global_tables_def Let_def)
apply (fold riscv_global_pt_def)
apply (intro conjI; clarsimp)
apply (drule pt_walk_level)
apply fastforce
apply (drule pts_of', fastforce)
apply (drule pts_of', fastforce)
apply (drule pts_of', fastforce simp: vm_kernel_only_def pte_rights_of_def)
done
*)
lemma vcpu_hyp_live_of':
"vcpu_hyp_live_of s' = vcpu_hyp_live_of s"
sorry (* FIXME AARCH64 VCPU *)
lemma vmid_inv':
"vmid_inv s \<Longrightarrow> vmid_inv s'"
sorry (* FIXME AARCH64 *)
lemma valid_arch_state:
"valid_arch_state s \<Longrightarrow> valid_arch_state s'"
sorry (* FIXME AARCH64
apply (simp add: valid_arch_state_def valid_asid_table valid_global_arch_objs valid_global_tables
apply (simp add: valid_arch_state_def valid_asid_table vcpu_hyp_live_of' vmid_inv'
del: arch_state)
apply simp
done *)
sorry (* FIXME AARCH64 *)
lemma vspace_for_pool1:
"(vspace_for_pool asid p (asid_pools_of s) = Some pt) \<Longrightarrow>
@ -515,12 +455,12 @@ lemma vs_lookup_target':
done
lemma wellformed_default_obj[Retype_AI_assms]:
"\<lbrakk> ptra \<notin> set (retype_addrs ptr ty n us);
kheap s ptra = Some (ArchObj ao); arch_valid_obj ao s\<rbrakk> \<Longrightarrow>
arch_valid_obj ao s'"
sorry (* FIXME AARCH64
apply (clarsimp elim!:obj_at_pres
split: arch_kernel_obj.splits option.splits)
"\<lbrakk> ptr' \<notin> set (retype_addrs ptr ty n us);
kheap s ptr' = Some (ArchObj ao); arch_valid_obj ao s\<rbrakk> \<Longrightarrow>
arch_valid_obj ao s'"
sorry (* FIXME AARCH64 VCPU: valid_vcpu s'
apply (cases ao; clarsimp elim!: obj_at_pres
split: arch_kernel_obj.splits option.splits)+
done *)
end
@ -531,7 +471,7 @@ context retype_region_proofs_arch begin
lemma hyp_refs_eq:
"state_hyp_refs_of s' = state_hyp_refs_of s"
unfolding s'_def ps_def
sorry (* FIXME AARCH64
sorry (* FIXME AARCH64 VCPU
by (rule ext) (clarsimp simp: state_hyp_refs_of_def split: option.splits) *)
@ -542,47 +482,9 @@ lemma obj_at_valid_pte:
apply (clarsimp | elim disjE)+
done
(* FIXME AARCH64
lemma pt_lookup_slot_from_level:
"\<lbrakk> vref \<in> kernel_mappings; valid_global_tables s; valid_global_arch_objs s; pspace_aligned s;
valid_global_refs s \<rbrakk> \<Longrightarrow>
(pt_lookup_slot_from_level max_pt_level 0 (riscv_global_pt (arch_state s)) vref (ptes_of s')
= Some (level, p)) =
(pt_lookup_slot_from_level max_pt_level 0 (riscv_global_pt (arch_state s)) vref (ptes_of s)
= Some (level, p))"
apply (simp add: pt_lookup_slot_from_level_def in_omonad)
apply (subst pt_walk_eqI; simp)
apply clarsimp
apply (drule (1) valid_global_tablesD, simp)
apply (frule (1) global_pts_no_retype)
apply (simp add: opt_map_def s'_def ps_def split: option.splits)
done
lemma translate_address:
"\<lbrakk> vref \<in> kernel_mappings; valid_global_tables s; valid_global_arch_objs s; pspace_aligned s;
valid_global_refs s \<rbrakk> \<Longrightarrow>
(translate_address (riscv_global_pt (arch_state s)) vref (ptes_of s') = Some p) =
(translate_address (riscv_global_pt (arch_state s)) vref (ptes_of s) = Some p)"
apply (simp add: translate_address_def in_omonad)
apply (simp add: pt_lookup_target_def in_omonad)
apply (auto simp: pt_lookup_slot_from_level dest: ptes_of' ptes_of)
done
lemma valid_global_vspace_mappings:
"\<lbrakk> valid_global_vspace_mappings s; valid_global_tables s; valid_global_arch_objs s;
pspace_aligned s; valid_global_refs s; valid_uses s \<rbrakk>
\<Longrightarrow> valid_global_vspace_mappings s'"
unfolding valid_global_vspace_mappings_def Let_def
apply simp
apply (rule conjI; clarsimp)
apply (subst translate_address; assumption?)
apply (fastforce simp: kernel_regions_def translate_address intro!: kernel_regions_in_mappings)
apply simp
apply (subst translate_address; assumption?)
apply (fastforce simp: kernel_regions_def intro!: kernel_regions_in_mappings)
apply simp
done
*)
"valid_global_vspace_mappings s'"
unfolding valid_global_vspace_mappings_def by simp
end
@ -624,11 +526,10 @@ qed
sublocale retype_region_proofs_gen?: retype_region_proofs_gen
sorry (* FIXME AARCH64
by (unfold_locales,
auto simp: hyp_refs_eq[simplified s'_def ps_def]
wellformed_default_obj[simplified s'_def ps_def]
valid_default_arch_tcb) *)
valid_default_arch_tcb)
end
@ -791,32 +692,9 @@ lemma vspace_for_asid:
sorry (* FIXME AARCH64
by (clarsimp simp: vspace_for_asid_def in_omonad pool_for_asid_def) *)
(* FIXME AARCH64
lemma has_kernel_mappings:
"\<lbrakk> has_kernel_mappings pt s; valid_global_arch_objs s; valid_global_refs s \<rbrakk> \<Longrightarrow> has_kernel_mappings pt s'"
unfolding has_kernel_mappings_def
apply clarsimp
apply (drule pts_of')
apply (erule disjE; clarsimp)
apply (drule riscv_global_pt_in_global_refs)
apply (drule (1) global_no_retype)
apply simp
done
lemma equal_kernel_mappings:
"\<lbrakk> equal_kernel_mappings s; valid_vspace_objs s; valid_asid_table s; valid_global_arch_objs s;
valid_global_refs s \<rbrakk> \<Longrightarrow> equal_kernel_mappings s'"
apply (simp add: equal_kernel_mappings_def)
apply clarsimp
apply (drule vspace_for_asid)
apply (rule has_kernel_mappings; assumption?)
apply (frule (2) vspace_for_asid_valid_pt)
apply clarsimp
apply (elim allE, erule (1) impE)+
apply (drule pts_of)
apply simp
done
*)
"equal_kernel_mappings s'"
by (simp add: equal_kernel_mappings_def)
lemma pspace_in_kernel_window:
"\<lbrakk> pspace_in_kernel_window (s :: 'state_ext state);
@ -907,7 +785,6 @@ interpretation retype_region_proofs_arch ..
lemma post_retype_invs:
"\<lbrakk> invs s; region_in_kernel_window {ptr .. (ptr && ~~ mask sz) + 2 ^ sz - 1} s \<rbrakk>
\<Longrightarrow> post_retype_invs ty (retype_addrs ptr ty n us) s'"
sorry (* FIXME AARCH64
using equal_kernel_mappings valid_global_vspace_mappings
apply (clarsimp simp: invs_def post_retype_invs_def valid_state_def
unsafe_rep2 null_filter valid_idle
@ -925,8 +802,7 @@ lemma post_retype_invs:
cap_refs_respects_device_region
cap_refs_in_kernel_window valid_irq_states
split: if_split_asm)
apply (simp add: valid_arch_state_def valid_pspace_def)
done *)
done
(* ML \<open>val pre_ctxt_1 = @{context}\<close> *)

View File

@ -158,10 +158,10 @@ global_interpretation Tcb_AI_1?: Tcb_AI_1
and is_cnode_or_valid_arch = is_cnode_or_valid_arch
proof goal_cases
interpret Arch .
case 1 show ?case sorry (* FIXME AARCH64 by (unfold_locales; (fact Tcb_AI_asms)?) *)
case 1 show ?case by (unfold_locales; (fact Tcb_AI_asms)?)
qed
context Arch begin global_naming RISVB64
context Arch begin global_naming AARCH64
lemma use_no_cap_to_obj_asid_strg: (* arch specific *)
"(cte_at p s \<and> no_cap_to_obj_dr_emp cap s \<and> valid_cap cap s \<and> invs s)

View File

@ -259,15 +259,10 @@ lemma delete_objects_rewrite[Untyped_AI_assms]:
apply simp
done
(* FIXME AARCH64
declare store_pde_pred_tcb_at [wp] *)
declare store_pte_pred_tcb_at [wp]
(* nonempty_table *)
definition
nonempty_table :: "machine_word set \<Rightarrow> Structures_A.kernel_object \<Rightarrow> bool"
where
(* FIXME AARCH64 check *)
"nonempty_table S ko \<equiv> \<forall>pt_t. a_type ko = AArch (APageTable pt_t) \<and> ko \<noteq> ArchObj (PageTable (empty_pt pt_t))"
definition nonempty_table :: "obj_ref set \<Rightarrow> kernel_object \<Rightarrow> bool" where
"nonempty_table S ko \<equiv> \<exists>pt_t. a_type ko = AArch (APageTable pt_t) \<and> ko \<noteq> ArchObj (PageTable (empty_pt pt_t))"
lemma reachable_target_trans[simp]:
"reachable_target ref p (trans_state f s) = reachable_target ref p s"
@ -373,8 +368,7 @@ global_interpretation Untyped_AI? : Untyped_AI
proof goal_cases
interpret Arch .
case 1 show ?case
sorry (* FIXME AARCH64
by (unfold_locales; (fact Untyped_AI_assms)?) *)
by (unfold_locales; (fact Untyped_AI_assms)?)
qed
end

View File

@ -11,27 +11,19 @@ begin
context Arch begin global_naming AARCH64
(* FIXME AARCH64 this is inpired by X64 because we need some kind of definition to continue, but
it doesn't have much of a feel of being generic
Also awkward names to not shadow existing pt_range constant.
Also, how is this called "range" on other arches, when it returns the subset of the *domain* with
non-invalid entries?! *)
primrec pt_valid_idxs :: "pte \<Rightarrow> pt_index \<Rightarrow> pt_index set" where
(* Since we're not doing anything with the index apart from returning it, this definition works
for both, NormalPTs and VSRootPTs *)
primrec pt_valid_idxs :: "pte \<Rightarrow> 'a word \<Rightarrow> ('a word) set" where
"pt_valid_idxs (InvalidPTE) p = {}"
| "pt_valid_idxs (PagePTE ptr sm attr rights) p = {p}"
| "pt_valid_idxs (PageTablePTE ptr) p = {p}"
primrec vs_valid_idxs :: "pte \<Rightarrow> vs_index \<Rightarrow> vs_index set" where
"vs_valid_idxs (InvalidPTE) p = {}"
| "vs_valid_idxs (PagePTE ptr sm attr rights) p = {p}"
| "vs_valid_idxs (PageTablePTE ptr) p = {p}"
abbreviation "valid_pt_entries \<equiv> \<lambda>pt. valid_entries pt_valid_idxs pt"
abbreviation "valid_vs_entries \<equiv> \<lambda>pt. valid_entries vs_valid_idxs pt"
abbreviation "valid_pt_entries \<equiv> valid_entries pt_valid_idxs"
definition obj_valid_vspace :: "kernel_object \<Rightarrow> bool" where
"obj_valid_vspace obj \<equiv> case obj of
ArchObj (PageTable (VSRootPT pt)) \<Rightarrow> valid_entries vs_valid_idxs pt
| ArchObj (PageTable (NormalPT pt)) \<Rightarrow> valid_entries pt_valid_idxs pt
ArchObj (PageTable (VSRootPT pt)) \<Rightarrow> valid_pt_entries pt
| ArchObj (PageTable (NormalPT pt)) \<Rightarrow> valid_pt_entries pt
| _ \<Rightarrow> True"
lemmas obj_valid_vspace_simps[simp]
@ -55,45 +47,20 @@ lemma set_object_valid_vspace_objs'[wp]:
crunch valid_vspace_objs'[wp]: cap_insert, cap_swap_for_delete,empty_slot "valid_vspace_objs'"
(wp: crunch_wps simp: crunch_simps ignore:set_object)
(* FIXME AARCH64
lemma mapM_x_store_pte_updates:
"\<forall>x \<in> set xs. f x && ~~ mask (pt_bits pt_t) = p \<Longrightarrow>
\<lbrace>\<lambda>s. (\<not> pt_at pt_t p s \<longrightarrow> Q s) \<and>
(\<forall>pt. ko_at (ArchObj (PageTable pt)) p s
\<longrightarrow> Q (s \<lparr> kheap := (kheap s) (p := Some (ArchObj (PageTable (\<lambda>y. if y \<in> (\<lambda>x.
ucast (f x && mask (pt_bits pt_t) >> pte_bits)) ` set xs then pte else pt y)))) \<rparr>))\<rbrace>
mapM_x (\<lambda>x. store_pte (f x) pte) xs
\<lbrace>\<lambda>_. Q\<rbrace>"
apply (induct xs)
apply (simp add: mapM_x_Nil)
apply wp
apply (clarsimp simp: obj_at_def fun_upd_idem)
apply (simp add: mapM_x_Cons)
apply (rule hoare_seq_ext, assumption)
apply (thin_tac "valid P f Q" for P f Q)
apply (simp add: store_pte_def set_pt_def set_object_def word_size_bits_def)
apply (wp get_pt_wp get_object_wp)
apply (clarsimp simp: obj_at_def a_type_simps)
apply (erule rsubst[where P=Q])
apply (rule abstract_state.fold_congs[OF refl refl])
apply (rule ext, clarsimp)
apply (rule ext, clarsimp)
done
*)
lemma valid_pt_entries_invalid[simp]:
"valid_pt_entries (\<lambda>x. InvalidPTE)"
by (simp add:valid_entries_def)
lemma valid_vspace_objs'_ptD:
"\<lbrakk>valid_vspace_objs' s;
kheap s ptr = Some (ArchObj (arch_kernel_obj.PageTable (NormalPT pt)))\<rbrakk>
kheap s ptr = Some (ArchObj (PageTable (NormalPT pt)))\<rbrakk>
\<Longrightarrow> valid_pt_entries pt"
by (fastforce simp:ran_def)
lemma valid_vspace_objs'_vsD:
"\<lbrakk>valid_vspace_objs' s;
kheap s ptr = Some (ArchObj (arch_kernel_obj.PageTable (VSRootPT pt)))\<rbrakk>
\<Longrightarrow> valid_vs_entries pt"
kheap s ptr = Some (ArchObj (PageTable (VSRootPT pt)))\<rbrakk>
\<Longrightarrow> valid_pt_entries pt"
by (fastforce simp:ran_def)
lemma store_pte_valid_vspace_objs'[wp]:
@ -145,22 +112,6 @@ lemmas rec_del_preservation_valid_vspace_objs = rec_del_preservation[OF _ _ _ _,
(* FIXME AARCH64
crunch valid_vspace_objs'[wp]: cap_delete, cap_revoke "valid_vspace_objs'"
(rule: cap_revoke_preservation_valid_vspace_objs)
lemma copy_global_mappings_valid_vspace_objs'[wp]:
"\<lbrace>valid_vspace_objs' and valid_arch_state and pspace_aligned
and K (is_aligned p pt_bits)\<rbrace>
copy_global_mappings p \<lbrace>\<lambda>rv. valid_vspace_objs'\<rbrace>"
unfolding copy_global_mappings_def
by (wpsimp wp: mapM_x_wp')
lemma in_pte_rangeD:
"x \<in> pte_range v y \<Longrightarrow> x = y"
by (case_tac v,simp_all split:if_splits)
lemma non_invalid_in_pte_range:
"pte \<noteq> InvalidPTE
\<Longrightarrow> x \<in> pte_range pte x"
by (case_tac pte,simp_all)
*)
crunch valid_vspace_objs'[wp]: cancel_badged_sends "valid_vspace_objs'"
@ -205,8 +156,7 @@ lemma retype_region_valid_vspace_objs'[wp]:
apply (clarsimp simp: retype_addrs_fold foldr_upd_app_if ranI
elim!: ranE split: if_split_asm simp del:fun_upd_apply)
apply (simp add: default_object_def default_arch_object_def
split: Structures_A.kernel_object.splits
Structures_A.apiobject_type.split aobject_type.split)+
split: kernel_object.splits apiobject_type.split aobject_type.split)+
sorry (* FIXME AARCH64
done *)
@ -241,8 +191,7 @@ lemma invoke_untyped_valid_vspace_objs'[wp]:
apply (wp init_arch_objects_valid_vspace | simp)+
apply (auto simp: post_retype_invs_def split: if_split_asm)[1]
apply (wp | simp)+
sorry (* FIXME AARCH64
done *)
done
crunches store_asid_pool_entry
for valid_vspace_objs'[wp]: "valid_vspace_objs'"
@ -261,24 +210,12 @@ lemma perform_asid_pool_invocation_valid_vspace_objs'[wp]:
apply (erule (1) is_aligned_pt)
done *)
(* FIXME AARCH64
crunch valid_vspace_objs'[wp]: perform_asid_pool_invocation,
perform_asid_control_invocation "valid_vspace_objs'"
(ignore: delete_objects set_object
wp: static_imp_wp select_wp crunch_wps
simp: crunch_simps unless_def)
lemma pte_range_interD:
"pte_range pte p \<inter> pte_range pte' p' \<noteq> {}
\<Longrightarrow> pte \<noteq> InvalidPTE \<and> pte' \<noteq> InvalidPTE
\<and> p = p'"
apply (drule int_not_emptyD)
apply (case_tac pte,simp_all split:if_splits)
apply (case_tac pte',simp_all split:if_splits)
apply (case_tac pte',simp_all split:if_splits)
done
*)
lemma perform_page_valid_vspace_objs'[wp]:
"\<lbrace>valid_vspace_objs' and valid_page_inv pinv\<rbrace>
perform_page_invocation pinv \<lbrace>\<lambda>rv. valid_vspace_objs'\<rbrace>"

View File

@ -543,7 +543,7 @@ lemma arch_update_cap_invs_unmap_page_table:
and real_cte_at p
and invs and valid_cap cap
and (\<lambda>s. cte_wp_at (\<lambda>c. is_final_cap' c s) p s)
and (\<lambda>s. pts_of s (obj_ref_of cap) = Some (empty_pt FIXME_level))
and (\<lambda>s. pts_of s (obj_ref_of cap) = Some (empty_pt (cap_pt_type cap)))
and (\<lambda>s. cte_wp_at (\<lambda>c. \<forall>asid vref level. vs_cap_ref c = Some (asid, vref)
\<longrightarrow> vs_lookup_target level asid vref s \<noteq> Some (level, obj_ref_of cap)) p s)
and K (is_pt_cap cap \<and> vs_cap_ref cap = None)\<rbrace>
@ -581,9 +581,8 @@ lemma arch_update_cap_invs_unmap_page_table:
cte_wp_at_caps_of_state)
apply fastforce
apply (clarsimp simp: obj_at_def empty_table_def in_omonad)
sorry (* FIXME AARCH64
apply fastforce
done *)
done
lemma global_refs_arch_update_eq:
"arm_us_global_vspace (f (arch_state s)) = arm_us_global_vspace (arch_state s) \<Longrightarrow>
@ -1061,6 +1060,7 @@ lemma perform_pt_inv_unmap_invs[wp]:
mapM_x_store_pte_unreachable hoare_vcg_ball_lift
unmap_page_table_not_target real_cte_at_typ_valid
simp: cte_wp_at_caps_of_state)
sorry (* FIXME AARCH64
apply (clarsimp simp: valid_pti_def cte_wp_at_caps_of_state)
apply (clarsimp simp: is_arch_update_def is_cap_simps is_PageTableCap_def
update_map_data_def valid_cap_def valid_arch_cap_def cap_aligned_def)
@ -1072,7 +1072,6 @@ lemma perform_pt_inv_unmap_invs[wp]:
apply (clarsimp simp: wellformed_mapdata_def valid_cap_def cap_aligned_def cap_master_cap_simps)
apply (rule conjI)
apply clarsimp
sorry (* FIXME AARCH64
apply (prop_tac "is_aligned p pt_bits", simp add: bit_simps)
apply (subst (asm) upto_enum_step_shift_red; simp?)
apply (simp add: bit_simps)