Newer
Older
(* *********************************************************************)
(* *)
(* The Compcert verified compiler *)
(* *)
(* Xavier Leroy, INRIA Paris-Rocquencourt *)
(* Sandrine Blazy, ENSIIE and INRIA Paris-Rocquencourt *)
(* with contributions from Andrew Appel, Rob Dockins, *)
(* and Gordon Stewart (Princeton University) *)
(* *)
(* Copyright Institut National de Recherche en Informatique et en *)
(* Automatique. All rights reserved. This file is distributed *)
(* under the terms of the GNU General Public License as published by *)
(* the Free Software Foundation, either version 2 of the License, or *)
(* (at your option) any later version. This file is also distributed *)
(* under the terms of the INRIA Non-Commercial License Agreement. *)
(* *)
(* *********************************************************************)
(** This file develops the memory model that is used in the dynamic
semantics of all the languages used in the compiler.
It defines a type [mem] of memory states, the following 4 basic
operations over memory states, and their properties:
- [load]: read a memory chunk at a given address;
- [store]: store a memory chunk at a given address;
- [alloc]: allocate a fresh memory block;
- [free]: invalidate a memory block.
*)
xleroy
committed
Require Import Axioms.
Require Import Coqlib.
Require Import AST.
Require Import Integers.
Require Import Floats.
Require Import Values.
Require Export Memdata.
Require Export Memtype.
Definition update (A: Type) (x: Z) (v: A) (f: Z -> A) : Z -> A :=
fun y => if zeq y x then v else f y.
Implicit Arguments update [A].
Lemma update_s:
forall (A: Type) (x: Z) (v: A) (f: Z -> A),
update x v f x = v.
Proof.
intros; unfold update. apply zeq_true.
Qed.
Lemma update_o:
forall (A: Type) (x: Z) (v: A) (f: Z -> A) (y: Z),
x <> y -> update x v f y = f y.
Proof.
intros; unfold update. apply zeq_false; auto.
Qed.
xleroy
committed
Module Mem <: MEM.
Definition perm_order' (po: option permission) (p: permission) :=
match po with
| Some p' => perm_order p' p
| None => False
end.
xleroy
committed
mem_contents: block -> Z -> memval;
mem_access: block -> Z -> option permission;
bounds: block -> Z * Z;
nextblock: block;
nextblock_pos: nextblock > 0;
nextblock_noaccess: forall b, 0 < b < nextblock \/ bounds b = (0,0) ;
bounds_noaccess: forall b ofs, ofs < fst(bounds b) \/ ofs >= snd(bounds b) -> mem_access b ofs = None;
noread_undef: forall b ofs, perm_order' (mem_access b ofs) Readable \/ mem_contents b ofs = Undef
}.
Definition mem := mem_.
xleroy
committed
Lemma mkmem_ext:
forall cont1 cont2 acc1 acc2 bound1 bound2 next1 next2
a1 a2 b1 b2 c1 c2 d1 d2,
cont1=cont2 -> acc1=acc2 -> bound1=bound2 -> next1=next2 ->
mkmem cont1 acc1 bound1 next1 a1 b1 c1 d1 =
mkmem cont2 acc2 bound2 next2 a2 b2 c2 d2.
Proof.
xleroy
committed
Qed.
(** * Validity of blocks and accesses *)
(** A block address is valid if it was previously allocated. It remains valid
even after being freed. *)
Definition valid_block (m: mem) (b: block) :=
b < nextblock m.
Theorem valid_not_valid_diff:
forall m b b', valid_block m b -> ~(valid_block m b') -> b <> b'.
Proof.
intros; red; intros. subst b'. contradiction.
Qed.
Hint Local Resolve valid_not_valid_diff: mem.
(** Permissions *)
Definition perm (m: mem) (b: block) (ofs: Z) (p: permission) : Prop :=
xleroy
committed
perm_order' (mem_access m b ofs) p.
Theorem perm_implies:
forall m b ofs p1 p2, perm m b ofs p1 -> perm_order p1 p2 -> perm m b ofs p2.
Proof.
xleroy
committed
unfold perm, perm_order'; intros.
destruct (mem_access m b ofs); auto.
eapply perm_order_trans; eauto.
Qed.
Hint Local Resolve perm_implies: mem.
Theorem perm_valid_block:
forall m b ofs p, perm m b ofs p -> valid_block m b.
Proof.
unfold perm; intros.
xleroy
committed
destruct (zlt b m.(nextblock)).
xleroy
committed
assert (mem_access m b ofs = None).
destruct (nextblock_noaccess m b).
elimtype False; omega.
apply bounds_noaccess. rewrite H0; simpl; omega.
rewrite H0 in H.
contradiction.
Qed.
Hint Local Resolve perm_valid_block: mem.
xleroy
committed
Remark perm_order_dec:
forall p1 p2, {perm_order p1 p2} + {~perm_order p1 p2}.
Proof.
intros. destruct p1; destruct p2; (left; constructor) || (right; intro PO; inversion PO).
Qed.
Remark perm_order'_dec:
forall op p, {perm_order' op p} + {~perm_order' op p}.
Proof.
intros. destruct op; unfold perm_order'.
apply perm_order_dec.
right; tauto.
Qed.
Theorem perm_dec:
forall m b ofs p, {perm m b ofs p} + {~ perm m b ofs p}.
Proof.
unfold perm; intros.
xleroy
committed
apply perm_order'_dec.
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
Qed.
Definition range_perm (m: mem) (b: block) (lo hi: Z) (p: permission) : Prop :=
forall ofs, lo <= ofs < hi -> perm m b ofs p.
Theorem range_perm_implies:
forall m b lo hi p1 p2,
range_perm m b lo hi p1 -> perm_order p1 p2 -> range_perm m b lo hi p2.
Proof.
unfold range_perm; intros; eauto with mem.
Qed.
Hint Local Resolve range_perm_implies: mem.
Lemma range_perm_dec:
forall m b lo hi p, {range_perm m b lo hi p} + {~ range_perm m b lo hi p}.
Proof.
intros.
assert (forall n, 0 <= n ->
{range_perm m b lo (lo + n) p} + {~ range_perm m b lo (lo + n) p}).
apply natlike_rec2.
left. red; intros. omegaContradiction.
intros. destruct H0.
destruct (perm_dec m b (lo + z) p).
left. red; intros. destruct (zeq ofs (lo + z)). congruence. apply r. omega.
right; red; intro. elim n. apply H0. omega.
right; red; intro. elim n. red; intros. apply H0. omega.
destruct (zlt lo hi).
replace hi with (lo + (hi - lo)) by omega. apply H. omega.
left; red; intros. omegaContradiction.
Qed.
(** [valid_access m chunk b ofs p] holds if a memory access
of the given chunk is possible in [m] at address [b, ofs]
with permissions [p].
This means:
- The range of bytes accessed all have permission [p].
- The offset [ofs] is aligned.
*)
Definition valid_access (m: mem) (chunk: memory_chunk) (b: block) (ofs: Z) (p: permission): Prop :=
range_perm m b ofs (ofs + size_chunk chunk) p
/\ (align_chunk chunk | ofs).
Theorem valid_access_implies:
forall m chunk b ofs p1 p2,
valid_access m chunk b ofs p1 -> perm_order p1 p2 ->
valid_access m chunk b ofs p2.
Proof.
intros. inv H. constructor; eauto with mem.
Qed.
xleroy
committed
Theorem valid_access_freeable_any:
forall m chunk b ofs p,
valid_access m chunk b ofs Freeable ->
valid_access m chunk b ofs p.
Proof.
intros.
eapply valid_access_implies; eauto. constructor.
Qed.
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
Hint Local Resolve valid_access_implies: mem.
Theorem valid_access_valid_block:
forall m chunk b ofs,
valid_access m chunk b ofs Nonempty ->
valid_block m b.
Proof.
intros. destruct H.
assert (perm m b ofs Nonempty).
apply H. generalize (size_chunk_pos chunk). omega.
eauto with mem.
Qed.
Hint Local Resolve valid_access_valid_block: mem.
Lemma valid_access_perm:
forall m chunk b ofs p,
valid_access m chunk b ofs p ->
perm m b ofs p.
Proof.
intros. destruct H. apply H. generalize (size_chunk_pos chunk). omega.
Qed.
Lemma valid_access_compat:
forall m chunk1 chunk2 b ofs p,
size_chunk chunk1 = size_chunk chunk2 ->
valid_access m chunk1 b ofs p->
valid_access m chunk2 b ofs p.
Proof.
intros. inv H0. rewrite H in H1. constructor; auto.
rewrite <- (align_chunk_compat _ _ H). auto.
Qed.
Lemma valid_access_dec:
forall m chunk b ofs p,
{valid_access m chunk b ofs p} + {~ valid_access m chunk b ofs p}.
Proof.
intros.
destruct (range_perm_dec m b ofs (ofs + size_chunk chunk) p).
destruct (Zdivide_dec (align_chunk chunk) ofs (align_chunk_pos chunk)).
left; constructor; auto.
right; red; intro V; inv V; contradiction.
right; red; intro V; inv V; contradiction.
Qed.
(** [valid_pointer] is a boolean-valued function that says whether
the byte at the given location is nonempty. *)
Definition valid_pointer (m: mem) (b: block) (ofs: Z): bool :=
perm_dec m b ofs Nonempty.
Theorem valid_pointer_nonempty_perm:
forall m b ofs,
valid_pointer m b ofs = true <-> perm m b ofs Nonempty.
Proof.
intros. unfold valid_pointer.
destruct (perm_dec m b ofs Nonempty); simpl;
intuition congruence.
Qed.
Theorem valid_pointer_valid_access:
forall m b ofs,
valid_pointer m b ofs = true <-> valid_access m Mint8unsigned b ofs Nonempty.
Proof.
intros. rewrite valid_pointer_nonempty_perm.
split; intros.
split. simpl; red; intros. replace ofs0 with ofs by omega. auto.
simpl. apply Zone_divide.
destruct H. apply H. simpl. omega.
Qed.
(** Bounds *)
(** Each block has a low bound and a high bound, determined at allocation time
and invariant afterward. The crucial properties of bounds is
that any offset below the low bound or above the high bound is
empty. *)
Notation low_bound m b := (fst(bounds m b)).
Notation high_bound m b := (snd(bounds m b)).
Theorem perm_in_bounds:
forall m b ofs p, perm m b ofs p -> low_bound m b <= ofs < high_bound m b.
Proof.
xleroy
committed
unfold perm. intros.
destruct (zlt ofs (fst (bounds m b))).
exploit bounds_noaccess. left; eauto.
intros.
rewrite H0 in H. contradiction.
destruct (zlt ofs (snd (bounds m b))).
xleroy
committed
exploit bounds_noaccess. right; eauto.
intro; rewrite H0 in H. contradiction.
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
Qed.
Theorem range_perm_in_bounds:
forall m b lo hi p,
range_perm m b lo hi p -> lo < hi -> low_bound m b <= lo /\ hi <= high_bound m b.
Proof.
intros. split.
exploit (perm_in_bounds m b lo p). apply H. omega. omega.
exploit (perm_in_bounds m b (hi-1) p). apply H. omega. omega.
Qed.
Theorem valid_access_in_bounds:
forall m chunk b ofs p,
valid_access m chunk b ofs p ->
low_bound m b <= ofs /\ ofs + size_chunk chunk <= high_bound m b.
Proof.
intros. inv H. apply range_perm_in_bounds with p; auto.
generalize (size_chunk_pos chunk). omega.
Qed.
Hint Local Resolve perm_in_bounds range_perm_in_bounds valid_access_in_bounds.
(** * Store operations *)
(** The initial store *)
Program Definition empty: mem :=
mkmem (fun b ofs => Undef)
xleroy
committed
(fun b ofs => None)
xleroy
committed
1 _ _ _ _.
Next Obligation.
omega.
Qed.
Definition nullptr: block := 0.
(** Allocation of a fresh block with the given bounds. Return an updated
memory state and the address of the fresh block, which initially contains
undefined cells. Note that allocation never fails: we model an
infinite memory. *)
Program Definition alloc (m: mem) (lo hi: Z) :=
xleroy
committed
(mkmem (update m.(nextblock)
xleroy
committed
m.(mem_contents))
(update m.(nextblock)
(fun ofs => if zle lo ofs && zlt ofs hi then Some Freeable else None)
m.(mem_access))
(update m.(nextblock) (lo, hi) m.(bounds))
(Zsucc m.(nextblock))
_ _ _ _,
m.(nextblock)).
xleroy
committed
generalize (nextblock_pos m). omega.
xleroy
committed
assert (0 < b < Zsucc (nextblock m) \/ b <= 0 \/ b > nextblock m) by omega.
destruct H as [?|[?|?]]. left; omega.
right.
rewrite update_o.
destruct (nextblock_noaccess m b); auto. elimtype False; omega.
generalize (nextblock_pos m); omega.
(* generalize (bounds_noaccess m b 0).*)
destruct (nextblock_noaccess m b); auto. left; omega.
rewrite update_o. right; auto. omega.
xleroy
committed
unfold update in *. destruct (zeq b (nextblock m)).
simpl in H. destruct H.
unfold proj_sumbool. rewrite zle_false. auto. omega.
xleroy
committed
unfold proj_sumbool. rewrite zlt_false; auto. rewrite andb_false_r. auto.
apply bounds_noaccess. auto.
Qed.
Next Obligation.
unfold update.
destruct (zeq b (nextblock m)); auto.
apply noread_undef.
xleroy
committed
(** Freeing a block between the given bounds.
Return the updated memory state where the given range of the given block
has been invalidated: future reads and writes to this
range will fail. Requires write permission on the given range. *)
xleroy
committed
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
Definition clearN (m: block -> Z -> memval) (b: block) (lo hi: Z) :
block -> Z -> memval :=
fun b' ofs => if zeq b' b
then if zle lo ofs && zlt ofs hi then Undef else m b' ofs
else m b' ofs.
Lemma clearN_in:
forall m b lo hi ofs, lo <= ofs < hi -> clearN m b lo hi b ofs = Undef.
Proof.
intros. unfold clearN. rewrite zeq_true.
destruct H; unfold andb, proj_sumbool.
rewrite zle_true; auto. rewrite zlt_true; auto.
Qed.
Lemma clearN_out:
forall m b lo hi b' ofs, (b<>b' \/ ofs < lo \/ hi <= ofs) -> clearN m b lo hi b' ofs = m b' ofs.
Proof.
intros. unfold clearN. destruct (zeq b' b); auto.
subst b'.
destruct H. contradiction H; auto.
destruct (zle lo ofs); auto.
destruct (zlt ofs hi); auto.
elimtype False; omega.
Qed.
Program Definition unchecked_free (m: mem) (b: block) (lo hi: Z): mem :=
xleroy
committed
mkmem (clearN m.(mem_contents) b lo hi)
xleroy
committed
(fun ofs => if zle lo ofs && zlt ofs hi then None else m.(mem_access) b ofs)
m.(mem_access))
m.(bounds)
m.(nextblock) _ _ _ _.
xleroy
committed
apply nextblock_pos.
Qed.
Next Obligation.
apply (nextblock_noaccess m b0).
Qed.
Next Obligation.
unfold update. destruct (zeq b0 b). subst b0.
destruct (zle lo ofs); simpl; auto.
xleroy
committed
destruct (zlt ofs hi); simpl; auto.
apply bounds_noaccess; auto.
apply bounds_noaccess; auto.
apply bounds_noaccess; auto.
xleroy
committed
unfold clearN, update.
(* destruct (noread_undef m b0 ofs); auto.*)
destruct (zeq b0 b). subst b0.
destruct (zle lo ofs); simpl; auto.
xleroy
committed
destruct (zlt ofs hi); simpl; auto.
apply noread_undef.
apply noread_undef.
apply noread_undef.
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
Qed.
Definition free (m: mem) (b: block) (lo hi: Z): option mem :=
if range_perm_dec m b lo hi Freeable
then Some(unchecked_free m b lo hi)
else None.
Fixpoint free_list (m: mem) (l: list (block * Z * Z)) {struct l}: option mem :=
match l with
| nil => Some m
| (b, lo, hi) :: l' =>
match free m b lo hi with
| None => None
| Some m' => free_list m' l'
end
end.
(** Memory reads. *)
(** Reading N adjacent bytes in a block content. *)
Fixpoint getN (n: nat) (p: Z) (c: Z -> memval) {struct n}: list memval :=
match n with
| O => nil
| S n' => c p :: getN n' (p + 1) c
end.
(** [load chunk m b ofs] perform a read in memory state [m], at address
[b] and offset [ofs]. It returns the value of the memory chunk
at that address. [None] is returned if the accessed bytes
are not readable. *)
Definition load (chunk: memory_chunk) (m: mem) (b: block) (ofs: Z): option val :=
if valid_access_dec m chunk b ofs Readable
xleroy
committed
then Some(decode_val chunk (getN (size_chunk_nat chunk) ofs (m.(mem_contents) b)))
else None.
(** [loadv chunk m addr] is similar, but the address and offset are given
as a single value [addr], which must be a pointer value. *)
Definition loadv (chunk: memory_chunk) (m: mem) (addr: val) : option val :=
match addr with
| Vptr b ofs => load chunk m b (Int.signed ofs)
| _ => None
end.
(** [loadbytes m b ofs n] reads [n] consecutive bytes starting at
location [(b, ofs)]. Returns [None] if the accessed locations are
xleroy
committed
not readable. *)
xleroy
committed
Definition loadbytes (m: mem) (b: block) (ofs n: Z): option (list memval) :=
if range_perm_dec m b ofs (ofs + n) Readable
xleroy
committed
then Some (getN (nat_of_Z n) ofs (m.(mem_contents) b))
else None.
(** Memory stores. *)
(** Writing N adjacent bytes in a block content. *)
Fixpoint setN (vl: list memval) (p: Z) (c: Z -> memval) {struct vl}: Z -> memval :=
match vl with
| nil => c
| v :: vl' => setN vl' (p + 1) (update p v c)
end.
xleroy
committed
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
Remark setN_other:
forall vl c p q,
(forall r, p <= r < p + Z_of_nat (length vl) -> r <> q) ->
setN vl p c q = c q.
Proof.
induction vl; intros; simpl.
auto.
simpl length in H. rewrite inj_S in H.
transitivity (update p a c q).
apply IHvl. intros. apply H. omega.
apply update_o. apply H. omega.
Qed.
Remark setN_outside:
forall vl c p q,
q < p \/ q >= p + Z_of_nat (length vl) ->
setN vl p c q = c q.
Proof.
intros. apply setN_other.
intros. omega.
Qed.
Remark getN_setN_same:
forall vl p c,
getN (length vl) p (setN vl p c) = vl.
Proof.
induction vl; intros; simpl.
auto.
decEq.
rewrite setN_outside. apply update_s. omega.
apply IHvl.
Qed.
Remark getN_setN_outside:
forall vl q c n p,
p + Z_of_nat n <= q \/ q + Z_of_nat (length vl) <= p ->
getN n p (setN vl q c) = getN n p c.
Proof.
induction n; intros; simpl.
auto.
rewrite inj_S in H. decEq.
apply setN_outside. omega.
apply IHn. omega.
Qed.
Lemma store_noread_undef:
forall m ch b ofs (VA: valid_access m ch b ofs Writable) v,
forall b' ofs',
perm m b' ofs' Readable \/
update b (setN (encode_val ch v) ofs (mem_contents m b)) (mem_contents m) b' ofs' = Undef.
Proof.
intros.
destruct VA as [? _].
(*specialize (H ofs').*)
unfold update.
destruct (zeq b' b).
subst b'.
assert (ofs <= ofs' < ofs + size_chunk ch \/ (ofs' < ofs \/ ofs' >= ofs + size_chunk ch)) by omega.
destruct H0.
exploit (H ofs'); auto; intro.
eauto with mem.
rewrite setN_outside.
destruct (noread_undef m b ofs'); auto.
rewrite encode_val_length. rewrite <- size_chunk_conv; auto.
destruct (noread_undef m b' ofs'); auto.
Qed.
(** [store chunk m b ofs v] perform a write in memory state [m].
Value [v] is stored at address [b] and offset [ofs].
Return the updated memory store, or [None] if the accessed bytes
are not writable. *)
Definition store (chunk: memory_chunk) (m: mem) (b: block) (ofs: Z) (v: val): option mem :=
xleroy
committed
match valid_access_dec m chunk b ofs Writable with
| left VA => Some (mkmem (update b
(setN (encode_val chunk v) ofs (m.(mem_contents) b))
m.(mem_contents))
m.(mem_access)
m.(bounds)
m.(nextblock)
m.(nextblock_pos)
m.(nextblock_noaccess)
m.(bounds_noaccess)
(store_noread_undef m chunk b ofs VA v))
| right _ => None
end.
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
(** [storev chunk m addr v] is similar, but the address and offset are given
as a single value [addr], which must be a pointer value. *)
Definition storev (chunk: memory_chunk) (m: mem) (addr v: val) : option mem :=
match addr with
| Vptr b ofs => store chunk m b (Int.signed ofs) v
| _ => None
end.
(** * Properties of the memory operations *)
(** Properties of the empty store. *)
Theorem nextblock_empty: nextblock empty = 1.
Proof. reflexivity. Qed.
Theorem perm_empty: forall b ofs p, ~perm empty b ofs p.
Proof.
intros. unfold perm, empty; simpl. congruence.
Qed.
Theorem valid_access_empty: forall chunk b ofs p, ~valid_access empty chunk b ofs p.
Proof.
intros. red; intros. elim (perm_empty b ofs p). apply H.
generalize (size_chunk_pos chunk); omega.
Qed.
(** ** Properties related to [load] *)
Theorem valid_access_load:
forall m chunk b ofs,
valid_access m chunk b ofs Readable ->
exists v, load chunk m b ofs = Some v.
Proof.
intros. econstructor. unfold load. rewrite pred_dec_true; eauto.
Qed.
Theorem load_valid_access:
forall m chunk b ofs v,
load chunk m b ofs = Some v ->
valid_access m chunk b ofs Readable.
Proof.
intros until v. unfold load.
destruct (valid_access_dec m chunk b ofs Readable); intros.
auto.
congruence.
Qed.
Lemma load_result:
forall chunk m b ofs v,
load chunk m b ofs = Some v ->
xleroy
committed
v = decode_val chunk (getN (size_chunk_nat chunk) ofs (m.(mem_contents) b)).
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
Proof.
intros until v. unfold load.
destruct (valid_access_dec m chunk b ofs Readable); intros.
congruence.
congruence.
Qed.
Hint Local Resolve load_valid_access valid_access_load: mem.
Theorem load_type:
forall m chunk b ofs v,
load chunk m b ofs = Some v ->
Val.has_type v (type_of_chunk chunk).
Proof.
intros. exploit load_result; eauto; intros. rewrite H0.
apply decode_val_type.
Qed.
Theorem load_cast:
forall m chunk b ofs v,
load chunk m b ofs = Some v ->
match chunk with
| Mint8signed => v = Val.sign_ext 8 v
| Mint8unsigned => v = Val.zero_ext 8 v
| Mint16signed => v = Val.sign_ext 16 v
| Mint16unsigned => v = Val.zero_ext 16 v
| Mfloat32 => v = Val.singleoffloat v
| _ => True
end.
Proof.
intros. exploit load_result; eauto.
xleroy
committed
set (l := getN (size_chunk_nat chunk) ofs (mem_contents m b)).
intros. subst v. apply decode_val_cast.
Qed.
Theorem load_int8_signed_unsigned:
forall m b ofs,
load Mint8signed m b ofs = option_map (Val.sign_ext 8) (load Mint8unsigned m b ofs).
Proof.
intros. unfold load.
change (size_chunk_nat Mint8signed) with (size_chunk_nat Mint8unsigned).
xleroy
committed
set (cl := getN (size_chunk_nat Mint8unsigned) ofs (mem_contents m b)).
destruct (valid_access_dec m Mint8signed b ofs Readable).
rewrite pred_dec_true; auto. unfold decode_val.
destruct (proj_bytes cl); auto. rewrite decode_int8_signed_unsigned. auto.
rewrite pred_dec_false; auto.
Qed.
Theorem load_int16_signed_unsigned:
forall m b ofs,
load Mint16signed m b ofs = option_map (Val.sign_ext 16) (load Mint16unsigned m b ofs).
Proof.
intros. unfold load.
change (size_chunk_nat Mint16signed) with (size_chunk_nat Mint16unsigned).
xleroy
committed
set (cl := getN (size_chunk_nat Mint16unsigned) ofs (mem_contents m b)).
destruct (valid_access_dec m Mint16signed b ofs Readable).
rewrite pred_dec_true; auto. unfold decode_val.
destruct (proj_bytes cl); auto. rewrite decode_int16_signed_unsigned. auto.
rewrite pred_dec_false; auto.
Qed.
Theorem loadbytes_load:
forall chunk m b ofs bytes,
loadbytes m b ofs (size_chunk chunk) = Some bytes ->
(align_chunk chunk | ofs) ->
xleroy
committed
load chunk m b ofs = Some(decode_val chunk bytes).
Proof.
unfold loadbytes, load; intros.
destruct (range_perm_dec m b ofs (ofs + size_chunk chunk) Readable);
try congruence.
xleroy
committed
inv H. rewrite pred_dec_true. auto.
xleroy
committed
Theorem load_loadbytes:
forall chunk m b ofs v,
load chunk m b ofs = Some v ->
exists bytes, loadbytes m b ofs (size_chunk chunk) = Some bytes
xleroy
committed
/\ v = decode_val chunk bytes.
Proof.
intros. exploit load_valid_access; eauto. intros [A B].
xleroy
committed
exploit load_result; eauto. intros.
exists (getN (size_chunk_nat chunk) ofs (mem_contents m b)); split.
unfold loadbytes. rewrite pred_dec_true; auto.
auto.
Qed.
Lemma getN_length:
forall c n p, length (getN n p c) = n.
Proof.
induction n; simpl; intros. auto. decEq; auto.
Qed.
Theorem loadbytes_length:
forall m b ofs n bytes,
loadbytes m b ofs n = Some bytes ->
length bytes = nat_of_Z n.
Proof.
unfold loadbytes; intros.
destruct (range_perm_dec m b ofs (ofs + n) Readable); try congruence.
xleroy
committed
inv H. apply getN_length.
Qed.
Lemma getN_concat:
forall c n1 n2 p,
getN (n1 + n2)%nat p c = getN n1 p c ++ getN n2 (p + Z_of_nat n1) c.
Proof.
induction n1; intros.
simpl. decEq. omega.
rewrite inj_S. simpl. decEq.
replace (p + Zsucc (Z_of_nat n1)) with ((p + 1) + Z_of_nat n1) by omega.
auto.
Qed.
Theorem loadbytes_concat:
forall m b ofs n1 n2 bytes1 bytes2,
loadbytes m b ofs n1 = Some bytes1 ->
loadbytes m b (ofs + n1) n2 = Some bytes2 ->
n1 >= 0 -> n2 >= 0 ->
loadbytes m b ofs (n1 + n2) = Some(bytes1 ++ bytes2).
Proof.
unfold loadbytes; intros.
destruct (range_perm_dec m b ofs (ofs + n1) Readable); try congruence.
destruct (range_perm_dec m b (ofs + n1) (ofs + n1 + n2) Readable); try congruence.
rewrite pred_dec_true. rewrite nat_of_Z_plus; auto.
rewrite getN_concat. rewrite nat_of_Z_eq; auto.
xleroy
committed
congruence.
red; intros.
assert (ofs0 < ofs + n1 \/ ofs0 >= ofs + n1) by omega.
destruct H4. apply r; omega. apply r0; omega.
Qed.
Theorem loadbytes_split:
forall m b ofs n1 n2 bytes,
loadbytes m b ofs (n1 + n2) = Some bytes ->
n1 >= 0 -> n2 >= 0 ->
exists bytes1, exists bytes2,
loadbytes m b ofs n1 = Some bytes1
/\ loadbytes m b (ofs + n1) n2 = Some bytes2
/\ bytes = bytes1 ++ bytes2.
Proof.
unfold loadbytes; intros.
destruct (range_perm_dec m b ofs (ofs + (n1 + n2)) Readable);
try congruence.
rewrite nat_of_Z_plus in H; auto. rewrite getN_concat in H.
rewrite nat_of_Z_eq in H; auto.
repeat rewrite pred_dec_true.
xleroy
committed
econstructor; econstructor.
split. reflexivity. split. reflexivity. congruence.
red; intros; apply r; omega.
red; intros; apply r; omega.
Qed.
xleroy
committed
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
Theorem load_rep:
forall ch m1 m2 b ofs v1 v2,
(forall z, 0 <= z < size_chunk ch -> mem_contents m1 b (ofs+z) = mem_contents m2 b (ofs+z)) ->
load ch m1 b ofs = Some v1 ->
load ch m2 b ofs = Some v2 ->
v1 = v2.
Proof.
intros.
apply load_result in H0.
apply load_result in H1.
subst.
f_equal.
rewrite size_chunk_conv in H.
remember (size_chunk_nat ch) as n; clear Heqn.
revert ofs H; induction n; intros; simpl; auto.
f_equal.
rewrite inj_S in H.
replace ofs with (ofs+0) by omega.
apply H; omega.
apply IHn.
intros.
rewrite <- Zplus_assoc.
apply H.
rewrite inj_S. omega.
Qed.
(** ** Properties related to [store] *)
Theorem valid_access_store:
forall m1 chunk b ofs v,
valid_access m1 chunk b ofs Writable ->
{ m2: mem | store chunk m1 b ofs v = Some m2 }.
Proof.
xleroy
committed
intros.
unfold store.
destruct (valid_access_dec m1 chunk b ofs Writable).
eauto.
contradiction.
Qed.
Hint Local Resolve valid_access_store: mem.
Section STORE.
Variable chunk: memory_chunk.
Variable m1: mem.
Variable b: block.
Variable ofs: Z.
Variable v: val.
Variable m2: mem.
Hypothesis STORE: store chunk m1 b ofs v = Some m2.
xleroy
committed
(*
Lemma store_result:
m2 = unchecked_store chunk m1 b ofs v.
Proof.
unfold store in STORE.
destruct (valid_access_dec m1 chunk b ofs Writable).
congruence.
congruence.
Qed.
xleroy
committed
*)
Lemma store_access: mem_access m2 = mem_access m1.
Proof.
unfold store in STORE. destruct ( valid_access_dec m1 chunk b ofs Writable); inv STORE.
auto.
Qed.
Lemma store_mem_contents: mem_contents m2 =
update b (setN (encode_val chunk v) ofs (m1.(mem_contents) b)) m1.(mem_contents).
Proof.
unfold store in STORE. destruct ( valid_access_dec m1 chunk b ofs Writable); inv STORE.
auto.
Qed.
Theorem perm_store_1:
forall b' ofs' p, perm m1 b' ofs' p -> perm m2 b' ofs' p.
Proof.
xleroy
committed
intros.
unfold perm in *. rewrite store_access; auto.
Qed.
Theorem perm_store_2:
forall b' ofs' p, perm m2 b' ofs' p -> perm m1 b' ofs' p.
Proof.
xleroy
committed
intros. unfold perm in *. rewrite store_access in H; auto.
Qed.
Hint Local Resolve perm_store_1 perm_store_2: mem.
Theorem nextblock_store:
nextblock m2 = nextblock m1.
Proof.
xleroy
committed
intros.
unfold store in STORE. destruct ( valid_access_dec m1 chunk b ofs Writable); inv STORE.
auto.
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
Qed.
Theorem store_valid_block_1:
forall b', valid_block m1 b' -> valid_block m2 b'.
Proof.
unfold valid_block; intros. rewrite nextblock_store; auto.
Qed.
Theorem store_valid_block_2:
forall b', valid_block m2 b' -> valid_block m1 b'.
Proof.
unfold valid_block; intros. rewrite nextblock_store in H; auto.
Qed.
Hint Local Resolve store_valid_block_1 store_valid_block_2: mem.
Theorem store_valid_access_1:
forall chunk' b' ofs' p,
valid_access m1 chunk' b' ofs' p -> valid_access m2 chunk' b' ofs' p.
Proof.
intros. inv H. constructor; try red; auto with mem.
Qed.
Theorem store_valid_access_2:
forall chunk' b' ofs' p,
valid_access m2 chunk' b' ofs' p -> valid_access m1 chunk' b' ofs' p.
Proof.
intros. inv H. constructor; try red; auto with mem.
Qed.
Theorem store_valid_access_3:
valid_access m1 chunk b ofs Writable.
Proof.
unfold store in STORE. destruct (valid_access_dec m1 chunk b ofs Writable).
auto.
congruence.
Qed.
Hint Local Resolve store_valid_access_1 store_valid_access_2
store_valid_access_3: mem.
Theorem bounds_store:
forall b', bounds m2 b' = bounds m1 b'.
Proof.
xleroy
committed
intros.
unfold store in STORE.
destruct ( valid_access_dec m1 chunk b ofs Writable); inv STORE. simpl. auto.
Qed.
Theorem load_store_similar:
forall chunk',
size_chunk chunk' = size_chunk chunk ->
exists v', load chunk' m2 b ofs = Some v' /\ decode_encode_val v chunk chunk' v'.
Proof.
intros.
exploit (valid_access_load m2 chunk').
eapply valid_access_compat. symmetry; eauto. eauto with mem.
intros [v' LOAD].
exists v'; split; auto.
exploit load_result; eauto. intros B.
xleroy
committed
rewrite B. rewrite store_mem_contents; simpl.
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
rewrite update_s.
replace (size_chunk_nat chunk') with (length (encode_val chunk v)).
rewrite getN_setN_same. apply decode_encode_val_general.
rewrite encode_val_length. repeat rewrite size_chunk_conv in H.
apply inj_eq_rev; auto.
Qed.
Theorem load_store_same:
Val.has_type v (type_of_chunk chunk) ->
load chunk m2 b ofs = Some (Val.load_result chunk v).
Proof.
intros.
destruct (load_store_similar chunk) as [v' [A B]]. auto.
rewrite A. decEq. eapply decode_encode_val_similar; eauto.
Qed.
Theorem load_store_other:
forall chunk' b' ofs',
b' <> b
\/ ofs' + size_chunk chunk' <= ofs
\/ ofs + size_chunk chunk <= ofs' ->
load chunk' m2 b' ofs' = load chunk' m1 b' ofs'.
Proof.
intros. unfold load.
destruct (valid_access_dec m1 chunk' b' ofs' Readable).
rewrite pred_dec_true.
xleroy
committed
decEq. decEq. rewrite store_mem_contents; simpl.
unfold update. destruct (zeq b' b). subst b'.
apply getN_setN_outside. rewrite encode_val_length. repeat rewrite <- size_chunk_conv.
intuition.
auto.
eauto with mem.
rewrite pred_dec_false. auto.
eauto with mem.
Qed.
Theorem loadbytes_store_same: