@@ -13,18 +13,33 @@ use rustc_data_structures::sorted_map::SortedMap;
1313use rustc_target:: abi:: HasDataLayout ;
1414use std:: borrow:: Cow ;
1515
16- #[ derive( Clone , Debug , Eq , PartialEq , PartialOrd , Ord , Hash , RustcEncodable , RustcDecodable ) ]
16+ // NOTE: When adding new fields, make sure to adjust the Snapshot impl in
17+ // `src/librustc_mir/interpret/snapshot.rs`.
18+ #[ derive(
19+ Clone ,
20+ Debug ,
21+ Eq ,
22+ PartialEq ,
23+ PartialOrd ,
24+ Ord ,
25+ Hash ,
26+ RustcEncodable ,
27+ RustcDecodable ,
28+ HashStable ,
29+ ) ]
1730pub struct Allocation < Tag =( ) , Extra =( ) > {
1831 /// The actual bytes of the allocation.
19- /// Note that the bytes of a pointer represent the offset of the pointer
20- pub bytes : Vec < u8 > ,
32+ /// Note that the bytes of a pointer represent the offset of the pointer.
33+ bytes : Vec < u8 > ,
2134 /// Maps from byte addresses to extra data for each pointer.
2235 /// Only the first byte of a pointer is inserted into the map; i.e.,
2336 /// every entry in this map applies to `pointer_size` consecutive bytes starting
2437 /// at the given offset.
25- pub relocations : Relocations < Tag > ,
26- /// Denotes undefined memory. Reading from undefined memory is forbidden in miri
27- pub undef_mask : UndefMask ,
38+ relocations : Relocations < Tag > ,
39+ /// Denotes which part of this allocation is initialized.
40+ undef_mask : UndefMask ,
41+ /// The size of the allocation. Currently, must always equal `bytes.len()`.
42+ pub size : Size ,
2843 /// The alignment of the allocation to detect unaligned reads.
2944 pub align : Align ,
3045 /// Whether the allocation is mutable.
@@ -85,11 +100,12 @@ impl<Tag> Allocation<Tag> {
85100 /// Creates a read-only allocation initialized by the given bytes
86101 pub fn from_bytes < ' a > ( slice : impl Into < Cow < ' a , [ u8 ] > > , align : Align ) -> Self {
87102 let bytes = slice. into ( ) . into_owned ( ) ;
88- let undef_mask = UndefMask :: new ( Size :: from_bytes ( bytes. len ( ) as u64 ) , true ) ;
103+ let size = Size :: from_bytes ( bytes. len ( ) as u64 ) ;
89104 Self {
90105 bytes,
91106 relocations : Relocations :: new ( ) ,
92- undef_mask,
107+ undef_mask : UndefMask :: new ( size, true ) ,
108+ size,
93109 align,
94110 mutability : Mutability :: Immutable ,
95111 extra : ( ) ,
@@ -106,13 +122,39 @@ impl<Tag> Allocation<Tag> {
106122 bytes : vec ! [ 0 ; size. bytes( ) as usize ] ,
107123 relocations : Relocations :: new ( ) ,
108124 undef_mask : UndefMask :: new ( size, false ) ,
125+ size,
109126 align,
110127 mutability : Mutability :: Mutable ,
111128 extra : ( ) ,
112129 }
113130 }
114131}
115132
133+ /// Raw accessors. Provide access to otherwise private bytes.
134+ impl < Tag , Extra > Allocation < Tag , Extra > {
135+ pub fn len ( & self ) -> usize {
136+ self . size . bytes ( ) as usize
137+ }
138+
139+ /// Looks at a slice which may describe undefined bytes or describe a relocation. This differs
140+ /// from `get_bytes_with_undef_and_ptr` in that it does no relocation checks (even on the
141+ /// edges) at all. It further ignores `AllocationExtra` callbacks.
142+ /// This must not be used for reads affecting the interpreter execution.
143+ pub fn inspect_with_undef_and_ptr_outside_interpreter ( & self , range : Range < usize > ) -> & [ u8 ] {
144+ & self . bytes [ range]
145+ }
146+
147+ /// Returns the undef mask.
148+ pub fn undef_mask ( & self ) -> & UndefMask {
149+ & self . undef_mask
150+ }
151+
152+ /// Returns the relocation list.
153+ pub fn relocations ( & self ) -> & Relocations < Tag > {
154+ & self . relocations
155+ }
156+ }
157+
116158impl < ' tcx > rustc_serialize:: UseSpecializedDecodable for & ' tcx Allocation { }
117159
118160/// Byte accessors
@@ -132,9 +174,9 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
132174 ) ;
133175 let end = end. bytes ( ) as usize ;
134176 assert ! (
135- end <= self . bytes . len( ) ,
177+ end <= self . len( ) ,
136178 "Out-of-bounds access at offset {}, size {} in allocation of size {}" ,
137- offset. bytes( ) , size. bytes( ) , self . bytes . len( )
179+ offset. bytes( ) , size. bytes( ) , self . len( )
138180 ) ;
139181 ( offset. bytes ( ) as usize ) ..end
140182 }
@@ -422,7 +464,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
422464/// Relocations
423465impl < ' tcx , Tag : Copy , Extra > Allocation < Tag , Extra > {
424466 /// Returns all relocations overlapping with the given ptr-offset pair.
425- pub fn relocations (
467+ pub fn get_relocations (
426468 & self ,
427469 cx : & impl HasDataLayout ,
428470 ptr : Pointer < Tag > ,
@@ -443,7 +485,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
443485 ptr : Pointer < Tag > ,
444486 size : Size ,
445487 ) -> InterpResult < ' tcx > {
446- if self . relocations ( cx, ptr, size) . is_empty ( ) {
488+ if self . get_relocations ( cx, ptr, size) . is_empty ( ) {
447489 Ok ( ( ) )
448490 } else {
449491 throw_unsup ! ( ReadPointerAsBytes )
@@ -465,7 +507,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
465507 // Find the start and end of the given range and its outermost relocations.
466508 let ( first, last) = {
467509 // Find all relocations overlapping the given range.
468- let relocations = self . relocations ( cx, ptr, size) ;
510+ let relocations = self . get_relocations ( cx, ptr, size) ;
469511 if relocations. is_empty ( ) {
470512 return Ok ( ( ) ) ;
471513 }
@@ -536,6 +578,94 @@ impl<'tcx, Tag, Extra> Allocation<Tag, Extra> {
536578 }
537579}
538580
581+ /// Run-length encoding of the undef mask.
582+ /// Used to copy parts of a mask multiple times to another allocation.
583+ pub struct AllocationDefinedness {
584+ /// The definedness of the first range.
585+ initial : bool ,
586+ /// The lengths of ranges that are run-length encoded.
587+ /// The definedness of the ranges alternate starting with `initial`.
588+ ranges : smallvec:: SmallVec :: < [ u64 ; 1 ] > ,
589+ }
590+
591+ /// Transferring the definedness mask to other allocations.
592+ impl <Tag , Extra > Allocation < Tag , Extra > {
593+ /// Creates a run-length encoding of the undef_mask.
594+ pub fn compress_undef_range (
595+ & self ,
596+ src : Pointer < Tag > ,
597+ size : Size ,
598+ ) -> AllocationDefinedness {
599+ // Since we are copying `size` bytes from `src` to `dest + i * size` (`for i in 0..repeat`),
600+ // a naive undef mask copying algorithm would repeatedly have to read the undef mask from
601+ // the source and write it to the destination. Even if we optimized the memory accesses,
602+ // we'd be doing all of this `repeat` times.
603+ // Therefor we precompute a compressed version of the undef mask of the source value and
604+ // then write it back `repeat` times without computing any more information from the source.
605+
606+ // a precomputed cache for ranges of defined/undefined bits
607+ // 0000010010001110 will become
608+ // [5, 1, 2, 1, 3, 3, 1]
609+ // where each element toggles the state
610+
611+ let mut ranges = smallvec:: SmallVec :: < [ u64 ; 1 ] > :: new ( ) ;
612+ let initial = self . undef_mask . get ( src. offset ) ;
613+ let mut cur_len = 1 ;
614+ let mut cur = initial;
615+
616+ for i in 1 ..size. bytes ( ) {
617+ // FIXME: optimize to bitshift the current undef block's bits and read the top bit
618+ if self . undef_mask . get ( src. offset + Size :: from_bytes ( i) ) == cur {
619+ cur_len += 1 ;
620+ } else {
621+ ranges. push ( cur_len) ;
622+ cur_len = 1 ;
623+ cur = !cur;
624+ }
625+ }
626+
627+ ranges. push ( cur_len) ;
628+
629+ AllocationDefinedness { ranges, initial, }
630+ }
631+
632+ /// Apply multiple instances of the run-length encoding to the undef_mask.
633+ pub fn mark_compressed_undef_range (
634+ & mut self ,
635+ defined : & AllocationDefinedness ,
636+ dest : Pointer < Tag > ,
637+ size : Size ,
638+ repeat : u64 ,
639+ ) {
640+ // an optimization where we can just overwrite an entire range of definedness bits if
641+ // they are going to be uniformly `1` or `0`.
642+ if defined. ranges . len ( ) <= 1 {
643+ self . undef_mask . set_range_inbounds (
644+ dest. offset ,
645+ dest. offset + size * repeat,
646+ defined. initial ,
647+ ) ;
648+ return ;
649+ }
650+
651+ for mut j in 0 ..repeat {
652+ j *= size. bytes ( ) ;
653+ j += dest. offset . bytes ( ) ;
654+ let mut cur = defined. initial ;
655+ for range in & defined. ranges {
656+ let old_j = j;
657+ j += range;
658+ self . undef_mask . set_range_inbounds (
659+ Size :: from_bytes ( old_j) ,
660+ Size :: from_bytes ( j) ,
661+ cur,
662+ ) ;
663+ cur = !cur;
664+ }
665+ }
666+ }
667+ }
668+
539669/// Relocations
540670#[ derive( Clone , PartialEq , Eq , PartialOrd , Ord , Hash , Debug , RustcEncodable , RustcDecodable ) ]
541671pub struct Relocations < Tag =( ) , Id =AllocId > ( SortedMap < Size , ( Tag , Id ) > ) ;
@@ -566,6 +696,59 @@ impl<Tag> DerefMut for Relocations<Tag> {
566696 }
567697}
568698
699+ /// A partial, owned list of relocations to transfer into another allocation.
700+ pub struct AllocationRelocations < Tag > {
701+ relative_relocations : Vec < ( Size , ( Tag , AllocId ) ) > ,
702+ }
703+
704+ impl < Tag : Copy , Extra > Allocation < Tag , Extra > {
705+ pub fn prepare_relocation_copy (
706+ & self ,
707+ cx : & impl HasDataLayout ,
708+ src : Pointer < Tag > ,
709+ size : Size ,
710+ dest : Pointer < Tag > ,
711+ length : u64 ,
712+ ) -> AllocationRelocations < Tag > {
713+ let relocations = self . get_relocations ( cx, src, size) ;
714+ if relocations. is_empty ( ) {
715+ return AllocationRelocations { relative_relocations : Vec :: new ( ) } ;
716+ }
717+
718+ let mut new_relocations = Vec :: with_capacity ( relocations. len ( ) * ( length as usize ) ) ;
719+
720+ for i in 0 ..length {
721+ new_relocations. extend (
722+ relocations
723+ . iter ( )
724+ . map ( |& ( offset, reloc) | {
725+ // compute offset for current repetition
726+ let dest_offset = dest. offset + ( i * size) ;
727+ (
728+ // shift offsets from source allocation to destination allocation
729+ offset + dest_offset - src. offset ,
730+ reloc,
731+ )
732+ } )
733+ ) ;
734+ }
735+
736+ AllocationRelocations {
737+ relative_relocations : new_relocations,
738+ }
739+ }
740+
741+ /// Apply a relocation copy.
742+ /// The affected range, as defined in the parameters to `prepare_relocation_copy` is expected
743+ /// to be clear of relocations.
744+ pub fn mark_relocation_range (
745+ & mut self ,
746+ relocations : AllocationRelocations < Tag > ,
747+ ) {
748+ self . relocations . insert_presorted ( relocations. relative_relocations ) ;
749+ }
750+ }
751+
569752////////////////////////////////////////////////////////////////////////////////
570753// Undefined byte tracking
571754////////////////////////////////////////////////////////////////////////////////
0 commit comments