11use std:: ops:: Range ;
22
3- use rustc_abi:: { Align , HasDataLayout , Primitive , Scalar , Size , WrappingRange } ;
3+ use rustc_abi:: { Align , Endian , HasDataLayout , Primitive , Scalar , Size , WrappingRange } ;
44use rustc_codegen_ssa:: common;
55use rustc_codegen_ssa:: traits:: * ;
66use rustc_hir:: LangItem ;
@@ -28,6 +28,7 @@ pub(crate) fn const_alloc_to_llvm<'ll>(
2828 cx : & CodegenCx < ' ll , ' _ > ,
2929 alloc : & Allocation ,
3030 is_static : bool ,
31+ vtable_base : Option < & ' ll Value > ,
3132) -> & ' ll Value {
3233 // We expect that callers of const_alloc_to_llvm will instead directly codegen a pointer or
3334 // integer for any &ZST where the ZST is a constant (i.e. not a static). We should never be
@@ -43,6 +44,8 @@ pub(crate) fn const_alloc_to_llvm<'ll>(
4344 let dl = cx. data_layout ( ) ;
4445 let pointer_size = dl. pointer_size ( ) ;
4546 let pointer_size_bytes = pointer_size. bytes ( ) as usize ;
47+ let use_relative_layout = cx. sess ( ) . opts . unstable_opts . experimental_relative_rust_abi_vtables
48+ && vtable_base. is_some ( ) ;
4649
4750 // Note: this function may call `inspect_with_uninit_and_ptr_outside_interpreter`, so `range`
4851 // must be within the bounds of `alloc` and not contain or overlap a pointer provenance.
@@ -51,7 +54,11 @@ pub(crate) fn const_alloc_to_llvm<'ll>(
5154 cx : & ' a CodegenCx < ' ll , ' b > ,
5255 alloc : & ' a Allocation ,
5356 range : Range < usize > ,
57+ use_relative_layout : bool ,
5458 ) {
59+ let dl = cx. data_layout ( ) ;
60+ let pointer_size = dl. pointer_size ( ) ;
61+ let pointer_size_bytes = pointer_size. bytes ( ) as usize ;
5562 let chunks = alloc. init_mask ( ) . range_as_init_chunks ( range. clone ( ) . into ( ) ) ;
5663
5764 let chunk_to_llval = move |chunk| match chunk {
@@ -74,7 +81,43 @@ pub(crate) fn const_alloc_to_llvm<'ll>(
7481 let allow_uninit_chunks = chunks. clone ( ) . take ( max. saturating_add ( 1 ) ) . count ( ) <= max;
7582
7683 if allow_uninit_chunks {
77- llvals. extend ( chunks. map ( chunk_to_llval) ) ;
84+ if use_relative_layout {
85+ // Rather than being stored as a struct of pointers or byte-arrays, a relative
86+ // vtable is a pure i32 array, so its components must be chunks of i32s. Here we
87+ // explicitly group any sequence of bytes into i32s.
88+ //
89+ // Normally we can only do this if an 8-byte constant can fit into 4 bytes.
90+ for chunk in chunks {
91+ match chunk {
92+ InitChunk :: Init ( range) => {
93+ let range =
94+ ( range. start . bytes ( ) as usize ) ..( range. end . bytes ( ) as usize ) ;
95+ let bytes =
96+ alloc. inspect_with_uninit_and_ptr_outside_interpreter ( range) ;
97+ for bytes in bytes. chunks_exact ( pointer_size_bytes) {
98+ assert ! (
99+ bytes[ 4 ..pointer_size_bytes] . iter( ) . all( |& x| x == 0 ) ,
100+ "Cannot fit constant into 4-bytes: {:?}" ,
101+ bytes
102+ ) ;
103+ let bytes: [ u8 ; 4 ] = bytes[ 0 ..4 ] . try_into ( ) . unwrap ( ) ;
104+ let val: u32 = match dl. endian {
105+ Endian :: Big => u32:: from_be_bytes ( bytes) ,
106+ Endian :: Little => u32:: from_le_bytes ( bytes) ,
107+ } ;
108+ llvals. push ( cx. const_u32 ( val) ) ;
109+ }
110+ }
111+ InitChunk :: Uninit ( range) => {
112+ let len = range. end . bytes ( ) - range. start . bytes ( ) ;
113+ let val = cx. const_undef ( cx. type_array ( cx. type_i8 ( ) , len / 2 ) ) ;
114+ llvals. push ( val) ;
115+ }
116+ } ;
117+ }
118+ } else {
119+ llvals. extend ( chunks. map ( chunk_to_llval) ) ;
120+ }
78121 } else {
79122 // If this allocation contains any uninit bytes, codegen as if it was initialized
80123 // (using some arbitrary value for uninit bytes).
@@ -92,7 +135,13 @@ pub(crate) fn const_alloc_to_llvm<'ll>(
92135 // This `inspect` is okay since we have checked that there is no provenance, it
93136 // is within the bounds of the allocation, and it doesn't affect interpreter execution
94137 // (we inspect the result after interpreter execution).
95- append_chunks_of_init_and_uninit_bytes ( & mut llvals, cx, alloc, next_offset..offset) ;
138+ append_chunks_of_init_and_uninit_bytes (
139+ & mut llvals,
140+ cx,
141+ alloc,
142+ next_offset..offset,
143+ use_relative_layout,
144+ ) ;
96145 }
97146 let ptr_offset = read_target_uint (
98147 dl. endian ,
@@ -108,38 +157,64 @@ pub(crate) fn const_alloc_to_llvm<'ll>(
108157
109158 let address_space = cx. tcx . global_alloc ( prov. alloc_id ( ) ) . address_space ( cx) ;
110159
111- llvals. push ( cx. scalar_to_backend (
112- InterpScalar :: from_pointer ( Pointer :: new ( prov, Size :: from_bytes ( ptr_offset) ) , & cx. tcx ) ,
113- Scalar :: Initialized {
114- value : Primitive :: Pointer ( address_space) ,
115- valid_range : WrappingRange :: full ( pointer_size) ,
116- } ,
117- cx. type_ptr_ext ( address_space) ,
118- ) ) ;
160+ let s = {
161+ let scalar = cx. scalar_to_backend (
162+ InterpScalar :: from_pointer (
163+ Pointer :: new ( prov, Size :: from_bytes ( ptr_offset) ) ,
164+ & cx. tcx ,
165+ ) ,
166+ Scalar :: Initialized {
167+ value : Primitive :: Pointer ( address_space) ,
168+ valid_range : WrappingRange :: full ( pointer_size) ,
169+ } ,
170+ cx. type_ptr_ext ( address_space) ,
171+ ) ;
172+
173+ if use_relative_layout {
174+ unsafe {
175+ let fptr = llvm:: LLVMDSOLocalEquivalent ( scalar) ;
176+ let sub = llvm:: LLVMConstSub (
177+ llvm:: LLVMConstPtrToInt ( fptr, cx. type_i64 ( ) ) ,
178+ llvm:: LLVMConstPtrToInt ( vtable_base. unwrap ( ) , cx. type_i64 ( ) ) ,
179+ ) ;
180+ llvm:: LLVMConstTrunc ( sub, cx. type_i32 ( ) )
181+ }
182+ } else {
183+ scalar
184+ }
185+ } ;
186+
187+ llvals. push ( s) ;
119188 next_offset = offset + pointer_size_bytes;
120189 }
121190 if alloc. len ( ) >= next_offset {
122191 let range = next_offset..alloc. len ( ) ;
123192 // This `inspect` is okay since we have check that it is after all provenance, it is
124193 // within the bounds of the allocation, and it doesn't affect interpreter execution (we
125194 // inspect the result after interpreter execution).
126- append_chunks_of_init_and_uninit_bytes ( & mut llvals, cx, alloc, range) ;
195+ append_chunks_of_init_and_uninit_bytes ( & mut llvals, cx, alloc, range, use_relative_layout ) ;
127196 }
128197
129198 // Avoid wrapping in a struct if there is only a single value. This ensures
130199 // that LLVM is able to perform the string merging optimization if the constant
131200 // is a valid C string. LLVM only considers bare arrays for this optimization,
132201 // not arrays wrapped in a struct. LLVM handles this at:
133202 // https://github.com/rust-lang/llvm-project/blob/acaea3d2bb8f351b740db7ebce7d7a40b9e21488/llvm/lib/Target/TargetLoweringObjectFile.cpp#L249-L280
134- if let & [ data] = & * llvals { data } else { cx. const_struct ( & llvals, true ) }
203+ if let & [ data] = & * llvals {
204+ data
205+ } else if use_relative_layout {
206+ cx. const_array ( cx. type_i32 ( ) , & llvals)
207+ } else {
208+ cx. const_struct ( & llvals, true )
209+ }
135210}
136211
137212fn codegen_static_initializer < ' ll , ' tcx > (
138213 cx : & CodegenCx < ' ll , ' tcx > ,
139214 def_id : DefId ,
140215) -> Result < ( & ' ll Value , ConstAllocation < ' tcx > ) , ErrorHandled > {
141216 let alloc = cx. tcx . eval_static_initializer ( def_id) ?;
142- Ok ( ( const_alloc_to_llvm ( cx, alloc. inner ( ) , /*static*/ true ) , alloc) )
217+ Ok ( ( const_alloc_to_llvm ( cx, alloc. inner ( ) , /*static*/ true , /*vtable_base*/ None ) , alloc) )
143218}
144219
145220fn set_global_alignment < ' ll > ( cx : & CodegenCx < ' ll , ' _ > , gv : & ' ll Value , mut align : Align ) {
@@ -232,19 +307,29 @@ impl<'ll> CodegenCx<'ll, '_> {
232307 cv : & ' ll Value ,
233308 align : Align ,
234309 kind : Option < & str > ,
310+ ) -> & ' ll Value {
311+ let gv = self . static_addr_of_mut_from_type ( self . val_ty ( cv) , align, kind) ;
312+ llvm:: set_initializer ( gv, cv) ;
313+ gv
314+ }
315+
316+ pub ( crate ) fn static_addr_of_mut_from_type (
317+ & self ,
318+ ty : & ' ll Type ,
319+ align : Align ,
320+ kind : Option < & str > ,
235321 ) -> & ' ll Value {
236322 let gv = match kind {
237323 Some ( kind) if !self . tcx . sess . fewer_names ( ) => {
238324 let name = self . generate_local_symbol_name ( kind) ;
239- let gv = self . define_global ( & name, self . val_ty ( cv ) ) . unwrap_or_else ( || {
325+ let gv = self . define_global ( & name, ty ) . unwrap_or_else ( || {
240326 bug ! ( "symbol `{}` is already defined" , name) ;
241327 } ) ;
242328 llvm:: set_linkage ( gv, llvm:: Linkage :: PrivateLinkage ) ;
243329 gv
244330 }
245- _ => self . define_private_global ( self . val_ty ( cv ) ) ,
331+ _ => self . define_private_global ( ty ) ,
246332 } ;
247- llvm:: set_initializer ( gv, cv) ;
248333 set_global_alignment ( self , gv, align) ;
249334 llvm:: set_unnamed_address ( gv, llvm:: UnnamedAddr :: Global ) ;
250335 gv
@@ -277,6 +362,15 @@ impl<'ll> CodegenCx<'ll, '_> {
277362 gv
278363 }
279364
365+ pub ( crate ) fn static_addr_of_impl_for_gv ( & self , cv : & ' ll Value , gv : & ' ll Value ) -> & ' ll Value {
366+ assert ! ( !self . const_globals. borrow( ) . contains_key( & cv) ) ;
367+ let mut binding = self . const_globals . borrow_mut ( ) ;
368+ binding. insert ( cv, gv) ;
369+ llvm:: set_initializer ( gv, cv) ;
370+ llvm:: set_global_constant ( gv, true ) ;
371+ gv
372+ }
373+
280374 #[ instrument( level = "debug" , skip( self ) ) ]
281375 pub ( crate ) fn get_static ( & self , def_id : DefId ) -> & ' ll Value {
282376 let instance = Instance :: mono ( self . tcx , def_id) ;
0 commit comments