@@ -6,6 +6,7 @@ use rustc_codegen_ssa::traits::{
66} ;
77use rustc_middle:: mir:: Mutability ;
88use rustc_middle:: mir:: interpret:: { ConstAllocation , GlobalAlloc , PointerArithmetic , Scalar } ;
9+ use rustc_middle:: ty:: Instance ;
910use rustc_middle:: ty:: layout:: LayoutOf ;
1011
1112use crate :: context:: CodegenCx ;
@@ -109,7 +110,7 @@ pub fn type_is_pointer(typ: Type<'_>) -> bool {
109110 typ. get_pointee ( ) . is_some ( )
110111}
111112
112- impl < ' gcc , ' tcx > ConstCodegenMethods for CodegenCx < ' gcc , ' tcx > {
113+ impl < ' gcc , ' tcx > ConstCodegenMethods < ' tcx > for CodegenCx < ' gcc , ' tcx > {
113114 fn const_null ( & self , typ : Type < ' gcc > ) -> RValue < ' gcc > {
114115 if type_is_pointer ( typ) { self . context . new_null ( typ) } else { self . const_int ( typ, 0 ) }
115116 }
@@ -220,6 +221,63 @@ impl<'gcc, 'tcx> ConstCodegenMethods for CodegenCx<'gcc, 'tcx> {
220221 None
221222 }
222223
224+ fn alloc_to_backend (
225+ & self ,
226+ global_alloc : GlobalAlloc < ' tcx > ,
227+ ) -> Result < ( RValue < ' gcc > , Option < Instance < ' tcx > > ) , u64 > {
228+ let alloc = match global_alloc {
229+ GlobalAlloc :: Function { instance, .. } => {
230+ return Ok ( ( self . get_fn_addr ( instance) , Some ( instance) ) ) ;
231+ }
232+ GlobalAlloc :: Static ( def_id) => {
233+ assert ! ( self . tcx. is_static( def_id) ) ;
234+ return Ok ( (
235+ self . get_static ( def_id) . get_address ( None ) ,
236+ Some ( Instance :: mono ( self . tcx , def_id) ) ,
237+ ) ) ;
238+ }
239+ GlobalAlloc :: TypeId { .. } => {
240+ // Drop the provenance, the offset contains the bytes of the hash, so
241+ // just return 0 as base address.
242+ return Err ( 0 ) ;
243+ }
244+
245+ GlobalAlloc :: Memory ( alloc) => {
246+ if alloc. inner ( ) . len ( ) == 0 {
247+ // For ZSTs directly codegen an aligned pointer.
248+ // This avoids generating a zero-sized constant value and actually needing a
249+ // real address at runtime.
250+ return Err ( alloc. inner ( ) . align . bytes ( ) ) ;
251+ }
252+
253+ alloc
254+ }
255+
256+ GlobalAlloc :: VTable ( ty, dyn_ty) => {
257+ self . tcx
258+ . global_alloc ( self . tcx . vtable_allocation ( (
259+ ty,
260+ dyn_ty. principal ( ) . map ( |principal| {
261+ self . tcx . instantiate_bound_regions_with_erased ( principal)
262+ } ) ,
263+ ) ) )
264+ . unwrap_memory ( )
265+ }
266+ } ;
267+
268+ let init = self . const_data_from_alloc ( alloc) ;
269+ let alloc = alloc. inner ( ) ;
270+ let value = match alloc. mutability {
271+ Mutability :: Mut => self . static_addr_of_mut ( init, alloc. align , None ) ,
272+ _ => self . static_addr_of ( init, alloc. align , None ) ,
273+ } ;
274+ if !self . sess ( ) . fewer_names ( ) {
275+ // TODO(antoyo): set value name.
276+ }
277+
278+ Ok ( ( value, None ) )
279+ }
280+
223281 fn scalar_to_backend ( & self , cv : Scalar , layout : abi:: Scalar , ty : Type < ' gcc > ) -> RValue < ' gcc > {
224282 let bitsize = if layout. is_bool ( ) { 1 } else { layout. size ( self ) . bits ( ) } ;
225283 match cv {
@@ -241,56 +299,16 @@ impl<'gcc, 'tcx> ConstCodegenMethods for CodegenCx<'gcc, 'tcx> {
241299 Scalar :: Ptr ( ptr, _size) => {
242300 let ( prov, offset) = ptr. prov_and_relative_offset ( ) ;
243301 let alloc_id = prov. alloc_id ( ) ;
244- let base_addr = match self . tcx . global_alloc ( alloc_id) {
245- GlobalAlloc :: Memory ( alloc) => {
246- // For ZSTs directly codegen an aligned pointer.
247- // This avoids generating a zero-sized constant value and actually needing a
248- // real address at runtime.
249- if alloc. inner ( ) . len ( ) == 0 {
250- let val = alloc. inner ( ) . align . bytes ( ) . wrapping_add ( offset. bytes ( ) ) ;
251- let val = self . const_usize ( self . tcx . truncate_to_target_usize ( val) ) ;
252- return if matches ! ( layout. primitive( ) , Pointer ( _) ) {
253- self . context . new_cast ( None , val, ty)
254- } else {
255- self . const_bitcast ( val, ty)
256- } ;
257- }
258-
259- let init = self . const_data_from_alloc ( alloc) ;
260- let alloc = alloc. inner ( ) ;
261- let value = match alloc. mutability {
262- Mutability :: Mut => self . static_addr_of_mut ( init, alloc. align , None ) ,
263- _ => self . static_addr_of ( init, alloc. align , None ) ,
302+ let base_addr = match self . alloc_to_backend ( self . tcx . global_alloc ( alloc_id) ) {
303+ Ok ( ( base_addr, _) ) => base_addr,
304+ Err ( base_addr) => {
305+ let val = base_addr. wrapping_add ( offset. bytes ( ) ) ;
306+ let val = self . const_usize ( self . tcx . truncate_to_target_usize ( val) ) ;
307+ return if matches ! ( layout. primitive( ) , Pointer ( _) ) {
308+ self . context . new_cast ( None , val, ty)
309+ } else {
310+ self . const_bitcast ( val, ty)
264311 } ;
265- if !self . sess ( ) . fewer_names ( ) {
266- // TODO(antoyo): set value name.
267- }
268- value
269- }
270- GlobalAlloc :: Function { instance, .. } => self . get_fn_addr ( instance) ,
271- GlobalAlloc :: VTable ( ty, dyn_ty) => {
272- let alloc = self
273- . tcx
274- . global_alloc ( self . tcx . vtable_allocation ( (
275- ty,
276- dyn_ty. principal ( ) . map ( |principal| {
277- self . tcx . instantiate_bound_regions_with_erased ( principal)
278- } ) ,
279- ) ) )
280- . unwrap_memory ( ) ;
281- let init = self . const_data_from_alloc ( alloc) ;
282- self . static_addr_of ( init, alloc. inner ( ) . align , None )
283- }
284- GlobalAlloc :: TypeId { .. } => {
285- let val = self . const_usize ( offset. bytes ( ) ) ;
286- // This is still a variable of pointer type, even though we only use the provenance
287- // of that pointer in CTFE and Miri. But to make LLVM's type system happy,
288- // we need an int-to-ptr cast here (it doesn't matter at all which provenance that picks).
289- return self . context . new_cast ( None , val, ty) ;
290- }
291- GlobalAlloc :: Static ( def_id) => {
292- assert ! ( self . tcx. is_static( def_id) ) ;
293- self . get_static ( def_id) . get_address ( None )
294312 }
295313 } ;
296314 let ptr_type = base_addr. get_type ( ) ;
0 commit comments