1 use std::fmt::Write;
2
3 use gccjit::{Struct, Type};
4 use crate::rustc_codegen_ssa::traits::{BaseTypeMethods, DerivedTypeMethods, LayoutTypeMethods};
5 use rustc_middle::bug;
6 use rustc_middle::ty::{self, Ty, TypeFoldable};
7 use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout};
8 use rustc_middle::ty::print::with_no_trimmed_paths;
9 use rustc_target::abi::{self, Abi, F32, F64, FieldsShape, Int, Integer, Pointer, PointeeInfo, Size, TyAbiInterface, Variants};
10 use rustc_target::abi::call::{CastTarget, FnAbi, Reg};
11
12 use crate::abi::{FnAbiGccExt, GccType};
13 use crate::context::CodegenCx;
14 use crate::type_::struct_fields;
15
16 impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
type_from_unsigned_integer(&self, i: Integer) -> Type<'gcc>17 fn type_from_unsigned_integer(&self, i: Integer) -> Type<'gcc> {
18 use Integer::*;
19 match i {
20 I8 => self.type_u8(),
21 I16 => self.type_u16(),
22 I32 => self.type_u32(),
23 I64 => self.type_u64(),
24 I128 => self.type_u128(),
25 }
26 }
27 }
28
uncached_gcc_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, layout: TyAndLayout<'tcx>, defer: &mut Option<(Struct<'gcc>, TyAndLayout<'tcx>)>) -> Type<'gcc>29 pub fn uncached_gcc_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, layout: TyAndLayout<'tcx>, defer: &mut Option<(Struct<'gcc>, TyAndLayout<'tcx>)>) -> Type<'gcc> {
30 match layout.abi {
31 Abi::Scalar(_) => bug!("handled elsewhere"),
32 Abi::Vector { ref element, count } => {
33 let element = layout.scalar_gcc_type_at(cx, element, Size::ZERO);
34 return cx.context.new_vector_type(element, count);
35 },
36 Abi::ScalarPair(..) => {
37 return cx.type_struct(
38 &[
39 layout.scalar_pair_element_gcc_type(cx, 0, false),
40 layout.scalar_pair_element_gcc_type(cx, 1, false),
41 ],
42 false,
43 );
44 }
45 Abi::Uninhabited | Abi::Aggregate { .. } => {}
46 }
47
48 let name = match layout.ty.kind() {
49 // FIXME(eddyb) producing readable type names for trait objects can result
50 // in problematically distinct types due to HRTB and subtyping (see #47638).
51 // ty::Dynamic(..) |
52 ty::Adt(..) | ty::Closure(..) | ty::Foreign(..) | ty::Generator(..) | ty::Str
53 if !cx.sess().fewer_names() =>
54 {
55 let mut name = with_no_trimmed_paths(|| layout.ty.to_string());
56 if let (&ty::Adt(def, _), &Variants::Single { index }) =
57 (layout.ty.kind(), &layout.variants)
58 {
59 if def.is_enum() && !def.variants.is_empty() {
60 write!(&mut name, "::{}", def.variants[index].ident).unwrap();
61 }
62 }
63 if let (&ty::Generator(_, _, _), &Variants::Single { index }) =
64 (layout.ty.kind(), &layout.variants)
65 {
66 write!(&mut name, "::{}", ty::GeneratorSubsts::variant_name(index)).unwrap();
67 }
68 Some(name)
69 }
70 ty::Adt(..) => {
71 // If `Some` is returned then a named struct is created in LLVM. Name collisions are
72 // avoided by LLVM (with increasing suffixes). If rustc doesn't generate names then that
73 // can improve perf.
74 // FIXME(antoyo): I don't think that's true for libgccjit.
75 Some(String::new())
76 }
77 _ => None,
78 };
79
80 match layout.fields {
81 FieldsShape::Primitive | FieldsShape::Union(_) => {
82 let fill = cx.type_padding_filler(layout.size, layout.align.abi);
83 let packed = false;
84 match name {
85 None => cx.type_struct(&[fill], packed),
86 Some(ref name) => {
87 let gcc_type = cx.type_named_struct(name);
88 cx.set_struct_body(gcc_type, &[fill], packed);
89 gcc_type.as_type()
90 },
91 }
92 }
93 FieldsShape::Array { count, .. } => cx.type_array(layout.field(cx, 0).gcc_type(cx, true), count),
94 FieldsShape::Arbitrary { .. } =>
95 match name {
96 None => {
97 let (gcc_fields, packed) = struct_fields(cx, layout);
98 cx.type_struct(&gcc_fields, packed)
99 },
100 Some(ref name) => {
101 let gcc_type = cx.type_named_struct(name);
102 *defer = Some((gcc_type, layout));
103 gcc_type.as_type()
104 },
105 },
106 }
107 }
108
109 pub trait LayoutGccExt<'tcx> {
is_gcc_immediate(&self) -> bool110 fn is_gcc_immediate(&self) -> bool;
is_gcc_scalar_pair(&self) -> bool111 fn is_gcc_scalar_pair(&self) -> bool;
gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, set_fields: bool) -> Type<'gcc>112 fn gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, set_fields: bool) -> Type<'gcc>;
immediate_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc>113 fn immediate_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc>;
scalar_gcc_type_at<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, scalar: &abi::Scalar, offset: Size) -> Type<'gcc>114 fn scalar_gcc_type_at<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, scalar: &abi::Scalar, offset: Size) -> Type<'gcc>;
scalar_pair_element_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, index: usize, immediate: bool) -> Type<'gcc>115 fn scalar_pair_element_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, index: usize, immediate: bool) -> Type<'gcc>;
gcc_field_index(&self, index: usize) -> u64116 fn gcc_field_index(&self, index: usize) -> u64;
pointee_info_at<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, offset: Size) -> Option<PointeeInfo>117 fn pointee_info_at<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, offset: Size) -> Option<PointeeInfo>;
118 }
119
120 impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> {
is_gcc_immediate(&self) -> bool121 fn is_gcc_immediate(&self) -> bool {
122 match self.abi {
123 Abi::Scalar(_) | Abi::Vector { .. } => true,
124 Abi::ScalarPair(..) => false,
125 Abi::Uninhabited | Abi::Aggregate { .. } => self.is_zst(),
126 }
127 }
128
is_gcc_scalar_pair(&self) -> bool129 fn is_gcc_scalar_pair(&self) -> bool {
130 match self.abi {
131 Abi::ScalarPair(..) => true,
132 Abi::Uninhabited | Abi::Scalar(_) | Abi::Vector { .. } | Abi::Aggregate { .. } => false,
133 }
134 }
135
136 /// Gets the GCC type corresponding to a Rust type, i.e., `rustc_middle::ty::Ty`.
137 /// The pointee type of the pointer in `PlaceRef` is always this type.
138 /// For sized types, it is also the right LLVM type for an `alloca`
139 /// containing a value of that type, and most immediates (except `bool`).
140 /// Unsized types, however, are represented by a "minimal unit", e.g.
141 /// `[T]` becomes `T`, while `str` and `Trait` turn into `i8` - this
142 /// is useful for indexing slices, as `&[T]`'s data pointer is `T*`.
143 /// If the type is an unsized struct, the regular layout is generated,
144 /// with the inner-most trailing unsized field using the "minimal unit"
145 /// of that field's type - this is useful for taking the address of
146 /// that field and ensuring the struct has the right alignment.
147 //TODO(antoyo): do we still need the set_fields parameter?
gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, set_fields: bool) -> Type<'gcc>148 fn gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, set_fields: bool) -> Type<'gcc> {
149 if let Abi::Scalar(ref scalar) = self.abi {
150 // Use a different cache for scalars because pointers to DSTs
151 // can be either fat or thin (data pointers of fat pointers).
152 if let Some(&ty) = cx.scalar_types.borrow().get(&self.ty) {
153 return ty;
154 }
155 let ty =
156 match *self.ty.kind() {
157 ty::Ref(_, ty, _) | ty::RawPtr(ty::TypeAndMut { ty, .. }) => {
158 cx.type_ptr_to(cx.layout_of(ty).gcc_type(cx, set_fields))
159 }
160 ty::Adt(def, _) if def.is_box() => {
161 cx.type_ptr_to(cx.layout_of(self.ty.boxed_ty()).gcc_type(cx, true))
162 }
163 ty::FnPtr(sig) => cx.fn_ptr_backend_type(&cx.fn_abi_of_fn_ptr(sig, ty::List::empty())),
164 _ => self.scalar_gcc_type_at(cx, scalar, Size::ZERO),
165 };
166 cx.scalar_types.borrow_mut().insert(self.ty, ty);
167 return ty;
168 }
169
170 // Check the cache.
171 let variant_index =
172 match self.variants {
173 Variants::Single { index } => Some(index),
174 _ => None,
175 };
176 let cached_type = cx.types.borrow().get(&(self.ty, variant_index)).cloned();
177 if let Some(ty) = cached_type {
178 let type_to_set_fields = cx.types_with_fields_to_set.borrow_mut().remove(&ty);
179 if let Some((struct_type, layout)) = type_to_set_fields {
180 // Since we might be trying to generate a type containing another type which is not
181 // completely generated yet, we deferred setting the fields until now.
182 let (fields, packed) = struct_fields(cx, layout);
183 cx.set_struct_body(struct_type, &fields, packed);
184 }
185 return ty;
186 }
187
188 assert!(!self.ty.has_escaping_bound_vars(), "{:?} has escaping bound vars", self.ty);
189
190 // Make sure lifetimes are erased, to avoid generating distinct LLVM
191 // types for Rust types that only differ in the choice of lifetimes.
192 let normal_ty = cx.tcx.erase_regions(self.ty);
193
194 let mut defer = None;
195 let ty =
196 if self.ty != normal_ty {
197 let mut layout = cx.layout_of(normal_ty);
198 if let Some(v) = variant_index {
199 layout = layout.for_variant(cx, v);
200 }
201 layout.gcc_type(cx, true)
202 }
203 else {
204 uncached_gcc_type(cx, *self, &mut defer)
205 };
206
207 cx.types.borrow_mut().insert((self.ty, variant_index), ty);
208
209 if let Some((ty, layout)) = defer {
210 let (fields, packed) = struct_fields(cx, layout);
211 cx.set_struct_body(ty, &fields, packed);
212 }
213
214 ty
215 }
216
immediate_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc>217 fn immediate_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc> {
218 if let Abi::Scalar(ref scalar) = self.abi {
219 if scalar.is_bool() {
220 return cx.type_i1();
221 }
222 }
223 self.gcc_type(cx, true)
224 }
225
scalar_gcc_type_at<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, scalar: &abi::Scalar, offset: Size) -> Type<'gcc>226 fn scalar_gcc_type_at<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, scalar: &abi::Scalar, offset: Size) -> Type<'gcc> {
227 match scalar.value {
228 Int(i, true) => cx.type_from_integer(i),
229 Int(i, false) => cx.type_from_unsigned_integer(i),
230 F32 => cx.type_f32(),
231 F64 => cx.type_f64(),
232 Pointer => {
233 // If we know the alignment, pick something better than i8.
234 let pointee =
235 if let Some(pointee) = self.pointee_info_at(cx, offset) {
236 cx.type_pointee_for_align(pointee.align)
237 }
238 else {
239 cx.type_i8()
240 };
241 cx.type_ptr_to(pointee)
242 }
243 }
244 }
245
scalar_pair_element_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, index: usize, immediate: bool) -> Type<'gcc>246 fn scalar_pair_element_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, index: usize, immediate: bool) -> Type<'gcc> {
247 // TODO(antoyo): remove llvm hack:
248 // HACK(eddyb) special-case fat pointers until LLVM removes
249 // pointee types, to avoid bitcasting every `OperandRef::deref`.
250 match self.ty.kind() {
251 ty::Ref(..) | ty::RawPtr(_) => {
252 return self.field(cx, index).gcc_type(cx, true);
253 }
254 ty::Adt(def, _) if def.is_box() => {
255 let ptr_ty = cx.tcx.mk_mut_ptr(self.ty.boxed_ty());
256 return cx.layout_of(ptr_ty).scalar_pair_element_gcc_type(cx, index, immediate);
257 }
258 _ => {}
259 }
260
261 let (a, b) = match self.abi {
262 Abi::ScalarPair(ref a, ref b) => (a, b),
263 _ => bug!("TyAndLayout::scalar_pair_element_llty({:?}): not applicable", self),
264 };
265 let scalar = [a, b][index];
266
267 // Make sure to return the same type `immediate_gcc_type` would when
268 // dealing with an immediate pair. This means that `(bool, bool)` is
269 // effectively represented as `{i8, i8}` in memory and two `i1`s as an
270 // immediate, just like `bool` is typically `i8` in memory and only `i1`
271 // when immediate. We need to load/store `bool` as `i8` to avoid
272 // crippling LLVM optimizations or triggering other LLVM bugs with `i1`.
273 // TODO(antoyo): this bugs certainly don't happen in this case since the bool type is used instead of i1.
274 if scalar.is_bool() {
275 return cx.type_i1();
276 }
277
278 let offset =
279 if index == 0 {
280 Size::ZERO
281 }
282 else {
283 a.value.size(cx).align_to(b.value.align(cx).abi)
284 };
285 self.scalar_gcc_type_at(cx, scalar, offset)
286 }
287
gcc_field_index(&self, index: usize) -> u64288 fn gcc_field_index(&self, index: usize) -> u64 {
289 match self.abi {
290 Abi::Scalar(_) | Abi::ScalarPair(..) => {
291 bug!("TyAndLayout::gcc_field_index({:?}): not applicable", self)
292 }
293 _ => {}
294 }
295 match self.fields {
296 FieldsShape::Primitive | FieldsShape::Union(_) => {
297 bug!("TyAndLayout::gcc_field_index({:?}): not applicable", self)
298 }
299
300 FieldsShape::Array { .. } => index as u64,
301
302 FieldsShape::Arbitrary { .. } => 1 + (self.fields.memory_index(index) as u64) * 2,
303 }
304 }
305
pointee_info_at<'a>(&self, cx: &CodegenCx<'a, 'tcx>, offset: Size) -> Option<PointeeInfo>306 fn pointee_info_at<'a>(&self, cx: &CodegenCx<'a, 'tcx>, offset: Size) -> Option<PointeeInfo> {
307 if let Some(&pointee) = cx.pointee_infos.borrow().get(&(self.ty, offset)) {
308 return pointee;
309 }
310
311 let result = Ty::ty_and_layout_pointee_info_at(*self, cx, offset);
312
313 cx.pointee_infos.borrow_mut().insert((self.ty, offset), result);
314 result
315 }
316 }
317
318 impl<'gcc, 'tcx> LayoutTypeMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
backend_type(&self, layout: TyAndLayout<'tcx>) -> Type<'gcc>319 fn backend_type(&self, layout: TyAndLayout<'tcx>) -> Type<'gcc> {
320 layout.gcc_type(self, true)
321 }
322
immediate_backend_type(&self, layout: TyAndLayout<'tcx>) -> Type<'gcc>323 fn immediate_backend_type(&self, layout: TyAndLayout<'tcx>) -> Type<'gcc> {
324 layout.immediate_gcc_type(self)
325 }
326
is_backend_immediate(&self, layout: TyAndLayout<'tcx>) -> bool327 fn is_backend_immediate(&self, layout: TyAndLayout<'tcx>) -> bool {
328 layout.is_gcc_immediate()
329 }
330
is_backend_scalar_pair(&self, layout: TyAndLayout<'tcx>) -> bool331 fn is_backend_scalar_pair(&self, layout: TyAndLayout<'tcx>) -> bool {
332 layout.is_gcc_scalar_pair()
333 }
334
backend_field_index(&self, layout: TyAndLayout<'tcx>, index: usize) -> u64335 fn backend_field_index(&self, layout: TyAndLayout<'tcx>, index: usize) -> u64 {
336 layout.gcc_field_index(index)
337 }
338
scalar_pair_element_backend_type(&self, layout: TyAndLayout<'tcx>, index: usize, immediate: bool) -> Type<'gcc>339 fn scalar_pair_element_backend_type(&self, layout: TyAndLayout<'tcx>, index: usize, immediate: bool) -> Type<'gcc> {
340 layout.scalar_pair_element_gcc_type(self, index, immediate)
341 }
342
cast_backend_type(&self, ty: &CastTarget) -> Type<'gcc>343 fn cast_backend_type(&self, ty: &CastTarget) -> Type<'gcc> {
344 ty.gcc_type(self)
345 }
346
fn_ptr_backend_type(&self, fn_abi: &FnAbi<'tcx, Ty<'tcx>>) -> Type<'gcc>347 fn fn_ptr_backend_type(&self, fn_abi: &FnAbi<'tcx, Ty<'tcx>>) -> Type<'gcc> {
348 fn_abi.ptr_to_gcc_type(self)
349 }
350
reg_backend_type(&self, _ty: &Reg) -> Type<'gcc>351 fn reg_backend_type(&self, _ty: &Reg) -> Type<'gcc> {
352 unimplemented!();
353 }
354
fn_decl_backend_type(&self, _fn_abi: &FnAbi<'tcx, Ty<'tcx>>) -> Type<'gcc>355 fn fn_decl_backend_type(&self, _fn_abi: &FnAbi<'tcx, Ty<'tcx>>) -> Type<'gcc> {
356 // FIXME(antoyo): return correct type.
357 self.type_void()
358 }
359 }
360