1use std::iter;
2
3use rustc_abi::Primitive::Pointer;
4use rustc_abi::{BackendRepr, ExternAbi, PointerKind, Scalar, Size};
5use rustc_hir as hir;
6use rustc_hir::lang_items::LangItem;
7use rustc_middle::bug;
8use rustc_middle::query::Providers;
9use rustc_middle::ty::layout::{
10 FnAbiError, HasTyCtxt, HasTypingEnv, LayoutCx, LayoutOf, TyAndLayout, fn_can_unwind,
11};
12use rustc_middle::ty::{self, InstanceKind, Ty, TyCtxt};
13use rustc_session::config::OptLevel;
14use rustc_span::DUMMY_SP;
15use rustc_span::def_id::DefId;
16use rustc_target::callconv::{
17 AbiMap, ArgAbi, ArgAttribute, ArgAttributes, ArgExtension, FnAbi, PassMode,
18};
19use tracing::debug;
20
21pub(crate) fn provide(providers: &mut Providers) {
22 *providers = Providers { fn_abi_of_fn_ptr, fn_abi_of_instance, ..*providers };
23}
24
25#[tracing::instrument(level = "debug", skip(tcx, typing_env))]
31fn fn_sig_for_fn_abi<'tcx>(
32 tcx: TyCtxt<'tcx>,
33 instance: ty::Instance<'tcx>,
34 typing_env: ty::TypingEnv<'tcx>,
35) -> ty::FnSig<'tcx> {
36 if let InstanceKind::ThreadLocalShim(..) = instance.def {
37 return tcx.mk_fn_sig(
38 [],
39 tcx.thread_local_ptr_ty(instance.def_id()),
40 false,
41 hir::Safety::Safe,
42 rustc_abi::ExternAbi::Rust,
43 );
44 }
45
46 let ty = instance.ty(tcx, typing_env);
47 match *ty.kind() {
48 ty::FnDef(def_id, args) => {
49 let mut sig = tcx
50 .instantiate_bound_regions_with_erased(tcx.fn_sig(def_id).instantiate(tcx, args));
51
52 if let ty::InstanceKind::VTableShim(..) = instance.def {
54 let mut inputs_and_output = sig.inputs_and_output.to_vec();
55 inputs_and_output[0] = Ty::new_mut_ptr(tcx, inputs_and_output[0]);
56 sig.inputs_and_output = tcx.mk_type_list(&inputs_and_output);
57 }
58
59 sig
60 }
61 ty::Closure(def_id, args) => {
62 let sig = tcx.instantiate_bound_regions_with_erased(args.as_closure().sig());
63 let env_ty = tcx.closure_env_ty(
64 Ty::new_closure(tcx, def_id, args),
65 args.as_closure().kind(),
66 tcx.lifetimes.re_erased,
67 );
68
69 tcx.mk_fn_sig(
70 iter::once(env_ty).chain(sig.inputs().iter().cloned()),
71 sig.output(),
72 sig.c_variadic,
73 sig.safety,
74 sig.abi,
75 )
76 }
77 ty::CoroutineClosure(def_id, args) => {
78 let coroutine_ty = Ty::new_coroutine_closure(tcx, def_id, args);
79 let sig = args.as_coroutine_closure().coroutine_closure_sig();
80
81 let mut coroutine_kind = args.as_coroutine_closure().kind();
86
87 let env_ty =
88 if let InstanceKind::ConstructCoroutineInClosureShim { receiver_by_ref, .. } =
89 instance.def
90 {
91 coroutine_kind = ty::ClosureKind::FnOnce;
92
93 if receiver_by_ref {
96 Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, coroutine_ty)
97 } else {
98 coroutine_ty
99 }
100 } else {
101 tcx.closure_env_ty(coroutine_ty, coroutine_kind, tcx.lifetimes.re_erased)
102 };
103
104 let sig = tcx.instantiate_bound_regions_with_erased(sig);
105
106 tcx.mk_fn_sig(
107 iter::once(env_ty).chain([sig.tupled_inputs_ty]),
108 sig.to_coroutine_given_kind_and_upvars(
109 tcx,
110 args.as_coroutine_closure().parent_args(),
111 tcx.coroutine_for_closure(def_id),
112 coroutine_kind,
113 tcx.lifetimes.re_erased,
114 args.as_coroutine_closure().tupled_upvars_ty(),
115 args.as_coroutine_closure().coroutine_captures_by_ref_ty(),
116 ),
117 sig.c_variadic,
118 sig.safety,
119 sig.abi,
120 )
121 }
122 ty::Coroutine(did, args) => {
123 let coroutine_kind = tcx.coroutine_kind(did).unwrap();
124 let sig = args.as_coroutine().sig();
125
126 let env_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, ty);
127
128 let pin_did = tcx.require_lang_item(LangItem::Pin, DUMMY_SP);
129 let pin_adt_ref = tcx.adt_def(pin_did);
130 let pin_args = tcx.mk_args(&[env_ty.into()]);
131 let env_ty = match coroutine_kind {
132 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => {
133 env_ty
136 }
137 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _)
138 | hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _)
139 | hir::CoroutineKind::Coroutine(_) => Ty::new_adt(tcx, pin_adt_ref, pin_args),
140 };
141
142 let (resume_ty, ret_ty) = match coroutine_kind {
149 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _) => {
150 assert_eq!(sig.yield_ty, tcx.types.unit);
152
153 let poll_did = tcx.require_lang_item(LangItem::Poll, DUMMY_SP);
154 let poll_adt_ref = tcx.adt_def(poll_did);
155 let poll_args = tcx.mk_args(&[sig.return_ty.into()]);
156 let ret_ty = Ty::new_adt(tcx, poll_adt_ref, poll_args);
157
158 #[cfg(debug_assertions)]
161 {
162 if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
163 let expected_adt =
164 tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, DUMMY_SP));
165 assert_eq!(*resume_ty_adt, expected_adt);
166 } else {
167 panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
168 };
169 }
170 let context_mut_ref = Ty::new_task_context(tcx);
171
172 (Some(context_mut_ref), ret_ty)
173 }
174 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => {
175 let option_did = tcx.require_lang_item(LangItem::Option, DUMMY_SP);
177 let option_adt_ref = tcx.adt_def(option_did);
178 let option_args = tcx.mk_args(&[sig.yield_ty.into()]);
179 let ret_ty = Ty::new_adt(tcx, option_adt_ref, option_args);
180
181 assert_eq!(sig.return_ty, tcx.types.unit);
182 assert_eq!(sig.resume_ty, tcx.types.unit);
183
184 (None, ret_ty)
185 }
186 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _) => {
187 assert_eq!(sig.return_ty, tcx.types.unit);
190
191 let ret_ty = sig.yield_ty;
193
194 #[cfg(debug_assertions)]
197 {
198 if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
199 let expected_adt =
200 tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, DUMMY_SP));
201 assert_eq!(*resume_ty_adt, expected_adt);
202 } else {
203 panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
204 };
205 }
206 let context_mut_ref = Ty::new_task_context(tcx);
207
208 (Some(context_mut_ref), ret_ty)
209 }
210 hir::CoroutineKind::Coroutine(_) => {
211 let state_did = tcx.require_lang_item(LangItem::CoroutineState, DUMMY_SP);
213 let state_adt_ref = tcx.adt_def(state_did);
214 let state_args = tcx.mk_args(&[sig.yield_ty.into(), sig.return_ty.into()]);
215 let ret_ty = Ty::new_adt(tcx, state_adt_ref, state_args);
216
217 (Some(sig.resume_ty), ret_ty)
218 }
219 };
220
221 if let Some(resume_ty) = resume_ty {
222 tcx.mk_fn_sig(
223 [env_ty, resume_ty],
224 ret_ty,
225 false,
226 hir::Safety::Safe,
227 rustc_abi::ExternAbi::Rust,
228 )
229 } else {
230 tcx.mk_fn_sig(
232 [env_ty],
233 ret_ty,
234 false,
235 hir::Safety::Safe,
236 rustc_abi::ExternAbi::Rust,
237 )
238 }
239 }
240 _ => bug!("unexpected type {:?} in Instance::fn_sig", ty),
241 }
242}
243
244fn fn_abi_of_fn_ptr<'tcx>(
245 tcx: TyCtxt<'tcx>,
246 query: ty::PseudoCanonicalInput<'tcx, (ty::PolyFnSig<'tcx>, &'tcx ty::List<Ty<'tcx>>)>,
247) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
248 let ty::PseudoCanonicalInput { typing_env, value: (sig, extra_args) } = query;
249 fn_abi_new_uncached(
250 &LayoutCx::new(tcx, typing_env),
251 tcx.instantiate_bound_regions_with_erased(sig),
252 extra_args,
253 None,
254 )
255}
256
257fn fn_abi_of_instance<'tcx>(
258 tcx: TyCtxt<'tcx>,
259 query: ty::PseudoCanonicalInput<'tcx, (ty::Instance<'tcx>, &'tcx ty::List<Ty<'tcx>>)>,
260) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
261 let ty::PseudoCanonicalInput { typing_env, value: (instance, extra_args) } = query;
262 fn_abi_new_uncached(
263 &LayoutCx::new(tcx, typing_env),
264 fn_sig_for_fn_abi(tcx, instance, typing_env),
265 extra_args,
266 Some(instance),
267 )
268}
269
270fn arg_attrs_for_rust_scalar<'tcx>(
272 cx: LayoutCx<'tcx>,
273 scalar: Scalar,
274 layout: TyAndLayout<'tcx>,
275 offset: Size,
276 is_return: bool,
277 drop_target_pointee: Option<Ty<'tcx>>,
278) -> ArgAttributes {
279 let mut attrs = ArgAttributes::new();
280
281 if scalar.is_bool() {
283 attrs.ext(ArgExtension::Zext);
284 attrs.set(ArgAttribute::NoUndef);
285 return attrs;
286 }
287
288 if !scalar.is_uninit_valid() {
289 attrs.set(ArgAttribute::NoUndef);
290 }
291
292 let Scalar::Initialized { value: Pointer(_), valid_range } = scalar else { return attrs };
294
295 if !valid_range.contains(0) || drop_target_pointee.is_some() {
298 attrs.set(ArgAttribute::NonNull);
299 }
300
301 let tcx = cx.tcx();
302
303 if let Some(pointee) = layout.pointee_info_at(&cx, offset) {
304 let kind = if let Some(kind) = pointee.safe {
305 Some(kind)
306 } else if let Some(pointee) = drop_target_pointee {
307 Some(PointerKind::MutableRef { unpin: pointee.is_unpin(tcx, cx.typing_env) })
309 } else {
310 None
311 };
312 if let Some(kind) = kind {
313 attrs.pointee_align =
314 Some(pointee.align.min(cx.tcx().sess.target.max_reliable_alignment()));
315
316 attrs.pointee_size = match kind {
323 PointerKind::Box { .. }
324 | PointerKind::SharedRef { frozen: false }
325 | PointerKind::MutableRef { unpin: false } => Size::ZERO,
326 PointerKind::SharedRef { frozen: true }
327 | PointerKind::MutableRef { unpin: true } => pointee.size,
328 };
329
330 let noalias_for_box = tcx.sess.opts.unstable_opts.box_noalias;
334
335 let noalias_mut_ref = tcx.sess.opts.unstable_opts.mutable_noalias;
339
340 let no_alias = match kind {
347 PointerKind::SharedRef { frozen } => frozen,
348 PointerKind::MutableRef { unpin } => unpin && noalias_mut_ref,
349 PointerKind::Box { unpin, global } => unpin && global && noalias_for_box,
350 };
351 if no_alias && !is_return {
354 attrs.set(ArgAttribute::NoAlias);
355 }
356
357 if matches!(kind, PointerKind::SharedRef { frozen: true }) && !is_return {
358 attrs.set(ArgAttribute::ReadOnly);
359 }
360 }
361 }
362
363 attrs
364}
365
366fn fn_abi_sanity_check<'tcx>(
368 cx: &LayoutCx<'tcx>,
369 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
370 spec_abi: ExternAbi,
371) {
372 fn fn_arg_sanity_check<'tcx>(
373 cx: &LayoutCx<'tcx>,
374 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
375 spec_abi: ExternAbi,
376 arg: &ArgAbi<'tcx, Ty<'tcx>>,
377 ) {
378 let tcx = cx.tcx();
379
380 if spec_abi.is_rustic_abi() {
381 if arg.layout.is_zst() {
382 assert!(arg.is_ignore());
385 }
386 if let PassMode::Indirect { on_stack, .. } = arg.mode {
387 assert!(!on_stack, "rust abi shouldn't use on_stack");
388 }
389 }
390
391 match &arg.mode {
392 PassMode::Ignore => {
393 assert!(arg.layout.is_zst());
394 }
395 PassMode::Direct(_) => {
396 match arg.layout.backend_repr {
401 BackendRepr::Scalar(_) | BackendRepr::SimdVector { .. } => {}
402 BackendRepr::ScalarPair(..) => {
403 panic!("`PassMode::Direct` used for ScalarPair type {}", arg.layout.ty)
404 }
405 BackendRepr::Memory { sized } => {
406 assert!(sized, "`PassMode::Direct` for unsized type in ABI: {:#?}", fn_abi);
409
410 assert!(
416 matches!(spec_abi, ExternAbi::Unadjusted),
417 "`PassMode::Direct` for aggregates only allowed for \"unadjusted\"\n\
418 Problematic type: {:#?}",
419 arg.layout,
420 );
421 }
422 }
423 }
424 PassMode::Pair(_, _) => {
425 assert!(
428 matches!(arg.layout.backend_repr, BackendRepr::ScalarPair(..)),
429 "PassMode::Pair for type {}",
430 arg.layout.ty
431 );
432 }
433 PassMode::Cast { .. } => {
434 assert!(arg.layout.is_sized());
436 }
437 PassMode::Indirect { meta_attrs: None, .. } => {
438 assert!(arg.layout.is_sized());
443 }
444 PassMode::Indirect { meta_attrs: Some(_), on_stack, .. } => {
445 assert!(arg.layout.is_unsized() && !on_stack);
447 let tail = tcx.struct_tail_for_codegen(arg.layout.ty, cx.typing_env);
449 if matches!(tail.kind(), ty::Foreign(..)) {
450 panic!("unsized arguments must not be `extern` types");
455 }
456 }
457 }
458 }
459
460 for arg in fn_abi.args.iter() {
461 fn_arg_sanity_check(cx, fn_abi, spec_abi, arg);
462 }
463 fn_arg_sanity_check(cx, fn_abi, spec_abi, &fn_abi.ret);
464}
465
466#[tracing::instrument(level = "debug", skip(cx, instance))]
467fn fn_abi_new_uncached<'tcx>(
468 cx: &LayoutCx<'tcx>,
469 sig: ty::FnSig<'tcx>,
470 extra_args: &[Ty<'tcx>],
471 instance: Option<ty::Instance<'tcx>>,
472) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
473 let tcx = cx.tcx();
474 let (caller_location, determined_fn_def_id, is_virtual_call) = if let Some(instance) = instance
475 {
476 let is_virtual_call = matches!(instance.def, ty::InstanceKind::Virtual(..));
477 let is_tls_shim_call = matches!(instance.def, ty::InstanceKind::ThreadLocalShim(_));
478 (
479 instance.def.requires_caller_location(tcx).then(|| tcx.caller_location_ty()),
480 if is_virtual_call || is_tls_shim_call { None } else { Some(instance.def_id()) },
481 is_virtual_call,
482 )
483 } else {
484 (None, None, false)
485 };
486 let sig = tcx.normalize_erasing_regions(cx.typing_env, sig);
487
488 let abi_map = AbiMap::from_target(&tcx.sess.target);
489 let conv = abi_map.canonize_abi(sig.abi, sig.c_variadic).unwrap();
490
491 let mut inputs = sig.inputs();
492 let extra_args = if sig.abi == ExternAbi::RustCall {
493 assert!(!sig.c_variadic && extra_args.is_empty());
494
495 if let Some(input) = sig.inputs().last()
496 && let ty::Tuple(tupled_arguments) = input.kind()
497 {
498 inputs = &sig.inputs()[0..sig.inputs().len() - 1];
499 tupled_arguments
500 } else {
501 bug!(
502 "argument to function with \"rust-call\" ABI \
503 is not a tuple"
504 );
505 }
506 } else {
507 assert!(sig.c_variadic || extra_args.is_empty());
508 extra_args
509 };
510
511 let is_drop_in_place = determined_fn_def_id.is_some_and(|def_id| {
512 tcx.is_lang_item(def_id, LangItem::DropInPlace)
513 || tcx.is_lang_item(def_id, LangItem::AsyncDropInPlace)
514 });
515
516 let arg_of = |ty: Ty<'tcx>, arg_idx: Option<usize>| -> Result<_, &'tcx FnAbiError<'tcx>> {
517 let span = tracing::debug_span!("arg_of");
518 let _entered = span.enter();
519 let is_return = arg_idx.is_none();
520 let is_drop_target = is_drop_in_place && arg_idx == Some(0);
521 let drop_target_pointee = is_drop_target.then(|| match ty.kind() {
522 ty::RawPtr(ty, _) => *ty,
523 _ => bug!("argument to drop_in_place is not a raw ptr: {:?}", ty),
524 });
525
526 let layout = cx.layout_of(ty).map_err(|err| &*tcx.arena.alloc(FnAbiError::Layout(*err)))?;
527 let layout = if is_virtual_call && arg_idx == Some(0) {
528 make_thin_self_ptr(cx, layout)
532 } else {
533 layout
534 };
535
536 let mut arg = ArgAbi::new(cx, layout, |layout, scalar, offset| {
537 arg_attrs_for_rust_scalar(*cx, scalar, *layout, offset, is_return, drop_target_pointee)
538 });
539
540 if arg.layout.is_zst() {
541 arg.mode = PassMode::Ignore;
542 }
543
544 Ok(arg)
545 };
546
547 let mut fn_abi = FnAbi {
548 ret: arg_of(sig.output(), None)?,
549 args: inputs
550 .iter()
551 .copied()
552 .chain(extra_args.iter().copied())
553 .chain(caller_location)
554 .enumerate()
555 .map(|(i, ty)| arg_of(ty, Some(i)))
556 .collect::<Result<_, _>>()?,
557 c_variadic: sig.c_variadic,
558 fixed_count: inputs.len() as u32,
559 conv,
560 can_unwind: fn_can_unwind(
562 tcx,
563 determined_fn_def_id,
565 sig.abi,
566 ),
567 };
568 fn_abi_adjust_for_abi(
569 cx,
570 &mut fn_abi,
571 sig.abi,
572 determined_fn_def_id,
577 );
578 debug!("fn_abi_new_uncached = {:?}", fn_abi);
579 fn_abi_sanity_check(cx, &fn_abi, sig.abi);
580 Ok(tcx.arena.alloc(fn_abi))
581}
582
583#[tracing::instrument(level = "trace", skip(cx))]
584fn fn_abi_adjust_for_abi<'tcx>(
585 cx: &LayoutCx<'tcx>,
586 fn_abi: &mut FnAbi<'tcx, Ty<'tcx>>,
587 abi: ExternAbi,
588 fn_def_id: Option<DefId>,
589) {
590 if abi == ExternAbi::Unadjusted {
591 fn unadjust<'tcx>(arg: &mut ArgAbi<'tcx, Ty<'tcx>>) {
594 if matches!(arg.layout.backend_repr, BackendRepr::Memory { .. }) {
597 assert!(
598 arg.layout.backend_repr.is_sized(),
599 "'unadjusted' ABI does not support unsized arguments"
600 );
601 }
602 arg.make_direct_deprecated();
603 }
604
605 unadjust(&mut fn_abi.ret);
606 for arg in fn_abi.args.iter_mut() {
607 unadjust(arg);
608 }
609 return;
610 }
611
612 let tcx = cx.tcx();
613
614 if abi.is_rustic_abi() {
615 fn_abi.adjust_for_rust_abi(cx);
616
617 let deduced_param_attrs =
621 if tcx.sess.opts.optimize != OptLevel::No && tcx.sess.opts.incremental.is_none() {
622 fn_def_id.map(|fn_def_id| tcx.deduced_param_attrs(fn_def_id)).unwrap_or_default()
623 } else {
624 &[]
625 };
626
627 for (arg_idx, arg) in fn_abi.args.iter_mut().enumerate() {
628 if arg.is_ignore() {
629 continue;
630 }
631
632 if let &mut PassMode::Indirect { ref mut attrs, .. } = &mut arg.mode {
638 if let Some(deduced_param_attrs) = deduced_param_attrs.get(arg_idx)
642 && deduced_param_attrs.read_only
643 {
644 attrs.regular.insert(ArgAttribute::ReadOnly);
645 debug!("added deduced read-only attribute");
646 }
647 }
648 }
649 } else {
650 fn_abi.adjust_for_foreign_abi(cx, abi);
651 }
652}
653
654#[tracing::instrument(level = "debug", skip(cx))]
655fn make_thin_self_ptr<'tcx>(
656 cx: &(impl HasTyCtxt<'tcx> + HasTypingEnv<'tcx>),
657 layout: TyAndLayout<'tcx>,
658) -> TyAndLayout<'tcx> {
659 let tcx = cx.tcx();
660 let wide_pointer_ty = if layout.is_unsized() {
661 Ty::new_mut_ptr(tcx, layout.ty)
664 } else {
665 match layout.backend_repr {
666 BackendRepr::ScalarPair(..) | BackendRepr::Scalar(..) => (),
667 _ => bug!("receiver type has unsupported layout: {:?}", layout),
668 }
669
670 let mut wide_pointer_layout = layout;
676 while !wide_pointer_layout.ty.is_raw_ptr() && !wide_pointer_layout.ty.is_ref() {
677 wide_pointer_layout = wide_pointer_layout
678 .non_1zst_field(cx)
679 .expect("not exactly one non-1-ZST field in a `DispatchFromDyn` type")
680 .1
681 }
682
683 wide_pointer_layout.ty
684 };
685
686 let unit_ptr_ty = Ty::new_mut_ptr(tcx, tcx.types.unit);
690
691 TyAndLayout {
692 ty: wide_pointer_ty,
693
694 ..tcx.layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(unit_ptr_ty)).unwrap()
697 }
698}