|
1 | 1 | #![cfg_attr(USE_RUSTC_FEATURES, feature(lint_reasons))] |
| 2 | +#![cfg_attr(USE_RUSTC_FEATURES, feature(raw_ref_op))] |
| 3 | +#![cfg_attr(feature = "alloc", feature(allocator_api))] |
2 | 4 |
|
3 | | -use core::ptr; |
4 | | - |
| 5 | +use core::{ |
| 6 | + pin::Pin, |
| 7 | + sync::atomic::{AtomicUsize, Ordering}, |
| 8 | +}; |
5 | 9 | use pin_init::*; |
6 | 10 |
|
| 11 | +#[allow(unused_attributes)] |
| 12 | +#[path = "../examples/mutex.rs"] |
| 13 | +mod mutex; |
| 14 | +use mutex::*; |
| 15 | + |
7 | 16 | #[pin_data] |
8 | | -struct TupleStruct(#[pin] i32, i32); |
| 17 | +struct TupleStruct<T>(#[pin] CMutex<T>, i32); |
9 | 18 |
|
10 | | -fn init_i32(value: i32) -> impl PinInit<i32> { |
11 | | - // SAFETY: The closure always initializes `slot` with a valid `i32` value. |
12 | | - unsafe { |
13 | | - pin_init_from_closure(move |slot| { |
14 | | - // SAFETY: `slot` is provided by the initialization framework and valid for write. |
15 | | - ptr::write(slot, value); |
16 | | - Ok(()) |
17 | | - }) |
| 19 | +fn assert_pinned_mutex<T>(_: &Pin<&mut CMutex<T>>) {} |
| 20 | + |
| 21 | +#[test] |
| 22 | +fn tuple_struct_values() { |
| 23 | + // Baseline tuple-field syntax with index-based struct initializer. |
| 24 | + stack_pin_init!(let tuple = pin_init!(TupleStruct::<usize> { 0 <- CMutex::new(42), 1: 24 })); |
| 25 | + assert_eq!(*tuple.as_ref().get_ref().0.lock(), 42); |
| 26 | + assert_eq!(tuple.as_ref().get_ref().1, 24); |
| 27 | +} |
| 28 | + |
| 29 | +#[test] |
| 30 | +fn tuple_struct_init_arrow_and_projection() { |
| 31 | + // Checks projection types and that `<-` correctly initializes the pinned tuple field. |
| 32 | + stack_pin_init!(let tuple = pin_init!(TupleStruct::<usize> { 0 <- CMutex::new(7), 1: 13 })); |
| 33 | + |
| 34 | + let projected = tuple.as_mut().project(); |
| 35 | + assert_pinned_mutex(&projected._0); |
| 36 | + let projected = tuple.as_mut().project(); |
| 37 | + assert_eq!(*projected._0.as_ref().get_ref().lock(), 7); |
| 38 | + assert_eq!(*projected._1, 13); |
| 39 | +} |
| 40 | + |
| 41 | +#[test] |
| 42 | +fn tuple_struct_constructor_form() { |
| 43 | + // Same semantics as `tuple_struct_values`, but using tuple constructor syntax. |
| 44 | + stack_pin_init!(let tuple = pin_init!(TupleStruct::<usize>(<- CMutex::new(11), 29))); |
| 45 | + assert_eq!(*tuple.as_ref().get_ref().0.lock(), 11); |
| 46 | + assert_eq!(tuple.as_ref().get_ref().1, 29); |
| 47 | +} |
| 48 | + |
| 49 | +#[pin_data] |
| 50 | +struct DualPinned<T>(#[pin] CMutex<T>, #[pin] CMutex<T>, usize); |
| 51 | + |
| 52 | +#[test] |
| 53 | +fn tuple_struct_multi_pinned_fields_projection() { |
| 54 | + // Both pinned tuple fields should project to `Pin<&mut CMutex<T>>` and stay usable. |
| 55 | + stack_pin_init!(let tuple = pin_init!(DualPinned::<usize>(<- CMutex::new(1), <- CMutex::new(2), 3))); |
| 56 | + let projected = tuple.as_mut().project(); |
| 57 | + assert_pinned_mutex(&projected._0); |
| 58 | + assert_pinned_mutex(&projected._1); |
| 59 | + |
| 60 | + *projected._0.as_ref().get_ref().lock() = 10; |
| 61 | + *projected._1.as_ref().get_ref().lock() = 20; |
| 62 | + *projected._2 = 30; |
| 63 | + |
| 64 | + assert_eq!(*tuple.as_ref().get_ref().0.lock(), 10); |
| 65 | + assert_eq!(*tuple.as_ref().get_ref().1.lock(), 20); |
| 66 | + assert_eq!(tuple.as_ref().get_ref().2, 30); |
| 67 | +} |
| 68 | + |
| 69 | +#[test] |
| 70 | +fn tuple_struct_generic_type_param_behavior() { |
| 71 | + // Keep this focused on explicit generic-arg syntax (`::<u16>`) with struct-style init. |
| 72 | + stack_pin_init!(let tuple = pin_init!(TupleStruct::<u16> { 0 <- CMutex::new(123u16), 1: 7 })); |
| 73 | + let projected = tuple.as_mut().project(); |
| 74 | + assert_pinned_mutex(&projected._0); |
| 75 | + assert_eq!(*projected._0.as_ref().get_ref().lock(), 123u16); |
| 76 | + assert_eq!(*projected._1, 7); |
| 77 | +} |
| 78 | + |
| 79 | +#[pin_data] |
| 80 | +struct RefTuple<'a>(#[pin] CMutex<&'a usize>, usize); |
| 81 | + |
| 82 | +#[test] |
| 83 | +fn tuple_struct_lifetime_reference_behavior() { |
| 84 | + // Verifies tuple init/projection with borrowed data (`'a`) through the pinned field. |
| 85 | + let first = 111usize; |
| 86 | + let first_ref = &first; |
| 87 | + stack_pin_init!(let tuple = pin_init!(RefTuple { 0 <- CMutex::new(first_ref), 1: 3 })); |
| 88 | + assert_eq!(**tuple.as_ref().get_ref().0.lock(), 111usize); |
| 89 | + assert_eq!(tuple.as_ref().get_ref().1, 3); |
| 90 | + |
| 91 | + let second = 222usize; |
| 92 | + let second_ref = &second; |
| 93 | + stack_pin_init!(let tuple = pin_init!(RefTuple(<- CMutex::new(second_ref), 4))); |
| 94 | + let projected = tuple.as_mut().project(); |
| 95 | + assert_pinned_mutex(&projected._0); |
| 96 | + assert_eq!(**projected._0.as_ref().get_ref().lock(), 222usize); |
| 97 | + assert_eq!(*projected._1, 4); |
| 98 | +} |
| 99 | + |
| 100 | +#[test] |
| 101 | +fn tuple_struct_projection_mutation_behavior() { |
| 102 | + // Confirms both projected fields can be mutated through their projected references. |
| 103 | + stack_pin_init!(let tuple = pin_init!(TupleStruct::<usize>(<- CMutex::new(1usize), 2))); |
| 104 | + |
| 105 | + let projected = tuple.as_mut().project(); |
| 106 | + *projected._0.as_ref().get_ref().lock() = 10usize; |
| 107 | + *projected._1 = 20; |
| 108 | + |
| 109 | + assert_eq!(*tuple.as_ref().get_ref().0.lock(), 10usize); |
| 110 | + assert_eq!(tuple.as_ref().get_ref().1, 20); |
| 111 | +} |
| 112 | + |
| 113 | +struct DropCounter; |
| 114 | + |
| 115 | +static FALLIBLE_TUPLE_DROPS: AtomicUsize = AtomicUsize::new(0); |
| 116 | + |
| 117 | +impl Drop for DropCounter { |
| 118 | + fn drop(&mut self) { |
| 119 | + FALLIBLE_TUPLE_DROPS.fetch_add(1, Ordering::SeqCst); |
18 | 120 | } |
19 | 121 | } |
20 | 122 |
|
21 | | -fn init_i32_unpinned(value: i32) -> impl Init<i32> { |
22 | | - // SAFETY: The closure always initializes `slot` with a valid `i32` value. |
| 123 | +fn tuple_failing_init() -> impl PinInit<TupleStruct<DropCounter>, ()> { |
| 124 | + // SAFETY: We emulate "initialized first field, then fail" and ensure rollback leaves no |
| 125 | + // partially initialized value in `slot`. |
23 | 126 | unsafe { |
24 | | - init_from_closure(move |slot| { |
25 | | - // SAFETY: `slot` is provided by the initialization framework and valid for write. |
26 | | - ptr::write(slot, value); |
27 | | - Ok(()) |
| 127 | + pin_init_from_closure(|slot: *mut TupleStruct<DropCounter>| { |
| 128 | + // Manually initialize only field 0 to model a mid-initialization failure. |
| 129 | + let field0 = core::ptr::addr_of_mut!((*slot).0); |
| 130 | + let init0 = CMutex::new(DropCounter); |
| 131 | + // SAFETY: `field0` points into `slot`, which is valid uninitialized memory. |
| 132 | + match init0.__pinned_init(field0) { |
| 133 | + Ok(()) => {} |
| 134 | + Err(infallible) => match infallible {}, |
| 135 | + } |
| 136 | + // Explicit rollback is required before returning `Err` to avoid leaking initialized state. |
| 137 | + core::ptr::drop_in_place(field0); |
| 138 | + Err(()) |
28 | 139 | }) |
29 | 140 | } |
30 | 141 | } |
31 | 142 |
|
32 | 143 | #[test] |
33 | | -fn tuple_struct_values() { |
34 | | - stack_pin_init!(let foo = pin_init!(TupleStruct { 0: 42, 1: 24 })); |
35 | | - assert_eq!(foo.as_ref().get_ref().0, 42); |
36 | | - assert_eq!(foo.as_ref().get_ref().1, 24); |
| 144 | +fn tuple_struct_fallible_init_drops_initialized_fields() { |
| 145 | + // A failure after partial initialization must still drop the already-initialized field. |
| 146 | + FALLIBLE_TUPLE_DROPS.store(0, Ordering::SeqCst); |
| 147 | + stack_try_pin_init!(let tuple: TupleStruct<DropCounter> = tuple_failing_init()); |
| 148 | + assert!(matches!(tuple, Err(()))); |
| 149 | + assert_eq!(FALLIBLE_TUPLE_DROPS.load(Ordering::SeqCst), 1); |
37 | 150 | } |
38 | 151 |
|
| 152 | +#[pin_data] |
| 153 | +struct TupleConst<T, const N: usize>(#[pin] CMutex<[T; N]>, usize); |
| 154 | + |
39 | 155 | #[test] |
40 | | -#[allow(clippy::redundant_locals)] |
41 | | -fn tuple_struct_init_arrow_and_projection() { |
42 | | - stack_pin_init!(let foo = pin_init!(TupleStruct { 0 <- init_i32(7), 1: 13 })); |
43 | | - let mut foo = foo; |
44 | | - let projected = foo.as_mut().project(); |
45 | | - assert_eq!(*projected._0.as_ref().get_ref(), 7); |
46 | | - assert_eq!(*projected._1, 13); |
| 156 | +fn tuple_struct_const_generic_behavior() { |
| 157 | + // Covers tuple-field init/projection when the pinned field contains a const-generic array. |
| 158 | + stack_pin_init!(let tuple = pin_init!(TupleConst::<u8, 3> { 0 <- CMutex::new([1, 2, 3]), 1: 9 })); |
| 159 | + let projected = tuple.as_mut().project(); |
| 160 | + assert_pinned_mutex(&projected._0); |
| 161 | + assert_eq!(*projected._0.as_ref().get_ref().lock(), [1, 2, 3]); |
| 162 | + assert_eq!(*projected._1, 9); |
| 163 | + |
| 164 | + stack_pin_init!(let tuple = pin_init!(TupleConst::<u8, 2>(<- CMutex::new([7, 8]), 5))); |
| 165 | + assert_eq!(*tuple.as_ref().get_ref().0.lock(), [7, 8]); |
| 166 | + assert_eq!(tuple.as_ref().get_ref().1, 5); |
47 | 167 | } |
48 | 168 |
|
49 | 169 | #[test] |
50 | | -fn tuple_struct_constructor_form() { |
51 | | - stack_pin_init!(let foo = pin_init!(TupleStruct(<- init_i32(11), 29))); |
52 | | - assert_eq!(foo.as_ref().get_ref().0, 11); |
53 | | - assert_eq!(foo.as_ref().get_ref().1, 29); |
| 170 | +fn tuple_struct_generic_inference_constructor_form() { |
| 171 | + // Ensures tuple constructor form can infer `T` from the pinned field initializer. |
| 172 | + stack_pin_init!(let tuple = pin_init!(TupleStruct(<- CMutex::new(9u32), 6))); |
| 173 | + assert_eq!(*tuple.as_ref().get_ref().0.lock(), 9u32); |
| 174 | + assert_eq!(tuple.as_ref().get_ref().1, 6); |
54 | 175 | } |
55 | 176 |
|
56 | 177 | #[pin_data] |
57 | | -struct Triple(i32, i32, i32); |
| 178 | +struct MixedTuple<'a, T, const N: usize>(#[pin] CMutex<MixedPayload<'a, T, N>>, usize); |
| 179 | + |
| 180 | +type MixedPayload<'a, T, const N: usize> = (&'a T, [u8; N]); |
58 | 181 |
|
59 | 182 | #[test] |
60 | | -fn tuple_struct_constructor_form_mixed_middle_init() { |
61 | | - stack_pin_init!(let triple = pin_init!(Triple(1, <- init_i32_unpinned(2), 3))); |
62 | | - assert_eq!(triple.as_ref().get_ref().0, 1); |
63 | | - assert_eq!(triple.as_ref().get_ref().1, 2); |
64 | | - assert_eq!(triple.as_ref().get_ref().2, 3); |
| 183 | +fn tuple_struct_mixed_lifetime_type_const_generics() { |
| 184 | + // Stress case combining lifetime + type + const generics in one tuple pinned field. |
| 185 | + let value = 77u16; |
| 186 | + let pair = (&value, [1, 2, 3, 4]); |
| 187 | + stack_pin_init!(let tuple = pin_init!(MixedTuple(<- CMutex::new(pair), 12))); |
| 188 | + |
| 189 | + let projected = tuple.as_mut().project(); |
| 190 | + assert_pinned_mutex(&projected._0); |
| 191 | + let locked = projected._0.as_ref().get_ref().lock(); |
| 192 | + assert_eq!(*locked.0, 77u16); |
| 193 | + assert_eq!(locked.1, [1, 2, 3, 4]); |
| 194 | + assert_eq!(*projected._1, 12); |
| 195 | +} |
| 196 | + |
| 197 | +static PINNED_DROP_TUPLE_DROPS: AtomicUsize = AtomicUsize::new(0); |
| 198 | + |
| 199 | +#[pin_data(PinnedDrop)] |
| 200 | +struct DropTuple(#[pin] CMutex<usize>, usize); |
| 201 | + |
| 202 | +#[pinned_drop] |
| 203 | +impl PinnedDrop for DropTuple { |
| 204 | + fn drop(self: Pin<&mut Self>) { |
| 205 | + let _ = self; |
| 206 | + PINNED_DROP_TUPLE_DROPS.fetch_add(1, Ordering::SeqCst); |
| 207 | + } |
| 208 | +} |
| 209 | + |
| 210 | +#[test] |
| 211 | +fn tuple_struct_pinned_drop_delegates_from_drop() { |
| 212 | + // `#[pin_data(PinnedDrop)]` should call our `PinnedDrop::drop` exactly once. |
| 213 | + PINNED_DROP_TUPLE_DROPS.store(0, Ordering::SeqCst); |
| 214 | + { |
| 215 | + stack_pin_init!(let _tuple = pin_init!(DropTuple(<- CMutex::new(5usize), 1))); |
| 216 | + } |
| 217 | + assert_eq!(PINNED_DROP_TUPLE_DROPS.load(Ordering::SeqCst), 1); |
65 | 218 | } |
0 commit comments