1use std::{
2 alloc::{AllocError, Layout},
3 mem::MaybeUninit,
4 ops::RangeBounds,
5};
6
7use twizzler_abi::object::{MAX_SIZE, NULLPAGE_SIZE};
8use twizzler_rt_abi::error::{ArgumentError, ResourceError};
9
10use crate::{
11 alloc::{Allocator, SingleObjectAllocator},
12 marker::{Invariant, StoreCopy},
13 ptr::{GlobalPtr, InvPtr, Ref, RefMut, RefSlice, RefSliceMut, TxRef, TxRefSlice},
14 util::same_object,
15 Result,
16};
17
18mod vec_object;
19pub use vec_object::{VecIter, VecObject};
20
21#[cfg(test)]
22mod tests;
23
24pub struct VecInner<T: Invariant> {
25 len: usize,
26 cap: usize,
27 start: InvPtr<T>,
28}
29
30impl<T: Invariant> VecInner<T> {
31 fn resolve_start(&self) -> Ref<'_, T> {
32 tracing::trace!("found start as {:x}", self.start.raw());
33 unsafe { self.start.resolve() }
34 }
35
36 fn resolve_start_tx(&self) -> Result<TxRef<T>> {
37 tracing::trace!("found start as {:x}", self.start.raw());
38 let mut tx = unsafe { self.start.resolve().into_tx() }?;
39 if same_object(tx.raw(), self) {
40 tx.nosync();
41 }
42 Ok(tx)
43 }
44
45 fn do_realloc<Alloc: Allocator>(
46 &mut self,
47 newcap: usize,
48 newlen: usize,
49 alloc: &Alloc,
50 ) -> Result<()> {
51 let place = unsafe { Ref::from_ptr(self) };
52 if newcap <= self.cap {
53 return Ok(());
55 }
56
57 let new_layout = Layout::array::<T>(newcap).map_err(|_| AllocError)?;
58 let old_layout = Layout::array::<T>(self.cap).map_err(|_| AllocError)?;
59
60 let old_global = self.start.global().cast();
61 let new_alloc = unsafe { alloc.realloc(old_global, old_layout, new_layout.size()) }?;
62 let new_start = InvPtr::new(place, new_alloc.cast())?;
63 self.start = new_start;
64 self.cap = newcap;
65 self.len = newlen;
66 tracing::trace!(
67 "set start: {:x} len {}, cap {}",
68 self.start.raw(),
69 self.len,
70 self.cap
71 );
72
73 Ok(())
74 }
75
76 fn do_remove(&mut self, idx: usize) -> Result<()> {
77 let mut rslice = unsafe {
78 TxRefSlice::from_ref(
79 self.start.resolve().into_tx()?.cast::<u8>(),
80 self.cap * size_of::<T>(),
81 )
82 };
83 let slice = rslice.as_slice_mut();
84 let byte_idx_start = (idx + 1) * size_of::<T>();
85 let byte_idx = idx * size_of::<T>();
86 let byte_end = self.len * size_of::<T>();
87 tracing::trace!(
88 "slice byte copy: {} {} {}",
89 byte_idx,
90 byte_idx_start,
91 byte_end
92 );
93 slice.copy_within(byte_idx_start..byte_end, byte_idx);
94 if byte_idx_start == byte_end {
95 slice[byte_idx..byte_idx_start].fill(0);
96 }
97 self.len -= 1;
98 Ok(())
99 }
100
101 pub fn as_slice(&self) -> RefSlice<'_, T> {
102 let r = self.resolve_start();
103 let slice = unsafe { RefSlice::from_ref(r, self.len) };
104 slice
105 }
106
107 fn with_slice<R>(&self, f: impl FnOnce(&[T]) -> R) -> R {
108 let r = self.resolve_start();
109 let slice = unsafe { RefSlice::from_ref(r, self.len) };
110 f(slice.as_slice())
111 }
112
113 fn with_mut_slice<R>(
114 &mut self,
115 range: impl RangeBounds<usize>,
116 f: impl FnOnce(&mut [T]) -> Result<R>,
117 ) -> Result<R> {
118 let r = self.resolve_start_tx()?;
119 let slice = unsafe { TxRefSlice::from_ref(r, self.len) };
120 f(slice.slice(range).as_slice_mut())
121 }
122
123 fn with_mut<R>(&mut self, idx: usize, f: impl FnOnce(&mut T) -> Result<R>) -> Result<R> {
124 let r = self.resolve_start_tx()?;
125 let mut slice = unsafe { TxRefSlice::from_ref(r, self.len) };
126 let mut item = slice.get_mut(idx).unwrap();
127 f(&mut *item)
128 }
129}
130
131#[derive(twizzler_derive::BaseType)]
132pub struct Vec<T: Invariant, Alloc: Allocator> {
133 inner: VecInner<T>,
134 alloc: Alloc,
135}
136
137#[derive(Clone)]
138pub struct VecObjectAlloc;
139
140impl Allocator for VecObjectAlloc {
141 fn alloc(
142 &self,
143 layout: Layout,
144 ) -> std::result::Result<crate::ptr::GlobalPtr<u8>, std::alloc::AllocError> {
145 if layout.size() > MAX_SIZE - NULLPAGE_SIZE * 4 {
147 return Err(std::alloc::AllocError);
148 }
149 let obj = twizzler_rt_abi::object::twz_rt_get_object_handle((self as *const Self).cast())
150 .unwrap();
151 Ok(GlobalPtr::new(obj.id(), (NULLPAGE_SIZE * 2) as u64))
152 }
153
154 unsafe fn dealloc(&self, _ptr: crate::ptr::GlobalPtr<u8>, _layout: Layout) {}
155
156 unsafe fn realloc(
157 &self,
158 _ptr: GlobalPtr<u8>,
159 _layout: Layout,
160 newsize: usize,
161 ) -> std::result::Result<GlobalPtr<u8>, AllocError> {
162 if newsize > MAX_SIZE - NULLPAGE_SIZE * 4 {
164 return Err(std::alloc::AllocError);
165 }
166 let obj = twizzler_rt_abi::object::twz_rt_get_object_handle((self as *const Self).cast())
167 .unwrap();
168 Ok(GlobalPtr::new(obj.id(), (NULLPAGE_SIZE * 2) as u64))
169 }
170}
171
172impl SingleObjectAllocator for VecObjectAlloc {}
173
174impl<T: Invariant, Alloc: Allocator> Vec<T, Alloc> {
177 fn maybe_uninit_slice<'a>(r: TxRef<T>, cap: usize) -> TxRefSlice<MaybeUninit<T>> {
178 unsafe { TxRefSlice::from_ref(r.cast(), cap) }
179 }
180
181 #[inline]
182 pub fn get_ref(&self, idx: usize) -> Option<Ref<'_, T>> {
183 let slice = self.as_slice();
184 slice.get_ref(idx)
185 }
186
187 #[inline]
188 pub unsafe fn get_mut(&mut self, idx: usize) -> Option<RefMut<'_, T>> {
189 let mut slice = self.as_mut_slice();
190 slice.get_mut(idx)
191 }
192
193 #[inline]
194 pub unsafe fn get_tx(&self, idx: usize) -> Result<Option<TxRef<T>>> {
195 let slice = self.as_slice();
196 slice.get_ref(idx).map(|f| f.owned().into_tx()).transpose()
197 }
198
199 pub fn new_in(alloc: Alloc) -> Self {
200 Self {
201 inner: VecInner {
202 cap: 0,
203 len: 0,
204 start: InvPtr::null(),
205 },
206 alloc,
207 }
208 }
209
210 fn get_slice_grow(&mut self) -> Result<TxRef<MaybeUninit<T>>> {
211 let oldlen = self.inner.len;
212 tracing::trace!("len: {}, cap: {}", self.inner.len, self.inner.cap);
213 if self.inner.len == self.inner.cap {
214 if self.inner.start.raw() as usize + size_of::<T>() * self.inner.cap
215 >= MAX_SIZE - NULLPAGE_SIZE
216 {
217 return Err(ResourceError::OutOfMemory.into());
218 }
219 let newcap = std::cmp::max(self.inner.cap, 1) * 2;
220 self.inner.do_realloc(newcap, oldlen + 1, &self.alloc)?;
221 let r = self.inner.resolve_start_tx()?;
222 tracing::trace!("grow {:p}", r.raw());
223 Ok(Self::maybe_uninit_slice(r, newcap)
224 .get_into(oldlen)
225 .unwrap())
226 } else {
227 self.inner.len += 1;
228 let r = self.inner.resolve_start_tx()?;
229 tracing::trace!(
230 "no grow {:p} {}, {} {}",
231 r.raw(),
232 r.is_nosync(),
233 self.inner.len,
234 self.inner.cap
235 );
236 Ok(Self::maybe_uninit_slice(r, self.inner.cap)
237 .get_into(oldlen)
238 .unwrap())
239 }
240 }
241
242 fn do_push(&mut self, item: T) -> Result<()> {
243 let r = self.get_slice_grow()?;
244 tracing::trace!("store value: {:p}", r.raw());
245 let r = r.write(item)?;
246 drop(r);
247 Ok(())
248 }
249
250 pub fn len(&self) -> usize {
251 self.inner.len
252 }
253
254 pub fn capacity(&self) -> usize {
255 self.inner.cap
256 }
257
258 pub fn reserve(&mut self, additional: usize) -> Result<()> {
259 self.inner
260 .do_realloc(self.inner.cap + additional, self.inner.len, &self.alloc)?;
261 Ok(())
262 }
263
264 #[inline]
265 pub fn as_slice(&self) -> RefSlice<'_, T> {
266 let r = self.inner.resolve_start();
267 let slice = unsafe { RefSlice::from_ref(r, self.inner.len) };
268 slice
269 }
270
271 #[inline]
272 pub fn as_tx_slice(&self) -> Result<TxRefSlice<T>> {
273 let r = self.inner.resolve_start_tx()?;
274 let slice = unsafe { TxRefSlice::from_ref(r, self.inner.len) };
275 Ok(slice)
276 }
277
278 #[inline]
279 pub unsafe fn as_mut_slice(&mut self) -> RefSliceMut<'_, T> {
280 let r = unsafe { self.inner.start.resolve_mut() };
281 let slice = unsafe { RefSliceMut::from_ref(r, self.inner.len) };
282 slice
283 }
284
285 pub fn remove_inplace(&mut self, idx: usize) -> Result<()> {
286 if idx >= self.inner.len {
287 return Err(ArgumentError::InvalidArgument.into());
288 }
289 self.inner.with_mut(idx, |item| {
290 unsafe { core::ptr::drop_in_place(item) };
291 Ok(())
292 })?;
293 self.inner.do_remove(idx)?;
294 Ok(())
295 }
296
297 pub fn truncate(&mut self, newlen: usize) -> Result<()> {
298 let oldlen = self.inner.len;
299 if newlen >= oldlen {
300 return Ok(());
301 }
302 self.inner.with_mut_slice(newlen..oldlen, |slice| {
303 for item in slice {
304 unsafe { core::ptr::drop_in_place(item) };
305 }
306 Ok(())
307 })?;
308 self.inner.len = newlen;
309 Ok(())
310 }
311
312 pub fn shrink_to_fit(&mut self) -> Result<()> {
313 self.inner.cap = self.inner.len;
314 Ok(())
316 }
317
318 pub fn with_slice<R>(&self, f: impl FnOnce(&[T]) -> R) -> R {
319 self.inner.with_slice(f)
320 }
321
322 pub fn with_mut_slice<R>(
323 &mut self,
324 range: impl RangeBounds<usize>,
325 f: impl FnOnce(&mut [T]) -> Result<R>,
326 ) -> Result<R> {
327 self.inner.with_mut_slice(range, f)
328 }
329
330 pub fn is_empty(&self) -> bool {
331 self.len() == 0
332 }
333
334 pub fn clear(&mut self) -> Result<()> {
335 self.truncate(0)
336 }
337
338 pub fn swap(&mut self, a: usize, b: usize) {
339 if a == b {
340 return;
341 }
342 unsafe {
343 let mut slice = self.as_mut_slice();
344 let slice_mut = slice.as_slice_mut();
345 slice_mut.swap(a, b);
346 }
347 }
348
349 pub fn first_ref(&self) -> Option<Ref<'_, T>> {
350 self.get_ref(0)
351 }
352
353 pub fn last_ref(&self) -> Option<Ref<'_, T>> {
354 if self.inner.len == 0 {
355 None
356 } else {
357 self.get_ref(self.inner.len - 1)
358 }
359 }
360
361 pub fn contains(&self, item: &T) -> bool
362 where
363 T: PartialEq,
364 {
365 self.with_slice(|slice| slice.contains(item))
366 }
367
368 pub fn starts_with(&self, needle: &[T]) -> bool
369 where
370 T: PartialEq,
371 {
372 self.with_slice(|slice| slice.starts_with(needle))
373 }
374
375 pub fn ends_with(&self, needle: &[T]) -> bool
376 where
377 T: PartialEq,
378 {
379 self.with_slice(|slice| slice.ends_with(needle))
380 }
381
382 pub fn binary_search(&self, x: &T) -> std::result::Result<usize, usize>
383 where
384 T: Ord,
385 {
386 self.with_slice(|slice| slice.binary_search(x))
387 }
388
389 pub fn binary_search_by<F>(&self, f: F) -> std::result::Result<usize, usize>
390 where
391 F: FnMut(&T) -> std::cmp::Ordering,
392 {
393 self.with_slice(|slice| slice.binary_search_by(f))
394 }
395
396 pub fn reverse(&mut self) -> Result<()> {
397 self.with_mut_slice(.., |slice| {
398 slice.reverse();
399 Ok(())
400 })
401 }
402
403 pub fn sort(&mut self) -> Result<()>
404 where
405 T: Ord,
406 {
407 self.with_mut_slice(.., |slice| {
408 slice.sort();
409 Ok(())
410 })
411 }
412
413 pub fn sort_by<F>(&mut self, compare: F) -> Result<()>
414 where
415 F: FnMut(&T, &T) -> std::cmp::Ordering,
416 {
417 self.with_mut_slice(.., |slice| {
418 slice.sort_by(compare);
419 Ok(())
420 })
421 }
422
423 pub fn sort_unstable(&mut self) -> Result<()>
424 where
425 T: Ord,
426 {
427 self.with_mut_slice(.., |slice| {
428 slice.sort_unstable();
429 Ok(())
430 })
431 }
432
433 pub fn sort_unstable_by<F>(&mut self, compare: F) -> Result<()>
434 where
435 F: FnMut(&T, &T) -> std::cmp::Ordering,
436 {
437 self.with_mut_slice(.., |slice| {
438 slice.sort_unstable_by(compare);
439 Ok(())
440 })
441 }
442
443 pub fn retain<F>(&mut self, mut f: F) -> Result<()>
444 where
445 F: FnMut(&T) -> bool,
446 {
447 let mut del = 0;
448 let len = self.len();
449
450 for i in 0..len {
451 let should_retain = self.with_slice(|slice| f(&slice[i - del]));
452 if !should_retain {
453 self.remove_inplace(i - del)?;
454 del += 1;
455 }
456 }
457
458 Ok(())
459 }
460}
461
462impl<T: Invariant + StoreCopy, Alloc: Allocator> Vec<T, Alloc> {
463 pub fn push(&mut self, item: T) -> Result<()> {
464 self.do_push(item)
465 }
466
467 pub fn pop(&mut self) -> Result<Option<T>> {
468 if self.inner.len == 0 {
469 return Ok(None);
470 }
471 let new_len = self.inner.len - 1;
472 let val = self
473 .inner
474 .with_slice(|slice| unsafe { ((&slice[new_len]) as *const T).read() });
475 self.inner.do_remove(new_len)?;
476 Ok(Some(val))
477 }
478
479 pub fn remove(&mut self, idx: usize) -> Result<T> {
480 if idx >= self.inner.len {
482 return Err(ArgumentError::InvalidArgument.into());
483 }
484 let val = self
485 .inner
486 .with_slice(|slice| unsafe { ((&slice[idx]) as *const T).read() });
487 self.inner.do_remove(idx)?;
488 Ok(val)
489 }
490}
491
492impl<T: Invariant, Alloc: Allocator + SingleObjectAllocator> Vec<T, Alloc> {
493 pub fn push_inplace(&mut self, item: T) -> Result<()> {
494 self.do_push(item)
495 }
496
497 pub fn push_ctor<F>(&mut self, ctor: F) -> Result<()>
498 where
499 F: FnOnce(RefMut<MaybeUninit<T>>) -> Result<RefMut<T>>,
500 {
501 let mut r = self.get_slice_grow()?;
502 let _val = ctor(r.as_mut())?;
503 Ok(())
504 }
505}
506
507impl<T: Invariant, A: Allocator, U> PartialEq<&[U]> for Vec<T, A>
508where
509 T: PartialEq<U>,
510{
511 fn eq(&self, other: &&[U]) -> bool {
512 self.with_slice(|s| s.eq(*other))
513 }
514}
515
516impl<T: Invariant, A: Allocator, U: Invariant, A2: Allocator> PartialEq<Vec<U, A2>> for Vec<T, A>
517where
518 T: PartialEq<U>,
519{
520 fn eq(&self, other: &Vec<U, A2>) -> bool {
521 self.with_slice(|s| other.with_slice(|o| s.eq(o)))
522 }
523}
524
525impl<T: Invariant, A: Allocator, U> PartialOrd<&[U]> for Vec<T, A>
526where
527 T: PartialOrd<U>,
528{
529 fn partial_cmp(&self, other: &&[U]) -> Option<std::cmp::Ordering> {
530 self.with_slice(|s| s.iter().partial_cmp(*other))
531 }
532}
533
534impl<T: Invariant, A: Allocator, U: Invariant, A2: Allocator> PartialOrd<Vec<U, A2>> for Vec<T, A>
535where
536 T: PartialOrd<U>,
537{
538 fn partial_cmp(&self, other: &Vec<U, A2>) -> Option<std::cmp::Ordering> {
539 self.with_slice(|s| other.with_slice(|o| s.iter().partial_cmp(o)))
540 }
541}
542
543impl<T: Invariant + Eq, A: Allocator> Eq for Vec<T, A> {}
544
545impl<T: Invariant + Ord, A: Allocator> Ord for Vec<T, A> {
546 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
547 self.partial_cmp(other).unwrap()
548 }
549}
550
551use std::fmt::Debug;
552impl<T: Debug + Invariant, A: Allocator> Debug for Vec<T, A> {
553 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
554 self.with_slice(|s| f.debug_list().entries(s.iter()).finish())
555 }
556}