twizzler/collections/
vec.rs

1use std::{
2    alloc::{AllocError, Layout},
3    mem::MaybeUninit,
4    ops::RangeBounds,
5};
6
7use twizzler_abi::object::{MAX_SIZE, NULLPAGE_SIZE};
8use twizzler_rt_abi::error::{ArgumentError, ResourceError};
9
10use crate::{
11    alloc::{Allocator, SingleObjectAllocator},
12    marker::{Invariant, StoreCopy},
13    ptr::{GlobalPtr, InvPtr, Ref, RefMut, RefSlice, RefSliceMut, TxRef, TxRefSlice},
14    util::same_object,
15    Result,
16};
17
18mod vec_object;
19pub use vec_object::{VecIter, VecObject};
20
21#[cfg(test)]
22mod tests;
23
24pub struct VecInner<T: Invariant> {
25    len: usize,
26    cap: usize,
27    start: InvPtr<T>,
28}
29
30impl<T: Invariant> VecInner<T> {
31    fn resolve_start(&self) -> Ref<'_, T> {
32        unsafe { self.start.resolve() }
33    }
34
35    fn resolve_start_tx(&self) -> Result<TxRef<T>> {
36        let mut tx = unsafe { self.start.resolve().into_tx() }?;
37        if same_object(tx.raw(), self) {
38            tx.nosync();
39        }
40        Ok(tx)
41    }
42
43    fn do_realloc<Alloc: Allocator>(
44        &mut self,
45        newcap: usize,
46        newlen: usize,
47        alloc: &Alloc,
48    ) -> Result<()> {
49        let place = unsafe { Ref::from_ptr(self) };
50        if newcap <= self.cap {
51            // TODO: shrinking.
52            return Ok(());
53        }
54
55        let new_layout = Layout::array::<T>(newcap).map_err(|_| AllocError)?;
56        let old_layout = Layout::array::<T>(self.cap).map_err(|_| AllocError)?;
57
58        let old_global = self.start.global().cast();
59        let new_alloc = unsafe { alloc.realloc(old_global, old_layout, new_layout.size()) }?;
60        let new_start = InvPtr::new(place, new_alloc.cast())?;
61        self.start = new_start;
62        self.cap = newcap;
63        self.len = newlen;
64        tracing::trace!(
65            "set start: {:x} len {}, cap {}",
66            self.start.raw(),
67            self.len,
68            self.cap
69        );
70
71        Ok(())
72    }
73
74    fn do_remove(&mut self, idx: usize) -> Result<()> {
75        let mut rslice = unsafe {
76            TxRefSlice::from_ref(
77                self.start.resolve().into_tx()?.cast::<u8>(),
78                self.cap * size_of::<T>(),
79            )
80        };
81        let slice = rslice.as_slice_mut();
82        let byte_idx_start = (idx + 1) * size_of::<T>();
83        let byte_idx = idx * size_of::<T>();
84        let byte_end = self.len * size_of::<T>();
85        tracing::trace!(
86            "slice byte copy: {} {} {}",
87            byte_idx,
88            byte_idx_start,
89            byte_end
90        );
91        slice.copy_within(byte_idx_start..byte_end, byte_idx);
92        if byte_idx_start == byte_end {
93            slice[byte_idx..byte_idx_start].fill(0);
94        }
95        self.len -= 1;
96        Ok(())
97    }
98
99    pub fn as_slice(&self) -> RefSlice<'_, T> {
100        let r = self.resolve_start();
101        let slice = unsafe { RefSlice::from_ref(r, self.len) };
102        slice
103    }
104
105    fn with_slice<R>(&self, f: impl FnOnce(&[T]) -> R) -> R {
106        let r = self.resolve_start();
107        let slice = unsafe { RefSlice::from_ref(r, self.len) };
108        f(slice.as_slice())
109    }
110
111    fn with_mut_slice<R>(
112        &mut self,
113        range: impl RangeBounds<usize>,
114        f: impl FnOnce(&mut [T]) -> Result<R>,
115    ) -> Result<R> {
116        let r = self.resolve_start_tx()?;
117        let slice = unsafe { TxRefSlice::from_ref(r, self.len) };
118        f(slice.slice(range).as_slice_mut())
119    }
120
121    fn with_mut<R>(&mut self, idx: usize, f: impl FnOnce(&mut T) -> Result<R>) -> Result<R> {
122        let r = self.resolve_start_tx()?;
123        let mut slice = unsafe { TxRefSlice::from_ref(r, self.len) };
124        let mut item = slice.get_mut(idx).unwrap();
125        f(&mut *item)
126    }
127}
128
129#[derive(twizzler_derive::BaseType)]
130pub struct Vec<T: Invariant, Alloc: Allocator> {
131    inner: VecInner<T>,
132    alloc: Alloc,
133}
134
135#[derive(Clone)]
136pub struct VecObjectAlloc;
137
138impl Allocator for VecObjectAlloc {
139    fn alloc(
140        &self,
141        layout: Layout,
142    ) -> std::result::Result<crate::ptr::GlobalPtr<u8>, std::alloc::AllocError> {
143        // 1 for null page, 2 for metadata pages, 1 for base
144        if layout.size() > MAX_SIZE - NULLPAGE_SIZE * 4 {
145            return Err(std::alloc::AllocError);
146        }
147        let obj = twizzler_rt_abi::object::twz_rt_get_object_handle((self as *const Self).cast())
148            .unwrap();
149        Ok(GlobalPtr::new(obj.id(), (NULLPAGE_SIZE * 2) as u64))
150    }
151
152    unsafe fn dealloc(&self, _ptr: crate::ptr::GlobalPtr<u8>, _layout: Layout) {}
153
154    unsafe fn realloc(
155        &self,
156        _ptr: GlobalPtr<u8>,
157        _layout: Layout,
158        newsize: usize,
159    ) -> std::result::Result<GlobalPtr<u8>, AllocError> {
160        // 1 for null page, 2 for metadata pages, 1 for base
161        if newsize > MAX_SIZE - NULLPAGE_SIZE * 4 {
162            return Err(std::alloc::AllocError);
163        }
164        let obj = twizzler_rt_abi::object::twz_rt_get_object_handle((self as *const Self).cast())
165            .unwrap();
166        Ok(GlobalPtr::new(obj.id(), (NULLPAGE_SIZE * 2) as u64))
167    }
168}
169
170impl SingleObjectAllocator for VecObjectAlloc {}
171
172//impl<T: Invariant, A: Allocator> BaseType for Vec<T, A> {}
173
174impl<T: Invariant, Alloc: Allocator> Vec<T, Alloc> {
175    fn maybe_uninit_slice<'a>(r: TxRef<T>, cap: usize) -> TxRefSlice<MaybeUninit<T>> {
176        unsafe { TxRefSlice::from_ref(r.cast(), cap) }
177    }
178
179    #[inline]
180    pub fn get_ref(&self, idx: usize) -> Option<Ref<'_, T>> {
181        let slice = self.as_slice();
182        slice.get_ref(idx)
183    }
184
185    #[inline]
186    pub unsafe fn get_mut(&mut self, idx: usize) -> Option<RefMut<'_, T>> {
187        let mut slice = self.as_mut_slice();
188        slice.get_mut(idx)
189    }
190
191    #[inline]
192    pub unsafe fn get_tx(&self, idx: usize) -> Result<Option<TxRef<T>>> {
193        let slice = self.as_slice();
194        slice.get_ref(idx).map(|f| f.owned().into_tx()).transpose()
195    }
196
197    pub fn new_in(alloc: Alloc) -> Self {
198        Self {
199            inner: VecInner {
200                cap: 0,
201                len: 0,
202                start: InvPtr::null(),
203            },
204            alloc,
205        }
206    }
207
208    fn get_slice_grow(&mut self) -> Result<TxRef<MaybeUninit<T>>> {
209        let oldlen = self.inner.len;
210        tracing::trace!("len: {}, cap: {}", self.inner.len, self.inner.cap);
211        if self.inner.len == self.inner.cap {
212            if self.inner.start.raw() as usize + size_of::<T>() * self.inner.cap
213                >= MAX_SIZE - NULLPAGE_SIZE
214            {
215                return Err(ResourceError::OutOfMemory.into());
216            }
217            let newcap = std::cmp::max(self.inner.cap, 1) * 2;
218            self.inner.do_realloc(newcap, oldlen + 1, &self.alloc)?;
219            let r = self.inner.resolve_start_tx()?;
220            tracing::trace!("grow {:p}", r.raw());
221            Ok(Self::maybe_uninit_slice(r, newcap)
222                .get_into(oldlen)
223                .unwrap())
224        } else {
225            self.inner.len += 1;
226            let r = self.inner.resolve_start_tx()?;
227            tracing::trace!("no grow {:p} {}", r.raw(), r.is_nosync());
228            Ok(Self::maybe_uninit_slice(r, self.inner.cap)
229                .get_into(oldlen)
230                .unwrap())
231        }
232    }
233
234    fn do_push(&mut self, item: T) -> Result<()> {
235        let r = self.get_slice_grow()?;
236        tracing::trace!("store value: {:p}", r.raw());
237        let r = r.write(item)?;
238        drop(r);
239        Ok(())
240    }
241
242    pub fn len(&self) -> usize {
243        self.inner.len
244    }
245
246    pub fn capacity(&self) -> usize {
247        self.inner.cap
248    }
249
250    pub fn reserve(&mut self, additional: usize) -> Result<()> {
251        self.inner
252            .do_realloc(self.inner.cap + additional, self.inner.len, &self.alloc)?;
253        Ok(())
254    }
255
256    #[inline]
257    pub fn as_slice(&self) -> RefSlice<'_, T> {
258        let r = self.inner.resolve_start();
259        let slice = unsafe { RefSlice::from_ref(r, self.inner.len) };
260        slice
261    }
262
263    #[inline]
264    pub fn as_tx_slice(&self) -> Result<TxRefSlice<T>> {
265        let r = self.inner.resolve_start_tx()?;
266        let slice = unsafe { TxRefSlice::from_ref(r, self.inner.len) };
267        Ok(slice)
268    }
269
270    #[inline]
271    pub unsafe fn as_mut_slice(&mut self) -> RefSliceMut<'_, T> {
272        let r = unsafe { self.inner.start.resolve_mut() };
273        let slice = unsafe { RefSliceMut::from_ref(r, self.inner.len) };
274        slice
275    }
276
277    pub fn remove_inplace(&mut self, idx: usize) -> Result<()> {
278        if idx >= self.inner.len {
279            return Err(ArgumentError::InvalidArgument.into());
280        }
281        self.inner.with_mut(idx, |item| {
282            unsafe { core::ptr::drop_in_place(item) };
283            Ok(())
284        })?;
285        self.inner.do_remove(idx)?;
286        Ok(())
287    }
288
289    pub fn truncate(&mut self, newlen: usize) -> Result<()> {
290        let oldlen = self.inner.len;
291        if newlen >= oldlen {
292            return Ok(());
293        }
294        self.inner.with_mut_slice(newlen..oldlen, |slice| {
295            for item in slice {
296                unsafe { core::ptr::drop_in_place(item) };
297            }
298            Ok(())
299        })?;
300        self.inner.len = newlen;
301        Ok(())
302    }
303
304    pub fn shrink_to_fit(&mut self) -> Result<()> {
305        self.inner.cap = self.inner.len;
306        // TODO: release memory
307        Ok(())
308    }
309
310    pub fn with_slice<R>(&self, f: impl FnOnce(&[T]) -> R) -> R {
311        self.inner.with_slice(f)
312    }
313
314    pub fn with_mut_slice<R>(
315        &mut self,
316        range: impl RangeBounds<usize>,
317        f: impl FnOnce(&mut [T]) -> Result<R>,
318    ) -> Result<R> {
319        self.inner.with_mut_slice(range, f)
320    }
321
322    pub fn is_empty(&self) -> bool {
323        self.len() == 0
324    }
325
326    pub fn clear(&mut self) -> Result<()> {
327        self.truncate(0)
328    }
329
330    pub fn swap(&mut self, a: usize, b: usize) {
331        if a == b {
332            return;
333        }
334        unsafe {
335            let mut slice = self.as_mut_slice();
336            let slice_mut = slice.as_slice_mut();
337            slice_mut.swap(a, b);
338        }
339    }
340
341    pub fn first_ref(&self) -> Option<Ref<'_, T>> {
342        self.get_ref(0)
343    }
344
345    pub fn last_ref(&self) -> Option<Ref<'_, T>> {
346        if self.inner.len == 0 {
347            None
348        } else {
349            self.get_ref(self.inner.len - 1)
350        }
351    }
352
353    pub fn contains(&self, item: &T) -> bool
354    where
355        T: PartialEq,
356    {
357        self.with_slice(|slice| slice.contains(item))
358    }
359
360    pub fn starts_with(&self, needle: &[T]) -> bool
361    where
362        T: PartialEq,
363    {
364        self.with_slice(|slice| slice.starts_with(needle))
365    }
366
367    pub fn ends_with(&self, needle: &[T]) -> bool
368    where
369        T: PartialEq,
370    {
371        self.with_slice(|slice| slice.ends_with(needle))
372    }
373
374    pub fn binary_search(&self, x: &T) -> std::result::Result<usize, usize>
375    where
376        T: Ord,
377    {
378        self.with_slice(|slice| slice.binary_search(x))
379    }
380
381    pub fn binary_search_by<F>(&self, f: F) -> std::result::Result<usize, usize>
382    where
383        F: FnMut(&T) -> std::cmp::Ordering,
384    {
385        self.with_slice(|slice| slice.binary_search_by(f))
386    }
387
388    pub fn reverse(&mut self) -> Result<()> {
389        self.with_mut_slice(.., |slice| {
390            slice.reverse();
391            Ok(())
392        })
393    }
394
395    pub fn sort(&mut self) -> Result<()>
396    where
397        T: Ord,
398    {
399        self.with_mut_slice(.., |slice| {
400            slice.sort();
401            Ok(())
402        })
403    }
404
405    pub fn sort_by<F>(&mut self, compare: F) -> Result<()>
406    where
407        F: FnMut(&T, &T) -> std::cmp::Ordering,
408    {
409        self.with_mut_slice(.., |slice| {
410            slice.sort_by(compare);
411            Ok(())
412        })
413    }
414
415    pub fn sort_unstable(&mut self) -> Result<()>
416    where
417        T: Ord,
418    {
419        self.with_mut_slice(.., |slice| {
420            slice.sort_unstable();
421            Ok(())
422        })
423    }
424
425    pub fn sort_unstable_by<F>(&mut self, compare: F) -> Result<()>
426    where
427        F: FnMut(&T, &T) -> std::cmp::Ordering,
428    {
429        self.with_mut_slice(.., |slice| {
430            slice.sort_unstable_by(compare);
431            Ok(())
432        })
433    }
434
435    pub fn retain<F>(&mut self, mut f: F) -> Result<()>
436    where
437        F: FnMut(&T) -> bool,
438    {
439        let mut del = 0;
440        let len = self.len();
441
442        for i in 0..len {
443            let should_retain = self.with_slice(|slice| f(&slice[i - del]));
444            if !should_retain {
445                self.remove_inplace(i - del)?;
446                del += 1;
447            }
448        }
449
450        Ok(())
451    }
452}
453
454impl<T: Invariant + StoreCopy, Alloc: Allocator> Vec<T, Alloc> {
455    pub fn push(&mut self, item: T) -> Result<()> {
456        self.do_push(item)
457    }
458
459    pub fn pop(&mut self) -> Result<Option<T>> {
460        if self.inner.len == 0 {
461            return Ok(None);
462        }
463        let new_len = self.inner.len - 1;
464        let val = self
465            .inner
466            .with_slice(|slice| unsafe { ((&slice[new_len]) as *const T).read() });
467        self.inner.do_remove(new_len)?;
468        Ok(Some(val))
469    }
470
471    pub fn remove(&mut self, idx: usize) -> Result<T> {
472        //let mut inner = self.inner.get()?;
473        if idx >= self.inner.len {
474            return Err(ArgumentError::InvalidArgument.into());
475        }
476        let val = self
477            .inner
478            .with_slice(|slice| unsafe { ((&slice[idx]) as *const T).read() });
479        self.inner.do_remove(idx)?;
480        Ok(val)
481    }
482}
483
484impl<T: Invariant, Alloc: Allocator + SingleObjectAllocator> Vec<T, Alloc> {
485    pub fn push_inplace(&mut self, item: T) -> Result<()> {
486        self.do_push(item)
487    }
488
489    pub fn push_ctor<F>(&mut self, ctor: F) -> Result<()>
490    where
491        F: FnOnce(RefMut<MaybeUninit<T>>) -> Result<RefMut<T>>,
492    {
493        let mut r = self.get_slice_grow()?;
494        let _val = ctor(r.as_mut())?;
495        Ok(())
496    }
497}