twizzler_rt_abi/
object.rs

1//! Interface for objects and object handles.
2
3use core::{
4    ffi::c_void,
5    fmt::{LowerHex, UpperHex},
6    mem::MaybeUninit,
7    sync::atomic::{AtomicU64, Ordering},
8};
9
10use crate::{
11    bindings::LEN_MUL,
12    error::{RawTwzError, TwzError},
13    Result,
14};
15
16/// An object ID.
17#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash, Default)]
18#[repr(transparent)]
19pub struct ObjID(twizzler_types::ObjID);
20
21impl ObjID {
22    /// The number of u64 components that make up an object ID, if split.
23    pub const NR_PARTS: usize = 2;
24
25    /// Build a new object ID from raw.
26    pub const fn new(raw: twizzler_types::ObjID) -> Self {
27        Self(raw)
28    }
29
30    /// Get the raw object ID type.
31    pub const fn raw(&self) -> twizzler_types::ObjID {
32        self.0
33    }
34
35    /// Build an object ID from parts, useful for syscalls.
36    pub const fn from_parts(parts: [u64; Self::NR_PARTS]) -> Self {
37        Self::new(((parts[0] as u128) << 64) | (parts[1] as u128))
38    }
39
40    /// Split the object ID into parts, useful for packing into registers for syscalls.
41    pub const fn parts(&self) -> [u64; Self::NR_PARTS] {
42        [(self.0 >> 64) as u64, (self.0 & 0xffffffffffffffff) as u64]
43    }
44}
45
46impl core::convert::AsRef<ObjID> for ObjID {
47    fn as_ref(&self) -> &ObjID {
48        self
49    }
50}
51
52impl From<twizzler_types::ObjID> for ObjID {
53    fn from(id: twizzler_types::ObjID) -> Self {
54        Self::new(id)
55    }
56}
57
58impl LowerHex for ObjID {
59    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
60        write!(f, "{:x}", self.0)
61    }
62}
63
64impl UpperHex for ObjID {
65    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
66        write!(f, "{:X}", self.0)
67    }
68}
69
70impl core::fmt::Display for ObjID {
71    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
72        write!(f, "ObjID({:x})", self.0)
73    }
74}
75
76impl core::fmt::Debug for ObjID {
77    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
78        write!(f, "ObjID({:x})", self.0)
79    }
80}
81
82/// An object handle, granting access to object memory. An object handle can be in two modes:
83///   - Owning -- the normal mode, which acts like an Arc, and asks the runtime to unmap when
84///     refcount hits zero.
85///   - Unsafe -- internal use only, is NOT owning, but still has pointers. This is totally unsafe
86///     to use, and should not be exposed to users. But sometimes, it can be safe, and faster than
87///     cloning.
88/// ... anyway, in general these have reference counting semantics, via Clone and Drop, like Arc.
89#[repr(transparent)]
90pub struct ObjectHandle(pub(crate) crate::bindings::object_handle);
91
92#[cfg(not(feature = "kernel"))]
93impl core::fmt::Debug for ObjectHandle {
94    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
95        write!(
96            f,
97            "ObjectHandle({:?}, {:p}, {:x}, {:?})",
98            self.id(),
99            self.start(),
100            self.valid_len(),
101            self.map_flags()
102        )
103    }
104}
105
106unsafe impl Send for ObjectHandle {}
107unsafe impl Sync for ObjectHandle {}
108
109bitflags::bitflags! {
110    /// Flags for mapping objects.
111    #[derive(Copy, Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Debug)]
112    pub struct MapFlags : crate::bindings::map_flags {
113        /// Request READ access.
114        const READ = crate::bindings::MAP_FLAG_R;
115        /// Request WRITE access.
116        const WRITE = crate::bindings::MAP_FLAG_W;
117        /// Request EXECUTE access.
118        const EXEC = crate::bindings::MAP_FLAG_X;
119        /// Persist changes on flush.
120        const PERSIST = crate::bindings::MAP_FLAG_PERSIST;
121        /// Use runtime support for read stability.
122        const INDIRECT = crate::bindings::MAP_FLAG_INDIRECT;
123        /// Use runtime support for read stability.
124        const NO_NULLPAGE = crate::bindings::MAP_FLAG_NO_NULLPAGE;
125    }
126}
127
128#[allow(dead_code)]
129impl ObjectHandle {
130    fn refs(&self) -> *const AtomicU64 {
131        self.0.runtime_info.cast()
132    }
133
134    /// Get a pointer to the start of object data.
135    pub fn start(&self) -> *mut u8 {
136        self.0.start.cast()
137    }
138
139    /// Get a pointer to the metadata structure.
140    pub fn meta(&self) -> *mut MetaInfo {
141        self.0.meta.cast()
142    }
143
144    /// Get a slice of metadata extensions
145    pub fn meta_exts(&self) -> &[MetaExt] {
146        unsafe {
147            core::slice::from_raw_parts(
148                self.0.meta.cast::<u8>().add(size_of::<MetaInfo>()).cast(),
149                (*self.meta()).extcount as usize,
150            )
151        }
152    }
153
154    /// Find the first metadata extension matching the given tag
155    pub fn find_meta_ext(&self, tag: MetaExtTag) -> Option<&MetaExt> {
156        self.meta_exts().iter().find(|e| e.tag == tag)
157    }
158
159    /// Get a pointer to the runtime info.
160    pub fn runtime_info(&self) -> *mut u8 {
161        self.0.runtime_info.cast()
162    }
163
164    /// Get map flags.
165    pub fn map_flags(&self) -> MapFlags {
166        MapFlags::from_bits_truncate(self.0.map_flags)
167    }
168
169    /// Get the number of valid bytes after start pointer for object data.
170    pub fn valid_len(&self) -> usize {
171        (self.0.valid_len as usize) * crate::bindings::LEN_MUL
172    }
173
174    /// Get the object ID.
175    pub fn id(&self) -> ObjID {
176        ObjID::new(self.0.id)
177    }
178
179    /// Get the raw object handle.
180    pub fn into_raw(self) -> crate::bindings::object_handle {
181        let this = core::mem::ManuallyDrop::new(self);
182        this.0
183    }
184
185    /// Build an object handle from raw.
186    pub fn from_raw(raw: crate::bindings::object_handle) -> Self {
187        Self(raw)
188    }
189
190    /// Make a new object handle.
191    ///
192    /// # Safety
193    /// The caller must ensure that runtime_info is a valid pointer, and points to a repr(C) struct
194    /// that starts with an AtomicU64 for reference counting.
195    pub unsafe fn new(
196        id: ObjID,
197        runtime_info: *mut core::ffi::c_void,
198        start: *mut core::ffi::c_void,
199        meta: *mut core::ffi::c_void,
200        map_flags: MapFlags,
201        valid_len: usize,
202    ) -> Self {
203        Self::from_raw(crate::bindings::object_handle {
204            id: id.0,
205            runtime_info,
206            start,
207            meta,
208            map_flags: map_flags.bits(),
209            valid_len: (valid_len / LEN_MUL) as u32,
210        })
211    }
212}
213
214#[cfg(not(feature = "kernel"))]
215impl Clone for ObjectHandle {
216    fn clone(&self) -> Self {
217        unsafe {
218            let Some(ref rc) = self.refs().as_ref() else {
219                return Self(self.0);
220            };
221            // This use of Relaxed ordering is justified by https://doc.rust-lang.org/nomicon/arc-mutex/arc-clone.html.
222            let old_count = rc.fetch_add(1, Ordering::Relaxed);
223            // The above link also justifies the following behavior. If our count gets this high, we
224            // have probably run into a problem somewhere.
225            if old_count >= i64::MAX as u64 {
226                crate::core::twz_rt_abort();
227            }
228        }
229        Self(self.0)
230    }
231}
232
233#[cfg(not(feature = "kernel"))]
234impl Drop for ObjectHandle {
235    fn drop(&mut self) {
236        unsafe {
237            let Some(ref rc) = self.refs().as_ref() else {
238                return;
239            };
240            // This use of Release ordering is justified by https://doc.rust-lang.org/nomicon/arc-mutex/arc-clone.html.
241            if rc.fetch_sub(1, Ordering::Release) != 1 {
242                return;
243            }
244        }
245        // This fence is needed to prevent reordering of the use and deletion
246        // of the data.
247        core::sync::atomic::fence(Ordering::Acquire);
248        twz_rt_release_handle(self);
249    }
250}
251
252impl AsRef<ObjectHandle> for ObjectHandle {
253    fn as_ref(&self) -> &ObjectHandle {
254        self
255    }
256}
257
258impl Default for crate::bindings::object_handle {
259    fn default() -> Self {
260        Self {
261            id: 0,
262            map_flags: 0,
263            start: core::ptr::null_mut(),
264            meta: core::ptr::null_mut(),
265            runtime_info: core::ptr::null_mut(),
266            valid_len: 0,
267        }
268    }
269}
270
271#[cfg(not(feature = "kernel"))]
272impl From<Result<ObjectHandle>> for crate::bindings::map_result {
273    fn from(value: Result<ObjectHandle>) -> Self {
274        match value {
275            Ok(handle) => Self {
276                handle: handle.into_raw(),
277                error: RawTwzError::success().raw(),
278            },
279            Err(e) => Self {
280                handle: crate::bindings::object_handle::default(),
281                error: e.raw(),
282            },
283        }
284    }
285}
286
287#[cfg(not(feature = "kernel"))]
288impl From<crate::bindings::map_result> for Result<ObjectHandle> {
289    fn from(value: crate::bindings::map_result) -> Self {
290        let raw = RawTwzError::new(value.error);
291        if raw.is_success() {
292            Ok(ObjectHandle(value.handle))
293        } else {
294            Err(raw.error())
295        }
296    }
297}
298
299/// Map an object given by ID `id` with the given flags.
300#[cfg(not(feature = "kernel"))]
301pub fn twz_rt_map_object(id: ObjID, flags: MapFlags) -> Result<ObjectHandle> {
302    unsafe { crate::bindings::twz_rt_map_object(id.raw(), flags.bits()).into() }
303}
304
305#[cfg(not(feature = "kernel"))]
306pub fn twz_rt_get_object_handle(ptr: *const u8) -> Result<ObjectHandle> {
307    use crate::error::ObjectError;
308
309    let res = unsafe { crate::bindings::twz_rt_get_object_handle((ptr as *mut u8).cast()) };
310    if res.id == 0 {
311        return Err(TwzError::Object(ObjectError::NotMapped));
312    }
313    Ok(ObjectHandle(res))
314}
315
316#[cfg(not(feature = "kernel"))]
317pub fn twz_rt_resolve_fot(
318    this: &ObjectHandle,
319    idx: u64,
320    valid_len: usize,
321    flags: MapFlags,
322) -> Result<ObjectHandle> {
323    unsafe {
324        crate::bindings::twz_rt_resolve_fot(
325            &this.0 as *const _ as *mut _,
326            idx,
327            valid_len,
328            flags.bits(),
329        )
330        .into()
331    }
332}
333
334impl From<Result<u32>> for crate::bindings::u32_result {
335    fn from(value: Result<u32>) -> Self {
336        match value {
337            Ok(val) => Self {
338                val,
339                err: RawTwzError::success().raw(),
340            },
341            Err(e) => Self {
342                val: 0,
343                err: e.raw(),
344            },
345        }
346    }
347}
348
349impl From<crate::bindings::u32_result> for Result<u32> {
350    fn from(value: crate::bindings::u32_result) -> Self {
351        let raw = RawTwzError::new(value.err);
352        if raw.is_success() {
353            Ok(value.val)
354        } else {
355            Err(raw.error())
356        }
357    }
358}
359
360#[cfg(not(feature = "kernel"))]
361pub fn twz_rt_insert_fot(this: &ObjectHandle, entry: *const u8) -> Result<u32> {
362    unsafe {
363        let res = crate::bindings::twz_rt_insert_fot(
364            &this.0 as *const _ as *mut _,
365            (entry as *mut u8).cast(),
366        );
367        res.into()
368    }
369}
370
371#[cfg(not(feature = "kernel"))]
372pub fn twz_rt_resolve_fot_local(
373    start: *mut u8,
374    idx: u64,
375    valid_len: usize,
376    flags: MapFlags,
377) -> *mut u8 {
378    unsafe {
379        let res =
380            crate::bindings::twz_rt_resolve_fot_local(start.cast(), idx, valid_len, flags.bits());
381        res.cast()
382    }
383}
384
385/// Release a handle. Should be only called by the ObjectHandle drop call.
386#[cfg(not(feature = "kernel"))]
387pub fn twz_rt_release_handle(handle: &mut ObjectHandle) {
388    unsafe { crate::bindings::twz_rt_release_handle(&mut handle.0) }
389}
390
391/// Update a handle.
392#[cfg(not(feature = "kernel"))]
393pub fn twz_rt_update_handle(handle: &mut ObjectHandle) -> Result<()> {
394    let r = unsafe { crate::bindings::twz_rt_update_handle(&mut handle.0) };
395    let r = RawTwzError::new(r);
396    if r.is_success() {
397        Ok(())
398    } else {
399        Err(r.error())
400    }
401}
402
403#[deprecated]
404#[cfg(not(feature = "kernel"))]
405pub fn twz_rt_map_two_objects(
406    id1: ObjID,
407    flags1: MapFlags,
408    id2: ObjID,
409    flags2: MapFlags,
410) -> Result<(ObjectHandle, ObjectHandle)> {
411    unsafe {
412        let mut res1 = MaybeUninit::uninit();
413        let mut res2 = MaybeUninit::uninit();
414        crate::bindings::__twz_rt_map_two_objects(
415            id1.raw(),
416            flags1.bits(),
417            id2.raw(),
418            flags2.bits(),
419            res1.as_mut_ptr(),
420            res2.as_mut_ptr(),
421        );
422
423        let res1 = res1.assume_init();
424        let res2 = res2.assume_init();
425
426        let res1: Result<ObjectHandle> = res1.into();
427        let res2: Result<ObjectHandle> = res2.into();
428
429        Ok((res1?, res2?))
430    }
431}
432
433bitflags::bitflags! {
434    /// Mapping protections for mapping objects into the address space.
435    #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
436    pub struct Protections: u16 {
437        /// Read allowed.
438        const READ = crate::bindings::MAP_FLAG_R as u16;
439        /// Write allowed.
440        const WRITE = crate::bindings::MAP_FLAG_W as u16;
441        /// Exec allowed.
442        const EXEC = crate::bindings::MAP_FLAG_X as u16;
443    }
444}
445
446impl From<Protections> for MapFlags {
447    fn from(p: Protections) -> Self {
448        let mut f = MapFlags::empty();
449        if p.contains(Protections::READ) {
450            f.insert(MapFlags::READ);
451        }
452
453        if p.contains(Protections::WRITE) {
454            f.insert(MapFlags::WRITE);
455        }
456
457        if p.contains(Protections::EXEC) {
458            f.insert(MapFlags::EXEC);
459        }
460        f
461    }
462}
463
464impl From<MapFlags> for Protections {
465    fn from(value: MapFlags) -> Self {
466        let mut f = Self::empty();
467        if value.contains(MapFlags::READ) {
468            f.insert(Protections::READ);
469        }
470        if value.contains(MapFlags::WRITE) {
471            f.insert(Protections::WRITE);
472        }
473        if value.contains(MapFlags::EXEC) {
474            f.insert(Protections::EXEC);
475        }
476        f
477    }
478}
479
480bitflags::bitflags! {
481/// Flags for objects.
482#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Ord, Eq, Hash, Default)]
483pub struct MetaFlags: u16 {
484    const IMMUTABLE = 1;
485}
486}
487
488/// A nonce for avoiding object ID collision.
489#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Ord, Eq, Hash, Default)]
490#[repr(transparent)]
491pub struct Nonce(pub u128);
492
493/// The core metadata that all objects share.
494#[derive(Clone, Copy, Debug, PartialEq, Hash)]
495#[repr(C)]
496pub struct MetaInfo {
497    /// The ID nonce.
498    pub nonce: Nonce,
499    /// The object's public key ID.
500    pub kuid: ObjID,
501    /// The object flags.
502    pub flags: MetaFlags,
503    /// Default protections
504    pub default_prot: Protections,
505    /// The number of FOT entries.
506    pub fotcount: u16,
507    /// The number of meta extensions.
508    pub extcount: u16,
509}
510
511/// A tag for a meta extension entry.
512#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Ord, Eq, Hash)]
513#[repr(transparent)]
514pub struct MetaExtTag(pub u64);
515
516/// A meta extension entry.
517#[repr(C)]
518pub struct MetaExt {
519    /// The tag.
520    pub tag: MetaExtTag,
521    /// A tag-specific value.
522    pub value: u64,
523}
524
525pub const MEXT_EMPTY: MetaExtTag = MetaExtTag(0);
526pub const MEXT_SIZED: MetaExtTag = MetaExtTag(1);