twizzler_rt_abi/
object.rs

1//! Interface for objects and object handles.
2
3use core::{
4    ffi::c_void,
5    fmt::{LowerHex, UpperHex},
6    mem::MaybeUninit,
7    sync::atomic::{AtomicU64, Ordering},
8};
9
10use crate::{
11    bindings::{object_cmd, twz_rt_object_cmd, LEN_MUL, OBJECT_CMD_DELETE},
12    error::{RawTwzError, TwzError},
13    Result,
14};
15
16/// An object ID.
17#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash, Default)]
18#[repr(transparent)]
19pub struct ObjID(twizzler_types::ObjID);
20
21impl ObjID {
22    /// The number of u64 components that make up an object ID, if split.
23    pub const NR_PARTS: usize = 2;
24
25    /// Build a new object ID from raw.
26    pub const fn new(raw: twizzler_types::ObjID) -> Self {
27        Self(raw)
28    }
29
30    /// Get the raw object ID type.
31    pub const fn raw(&self) -> twizzler_types::ObjID {
32        self.0
33    }
34
35    /// Build an object ID from parts, useful for syscalls.
36    pub const fn from_parts(parts: [u64; Self::NR_PARTS]) -> Self {
37        Self::new(((parts[0] as u128) << 64) | (parts[1] as u128))
38    }
39
40    /// Split the object ID into parts, useful for packing into registers for syscalls.
41    pub const fn parts(&self) -> [u64; Self::NR_PARTS] {
42        [(self.0 >> 64) as u64, (self.0 & 0xffffffffffffffff) as u64]
43    }
44}
45
46impl core::convert::AsRef<ObjID> for ObjID {
47    fn as_ref(&self) -> &ObjID {
48        self
49    }
50}
51
52impl From<twizzler_types::ObjID> for ObjID {
53    fn from(id: twizzler_types::ObjID) -> Self {
54        Self::new(id)
55    }
56}
57
58impl LowerHex for ObjID {
59    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
60        write!(f, "{:x}", self.0)
61    }
62}
63
64impl UpperHex for ObjID {
65    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
66        write!(f, "{:X}", self.0)
67    }
68}
69
70impl core::fmt::Display for ObjID {
71    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
72        write!(f, "ObjID({:x})", self.0)
73    }
74}
75
76impl core::fmt::Debug for ObjID {
77    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
78        write!(f, "ObjID({:x})", self.0)
79    }
80}
81
82/// An object handle, granting access to object memory. An object handle can be in two modes:
83///   - Owning -- the normal mode, which acts like an Arc, and asks the runtime to unmap when
84///     refcount hits zero.
85///   - Unsafe -- internal use only, is NOT owning, but still has pointers. This is totally unsafe
86///     to use, and should not be exposed to users. But sometimes, it can be safe, and faster than
87///     cloning.
88/// ... anyway, in general these have reference counting semantics, via Clone and Drop, like Arc.
89#[repr(transparent)]
90pub struct ObjectHandle(pub(crate) crate::bindings::object_handle);
91
92#[cfg(not(feature = "kernel"))]
93impl core::fmt::Debug for ObjectHandle {
94    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
95        write!(
96            f,
97            "ObjectHandle({:?}, {:p}, {:x}, {:?})",
98            self.id(),
99            self.start(),
100            self.valid_len(),
101            self.map_flags()
102        )
103    }
104}
105
106unsafe impl Send for ObjectHandle {}
107unsafe impl Sync for ObjectHandle {}
108
109bitflags::bitflags! {
110    /// Flags for mapping objects.
111    #[derive(Copy, Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Debug)]
112    pub struct MapFlags : crate::bindings::map_flags {
113        /// Request READ access.
114        const READ = crate::bindings::MAP_FLAG_R;
115        /// Request WRITE access.
116        const WRITE = crate::bindings::MAP_FLAG_W;
117        /// Request EXECUTE access.
118        const EXEC = crate::bindings::MAP_FLAG_X;
119        /// Persist changes on flush.
120        const PERSIST = crate::bindings::MAP_FLAG_PERSIST;
121        /// Use runtime support for read stability.
122        const INDIRECT = crate::bindings::MAP_FLAG_INDIRECT;
123        /// Use runtime support for read stability.
124        const NO_NULLPAGE = crate::bindings::MAP_FLAG_NO_NULLPAGE;
125    }
126}
127
128#[repr(u32)]
129pub enum ObjectCmd {
130    Delete = OBJECT_CMD_DELETE,
131}
132
133impl TryFrom<object_cmd> for ObjectCmd {
134    type Error = TwzError;
135
136    fn try_from(value: object_cmd) -> core::result::Result<Self, Self::Error> {
137        match value {
138            OBJECT_CMD_DELETE => Ok(ObjectCmd::Delete),
139            _ => Err(TwzError::INVALID_ARGUMENT),
140        }
141    }
142}
143
144#[allow(dead_code)]
145impl ObjectHandle {
146    fn refs(&self) -> *const AtomicU64 {
147        self.0.runtime_info.cast()
148    }
149
150    /// Get a pointer to the start of object data.
151    pub fn start(&self) -> *mut u8 {
152        self.0.start.cast()
153    }
154
155    /// Get a pointer to the metadata structure.
156    pub fn meta(&self) -> *mut MetaInfo {
157        self.0.meta.cast()
158    }
159
160    /// Get a slice of metadata extensions
161    pub fn meta_exts(&self) -> &[MetaExt] {
162        unsafe {
163            core::slice::from_raw_parts(
164                self.0.meta.cast::<u8>().add(size_of::<MetaInfo>()).cast(),
165                (*self.meta()).extcount as usize,
166            )
167        }
168    }
169
170    /// Find the first metadata extension matching the given tag
171    pub fn find_meta_ext(&self, tag: MetaExtTag) -> Option<&MetaExt> {
172        self.meta_exts().iter().find(|e| e.tag == tag)
173    }
174
175    /// Get a pointer to the runtime info.
176    pub fn runtime_info(&self) -> *mut u8 {
177        self.0.runtime_info.cast()
178    }
179
180    /// Get map flags.
181    pub fn map_flags(&self) -> MapFlags {
182        MapFlags::from_bits_truncate(self.0.map_flags)
183    }
184
185    /// Get the number of valid bytes after start pointer for object data.
186    pub fn valid_len(&self) -> usize {
187        (self.0.valid_len as usize) * crate::bindings::LEN_MUL
188    }
189
190    /// Get the object ID.
191    pub fn id(&self) -> ObjID {
192        ObjID::new(self.0.id)
193    }
194
195    /// Get the raw object handle.
196    pub fn into_raw(self) -> crate::bindings::object_handle {
197        let this = core::mem::ManuallyDrop::new(self);
198        this.0
199    }
200
201    /// Build an object handle from raw.
202    pub fn from_raw(raw: crate::bindings::object_handle) -> Self {
203        Self(raw)
204    }
205
206    #[cfg(not(feature = "kernel"))]
207    /// Modify an object.
208    pub fn cmd(&self, cmd: ObjectCmd, arg: u64) -> Result<()> {
209        let err =
210            unsafe { twz_rt_object_cmd(&self.0 as *const _ as *mut _, cmd as object_cmd, arg) };
211        let raw = RawTwzError::new(err);
212        if raw.is_success() {
213            Ok(())
214        } else {
215            Err(raw.error())
216        }
217    }
218
219    /// Make a new object handle.
220    ///
221    /// # Safety
222    /// The caller must ensure that runtime_info is a valid pointer, and points to a repr(C) struct
223    /// that starts with an AtomicU64 for reference counting.
224    pub unsafe fn new(
225        id: ObjID,
226        runtime_info: *mut core::ffi::c_void,
227        start: *mut core::ffi::c_void,
228        meta: *mut core::ffi::c_void,
229        map_flags: MapFlags,
230        valid_len: usize,
231    ) -> Self {
232        Self::from_raw(crate::bindings::object_handle {
233            id: id.0,
234            runtime_info,
235            start,
236            meta,
237            map_flags: map_flags.bits(),
238            valid_len: (valid_len / LEN_MUL) as u32,
239        })
240    }
241}
242
243#[cfg(not(feature = "kernel"))]
244impl Clone for ObjectHandle {
245    fn clone(&self) -> Self {
246        unsafe {
247            let Some(ref rc) = self.refs().as_ref() else {
248                return Self(self.0);
249            };
250            // This use of Relaxed ordering is justified by https://doc.rust-lang.org/nomicon/arc-mutex/arc-clone.html.
251            let old_count = rc.fetch_add(1, Ordering::Relaxed);
252            // The above link also justifies the following behavior. If our count gets this high, we
253            // have probably run into a problem somewhere.
254            if old_count >= i64::MAX as u64 {
255                crate::core::twz_rt_abort();
256            }
257        }
258        Self(self.0)
259    }
260}
261
262#[cfg(not(feature = "kernel"))]
263impl Drop for ObjectHandle {
264    fn drop(&mut self) {
265        unsafe {
266            let Some(ref rc) = self.refs().as_ref() else {
267                return;
268            };
269            // This use of Release ordering is justified by https://doc.rust-lang.org/nomicon/arc-mutex/arc-clone.html.
270            if rc.fetch_sub(1, Ordering::Release) != 1 {
271                return;
272            }
273        }
274        // This fence is needed to prevent reordering of the use and deletion
275        // of the data.
276        core::sync::atomic::fence(Ordering::Acquire);
277        twz_rt_release_handle(self, 0);
278    }
279}
280
281impl AsRef<ObjectHandle> for ObjectHandle {
282    fn as_ref(&self) -> &ObjectHandle {
283        self
284    }
285}
286
287#[cfg(not(feature = "kernel"))]
288impl From<Result<ObjectHandle>> for crate::bindings::map_result {
289    fn from(value: Result<ObjectHandle>) -> Self {
290        match value {
291            Ok(handle) => Self {
292                handle: handle.into_raw(),
293                error: RawTwzError::success().raw(),
294            },
295            Err(e) => Self {
296                handle: crate::bindings::object_handle::default(),
297                error: e.raw(),
298            },
299        }
300    }
301}
302
303#[cfg(not(feature = "kernel"))]
304impl From<crate::bindings::map_result> for Result<ObjectHandle> {
305    fn from(value: crate::bindings::map_result) -> Self {
306        let raw = RawTwzError::new(value.error);
307        if raw.is_success() {
308            Ok(ObjectHandle(value.handle))
309        } else {
310            Err(raw.error())
311        }
312    }
313}
314
315/// Map an object given by ID `id` with the given flags.
316#[cfg(not(feature = "kernel"))]
317pub fn twz_rt_map_object(id: ObjID, flags: MapFlags) -> Result<ObjectHandle> {
318    unsafe { crate::bindings::twz_rt_map_object(id.raw(), flags.bits()).into() }
319}
320
321#[cfg(not(feature = "kernel"))]
322pub fn twz_rt_get_object_handle(ptr: *const u8) -> Result<ObjectHandle> {
323    use crate::error::ObjectError;
324
325    let res = unsafe { crate::bindings::twz_rt_get_object_handle((ptr as *mut u8).cast()) };
326    if res.id == 0 {
327        return Err(TwzError::Object(ObjectError::NotMapped));
328    }
329    Ok(ObjectHandle(res))
330}
331
332#[cfg(not(feature = "kernel"))]
333pub fn twz_rt_resolve_fot(
334    this: &ObjectHandle,
335    idx: u64,
336    valid_len: usize,
337    flags: MapFlags,
338) -> Result<ObjectHandle> {
339    unsafe {
340        crate::bindings::twz_rt_resolve_fot(
341            &this.0 as *const _ as *mut _,
342            idx,
343            valid_len,
344            flags.bits(),
345        )
346        .into()
347    }
348}
349
350impl From<Result<u32>> for crate::bindings::u32_result {
351    fn from(value: Result<u32>) -> Self {
352        match value {
353            Ok(val) => Self {
354                val,
355                err: RawTwzError::success().raw(),
356            },
357            Err(e) => Self {
358                val: 0,
359                err: e.raw(),
360            },
361        }
362    }
363}
364
365impl From<crate::bindings::u32_result> for Result<u32> {
366    fn from(value: crate::bindings::u32_result) -> Self {
367        let raw = RawTwzError::new(value.err);
368        if raw.is_success() {
369            Ok(value.val)
370        } else {
371            Err(raw.error())
372        }
373    }
374}
375
376#[cfg(not(feature = "kernel"))]
377pub fn twz_rt_insert_fot(this: &ObjectHandle, entry: *const u8) -> Result<u32> {
378    unsafe {
379        let res = crate::bindings::twz_rt_insert_fot(
380            &this.0 as *const _ as *mut _,
381            (entry as *mut u8).cast(),
382        );
383        res.into()
384    }
385}
386
387#[cfg(not(feature = "kernel"))]
388pub fn twz_rt_resolve_fot_local(
389    start: *mut u8,
390    idx: u64,
391    valid_len: usize,
392    flags: MapFlags,
393) -> *mut u8 {
394    unsafe {
395        let res =
396            crate::bindings::twz_rt_resolve_fot_local(start.cast(), idx, valid_len, flags.bits());
397        res.cast()
398    }
399}
400
401#[cfg(not(feature = "kernel"))]
402use crate::bindings::release_flags;
403
404/// Release a handle. Should be only called by the ObjectHandle drop call.
405#[cfg(not(feature = "kernel"))]
406pub fn twz_rt_release_handle(handle: &mut ObjectHandle, flags: release_flags) {
407    unsafe { crate::bindings::twz_rt_release_handle(&mut handle.0, flags) }
408}
409
410/// Update a handle.
411#[cfg(not(feature = "kernel"))]
412pub fn twz_rt_update_handle(handle: &mut ObjectHandle) -> Result<()> {
413    let r = unsafe { crate::bindings::twz_rt_update_handle(&mut handle.0) };
414    let r = RawTwzError::new(r);
415    if r.is_success() {
416        Ok(())
417    } else {
418        Err(r.error())
419    }
420}
421
422#[deprecated]
423#[cfg(not(feature = "kernel"))]
424pub fn twz_rt_map_two_objects(
425    id1: ObjID,
426    flags1: MapFlags,
427    id2: ObjID,
428    flags2: MapFlags,
429) -> Result<(ObjectHandle, ObjectHandle)> {
430    unsafe {
431        let mut res1 = MaybeUninit::uninit();
432        let mut res2 = MaybeUninit::uninit();
433        crate::bindings::__twz_rt_map_two_objects(
434            id1.raw(),
435            flags1.bits(),
436            id2.raw(),
437            flags2.bits(),
438            res1.as_mut_ptr(),
439            res2.as_mut_ptr(),
440        );
441
442        let res1 = res1.assume_init();
443        let res2 = res2.assume_init();
444
445        let res1: Result<ObjectHandle> = res1.into();
446        let res2: Result<ObjectHandle> = res2.into();
447
448        Ok((res1?, res2?))
449    }
450}
451
452bitflags::bitflags! {
453    /// Mapping protections for mapping objects into the address space.
454    #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
455    pub struct Protections: u16 {
456        /// Read allowed.
457        const READ = crate::bindings::MAP_FLAG_R as u16;
458        /// Write allowed.
459        const WRITE = crate::bindings::MAP_FLAG_W as u16;
460        /// Exec allowed.
461        const EXEC = crate::bindings::MAP_FLAG_X as u16;
462    }
463}
464
465impl From<Protections> for MapFlags {
466    fn from(p: Protections) -> Self {
467        let mut f = MapFlags::empty();
468        if p.contains(Protections::READ) {
469            f.insert(MapFlags::READ);
470        }
471
472        if p.contains(Protections::WRITE) {
473            f.insert(MapFlags::WRITE);
474        }
475
476        if p.contains(Protections::EXEC) {
477            f.insert(MapFlags::EXEC);
478        }
479        f
480    }
481}
482
483impl From<MapFlags> for Protections {
484    fn from(value: MapFlags) -> Self {
485        let mut f = Self::empty();
486        if value.contains(MapFlags::READ) {
487            f.insert(Protections::READ);
488        }
489        if value.contains(MapFlags::WRITE) {
490            f.insert(Protections::WRITE);
491        }
492        if value.contains(MapFlags::EXEC) {
493            f.insert(Protections::EXEC);
494        }
495        f
496    }
497}
498
499bitflags::bitflags! {
500/// Flags for objects.
501#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Ord, Eq, Hash, Default)]
502pub struct MetaFlags: u16 {
503    const IMMUTABLE = 1;
504}
505}
506
507/// A nonce for avoiding object ID collision.
508#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Ord, Eq, Hash, Default)]
509#[repr(transparent)]
510pub struct Nonce(pub u128);
511
512/// The core metadata that all objects share.
513#[derive(Clone, Copy, Debug, PartialEq, Hash)]
514#[repr(C)]
515pub struct MetaInfo {
516    /// The ID nonce.
517    pub nonce: Nonce,
518    /// The object's public key ID.
519    pub kuid: ObjID,
520    /// The object flags.
521    pub flags: MetaFlags,
522    /// Default protections
523    pub default_prot: Protections,
524    /// The number of FOT entries.
525    pub fotcount: u16,
526    /// The number of meta extensions.
527    pub extcount: u16,
528}
529
530/// A tag for a meta extension entry.
531#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Ord, Eq, Hash)]
532#[repr(transparent)]
533pub struct MetaExtTag(pub u64);
534
535/// A meta extension entry.
536#[repr(C)]
537pub struct MetaExt {
538    /// The tag.
539    pub tag: MetaExtTag,
540    /// A tag-specific value.
541    pub value: u64,
542}
543
544pub const MEXT_EMPTY: MetaExtTag = MetaExtTag(0);
545pub const MEXT_SIZED: MetaExtTag = MetaExtTag(1);