object_store/
paged_object_store.rs

1pub type ObjID = u128;
2
3use core::str;
4use std::{io::ErrorKind, ops::Add};
5
6#[cfg(target_os = "twizzler")]
7use twizzler::Result;
8#[cfg(target_os = "twizzler")]
9pub use twizzler_abi::pager::PhysRange;
10
11#[cfg(not(target_os = "twizzler"))]
12#[derive(Debug, PartialEq, Eq, Clone, Copy)]
13pub struct PhysRange {
14    pub start: u64,
15    pub end: u64,
16}
17#[cfg(not(target_os = "twizzler"))]
18use std::io::Result;
19
20use crate::PAGE_SIZE;
21
22const PAGED_MEM_WIRED: u32 = 1;
23const PAGED_MEM_COMPLETED: u32 = 2;
24#[derive(Debug, PartialEq, Eq, Clone, Copy)]
25pub struct PagedPhysMem {
26    pub range: PhysRange,
27    flags: u32,
28}
29
30impl core::ops::Add<u64> for PagedPhysMem {
31    type Output = Self;
32
33    fn add(self, rhs: u64) -> Self::Output {
34        if rhs == 0 {
35            Self {
36                range: self.range,
37                flags: self.flags,
38            }
39        } else {
40            Self {
41                range: PhysRange {
42                    start: self.range.end,
43                    end: self.range.end + PAGE_SIZE as u64,
44                },
45                flags: self.flags,
46            }
47        }
48    }
49}
50
51impl PagedPhysMem {
52    pub fn new(range: PhysRange) -> Self {
53        PagedPhysMem { range, flags: 0 }
54    }
55
56    pub fn is_completed(&self) -> bool {
57        self.flags & PAGED_MEM_COMPLETED != 0
58    }
59
60    pub fn is_wired(&self) -> bool {
61        self.flags & PAGED_MEM_WIRED != 0
62    }
63
64    pub fn set_completed(&mut self) {
65        self.flags |= PAGED_MEM_COMPLETED;
66    }
67
68    pub fn completed(mut self) -> Self {
69        self.set_completed();
70        self
71    }
72
73    pub fn wired(mut self) -> Self {
74        self.set_wired();
75        self
76    }
77
78    pub fn set_wired(&mut self) {
79        self.flags |= PAGED_MEM_WIRED;
80    }
81
82    pub fn len(&self) -> usize {
83        (self.range.end - self.range.start) as usize
84    }
85
86    pub fn nr_pages(&self) -> usize {
87        self.len() / PAGE_SIZE
88    }
89}
90
91#[derive(Clone, Copy, Debug)]
92pub enum DevicePage {
93    Run(u64, u32),
94    Hole(u32),
95}
96
97impl DevicePage {
98    pub fn from_array(array: &[u64]) -> Vec<Self> {
99        let mut tmp = Vec::<Self>::new();
100        for item in array {
101            let item = if *item == 0 {
102                DevicePage::Hole(1)
103            } else {
104                DevicePage::Run(*item, 1)
105            };
106            if let Some(prev) = tmp.last_mut() {
107                if !prev.try_extend(&item) {
108                    tmp.push(item);
109                }
110            } else {
111                tmp.push(item);
112            }
113        }
114        tmp
115    }
116
117    pub fn try_extend(&mut self, other: &DevicePage) -> bool {
118        let new_val = match (*self, other) {
119            (DevicePage::Hole(len1), &DevicePage::Hole(len2)) => {
120                Some(DevicePage::Hole(len1 + len2))
121            }
122            (DevicePage::Run(start1, len1), &DevicePage::Run(start2, len2)) => {
123                if start1 + len1 as u64 == start2 {
124                    Some(DevicePage::Run(start1, len1 + len2))
125                } else {
126                    None
127                }
128            }
129            _ => None,
130        };
131        if let Some(new_val) = new_val {
132            *self = new_val;
133            true
134        } else {
135            false
136        }
137    }
138
139    pub fn nr_pages(&self) -> usize {
140        match self {
141            DevicePage::Run(_, len) => *len as usize,
142            DevicePage::Hole(len) => *len as usize,
143        }
144    }
145
146    pub fn as_hole(&self) -> Option<u32> {
147        match self {
148            DevicePage::Hole(len) => Some(*len),
149            _ => None,
150        }
151    }
152
153    pub fn offset(&mut self, avail_len: &mut usize) {
154        let new = match self {
155            DevicePage::Run(start, len) => {
156                let new_len = len.saturating_sub(*avail_len as u32);
157                let diff = *len - new_len;
158                *avail_len = diff as usize;
159                DevicePage::Run(*start + diff as u64, new_len)
160            }
161            DevicePage::Hole(len) => {
162                let new_len = len.saturating_sub(*avail_len as u32);
163                let diff = *len - new_len;
164                *avail_len = diff as usize;
165                DevicePage::Hole(new_len)
166            }
167        };
168        *self = new;
169    }
170}
171
172#[cfg(test)]
173mod tests {
174    use super::*;
175
176    #[test]
177    fn test_try_extend_holes() {
178        let mut hole1 = DevicePage::Hole(10);
179        let hole2 = DevicePage::Hole(5);
180
181        assert!(hole1.try_extend(&hole2));
182        match hole1 {
183            DevicePage::Hole(len) => assert_eq!(len, 15),
184            _ => panic!("Expected Hole"),
185        }
186    }
187
188    #[test]
189    fn test_try_extend_consecutive_runs() {
190        let mut run1 = DevicePage::Run(100, 10);
191        let run2 = DevicePage::Run(110, 5);
192
193        assert!(run1.try_extend(&run2));
194        match run1 {
195            DevicePage::Run(start, len) => {
196                assert_eq!(start, 100);
197                assert_eq!(len, 15);
198            }
199            _ => panic!("Expected Run"),
200        }
201    }
202
203    #[test]
204    fn test_try_extend_non_consecutive_runs() {
205        let mut run1 = DevicePage::Run(100, 10);
206        let run2 = DevicePage::Run(120, 5);
207
208        assert!(!run1.try_extend(&run2));
209        match run1 {
210            DevicePage::Run(start, len) => {
211                assert_eq!(start, 100);
212                assert_eq!(len, 10); // Should remain unchanged
213            }
214            _ => panic!("Expected Run"),
215        }
216    }
217
218    #[test]
219    fn test_try_extend_mixed_types() {
220        let mut hole = DevicePage::Hole(10);
221        let run = DevicePage::Run(100, 5);
222
223        assert!(!hole.try_extend(&run));
224        match hole {
225            DevicePage::Hole(len) => assert_eq!(len, 10), // Should remain unchanged
226            _ => panic!("Expected Hole"),
227        }
228
229        let mut run = DevicePage::Run(100, 10);
230        let hole = DevicePage::Hole(5);
231
232        assert!(!run.try_extend(&hole));
233        match run {
234            DevicePage::Run(start, len) => {
235                assert_eq!(start, 100);
236                assert_eq!(len, 10); // Should remain unchanged
237            }
238            _ => panic!("Expected Run"),
239        }
240    }
241}
242
243pub trait PagedDevice {
244    /// Append the needed paged phys mem for this device page, return the number of appended pages.
245    fn phys_addrs(&self, _start: DevicePage, _phys_list: &mut Vec<PagedPhysMem>) -> Result<usize> {
246        Err(std::io::ErrorKind::Unsupported.into())
247    }
248
249    fn sequential_read(&self, start: u64, list: &[PhysRange]) -> Result<usize>;
250    fn sequential_write(&self, start: u64, list: &[PhysRange]) -> Result<usize>;
251
252    fn len(&self) -> Result<usize>;
253}
254
255#[derive(Debug)]
256pub struct PageRequest {
257    pub start_page: i64,
258    pub nr_pages: u32,
259    pub completed: u32,
260    pub phys_list: Vec<PagedPhysMem>,
261}
262
263impl PageRequest {
264    pub fn new(start_page: i64, nr_pages: u32) -> Self {
265        Self {
266            start_page,
267            phys_list: Vec::new(),
268            nr_pages,
269            completed: 0,
270        }
271    }
272
273    pub fn new_from_list(phys_list: Vec<PagedPhysMem>, start_page: i64, nr_pages: u32) -> Self {
274        Self {
275            start_page,
276            phys_list,
277            nr_pages,
278            completed: 0,
279        }
280    }
281
282    pub fn into_list(self) -> Vec<PagedPhysMem> {
283        self.phys_list
284    }
285
286    fn setup_phys(&mut self, disk_pages: &[DevicePage], device: &dyn PagedDevice) -> Result<()> {
287        // TODO: recover these
288        self.phys_list.clear();
289        for page in disk_pages {
290            let mut count = 0;
291            while count < page.nr_pages() {
292                match device.phys_addrs(*page, &mut self.phys_list) {
293                    Ok(r) => {
294                        if r == 0 {
295                            break;
296                        }
297                        count += r;
298                    }
299                    Err(e) if Into::<std::io::Error>::into(e).kind() == ErrorKind::OutOfMemory => {
300                        if self.phys_list.is_empty() {
301                            return Err(e);
302                        } else {
303                            break;
304                        }
305                    }
306                    Err(e) => Err(e)?,
307                }
308            }
309
310            if count < page.nr_pages() {
311                break;
312            }
313        }
314
315        self.nr_pages =
316            self.phys_list
317                .iter()
318                .fold(0usize, |acc, range| acc + range.len() / PAGE_SIZE) as u32;
319        self.completed =
320            self.phys_list
321                .iter()
322                .filter(|p| p.is_completed())
323                .fold(0usize, |acc, range| acc + range.len() / PAGE_SIZE) as u32;
324        Ok(())
325    }
326
327    pub fn page_in(
328        &mut self,
329        disk_pages: &[DevicePage],
330        device: &dyn PagedDevice,
331    ) -> Result<usize> {
332        self.setup_phys(disk_pages, device)?;
333        if self.phys_list.iter().all(|p| p.is_completed()) {
334            return Ok(self.nr_pages as usize);
335        }
336
337        let mut cursor = 0;
338        let mut inner_cursor = 0;
339        let mut tfer_count = 0;
340        let mut tmp: Vec<PhysRange> = Vec::new();
341        for disk_page in disk_pages {
342            let mut count = 0;
343            tmp.clear();
344            while count < disk_page.nr_pages() {
345                let thislen = (disk_page.nr_pages() - count)
346                    .min(self.phys_list[cursor].nr_pages() - inner_cursor);
347
348                let new_range = PhysRange {
349                    start: self.phys_list[cursor].range.start + (inner_cursor * PAGE_SIZE) as u64,
350                    end: self.phys_list[cursor].range.start
351                        + (inner_cursor * PAGE_SIZE) as u64
352                        + (thislen * PAGE_SIZE) as u64,
353                };
354
355                tmp.push(new_range);
356
357                inner_cursor += thislen;
358                if inner_cursor >= self.phys_list[cursor].nr_pages() {
359                    cursor += 1;
360                    inner_cursor = 0;
361                    if cursor >= self.phys_list.len() {
362                        break;
363                    }
364                }
365
366                count += thislen;
367            }
368
369            if let DevicePage::Run(start, _len) = disk_page {
370                let mut count = 0;
371                while count < tmp.len() {
372                    let r = device.sequential_read(*start + count as u64, &tmp[count..])?;
373                    count += r;
374                }
375            }
376
377            tfer_count += count;
378
379            if cursor >= self.phys_list.len() {
380                break;
381            }
382        }
383
384        Ok(tfer_count)
385    }
386
387    pub fn page_out(
388        &mut self,
389        disk_pages: &[DevicePage],
390        device: &dyn PagedDevice,
391    ) -> Result<usize> {
392        let mut cursor = 0;
393        let mut inner_cursor = 0;
394        let mut tfer_count = 0;
395        let mut tmp: Vec<PhysRange> = Vec::new();
396        for disk_page in disk_pages {
397            let mut count = 0;
398            tmp.clear();
399            while count < disk_page.nr_pages() {
400                let thislen = (disk_page.nr_pages() - count)
401                    .min(self.phys_list[cursor].nr_pages() - inner_cursor);
402
403                let new_range = PhysRange {
404                    start: self.phys_list[cursor].range.start + (inner_cursor * PAGE_SIZE) as u64,
405                    end: self.phys_list[cursor].range.start
406                        + (inner_cursor * PAGE_SIZE) as u64
407                        + (thislen * PAGE_SIZE) as u64,
408                };
409
410                tmp.push(new_range);
411
412                inner_cursor += thislen;
413                if inner_cursor >= self.phys_list[cursor].nr_pages() {
414                    cursor += 1;
415                    inner_cursor = 0;
416                    if cursor >= self.phys_list.len() {
417                        break;
418                    }
419                }
420
421                count += thislen;
422            }
423
424            if let DevicePage::Run(start, _len) = disk_page {
425                let mut count = 0;
426                while count < tmp.len() {
427                    let r = device.sequential_write(*start + count as u64, &tmp[count..])?;
428                    count += r;
429                }
430            }
431
432            tfer_count += count;
433
434            if cursor >= self.phys_list.len() {
435                break;
436            }
437        }
438
439        Ok(tfer_count)
440    }
441}
442
443pub trait PagedObjectStore {
444    fn get_config_id(&self) -> Result<ObjID> {
445        let mut buf = [0; 16];
446        self.read_object(0, 0, &mut buf).and_then(|len| {
447            if len == 16 && buf.iter().find(|x| **x != 0).is_some() {
448                Ok(ObjID::from_le_bytes(buf))
449            } else {
450                Err(ErrorKind::InvalidData.into())
451            }
452        })
453    }
454
455    fn set_config_id(&self, id: ObjID) -> Result<()> {
456        let _ = self.delete_object(0);
457        self.create_object(0)?;
458        self.write_object(0, 0, &id.to_le_bytes())
459    }
460
461    fn create_object(&self, id: ObjID) -> Result<()>;
462    fn delete_object(&self, id: ObjID) -> Result<()>;
463
464    fn len(&self, id: ObjID) -> Result<u64>;
465
466    fn read_object(&self, id: ObjID, offset: u64, buf: &mut [u8]) -> Result<usize>;
467    fn write_object(&self, id: ObjID, offset: u64, buf: &[u8]) -> Result<()>;
468
469    fn page_in_object<'a>(&self, id: ObjID, reqs: &'a mut [PageRequest]) -> Result<usize>;
470    fn page_out_object<'a>(&self, id: ObjID, reqs: &'a mut [PageRequest]) -> Result<usize>;
471
472    fn flush(&self) -> Result<()> {
473        Ok(())
474    }
475
476    fn enumerate_external(&self, _id: ObjID) -> Result<Vec<ExternalFile>> {
477        Err(ErrorKind::Unsupported.into())
478    }
479
480    fn find_external(&self, _id: ObjID) -> Result<usize> {
481        Err(ErrorKind::Unsupported.into())
482    }
483}
484
485pub const MAX_EXTERNAL_PATH: usize = 4096;
486pub const NAME_MAX: usize = 256;
487
488#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Ord, Eq, Hash)]
489#[repr(C)]
490pub struct ExternalFile {
491    pub id: ObjID,
492    pub name: [u8; NAME_MAX],
493    pub name_len: u32,
494    pub kind: ExternalKind,
495}
496
497impl ExternalFile {
498    pub fn new(iname: &[u8], kind: ExternalKind, id: ObjID) -> Self {
499        let name_len = iname.len().min(NAME_MAX);
500        let sname = &iname[0..name_len];
501        let mut name = [0; NAME_MAX];
502        name[0..name_len].copy_from_slice(&sname);
503        Self {
504            id,
505            name,
506            kind,
507            name_len: name_len as u32,
508        }
509    }
510
511    pub fn name(&self) -> Option<&str> {
512        str::from_utf8(&self.name[0..(self.name_len as usize)]).ok()
513    }
514}
515
516#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Ord, Eq, Hash)]
517#[repr(u32)]
518pub enum ExternalKind {
519    Regular,
520    Directory,
521    SymLink,
522    Other,
523}
524
525pub fn objid_to_ino(id: ObjID) -> Option<u32> {
526    if id == 1 {
527        return Some(0);
528    };
529    let (hi, lo) = ((id >> 64) as u64, id as u64);
530    if hi == (1u64 << 63) {
531        let ino = lo & !(1u64 << 63);
532        Some(ino as u32)
533    } else {
534        None
535    }
536}
537
538pub fn ino_to_objid(ino: u32) -> ObjID {
539    if ino == 0 {
540        return 1;
541    }
542    (1u128 << 127) | (ino as u128) | (1u128 << 63)
543}
544
545pub(crate) fn _consecutive_slices<T: PartialEq + Add<u64> + Copy>(
546    data: &[T],
547) -> impl Iterator<Item = &[T]>
548where
549    T::Output: PartialEq<T>,
550{
551    let mut slice_start = 0;
552    (1..=data.len()).flat_map(move |i| {
553        if i == data.len() || data[i - 1] + 1u64 != data[i] {
554            let begin = slice_start;
555            slice_start = i;
556            Some(&data[begin..i])
557        } else {
558            None
559        }
560    })
561}
562
563pub trait PosIo {
564    fn read(&self, start: u64, buf: &mut [u8]) -> Result<usize>;
565    fn write(&self, start: u64, buf: &[u8]) -> Result<usize>;
566}