display_core/
lib.rs

1use std::{
2    cell::UnsafeCell,
3    ops::{Index, IndexMut},
4    sync::atomic::{AtomicU32, AtomicU64, Ordering},
5    u32,
6};
7
8use twizzler::{
9    error::TwzError,
10    object::{ObjID, Object, ObjectBuilder, TypedObject},
11    ptr::{InvPtr, RefSlice, RefSliceMut},
12    BaseType, Invariant,
13};
14use twizzler_abi::syscall::{
15    sys_thread_sync, ThreadSync, ThreadSyncFlags, ThreadSyncOp, ThreadSyncReference,
16    ThreadSyncSleep, ThreadSyncWake,
17};
18
19#[derive(Clone)]
20/// An object holding a double-buffered compositor surface for a window.
21pub struct BufferObject {
22    obj: Object<DisplayBufferBase>,
23}
24
25impl From<Object<DisplayBufferBase>> for BufferObject {
26    fn from(obj: Object<DisplayBufferBase>) -> Self {
27        Self { obj }
28    }
29}
30
31const DBF_PHASE: u64 = 0x1;
32const DBF_COMP_DONE: u64 = 0x2;
33
34const MAX_W: u64 = 8192;
35const MAX_H: u64 = 8192;
36const MAX_BUFFER_SIZE: u64 = MAX_W * MAX_H * 4;
37
38#[derive(Invariant, BaseType)]
39pub struct DisplayBufferBase {
40    pub flags: AtomicU64,
41    pub buffers: [DisplayBuffer; 2],
42}
43
44#[derive(Invariant, Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
45pub struct Rect {
46    pub x: u32,
47    pub y: u32,
48    pub w: u32,
49    pub h: u32,
50}
51
52impl From<WindowConfig> for Rect {
53    fn from(value: WindowConfig) -> Self {
54        Self {
55            x: value.x,
56            y: value.y,
57            w: value.w,
58            h: value.h,
59        }
60    }
61}
62
63impl Rect {
64    pub const fn new(x: u32, y: u32, w: u32, h: u32) -> Self {
65        Self { x, y, w, h }
66    }
67
68    pub const fn full() -> Self {
69        Self {
70            x: 0,
71            y: 0,
72            w: u32::MAX,
73            h: u32::MAX,
74        }
75    }
76
77    pub fn is_covered_by_any(&self, rects: &[Rect]) -> bool {
78        for rect in rects {
79            if rect.x <= self.x
80                && rect.y <= self.y
81                && rect.x + rect.w >= self.x + self.w
82                && rect.y + rect.h >= self.y + self.h
83            {
84                return true;
85            }
86        }
87        false
88    }
89
90    pub fn extent_of(rects: &[Rect]) -> Self {
91        let x = rects.iter().min_by_key(|r| r.x).map(|r| r.x).unwrap_or(0);
92        let y = rects.iter().min_by_key(|r| r.y).map(|r| r.y).unwrap_or(0);
93        Rect {
94            x,
95            y,
96            w: rects
97                .iter()
98                .max_by_key(|r| r.x + r.w)
99                .map(|r| r.x + r.w)
100                .unwrap_or(0)
101                - x,
102            h: rects
103                .iter()
104                .max_by_key(|r| r.y + r.h)
105                .map(|r| r.y + r.h)
106                .unwrap_or(0)
107                - y,
108        }
109    }
110}
111
112const NUM_DAMAGE: usize = 8;
113const FULL_DAMAGE: u64 = 0xFFFFFFFFFFFFFFFF;
114
115#[derive(Invariant)]
116pub struct DisplayBuffer {
117    pub comp_width: AtomicU32,
118    pub comp_height: AtomicU32,
119    pub width: AtomicU32,
120    pub height: AtomicU32,
121    pub byte_len: u64,
122    pub ptr: InvPtr<u32>,
123    pub damage: UnsafeCell<[Rect; NUM_DAMAGE]>,
124    pub damage_count: AtomicU64,
125}
126
127pub struct Buffer<'a> {
128    buffer: &'a [u32],
129    db: &'a DisplayBuffer,
130}
131
132impl<'a> AsRef<[u32]> for Buffer<'a> {
133    fn as_ref(&self) -> &[u32] {
134        self.buffer
135    }
136}
137
138impl<'a> Index<usize> for Buffer<'a> {
139    type Output = u32;
140
141    fn index(&self, index: usize) -> &Self::Output {
142        &self.buffer[index]
143    }
144}
145
146impl<'a> Buffer<'a> {
147    fn new(s: &mut RefSlice<'a, u32>, db: &'a DisplayBuffer) -> Self {
148        Self {
149            buffer: s.as_slice(),
150            db,
151        }
152    }
153
154    pub fn damage_rects(&self) -> &[Rect] {
155        unsafe { self.db.damage() }
156    }
157
158    pub fn len(&self) -> usize {
159        self.buffer.len()
160    }
161
162    pub fn as_slice(&self) -> &[u32] {
163        self.buffer
164    }
165}
166
167pub struct BufferMut<'a> {
168    buffer: &'a mut [u32],
169    db: &'a DisplayBuffer,
170}
171
172impl<'a> AsRef<[u32]> for BufferMut<'a> {
173    fn as_ref(&self) -> &[u32] {
174        self.buffer
175    }
176}
177
178impl<'a> AsMut<[u32]> for BufferMut<'a> {
179    fn as_mut(&mut self) -> &mut [u32] {
180        self.buffer
181    }
182}
183
184impl<'a> Index<usize> for BufferMut<'a> {
185    type Output = u32;
186
187    fn index(&self, index: usize) -> &Self::Output {
188        &self.buffer[index]
189    }
190}
191
192impl<'a> IndexMut<usize> for BufferMut<'a> {
193    fn index_mut(&mut self, index: usize) -> &mut Self::Output {
194        &mut self.buffer[index]
195    }
196}
197
198impl<'a> BufferMut<'a> {
199    fn new(s: &mut RefSliceMut<'a, u32>, db: &'a DisplayBuffer) -> Self {
200        Self {
201            buffer: s.as_slice_mut(),
202            db,
203        }
204    }
205
206    pub fn damage_rects(&self) -> &[Rect] {
207        unsafe { self.db.damage() }
208    }
209
210    pub fn damage(&self, dmg: Rect) {
211        unsafe { self.db.append_damage(dmg) };
212    }
213
214    pub fn len(&self) -> usize {
215        self.buffer.len()
216    }
217
218    pub fn as_slice(&self) -> &[u32] {
219        self.buffer
220    }
221
222    pub fn as_slice_mut(&mut self) -> &mut [u32] {
223        self.buffer
224    }
225}
226
227impl DisplayBuffer {
228    unsafe fn buffer_mut(&self) -> RefSliceMut<u32> {
229        let ptr = self.ptr.resolve().as_mut();
230        let slice = RefSliceMut::from_ref(ptr, self.byte_len as usize);
231        slice
232    }
233
234    unsafe fn buffer(&self) -> RefSlice<u32> {
235        let ptr = self.ptr.resolve();
236        let slice = RefSlice::from_ref(ptr, self.byte_len as usize);
237        slice
238    }
239
240    unsafe fn append_damage(&self, dmg: Rect) {
241        let current_count = self.damage_count.load(Ordering::SeqCst);
242        if current_count == FULL_DAMAGE {
243            return;
244        }
245        let damage = self.damage.get().as_mut().unwrap_unchecked();
246        if current_count as usize == NUM_DAMAGE {
247            damage[0] = Rect::extent_of(&damage.as_slice()[..(current_count as usize)]);
248            self.damage_count.store(1, Ordering::Release);
249            return;
250        }
251
252        if dmg.is_covered_by_any(&damage[..(current_count as usize)]) {
253            return;
254        }
255
256        damage[current_count as usize] = dmg;
257        self.damage_count.fetch_add(1, Ordering::Release);
258    }
259
260    unsafe fn reset_damage(&self) {
261        self.damage_count.store(0, Ordering::SeqCst);
262    }
263
264    unsafe fn damage(&self) -> &[Rect] {
265        const FD: [Rect; 1] = [Rect::full()];
266        let count = self.damage_count.load(Ordering::Acquire);
267        if count == FULL_DAMAGE {
268            return &FD;
269        }
270        &self.damage.get().as_ref().unwrap_unchecked()[..count as usize]
271    }
272}
273
274impl BufferObject {
275    pub fn id(&self) -> ObjID {
276        self.obj.id()
277    }
278
279    pub fn create_new(w: u32, h: u32) -> Result<Self, TwzError> {
280        let builder = ObjectBuilder::default();
281        let obj = builder.build_inplace(|mut obj| {
282            let buf1 = obj.static_alloc(0).unwrap();
283            let buf2 = obj.static_alloc(0).unwrap();
284
285            let base = DisplayBufferBase {
286                flags: AtomicU64::new(0),
287                buffers: [
288                    DisplayBuffer {
289                        comp_width: AtomicU32::new(w),
290                        comp_height: AtomicU32::new(h),
291                        width: AtomicU32::new(w),
292                        height: AtomicU32::new(h),
293                        byte_len: MAX_BUFFER_SIZE,
294                        ptr: InvPtr::from_raw_parts(0, buf1.offset()),
295                        damage_count: AtomicU64::new(0),
296                        damage: UnsafeCell::new([Rect::full(); NUM_DAMAGE]),
297                    },
298                    DisplayBuffer {
299                        comp_width: AtomicU32::new(w),
300                        comp_height: AtomicU32::new(h),
301                        width: AtomicU32::new(w),
302                        height: AtomicU32::new(h),
303                        byte_len: MAX_BUFFER_SIZE,
304                        ptr: InvPtr::from_raw_parts(0, buf2.offset()),
305                        damage_count: AtomicU64::new(0),
306                        damage: UnsafeCell::new([Rect::full(); NUM_DAMAGE]),
307                    },
308                ],
309            };
310            obj.write(base)
311        })?;
312
313        Ok(BufferObject { obj })
314    }
315
316    /// Returns true if the buffers currently need to be read out.
317    pub fn has_data_for_read(&self) -> bool {
318        self.obj.base().flags.load(Ordering::SeqCst) & DBF_COMP_DONE == 0
319    }
320
321    /// Read out the compositor buffer.
322    pub fn read_buffer<R>(&self, mut f: impl FnMut(Buffer, u32, u32) -> R) -> R {
323        let base = self.obj.base();
324        let flags = base.flags.load(Ordering::SeqCst);
325
326        let buffer = if flags & DBF_PHASE != 0 {
327            &base.buffers[0]
328        } else {
329            &base.buffers[1]
330        };
331        let cw = buffer.width.load(Ordering::SeqCst);
332        let ch = buffer.height.load(Ordering::SeqCst);
333        let buf = unsafe { buffer.buffer() };
334        let mut buf = buf.slice(0..((cw * ch) as usize));
335        let r = f(Buffer::new(&mut buf, buffer), cw, ch);
336        r
337    }
338
339    /// Mark that the compositor has finished reading the buffer. Provides the new width and height
340    /// to use next time this buffer should be filled out. These values may be unchanged.
341    pub fn read_done(&self, new_w: u32, new_h: u32) {
342        let base = self.obj.base();
343        let flags = base.flags.load(Ordering::SeqCst);
344        let buffer = if flags & DBF_PHASE != 0 {
345            &base.buffers[0]
346        } else {
347            &base.buffers[1]
348        };
349        buffer.comp_height.store(new_h, Ordering::Release);
350        buffer.comp_width.store(new_w, Ordering::Release);
351        unsafe { buffer.reset_damage() };
352        base.flags.fetch_or(DBF_COMP_DONE, Ordering::SeqCst);
353        let _ = sys_thread_sync(
354            &mut [ThreadSync::new_wake(ThreadSyncWake::new(
355                ThreadSyncReference::Virtual(&base.flags),
356                usize::MAX,
357            ))],
358            None,
359        );
360    }
361
362    /// Fill the current application-owned buffer with data within the callback.
363    pub fn update_buffer<R>(&self, mut f: impl FnMut(BufferMut, u32, u32) -> R) -> R {
364        let base = self.obj.base();
365        let flags = base.flags.load(Ordering::SeqCst);
366
367        let buffer = if flags & DBF_PHASE != 0 {
368            &base.buffers[1]
369        } else {
370            &base.buffers[0]
371        };
372        let cw = buffer.comp_width.load(Ordering::SeqCst);
373        let ch = buffer.comp_height.load(Ordering::SeqCst);
374
375        let buf = unsafe { buffer.buffer_mut() };
376        let mut buf = buf.slice(0..((cw * ch) as usize));
377        let r = f(BufferMut::new(&mut buf, buffer), cw, ch);
378        buffer.height.store(ch, Ordering::Release);
379        buffer.width.store(cw, Ordering::Release);
380        r
381    }
382
383    /// Flip the buffer, indicating that the compositor can now read the buffer.
384    pub fn flip(&self) {
385        let base = self.obj.base();
386        let mut flags = base.flags.load(Ordering::SeqCst);
387
388        while flags & DBF_COMP_DONE == 0 {
389            let _ = sys_thread_sync(
390                &mut [ThreadSync::new_sleep(ThreadSyncSleep::new(
391                    ThreadSyncReference::Virtual(&base.flags),
392                    flags,
393                    ThreadSyncOp::Equal,
394                    ThreadSyncFlags::empty(),
395                ))],
396                None,
397            );
398            flags = base.flags.load(Ordering::SeqCst);
399        }
400
401        let (src_buffer, dst_buffer) = if flags & DBF_PHASE != 0 {
402            (&base.buffers[0], &base.buffers[1])
403        } else {
404            (&base.buffers[1], &base.buffers[0])
405        };
406
407        let w = src_buffer.width.load(Ordering::Acquire);
408        let h = src_buffer.height.load(Ordering::Acquire);
409        for dmg in unsafe { src_buffer.damage() } {
410            for y in dmg.y..(dmg.y + dmg.h.min(h - dmg.y)) {
411                let start = (y * w + dmg.x) as usize;
412                let len = dmg.w.min(w - dmg.x) as usize;
413                let src = &unsafe { src_buffer.buffer().as_slice() }[start..(start + len)];
414                let dst =
415                    &mut unsafe { dst_buffer.buffer_mut().as_slice_mut() }[start..(start + len)];
416                dst.copy_from_slice(src);
417            }
418        }
419
420        let new_flags = (flags ^ DBF_PHASE) & !DBF_COMP_DONE;
421        base.flags.store(new_flags, Ordering::SeqCst);
422
423        let _ = sys_thread_sync(
424            &mut [ThreadSync::new_wake(ThreadSyncWake::new(
425                ThreadSyncReference::Virtual(&base.flags),
426                usize::MAX,
427            ))],
428            None,
429        );
430    }
431}
432
433#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
434#[repr(C)]
435pub struct WindowConfig {
436    pub w: u32,
437    pub h: u32,
438    pub x: u32,
439    pub y: u32,
440    pub z: u32,
441}