io.rs 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225
  1. use crate::prelude::*;
  2. use bindings::EFAULT;
  3. use core::{cmp, mem::MaybeUninit};
  4. #[must_use]
  5. pub enum FillResult {
  6. Done(usize),
  7. Partial(usize),
  8. Full,
  9. }
  10. impl FillResult {
  11. pub fn ok_or(self, err: u32) -> KResult<()> {
  12. match self {
  13. FillResult::Done(_) => Ok(()),
  14. _ => Err(err),
  15. }
  16. }
  17. pub fn should_stop(self) -> bool {
  18. return !matches!(self, FillResult::Done(_));
  19. }
  20. pub fn allow_partial(self) -> usize {
  21. match self {
  22. FillResult::Done(n) | FillResult::Partial(n) => n,
  23. FillResult::Full => 0,
  24. }
  25. }
  26. }
  27. pub trait Buffer {
  28. fn total(&self) -> usize;
  29. fn wrote(&self) -> usize;
  30. #[must_use]
  31. fn fill(&mut self, data: &[u8]) -> KResult<FillResult>;
  32. fn available(&self) -> usize {
  33. self.total() - self.wrote()
  34. }
  35. fn get_writer(&mut self) -> BufferWrite<'_, Self>
  36. where
  37. Self: Sized,
  38. {
  39. BufferWrite(self)
  40. }
  41. }
  42. pub trait BufferFill<T: Copy> {
  43. fn copy(&mut self, object: &T) -> KResult<FillResult>;
  44. }
  45. impl<T: Copy, B: Buffer + ?Sized> BufferFill<T> for B {
  46. fn copy(&mut self, object: &T) -> KResult<FillResult> {
  47. let ptr = object as *const T as *const u8;
  48. let len = core::mem::size_of::<T>();
  49. // SAFETY: `object` is a valid object.
  50. self.fill(unsafe { core::slice::from_raw_parts(ptr, len) })
  51. }
  52. }
  53. pub struct BufferWrite<'b, B>(&'b mut B)
  54. where
  55. B: Buffer + ?Sized;
  56. impl<'b, B> core::fmt::Write for BufferWrite<'b, B>
  57. where
  58. B: Buffer + ?Sized,
  59. {
  60. fn write_str(&mut self, s: &str) -> core::fmt::Result {
  61. match self.0.fill(s.as_bytes()) {
  62. Ok(FillResult::Done(_)) => Ok(()),
  63. _ => Err(core::fmt::Error),
  64. }
  65. }
  66. }
  67. pub struct UninitBuffer<'lt, T: Copy + Sized> {
  68. data: Box<MaybeUninit<T>>,
  69. buffer: ByteBuffer<'lt>,
  70. }
  71. impl<'lt, T: Copy + Sized> UninitBuffer<'lt, T> {
  72. pub fn new() -> Self {
  73. let mut data = Box::new(MaybeUninit::uninit());
  74. let ptr = data.as_mut_ptr();
  75. Self {
  76. data,
  77. buffer: ByteBuffer::from(unsafe {
  78. core::slice::from_raw_parts_mut(ptr as *mut u8, core::mem::size_of::<T>())
  79. }),
  80. }
  81. }
  82. pub fn assume_filled_ref(&self) -> KResult<&T> {
  83. if self.buffer.available() != 0 {
  84. Err(EFAULT)
  85. } else {
  86. Ok(unsafe { self.data.assume_init_ref() })
  87. }
  88. }
  89. pub fn assume_init(self) -> KResult<T> {
  90. if self.buffer.available() != 0 {
  91. Err(EFAULT)
  92. } else {
  93. Ok(unsafe { *self.data.assume_init() })
  94. }
  95. }
  96. }
  97. impl<'lt, T: Copy + Sized> Buffer for UninitBuffer<'lt, T> {
  98. fn total(&self) -> usize {
  99. self.buffer.total()
  100. }
  101. fn wrote(&self) -> usize {
  102. self.buffer.wrote()
  103. }
  104. fn fill(&mut self, data: &[u8]) -> KResult<FillResult> {
  105. self.buffer.fill(data)
  106. }
  107. }
  108. pub struct ByteBuffer<'lt> {
  109. buf: &'lt mut [u8],
  110. cur: usize,
  111. }
  112. impl<'lt> ByteBuffer<'lt> {
  113. pub fn new(buf: &'lt mut [u8]) -> Self {
  114. Self { buf, cur: 0 }
  115. }
  116. pub fn available(&self) -> usize {
  117. self.buf.len() - self.cur
  118. }
  119. pub fn data(&self) -> &[u8] {
  120. &self.buf[..self.cur]
  121. }
  122. }
  123. impl<'lt, T: Copy + Sized> From<&'lt mut [T]> for ByteBuffer<'lt> {
  124. fn from(value: &'lt mut [T]) -> Self {
  125. Self {
  126. buf: unsafe {
  127. core::slice::from_raw_parts_mut(
  128. value.as_ptr() as *mut u8,
  129. core::mem::size_of::<T>() * value.len(),
  130. )
  131. },
  132. cur: 0,
  133. }
  134. }
  135. }
  136. impl Buffer for ByteBuffer<'_> {
  137. fn total(&self) -> usize {
  138. self.buf.len()
  139. }
  140. fn fill(&mut self, data: &[u8]) -> KResult<FillResult> {
  141. match self.available() {
  142. 0 => Ok(FillResult::Full),
  143. n if n < data.len() => {
  144. self.buf[self.cur..].copy_from_slice(&data[..n]);
  145. self.cur += n;
  146. Ok(FillResult::Partial(n))
  147. }
  148. _ => {
  149. self.buf[self.cur..self.cur + data.len()].copy_from_slice(data);
  150. self.cur += data.len();
  151. Ok(FillResult::Done(data.len()))
  152. }
  153. }
  154. }
  155. fn wrote(&self) -> usize {
  156. self.cur
  157. }
  158. }
  159. /// Iterator that generates chunks of a given length from a start index
  160. /// until the end of the total length.
  161. ///
  162. /// The iterator returns a tuple of (start, len) for each chunk.
  163. pub struct Chunks {
  164. start: usize,
  165. end: usize,
  166. cur: usize,
  167. chunk_len: usize,
  168. }
  169. impl Chunks {
  170. pub const fn new(start: usize, total_len: usize, chunk_len: usize) -> Self {
  171. Self {
  172. start,
  173. end: start + total_len,
  174. cur: start,
  175. chunk_len,
  176. }
  177. }
  178. }
  179. impl Iterator for Chunks {
  180. type Item = (usize, usize);
  181. fn next(&mut self) -> Option<Self::Item> {
  182. if self.cur >= self.end {
  183. return None;
  184. }
  185. let start = self.cur;
  186. let len = cmp::min(self.chunk_len, self.end - start);
  187. self.cur += self.chunk_len;
  188. Some((start, len))
  189. }
  190. }