1use std::collections::VecDeque;
19use std::convert::Infallible;
20use std::fmt::Debug;
21use std::fmt::Formatter;
22use std::io::BufRead;
23use std::io::IoSlice;
24use std::io::Read;
25use std::io::Seek;
26use std::io::SeekFrom;
27use std::io::{self};
28use std::iter::FusedIterator;
29use std::mem;
30use std::ops::Bound;
31use std::ops::RangeBounds;
32use std::pin::Pin;
33use std::sync::Arc;
34use std::task::Context;
35use std::task::Poll;
36
37use bytes::Buf;
38use bytes::BufMut;
39use bytes::Bytes;
40use bytes::BytesMut;
41use futures::Stream;
42
43use crate::*;
44
45#[derive(Clone)]
118pub struct Buffer(Inner);
119
120#[derive(Clone)]
121enum Inner {
122 Contiguous(Bytes),
123 NonContiguous {
126 parts: Arc<[Bytes]>,
127 size: usize,
128 idx: usize,
129 offset: usize,
130 },
131}
132
133impl Debug for Buffer {
134 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
135 let mut b = f.debug_struct("Buffer");
136
137 match &self.0 {
138 Inner::Contiguous(bs) => {
139 b.field("type", &"contiguous");
140 b.field("size", &bs.len());
141 }
142 Inner::NonContiguous {
143 parts,
144 size,
145 idx,
146 offset,
147 } => {
148 b.field("type", &"non_contiguous");
149 b.field("parts", &parts);
150 b.field("size", &size);
151 b.field("idx", &idx);
152 b.field("offset", &offset);
153 }
154 }
155 b.finish_non_exhaustive()
156 }
157}
158
159impl Default for Buffer {
160 fn default() -> Self {
161 Self::new()
162 }
163}
164
165impl Buffer {
166 #[inline]
170 pub const fn new() -> Self {
171 Self(Inner::Contiguous(Bytes::new()))
172 }
173
174 #[inline]
176 pub fn len(&self) -> usize {
177 match &self.0 {
178 Inner::Contiguous(b) => b.remaining(),
179 Inner::NonContiguous { size, .. } => *size,
180 }
181 }
182
183 #[inline]
185 pub fn is_empty(&self) -> bool {
186 self.len() == 0
187 }
188
189 pub fn count(&self) -> usize {
194 match &self.0 {
195 Inner::Contiguous(_) => 1,
196 Inner::NonContiguous {
197 parts,
198 idx,
199 size,
200 offset,
201 } => {
202 parts
203 .iter()
204 .skip(*idx)
205 .fold((0, size + offset), |(count, size), bytes| {
206 if size == 0 {
207 (count, 0)
208 } else {
209 (count + 1, size.saturating_sub(bytes.len()))
210 }
211 })
212 .0
213 }
214 }
215 }
216
217 pub fn current(&self) -> Bytes {
219 match &self.0 {
220 Inner::Contiguous(inner) => inner.clone(),
221 Inner::NonContiguous {
222 parts,
223 idx,
224 offset,
225 size,
226 } => {
227 let chunk = &parts[*idx];
228 let n = (chunk.len() - *offset).min(*size);
229 chunk.slice(*offset..*offset + n)
230 }
231 }
232 }
233
234 #[inline]
238 pub fn truncate(&mut self, len: usize) {
239 match &mut self.0 {
240 Inner::Contiguous(bs) => bs.truncate(len),
241 Inner::NonContiguous { size, .. } => {
242 *size = (*size).min(len);
243 }
244 }
245 }
246
247 pub fn slice(&self, range: impl RangeBounds<usize>) -> Self {
253 let len = self.len();
254
255 let begin = match range.start_bound() {
256 Bound::Included(&n) => n,
257 Bound::Excluded(&n) => n.checked_add(1).expect("out of range"),
258 Bound::Unbounded => 0,
259 };
260
261 let end = match range.end_bound() {
262 Bound::Included(&n) => n.checked_add(1).expect("out of range"),
263 Bound::Excluded(&n) => n,
264 Bound::Unbounded => len,
265 };
266
267 assert!(
268 begin <= end,
269 "range start must not be greater than end: {begin:?} <= {end:?}",
270 );
271 assert!(end <= len, "range end out of bounds: {end:?} <= {len:?}",);
272
273 if end == begin {
274 return Buffer::new();
275 }
276
277 let mut ret = self.clone();
278 ret.truncate(end);
279 ret.advance(begin);
280 ret
281 }
282
283 #[inline]
289 pub fn to_bytes(&self) -> Bytes {
290 match &self.0 {
291 Inner::Contiguous(bytes) => bytes.clone(),
292 Inner::NonContiguous {
293 parts,
294 size,
295 idx: _,
296 offset,
297 } => {
298 if parts.len() == 1 {
299 parts[0].slice(*offset..(*offset + *size))
300 } else {
301 let mut ret = BytesMut::with_capacity(self.len());
302 ret.put(self.clone());
303 ret.freeze()
304 }
305 }
306 }
307 }
308
309 #[inline]
314 pub fn to_vec(&self) -> Vec<u8> {
315 let mut ret = Vec::with_capacity(self.len());
316 ret.put(self.clone());
317 ret
318 }
319
320 #[inline]
322 pub fn to_io_slice(&self) -> Vec<IoSlice<'_>> {
323 match &self.0 {
324 Inner::Contiguous(bs) => vec![IoSlice::new(bs.chunk())],
325 Inner::NonContiguous {
326 parts, idx, offset, ..
327 } => {
328 let mut ret = Vec::with_capacity(parts.len() - *idx);
329 let mut new_offset = *offset;
330 for part in parts.iter().skip(*idx) {
331 ret.push(IoSlice::new(&part[new_offset..]));
332 new_offset = 0;
333 }
334 ret
335 }
336 }
337 }
338
339 pub fn chunks(&self, chunk_size: usize) -> BufferChunks {
348 assert!(chunk_size != 0, "chunk size must be greater than 0");
349
350 BufferChunks {
351 buffer: self.clone(),
352 chunk_size,
353 position: 0,
354 len: self.len(),
355 }
356 }
357}
358
359impl From<Vec<u8>> for Buffer {
360 #[inline]
361 fn from(bs: Vec<u8>) -> Self {
362 Self(Inner::Contiguous(bs.into()))
363 }
364}
365
366impl From<Bytes> for Buffer {
367 #[inline]
368 fn from(bs: Bytes) -> Self {
369 Self(Inner::Contiguous(bs))
370 }
371}
372
373impl From<String> for Buffer {
374 #[inline]
375 fn from(s: String) -> Self {
376 Self(Inner::Contiguous(Bytes::from(s)))
377 }
378}
379
380impl From<&'static [u8]> for Buffer {
381 #[inline]
382 fn from(s: &'static [u8]) -> Self {
383 Self(Inner::Contiguous(Bytes::from_static(s)))
384 }
385}
386
387impl From<&'static str> for Buffer {
388 #[inline]
389 fn from(s: &'static str) -> Self {
390 Self(Inner::Contiguous(Bytes::from_static(s.as_bytes())))
391 }
392}
393
394impl FromIterator<u8> for Buffer {
395 #[inline]
396 fn from_iter<T: IntoIterator<Item = u8>>(iter: T) -> Self {
397 Self(Inner::Contiguous(Bytes::from_iter(iter)))
398 }
399}
400
401impl From<VecDeque<Bytes>> for Buffer {
402 #[inline]
403 fn from(bs: VecDeque<Bytes>) -> Self {
404 let size = bs.iter().map(Bytes::len).sum();
405 Self(Inner::NonContiguous {
406 parts: Vec::from(bs).into(),
407 size,
408 idx: 0,
409 offset: 0,
410 })
411 }
412}
413
414impl From<Vec<Bytes>> for Buffer {
415 #[inline]
416 fn from(bs: Vec<Bytes>) -> Self {
417 let size = bs.iter().map(Bytes::len).sum();
418 Self(Inner::NonContiguous {
419 parts: bs.into(),
420 size,
421 idx: 0,
422 offset: 0,
423 })
424 }
425}
426
427impl From<Arc<[Bytes]>> for Buffer {
428 #[inline]
429 fn from(bs: Arc<[Bytes]>) -> Self {
430 let size = bs.iter().map(Bytes::len).sum();
431 Self(Inner::NonContiguous {
432 parts: bs,
433 size,
434 idx: 0,
435 offset: 0,
436 })
437 }
438}
439
440impl FromIterator<Bytes> for Buffer {
441 #[inline]
442 fn from_iter<T: IntoIterator<Item = Bytes>>(iter: T) -> Self {
443 let mut size = 0;
444 let bs = iter.into_iter().inspect(|v| size += v.len());
445 let parts = Arc::from_iter(bs);
448 Self(Inner::NonContiguous {
449 parts,
450 size,
451 idx: 0,
452 offset: 0,
453 })
454 }
455}
456
457impl Buf for Buffer {
458 #[inline]
459 fn remaining(&self) -> usize {
460 self.len()
461 }
462
463 #[inline]
464 fn chunk(&self) -> &[u8] {
465 match &self.0 {
466 Inner::Contiguous(b) => b.chunk(),
467 Inner::NonContiguous {
468 parts,
469 size,
470 idx,
471 offset,
472 } => {
473 if *size == 0 {
474 return &[];
475 }
476
477 let chunk = &parts[*idx];
478 let n = (chunk.len() - *offset).min(*size);
479 &parts[*idx][*offset..*offset + n]
480 }
481 }
482 }
483
484 #[inline]
485 fn chunks_vectored<'a>(&'a self, dst: &mut [IoSlice<'a>]) -> usize {
486 match &self.0 {
487 Inner::Contiguous(b) => {
488 if dst.is_empty() {
489 return 0;
490 }
491
492 dst[0] = IoSlice::new(b.chunk());
493 1
494 }
495 Inner::NonContiguous {
496 parts, idx, offset, ..
497 } => {
498 if dst.is_empty() {
499 return 0;
500 }
501
502 let mut new_offset = *offset;
503 parts
504 .iter()
505 .skip(*idx)
506 .zip(dst.iter_mut())
507 .map(|(part, dst)| {
508 *dst = IoSlice::new(&part[new_offset..]);
509 new_offset = 0;
510 })
511 .count()
512 }
513 }
514 }
515
516 #[inline]
517 fn advance(&mut self, cnt: usize) {
518 match &mut self.0 {
519 Inner::Contiguous(b) => b.advance(cnt),
520 Inner::NonContiguous {
521 parts,
522 size,
523 idx,
524 offset,
525 } => {
526 assert!(
527 cnt <= *size,
528 "cannot advance past {cnt} bytes, only {size} bytes left"
529 );
530
531 let mut new_idx = *idx;
532 let mut new_offset = *offset;
533 let mut remaining_cnt = cnt;
534 while remaining_cnt > 0 {
535 let part_len = parts[new_idx].len();
536 let remaining_in_part = part_len - new_offset;
537
538 if remaining_cnt < remaining_in_part {
539 new_offset += remaining_cnt;
540 break;
541 }
542
543 remaining_cnt -= remaining_in_part;
544 new_idx += 1;
545 new_offset = 0;
546 }
547
548 *idx = new_idx;
549 *offset = new_offset;
550 *size -= cnt;
551 }
552 }
553 }
554}
555
556impl Iterator for Buffer {
557 type Item = Bytes;
558
559 fn next(&mut self) -> Option<Self::Item> {
560 match &mut self.0 {
561 Inner::Contiguous(bs) => {
562 if bs.is_empty() {
563 None
564 } else {
565 Some(mem::take(bs))
566 }
567 }
568 Inner::NonContiguous {
569 parts,
570 size,
571 idx,
572 offset,
573 } => {
574 if *size == 0 {
575 return None;
576 }
577
578 let chunk = &parts[*idx];
579 let n = (chunk.len() - *offset).min(*size);
580 let buf = chunk.slice(*offset..*offset + n);
581 *size -= n;
582 *offset += n;
583
584 if *offset == chunk.len() {
585 *idx += 1;
586 *offset = 0;
587 }
588
589 Some(buf)
590 }
591 }
592 }
593
594 fn size_hint(&self) -> (usize, Option<usize>) {
595 match &self.0 {
596 Inner::Contiguous(bs) => {
597 if bs.is_empty() {
598 (0, Some(0))
599 } else {
600 (1, Some(1))
601 }
602 }
603 Inner::NonContiguous { parts, idx, .. } => {
604 let remaining = parts.len().saturating_sub(*idx);
605 (remaining, Some(remaining))
606 }
607 }
608 }
609}
610
611impl Stream for Buffer {
612 type Item = Result<Bytes, Infallible>;
613
614 fn poll_next(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<Self::Item>> {
615 Poll::Ready(self.get_mut().next().map(Ok))
616 }
617
618 fn size_hint(&self) -> (usize, Option<usize>) {
619 Iterator::size_hint(self)
620 }
621}
622
623impl Read for Buffer {
624 fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
625 let chunk = self.chunk();
626 let len = chunk.len().min(buf.len());
627 buf[..len].copy_from_slice(&chunk[..len]);
628 self.advance(len);
629 Ok(len)
630 }
631}
632
633impl Seek for Buffer {
634 fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
635 let len = self.len() as u64;
636 let new_pos = match pos {
637 SeekFrom::Start(offset) => offset,
638 SeekFrom::End(offset) => {
639 if offset < 0 {
640 len.checked_sub(offset.unsigned_abs())
641 .ok_or(io::Error::new(
642 io::ErrorKind::InvalidInput,
643 "invalid seek to a negative position",
644 ))?
645 } else {
646 len.checked_add(offset as u64).ok_or(io::Error::new(
647 io::ErrorKind::InvalidInput,
648 "seek out of bounds",
649 ))?
650 }
651 }
652 SeekFrom::Current(offset) => {
653 let current_pos = (len - self.remaining() as u64) as i64;
654 let new_pos = current_pos.checked_add(offset).ok_or(io::Error::new(
655 io::ErrorKind::InvalidInput,
656 "seek out of bounds",
657 ))?;
658 if new_pos < 0 {
659 return Err(io::Error::new(
660 io::ErrorKind::InvalidInput,
661 "invalid seek to a negative position",
662 ));
663 }
664 new_pos as u64
665 }
666 };
667
668 if new_pos > len {
669 return Err(io::Error::new(
670 io::ErrorKind::InvalidInput,
671 "seek out of bounds",
672 ));
673 }
674
675 self.advance((new_pos - (len - self.remaining() as u64)) as usize);
676 Ok(new_pos)
677 }
678}
679
680impl BufRead for Buffer {
681 fn fill_buf(&mut self) -> io::Result<&[u8]> {
682 let chunk = match &self.0 {
683 Inner::Contiguous(b) => b.chunk(),
684 Inner::NonContiguous {
685 parts,
686 size,
687 idx,
688 offset,
689 } => {
690 if *size == 0 {
691 return Ok(&[]);
692 }
693
694 let chunk = &parts[*idx];
695 let n = (chunk.len() - *offset).min(*size);
696 &parts[*idx][*offset..*offset + n]
697 }
698 };
699 Ok(chunk)
700 }
701
702 fn consume(&mut self, amt: usize) {
703 self.advance(amt);
704 }
705}
706
707pub struct BufferChunks {
709 buffer: Buffer,
710 chunk_size: usize,
711 position: usize,
712 len: usize,
713}
714
715impl Iterator for BufferChunks {
716 type Item = Buffer;
717
718 fn next(&mut self) -> Option<Self::Item> {
719 if self.position >= self.len {
720 return None;
721 }
722
723 let end = (self.position + self.chunk_size).min(self.len);
724 let chunk = self.buffer.slice(self.position..end);
725 self.position = end;
726 Some(chunk)
727 }
728
729 fn size_hint(&self) -> (usize, Option<usize>) {
730 let remaining = self.len.saturating_sub(self.position);
731 let chunks = remaining.div_ceil(self.chunk_size);
732 (chunks, Some(chunks))
733 }
734}
735
736impl ExactSizeIterator for BufferChunks {
737 fn len(&self) -> usize {
738 self.size_hint().0
739 }
740}
741
742impl FusedIterator for BufferChunks {}
743
744#[cfg(test)]
745mod tests {
746 use std::io::BufRead;
747 use std::io::Read;
748 use std::io::Seek;
749 use std::io::SeekFrom;
750
751 use pretty_assertions::assert_eq;
752 use rand::prelude::*;
753
754 use super::*;
755
756 const EMPTY_SLICE: &[u8] = &[];
757
758 #[test]
759 fn test_contiguous_buffer() {
760 let mut buf = Buffer::new();
761
762 assert_eq!(buf.remaining(), 0);
763 assert_eq!(buf.chunk(), EMPTY_SLICE);
764 assert_eq!(buf.next(), None);
765 }
766
767 #[test]
768 fn test_empty_non_contiguous_buffer() {
769 let mut buf = Buffer::from(vec![Bytes::new()]);
770
771 assert_eq!(buf.remaining(), 0);
772 assert_eq!(buf.chunk(), EMPTY_SLICE);
773 assert_eq!(buf.next(), None);
774 }
775
776 #[test]
777 fn test_non_contiguous_buffer_with_empty_chunks() {
778 let mut buf = Buffer::from(vec![Bytes::from("a")]);
779
780 assert_eq!(buf.remaining(), 1);
781 assert_eq!(buf.chunk(), b"a");
782
783 buf.advance(1);
784
785 assert_eq!(buf.remaining(), 0);
786 assert_eq!(buf.chunk(), EMPTY_SLICE);
787 }
788
789 #[test]
790 fn test_non_contiguous_buffer_with_next() {
791 let mut buf = Buffer::from(vec![Bytes::from("a")]);
792
793 assert_eq!(buf.remaining(), 1);
794 assert_eq!(buf.chunk(), b"a");
795
796 let bs = buf.next();
797
798 assert_eq!(bs, Some(Bytes::from("a")));
799 assert_eq!(buf.remaining(), 0);
800 assert_eq!(buf.chunk(), EMPTY_SLICE);
801 }
802
803 #[test]
804 fn test_buffer_advance() {
805 let mut buf = Buffer::from(vec![Bytes::from("a"), Bytes::from("b"), Bytes::from("c")]);
806
807 assert_eq!(buf.remaining(), 3);
808 assert_eq!(buf.chunk(), b"a");
809
810 buf.advance(1);
811
812 assert_eq!(buf.remaining(), 2);
813 assert_eq!(buf.chunk(), b"b");
814
815 buf.advance(1);
816
817 assert_eq!(buf.remaining(), 1);
818 assert_eq!(buf.chunk(), b"c");
819
820 buf.advance(1);
821
822 assert_eq!(buf.remaining(), 0);
823 assert_eq!(buf.chunk(), EMPTY_SLICE);
824
825 buf.advance(0);
826
827 assert_eq!(buf.remaining(), 0);
828 assert_eq!(buf.chunk(), EMPTY_SLICE);
829 }
830
831 #[test]
832 fn test_buffer_truncate() {
833 let mut buf = Buffer::from(vec![Bytes::from("a"), Bytes::from("b"), Bytes::from("c")]);
834
835 assert_eq!(buf.remaining(), 3);
836 assert_eq!(buf.chunk(), b"a");
837
838 buf.truncate(100);
839
840 assert_eq!(buf.remaining(), 3);
841 assert_eq!(buf.chunk(), b"a");
842
843 buf.truncate(2);
844
845 assert_eq!(buf.remaining(), 2);
846 assert_eq!(buf.chunk(), b"a");
847
848 buf.truncate(0);
849
850 assert_eq!(buf.remaining(), 0);
851 assert_eq!(buf.chunk(), EMPTY_SLICE);
852 }
853
854 #[test]
855 fn test_buffer_chunks_contiguous() {
856 let buf = Buffer::from(Bytes::from("abcdefg"));
857
858 let chunks = buf
859 .chunks(3)
860 .map(|chunk| chunk.to_bytes())
861 .collect::<Vec<Bytes>>();
862
863 assert_eq!(
864 chunks,
865 vec![Bytes::from("abc"), Bytes::from("def"), Bytes::from("g")]
866 );
867
868 assert_eq!(Buffer::new().chunks(4).count(), 0);
869 }
870
871 #[test]
872 fn test_buffer_chunks_non_contiguous() {
873 let buf = Buffer::from(vec![
874 Bytes::from("ab"),
875 Bytes::from("c"),
876 Bytes::from("def"),
877 ]);
878
879 let chunks = buf
880 .chunks(2)
881 .map(|chunk| chunk.to_bytes())
882 .collect::<Vec<Bytes>>();
883
884 assert_eq!(
885 chunks,
886 vec![Bytes::from("ab"), Bytes::from("cd"), Bytes::from("ef"),]
887 );
888 }
889
890 #[test]
891 #[should_panic(expected = "chunk size must be greater than 0")]
892 fn test_buffer_chunks_zero_panics() {
893 let buf = Buffer::from(Bytes::from("abc"));
894 let _ = buf.chunks(0);
895 }
896
897 fn setup_buffer() -> (Buffer, usize, Bytes) {
903 let mut rng = thread_rng();
904
905 let bs = (0..100)
906 .map(|_| {
907 let len = rng.gen_range(1..100);
908 let mut buf = vec![0; len];
909 rng.fill(&mut buf[..]);
910 Bytes::from(buf)
911 })
912 .collect::<Vec<_>>();
913
914 let total_size = bs.iter().map(|b| b.len()).sum::<usize>();
915 let total_content = bs.iter().flatten().copied().collect::<Bytes>();
916 let buf = Buffer::from(bs);
917
918 (buf, total_size, total_content)
919 }
920
921 #[test]
922 fn fuzz_buffer_advance() {
923 let mut rng = thread_rng();
924
925 let (mut buf, total_size, total_content) = setup_buffer();
926 assert_eq!(buf.remaining(), total_size);
927 assert_eq!(buf.to_bytes(), total_content);
928
929 let mut cur = 0;
930 let mut times = 10000;
932 while !buf.is_empty() && times > 0 {
933 times -= 1;
934
935 let cnt = rng.gen_range(0..total_size - cur);
936 cur += cnt;
937 buf.advance(cnt);
938
939 assert_eq!(buf.remaining(), total_size - cur);
940 assert_eq!(buf.to_bytes(), total_content.slice(cur..));
941 }
942 }
943
944 #[test]
945 fn fuzz_buffer_iter() {
946 let mut rng = thread_rng();
947
948 let (mut buf, total_size, total_content) = setup_buffer();
949 assert_eq!(buf.remaining(), total_size);
950 assert_eq!(buf.to_bytes(), total_content);
951
952 let mut cur = 0;
953 while buf.is_empty() {
954 let cnt = rng.gen_range(0..total_size - cur);
955 cur += cnt;
956 buf.advance(cnt);
957
958 assert_eq!(buf.remaining(), total_size - cur);
960 assert_eq!(buf.to_bytes(), total_content.slice(cur..));
961
962 if let Some(bs) = buf.next() {
963 assert_eq!(bs, total_content.slice(cur..cur + bs.len()));
964 cur += bs.len();
965 }
966
967 assert_eq!(buf.remaining(), total_size - cur);
969 assert_eq!(buf.to_bytes(), total_content.slice(cur..));
970 }
971 }
972
973 #[test]
974 fn fuzz_buffer_truncate() {
975 let mut rng = thread_rng();
976
977 let (mut buf, total_size, total_content) = setup_buffer();
978 assert_eq!(buf.remaining(), total_size);
979 assert_eq!(buf.to_bytes(), total_content);
980
981 let mut cur = 0;
982 while buf.is_empty() {
983 let cnt = rng.gen_range(0..total_size - cur);
984 cur += cnt;
985 buf.advance(cnt);
986
987 assert_eq!(buf.remaining(), total_size - cur);
989 assert_eq!(buf.to_bytes(), total_content.slice(cur..));
990
991 let truncate_size = rng.gen_range(0..total_size - cur);
992 buf.truncate(truncate_size);
993
994 assert_eq!(buf.remaining(), truncate_size);
996 assert_eq!(
997 buf.to_bytes(),
998 total_content.slice(cur..cur + truncate_size)
999 );
1000
1001 if let Some(bs) = buf.next() {
1003 assert_eq!(bs, total_content.slice(cur..cur + bs.len()));
1004 cur += bs.len();
1005 }
1006
1007 assert_eq!(buf.remaining(), total_size - cur);
1009 assert_eq!(buf.to_bytes(), total_content.slice(cur..));
1010 }
1011 }
1012
1013 #[test]
1014 fn test_read_trait() {
1015 let mut buffer = Buffer::from(vec![Bytes::from("Hello"), Bytes::from("World")]);
1016 let mut output = vec![0; 5];
1017 let size = buffer.read(&mut output).unwrap();
1018 assert_eq!(size, 5);
1019 assert_eq!(&output, b"Hello");
1020 }
1021
1022 #[test]
1023 fn test_seek_trait() {
1024 let mut buffer = Buffer::from(vec![Bytes::from("Hello"), Bytes::from("World")]);
1025 buffer.seek(SeekFrom::Start(5)).unwrap();
1026 let mut output = vec![0; 5];
1027 buffer.read_exact(&mut output).unwrap();
1028 assert_eq!(&output, b"World");
1029 }
1030
1031 #[test]
1032 fn test_bufread_trait() {
1033 let mut buffer = Buffer::from(vec![Bytes::from("Hello"), Bytes::from("World")]);
1034 let mut output = String::new();
1035 buffer.read_to_string(&mut output).unwrap();
1036 assert_eq!(output, "HelloWorld");
1037
1038 let mut buffer = Buffer::from(vec![Bytes::from("Hello"), Bytes::from("World")]);
1039 let buf = buffer.fill_buf().unwrap();
1040 assert_eq!(buf, b"Hello");
1041 buffer.consume(5);
1042 let buf = buffer.fill_buf().unwrap();
1043 assert_eq!(buf, b"World");
1044 }
1045
1046 #[test]
1047 fn test_read_partial() {
1048 let mut buffer = Buffer::from(vec![Bytes::from("Partial"), Bytes::from("Read")]);
1049 let mut output = vec![0; 4];
1050 let size = buffer.read(&mut output).unwrap();
1051 assert_eq!(size, 4);
1052 assert_eq!(&output, b"Part");
1053
1054 let size = buffer.read(&mut output).unwrap();
1055 assert_eq!(size, 3);
1056 assert_eq!(&output[..3], b"ial");
1057 }
1058
1059 #[test]
1060 fn test_seek_and_read() {
1061 let mut buffer = Buffer::from(vec![Bytes::from("SeekAndRead")]);
1062 buffer.seek(SeekFrom::Start(4)).unwrap();
1063 let mut output = vec![0; 3];
1064 buffer.read_exact(&mut output).unwrap();
1065 assert_eq!(&output, b"And");
1066 }
1067
1068 #[test]
1069 fn test_bufread_consume() {
1070 let mut buffer = Buffer::from(vec![Bytes::from("ConsumeTest")]);
1071 let buf = buffer.fill_buf().unwrap();
1072 assert_eq!(buf, b"ConsumeTest");
1073 buffer.consume(7);
1074 let buf = buffer.fill_buf().unwrap();
1075 assert_eq!(buf, b"Test");
1076 }
1077
1078 #[test]
1079 fn test_empty_buffer() {
1080 let mut buffer = Buffer::new();
1081 let mut output = vec![0; 5];
1082 let size = buffer.read(&mut output).unwrap();
1083 assert_eq!(size, 0);
1084 assert_eq!(&output, &[0; 5]);
1085 }
1086
1087 #[test]
1088 fn test_seek_out_of_bounds() {
1089 let mut buffer = Buffer::from(vec![Bytes::from("OutOfBounds")]);
1090 let result = buffer.seek(SeekFrom::Start(100));
1091 assert!(result.is_err());
1092 }
1093}