1use core::iter::FromIterator;
2use core::mem::{self, ManuallyDrop};
3use core::ops::{Deref, RangeBounds};
4use core::{cmp, fmt, hash, ptr, slice, usize};
5
6use alloc::{
7 alloc::{dealloc, Layout},
8 borrow::Borrow,
9 boxed::Box,
10 string::String,
11 vec::Vec,
12};
13
14use crate::buf::IntoIter;
15#[allow(unused)]
16use crate::loom::sync::atomic::AtomicMut;
17use crate::loom::sync::atomic::{AtomicPtr, AtomicUsize, Ordering};
18use crate::{offset_from, Buf, BytesMut};
19
20pub struct Bytes {
102 ptr: *const u8,
103 len: usize,
104 data: AtomicPtr<()>,
106 vtable: &'static Vtable,
107}
108
109pub(crate) struct Vtable {
110 pub clone: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Bytes,
112 pub to_vec: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Vec<u8>,
116 pub to_mut: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> BytesMut,
117 pub is_unique: unsafe fn(&AtomicPtr<()>) -> bool,
119 pub drop: unsafe fn(&mut AtomicPtr<()>, *const u8, usize),
121}
122
123impl Bytes {
124 #[inline]
137 #[cfg(not(all(loom, test)))]
138 pub const fn new() -> Self {
139 const EMPTY: &[u8] = &[];
142 Bytes::from_static(EMPTY)
143 }
144
145 #[cfg(all(loom, test))]
147 pub fn new() -> Self {
148 const EMPTY: &[u8] = &[];
149 Bytes::from_static(EMPTY)
150 }
151
152 #[inline]
166 #[cfg(not(all(loom, test)))]
167 pub const fn from_static(bytes: &'static [u8]) -> Self {
168 Bytes {
169 ptr: bytes.as_ptr(),
170 len: bytes.len(),
171 data: AtomicPtr::new(ptr::null_mut()),
172 vtable: &STATIC_VTABLE,
173 }
174 }
175
176 #[cfg(all(loom, test))]
178 pub fn from_static(bytes: &'static [u8]) -> Self {
179 Bytes {
180 ptr: bytes.as_ptr(),
181 len: bytes.len(),
182 data: AtomicPtr::new(ptr::null_mut()),
183 vtable: &STATIC_VTABLE,
184 }
185 }
186
187 fn new_empty_with_ptr(ptr: *const u8) -> Self {
189 debug_assert!(!ptr.is_null());
190
191 let ptr = without_provenance(ptr as usize);
194
195 Bytes {
196 ptr,
197 len: 0,
198 data: AtomicPtr::new(ptr::null_mut()),
199 vtable: &STATIC_VTABLE,
200 }
201 }
202
203 #[inline]
214 pub const fn len(&self) -> usize {
215 self.len
216 }
217
218 #[inline]
229 pub const fn is_empty(&self) -> bool {
230 self.len == 0
231 }
232
233 pub fn is_unique(&self) -> bool {
252 unsafe { (self.vtable.is_unique)(&self.data) }
253 }
254
255 pub fn copy_from_slice(data: &[u8]) -> Self {
257 data.to_vec().into()
258 }
259
260 pub fn slice(&self, range: impl RangeBounds<usize>) -> Self {
283 use core::ops::Bound;
284
285 let len = self.len();
286
287 let begin = match range.start_bound() {
288 Bound::Included(&n) => n,
289 Bound::Excluded(&n) => n.checked_add(1).expect("out of range"),
290 Bound::Unbounded => 0,
291 };
292
293 let end = match range.end_bound() {
294 Bound::Included(&n) => n.checked_add(1).expect("out of range"),
295 Bound::Excluded(&n) => n,
296 Bound::Unbounded => len,
297 };
298
299 assert!(
300 begin <= end,
301 "range start must not be greater than end: {:?} <= {:?}",
302 begin,
303 end,
304 );
305 assert!(
306 end <= len,
307 "range end out of bounds: {:?} <= {:?}",
308 end,
309 len,
310 );
311
312 if end == begin {
313 return Bytes::new();
314 }
315
316 let mut ret = self.clone();
317
318 ret.len = end - begin;
319 ret.ptr = unsafe { ret.ptr.add(begin) };
320
321 ret
322 }
323
324 pub fn slice_ref(&self, subset: &[u8]) -> Self {
350 if subset.is_empty() {
353 return Bytes::new();
354 }
355
356 let bytes_p = self.as_ptr() as usize;
357 let bytes_len = self.len();
358
359 let sub_p = subset.as_ptr() as usize;
360 let sub_len = subset.len();
361
362 assert!(
363 sub_p >= bytes_p,
364 "subset pointer ({:p}) is smaller than self pointer ({:p})",
365 subset.as_ptr(),
366 self.as_ptr(),
367 );
368 assert!(
369 sub_p + sub_len <= bytes_p + bytes_len,
370 "subset is out of bounds: self = ({:p}, {}), subset = ({:p}, {})",
371 self.as_ptr(),
372 bytes_len,
373 subset.as_ptr(),
374 sub_len,
375 );
376
377 let sub_offset = sub_p - bytes_p;
378
379 self.slice(sub_offset..(sub_offset + sub_len))
380 }
381
382 #[must_use = "consider Bytes::truncate if you don't need the other half"]
408 pub fn split_off(&mut self, at: usize) -> Self {
409 if at == self.len() {
410 return Bytes::new_empty_with_ptr(self.ptr.wrapping_add(at));
411 }
412
413 if at == 0 {
414 return mem::replace(self, Bytes::new_empty_with_ptr(self.ptr));
415 }
416
417 assert!(
418 at <= self.len(),
419 "split_off out of bounds: {:?} <= {:?}",
420 at,
421 self.len(),
422 );
423
424 let mut ret = self.clone();
425
426 self.len = at;
427
428 unsafe { ret.inc_start(at) };
429
430 ret
431 }
432
433 #[must_use = "consider Bytes::advance if you don't need the other half"]
457 pub fn split_to(&mut self, at: usize) -> Self {
458 if at == self.len() {
459 let end_ptr = self.ptr.wrapping_add(at);
460 return mem::replace(self, Bytes::new_empty_with_ptr(end_ptr));
461 }
462
463 if at == 0 {
464 return Bytes::new_empty_with_ptr(self.ptr);
465 }
466
467 assert!(
468 at <= self.len(),
469 "split_to out of bounds: {:?} <= {:?}",
470 at,
471 self.len(),
472 );
473
474 let mut ret = self.clone();
475
476 unsafe { self.inc_start(at) };
477
478 ret.len = at;
479 ret
480 }
481
482 #[inline]
501 pub fn truncate(&mut self, len: usize) {
502 if len < self.len {
503 if self.vtable as *const Vtable == &PROMOTABLE_EVEN_VTABLE
507 || self.vtable as *const Vtable == &PROMOTABLE_ODD_VTABLE
508 {
509 drop(self.split_off(len));
510 } else {
511 self.len = len;
512 }
513 }
514 }
515
516 #[inline]
528 pub fn clear(&mut self) {
529 self.truncate(0);
530 }
531
532 pub fn try_into_mut(self) -> Result<BytesMut, Bytes> {
548 if self.is_unique() {
549 Ok(self.into())
550 } else {
551 Err(self)
552 }
553 }
554
555 #[inline]
556 pub(crate) unsafe fn with_vtable(
557 ptr: *const u8,
558 len: usize,
559 data: AtomicPtr<()>,
560 vtable: &'static Vtable,
561 ) -> Bytes {
562 Bytes {
563 ptr,
564 len,
565 data,
566 vtable,
567 }
568 }
569
570 #[inline]
573 fn as_slice(&self) -> &[u8] {
574 unsafe { slice::from_raw_parts(self.ptr, self.len) }
575 }
576
577 #[inline]
578 unsafe fn inc_start(&mut self, by: usize) {
579 debug_assert!(self.len >= by, "internal: inc_start out of bounds");
581 self.len -= by;
582 self.ptr = self.ptr.add(by);
583 }
584}
585
586unsafe impl Send for Bytes {}
588unsafe impl Sync for Bytes {}
589
590impl Drop for Bytes {
591 #[inline]
592 fn drop(&mut self) {
593 unsafe { (self.vtable.drop)(&mut self.data, self.ptr, self.len) }
594 }
595}
596
597impl Clone for Bytes {
598 #[inline]
599 fn clone(&self) -> Bytes {
600 unsafe { (self.vtable.clone)(&self.data, self.ptr, self.len) }
601 }
602}
603
604impl Buf for Bytes {
605 #[inline]
606 fn remaining(&self) -> usize {
607 self.len()
608 }
609
610 #[inline]
611 fn chunk(&self) -> &[u8] {
612 self.as_slice()
613 }
614
615 #[inline]
616 fn advance(&mut self, cnt: usize) {
617 assert!(
618 cnt <= self.len(),
619 "cannot advance past `remaining`: {:?} <= {:?}",
620 cnt,
621 self.len(),
622 );
623
624 unsafe {
625 self.inc_start(cnt);
626 }
627 }
628
629 fn copy_to_bytes(&mut self, len: usize) -> Self {
630 self.split_to(len)
631 }
632}
633
634impl Deref for Bytes {
635 type Target = [u8];
636
637 #[inline]
638 fn deref(&self) -> &[u8] {
639 self.as_slice()
640 }
641}
642
643impl AsRef<[u8]> for Bytes {
644 #[inline]
645 fn as_ref(&self) -> &[u8] {
646 self.as_slice()
647 }
648}
649
650impl hash::Hash for Bytes {
651 fn hash<H>(&self, state: &mut H)
652 where
653 H: hash::Hasher,
654 {
655 self.as_slice().hash(state);
656 }
657}
658
659impl Borrow<[u8]> for Bytes {
660 fn borrow(&self) -> &[u8] {
661 self.as_slice()
662 }
663}
664
665impl IntoIterator for Bytes {
666 type Item = u8;
667 type IntoIter = IntoIter<Bytes>;
668
669 fn into_iter(self) -> Self::IntoIter {
670 IntoIter::new(self)
671 }
672}
673
674impl<'a> IntoIterator for &'a Bytes {
675 type Item = &'a u8;
676 type IntoIter = core::slice::Iter<'a, u8>;
677
678 fn into_iter(self) -> Self::IntoIter {
679 self.as_slice().iter()
680 }
681}
682
683impl FromIterator<u8> for Bytes {
684 fn from_iter<T: IntoIterator<Item = u8>>(into_iter: T) -> Self {
685 Vec::from_iter(into_iter).into()
686 }
687}
688
689impl PartialEq for Bytes {
692 fn eq(&self, other: &Bytes) -> bool {
693 self.as_slice() == other.as_slice()
694 }
695}
696
697impl PartialOrd for Bytes {
698 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
699 self.as_slice().partial_cmp(other.as_slice())
700 }
701}
702
703impl Ord for Bytes {
704 fn cmp(&self, other: &Bytes) -> cmp::Ordering {
705 self.as_slice().cmp(other.as_slice())
706 }
707}
708
709impl Eq for Bytes {}
710
711impl PartialEq<[u8]> for Bytes {
712 fn eq(&self, other: &[u8]) -> bool {
713 self.as_slice() == other
714 }
715}
716
717impl PartialOrd<[u8]> for Bytes {
718 fn partial_cmp(&self, other: &[u8]) -> Option<cmp::Ordering> {
719 self.as_slice().partial_cmp(other)
720 }
721}
722
723impl PartialEq<Bytes> for [u8] {
724 fn eq(&self, other: &Bytes) -> bool {
725 *other == *self
726 }
727}
728
729impl PartialOrd<Bytes> for [u8] {
730 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
731 <[u8] as PartialOrd<[u8]>>::partial_cmp(self, other)
732 }
733}
734
735impl PartialEq<str> for Bytes {
736 fn eq(&self, other: &str) -> bool {
737 self.as_slice() == other.as_bytes()
738 }
739}
740
741impl PartialOrd<str> for Bytes {
742 fn partial_cmp(&self, other: &str) -> Option<cmp::Ordering> {
743 self.as_slice().partial_cmp(other.as_bytes())
744 }
745}
746
747impl PartialEq<Bytes> for str {
748 fn eq(&self, other: &Bytes) -> bool {
749 *other == *self
750 }
751}
752
753impl PartialOrd<Bytes> for str {
754 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
755 <[u8] as PartialOrd<[u8]>>::partial_cmp(self.as_bytes(), other)
756 }
757}
758
759impl PartialEq<Vec<u8>> for Bytes {
760 fn eq(&self, other: &Vec<u8>) -> bool {
761 *self == other[..]
762 }
763}
764
765impl PartialOrd<Vec<u8>> for Bytes {
766 fn partial_cmp(&self, other: &Vec<u8>) -> Option<cmp::Ordering> {
767 self.as_slice().partial_cmp(&other[..])
768 }
769}
770
771impl PartialEq<Bytes> for Vec<u8> {
772 fn eq(&self, other: &Bytes) -> bool {
773 *other == *self
774 }
775}
776
777impl PartialOrd<Bytes> for Vec<u8> {
778 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
779 <[u8] as PartialOrd<[u8]>>::partial_cmp(self, other)
780 }
781}
782
783impl PartialEq<String> for Bytes {
784 fn eq(&self, other: &String) -> bool {
785 *self == other[..]
786 }
787}
788
789impl PartialOrd<String> for Bytes {
790 fn partial_cmp(&self, other: &String) -> Option<cmp::Ordering> {
791 self.as_slice().partial_cmp(other.as_bytes())
792 }
793}
794
795impl PartialEq<Bytes> for String {
796 fn eq(&self, other: &Bytes) -> bool {
797 *other == *self
798 }
799}
800
801impl PartialOrd<Bytes> for String {
802 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
803 <[u8] as PartialOrd<[u8]>>::partial_cmp(self.as_bytes(), other)
804 }
805}
806
807impl PartialEq<Bytes> for &[u8] {
808 fn eq(&self, other: &Bytes) -> bool {
809 *other == *self
810 }
811}
812
813impl PartialOrd<Bytes> for &[u8] {
814 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
815 <[u8] as PartialOrd<[u8]>>::partial_cmp(self, other)
816 }
817}
818
819impl PartialEq<Bytes> for &str {
820 fn eq(&self, other: &Bytes) -> bool {
821 *other == *self
822 }
823}
824
825impl PartialOrd<Bytes> for &str {
826 fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
827 <[u8] as PartialOrd<[u8]>>::partial_cmp(self.as_bytes(), other)
828 }
829}
830
831impl<'a, T: ?Sized> PartialEq<&'a T> for Bytes
832where
833 Bytes: PartialEq<T>,
834{
835 fn eq(&self, other: &&'a T) -> bool {
836 *self == **other
837 }
838}
839
840impl<'a, T: ?Sized> PartialOrd<&'a T> for Bytes
841where
842 Bytes: PartialOrd<T>,
843{
844 fn partial_cmp(&self, other: &&'a T) -> Option<cmp::Ordering> {
845 self.partial_cmp(&**other)
846 }
847}
848
849impl Default for Bytes {
852 #[inline]
853 fn default() -> Bytes {
854 Bytes::new()
855 }
856}
857
858impl From<&'static [u8]> for Bytes {
859 fn from(slice: &'static [u8]) -> Bytes {
860 Bytes::from_static(slice)
861 }
862}
863
864impl From<&'static str> for Bytes {
865 fn from(slice: &'static str) -> Bytes {
866 Bytes::from_static(slice.as_bytes())
867 }
868}
869
870impl From<Vec<u8>> for Bytes {
871 fn from(vec: Vec<u8>) -> Bytes {
872 let mut vec = ManuallyDrop::new(vec);
873 let ptr = vec.as_mut_ptr();
874 let len = vec.len();
875 let cap = vec.capacity();
876
877 if len == cap {
879 let vec = ManuallyDrop::into_inner(vec);
880 return Bytes::from(vec.into_boxed_slice());
881 }
882
883 let shared = Box::new(Shared {
884 buf: ptr,
885 cap,
886 ref_cnt: AtomicUsize::new(1),
887 });
888
889 let shared = Box::into_raw(shared);
890 debug_assert!(
893 0 == (shared as usize & KIND_MASK),
894 "internal: Box<Shared> should have an aligned pointer",
895 );
896 Bytes {
897 ptr,
898 len,
899 data: AtomicPtr::new(shared as _),
900 vtable: &SHARED_VTABLE,
901 }
902 }
903}
904
905impl From<Box<[u8]>> for Bytes {
906 fn from(slice: Box<[u8]>) -> Bytes {
907 if slice.is_empty() {
911 return Bytes::new();
912 }
913
914 let len = slice.len();
915 let ptr = Box::into_raw(slice) as *mut u8;
916
917 if ptr as usize & 0x1 == 0 {
918 let data = ptr_map(ptr, |addr| addr | KIND_VEC);
919 Bytes {
920 ptr,
921 len,
922 data: AtomicPtr::new(data.cast()),
923 vtable: &PROMOTABLE_EVEN_VTABLE,
924 }
925 } else {
926 Bytes {
927 ptr,
928 len,
929 data: AtomicPtr::new(ptr.cast()),
930 vtable: &PROMOTABLE_ODD_VTABLE,
931 }
932 }
933 }
934}
935
936impl From<Bytes> for BytesMut {
937 fn from(bytes: Bytes) -> Self {
953 let bytes = ManuallyDrop::new(bytes);
954 unsafe { (bytes.vtable.to_mut)(&bytes.data, bytes.ptr, bytes.len) }
955 }
956}
957
958impl From<String> for Bytes {
959 fn from(s: String) -> Bytes {
960 Bytes::from(s.into_bytes())
961 }
962}
963
964impl From<Bytes> for Vec<u8> {
965 fn from(bytes: Bytes) -> Vec<u8> {
966 let bytes = ManuallyDrop::new(bytes);
967 unsafe { (bytes.vtable.to_vec)(&bytes.data, bytes.ptr, bytes.len) }
968 }
969}
970
971impl fmt::Debug for Vtable {
974 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
975 f.debug_struct("Vtable")
976 .field("clone", &(self.clone as *const ()))
977 .field("drop", &(self.drop as *const ()))
978 .finish()
979 }
980}
981
982const STATIC_VTABLE: Vtable = Vtable {
985 clone: static_clone,
986 to_vec: static_to_vec,
987 to_mut: static_to_mut,
988 is_unique: static_is_unique,
989 drop: static_drop,
990};
991
992unsafe fn static_clone(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes {
993 let slice = slice::from_raw_parts(ptr, len);
994 Bytes::from_static(slice)
995}
996
997unsafe fn static_to_vec(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
998 let slice = slice::from_raw_parts(ptr, len);
999 slice.to_vec()
1000}
1001
1002unsafe fn static_to_mut(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> BytesMut {
1003 let slice = slice::from_raw_parts(ptr, len);
1004 BytesMut::from(slice)
1005}
1006
1007fn static_is_unique(_: &AtomicPtr<()>) -> bool {
1008 false
1009}
1010
1011unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) {
1012 }
1014
1015static PROMOTABLE_EVEN_VTABLE: Vtable = Vtable {
1018 clone: promotable_even_clone,
1019 to_vec: promotable_even_to_vec,
1020 to_mut: promotable_even_to_mut,
1021 is_unique: promotable_is_unique,
1022 drop: promotable_even_drop,
1023};
1024
1025static PROMOTABLE_ODD_VTABLE: Vtable = Vtable {
1026 clone: promotable_odd_clone,
1027 to_vec: promotable_odd_to_vec,
1028 to_mut: promotable_odd_to_mut,
1029 is_unique: promotable_is_unique,
1030 drop: promotable_odd_drop,
1031};
1032
1033unsafe fn promotable_even_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes {
1034 let shared = data.load(Ordering::Acquire);
1035 let kind = shared as usize & KIND_MASK;
1036
1037 if kind == KIND_ARC {
1038 shallow_clone_arc(shared.cast(), ptr, len)
1039 } else {
1040 debug_assert_eq!(kind, KIND_VEC);
1041 let buf = ptr_map(shared.cast(), |addr| addr & !KIND_MASK);
1042 shallow_clone_vec(data, shared, buf, ptr, len)
1043 }
1044}
1045
1046unsafe fn promotable_to_vec(
1047 data: &AtomicPtr<()>,
1048 ptr: *const u8,
1049 len: usize,
1050 f: fn(*mut ()) -> *mut u8,
1051) -> Vec<u8> {
1052 let shared = data.load(Ordering::Acquire);
1053 let kind = shared as usize & KIND_MASK;
1054
1055 if kind == KIND_ARC {
1056 shared_to_vec_impl(shared.cast(), ptr, len)
1057 } else {
1058 debug_assert_eq!(kind, KIND_VEC);
1060
1061 let buf = f(shared);
1062
1063 let cap = offset_from(ptr, buf) + len;
1064
1065 ptr::copy(ptr, buf, len);
1067
1068 Vec::from_raw_parts(buf, len, cap)
1069 }
1070}
1071
1072unsafe fn promotable_to_mut(
1073 data: &AtomicPtr<()>,
1074 ptr: *const u8,
1075 len: usize,
1076 f: fn(*mut ()) -> *mut u8,
1077) -> BytesMut {
1078 let shared = data.load(Ordering::Acquire);
1079 let kind = shared as usize & KIND_MASK;
1080
1081 if kind == KIND_ARC {
1082 shared_to_mut_impl(shared.cast(), ptr, len)
1083 } else {
1084 debug_assert_eq!(kind, KIND_VEC);
1089
1090 let buf = f(shared);
1091 let off = offset_from(ptr, buf);
1092 let cap = off + len;
1093 let v = Vec::from_raw_parts(buf, cap, cap);
1094
1095 let mut b = BytesMut::from_vec(v);
1096 b.advance_unchecked(off);
1097 b
1098 }
1099}
1100
1101unsafe fn promotable_even_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
1102 promotable_to_vec(data, ptr, len, |shared| {
1103 ptr_map(shared.cast(), |addr| addr & !KIND_MASK)
1104 })
1105}
1106
1107unsafe fn promotable_even_to_mut(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> BytesMut {
1108 promotable_to_mut(data, ptr, len, |shared| {
1109 ptr_map(shared.cast(), |addr| addr & !KIND_MASK)
1110 })
1111}
1112
1113unsafe fn promotable_even_drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) {
1114 data.with_mut(|shared| {
1115 let shared = *shared;
1116 let kind = shared as usize & KIND_MASK;
1117
1118 if kind == KIND_ARC {
1119 release_shared(shared.cast());
1120 } else {
1121 debug_assert_eq!(kind, KIND_VEC);
1122 let buf = ptr_map(shared.cast(), |addr| addr & !KIND_MASK);
1123 free_boxed_slice(buf, ptr, len);
1124 }
1125 });
1126}
1127
1128unsafe fn promotable_odd_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes {
1129 let shared = data.load(Ordering::Acquire);
1130 let kind = shared as usize & KIND_MASK;
1131
1132 if kind == KIND_ARC {
1133 shallow_clone_arc(shared as _, ptr, len)
1134 } else {
1135 debug_assert_eq!(kind, KIND_VEC);
1136 shallow_clone_vec(data, shared, shared.cast(), ptr, len)
1137 }
1138}
1139
1140unsafe fn promotable_odd_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
1141 promotable_to_vec(data, ptr, len, |shared| shared.cast())
1142}
1143
1144unsafe fn promotable_odd_to_mut(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> BytesMut {
1145 promotable_to_mut(data, ptr, len, |shared| shared.cast())
1146}
1147
1148unsafe fn promotable_odd_drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) {
1149 data.with_mut(|shared| {
1150 let shared = *shared;
1151 let kind = shared as usize & KIND_MASK;
1152
1153 if kind == KIND_ARC {
1154 release_shared(shared.cast());
1155 } else {
1156 debug_assert_eq!(kind, KIND_VEC);
1157
1158 free_boxed_slice(shared.cast(), ptr, len);
1159 }
1160 });
1161}
1162
1163unsafe fn promotable_is_unique(data: &AtomicPtr<()>) -> bool {
1164 let shared = data.load(Ordering::Acquire);
1165 let kind = shared as usize & KIND_MASK;
1166
1167 if kind == KIND_ARC {
1168 let ref_cnt = (*shared.cast::<Shared>()).ref_cnt.load(Ordering::Relaxed);
1169 ref_cnt == 1
1170 } else {
1171 true
1172 }
1173}
1174
1175unsafe fn free_boxed_slice(buf: *mut u8, offset: *const u8, len: usize) {
1176 let cap = offset_from(offset, buf) + len;
1177 dealloc(buf, Layout::from_size_align(cap, 1).unwrap())
1178}
1179
1180struct Shared {
1183 buf: *mut u8,
1185 cap: usize,
1186 ref_cnt: AtomicUsize,
1187}
1188
1189impl Drop for Shared {
1190 fn drop(&mut self) {
1191 unsafe { dealloc(self.buf, Layout::from_size_align(self.cap, 1).unwrap()) }
1192 }
1193}
1194
1195const _: [(); 0 - mem::align_of::<Shared>() % 2] = []; static SHARED_VTABLE: Vtable = Vtable {
1202 clone: shared_clone,
1203 to_vec: shared_to_vec,
1204 to_mut: shared_to_mut,
1205 is_unique: shared_is_unique,
1206 drop: shared_drop,
1207};
1208
1209const KIND_ARC: usize = 0b0;
1210const KIND_VEC: usize = 0b1;
1211const KIND_MASK: usize = 0b1;
1212
1213unsafe fn shared_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes {
1214 let shared = data.load(Ordering::Relaxed);
1215 shallow_clone_arc(shared as _, ptr, len)
1216}
1217
1218unsafe fn shared_to_vec_impl(shared: *mut Shared, ptr: *const u8, len: usize) -> Vec<u8> {
1219 if (*shared)
1226 .ref_cnt
1227 .compare_exchange(1, 0, Ordering::AcqRel, Ordering::Relaxed)
1228 .is_ok()
1229 {
1230 let shared = *Box::from_raw(shared);
1232 let shared = ManuallyDrop::new(shared);
1233 let buf = shared.buf;
1234 let cap = shared.cap;
1235
1236 ptr::copy(ptr, buf, len);
1238
1239 Vec::from_raw_parts(buf, len, cap)
1240 } else {
1241 let v = slice::from_raw_parts(ptr, len).to_vec();
1242 release_shared(shared);
1243 v
1244 }
1245}
1246
1247unsafe fn shared_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
1248 shared_to_vec_impl(data.load(Ordering::Relaxed).cast(), ptr, len)
1249}
1250
1251unsafe fn shared_to_mut_impl(shared: *mut Shared, ptr: *const u8, len: usize) -> BytesMut {
1252 if (*shared).ref_cnt.load(Ordering::Acquire) == 1 {
1265 let shared = *Box::from_raw(shared);
1267 let shared = ManuallyDrop::new(shared);
1268 let buf = shared.buf;
1269 let cap = shared.cap;
1270
1271 let off = offset_from(ptr, buf);
1273 let v = Vec::from_raw_parts(buf, len + off, cap);
1274
1275 let mut b = BytesMut::from_vec(v);
1276 b.advance_unchecked(off);
1277 b
1278 } else {
1279 let v = slice::from_raw_parts(ptr, len).to_vec();
1281 release_shared(shared);
1282 BytesMut::from_vec(v)
1283 }
1284}
1285
1286unsafe fn shared_to_mut(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> BytesMut {
1287 shared_to_mut_impl(data.load(Ordering::Relaxed).cast(), ptr, len)
1288}
1289
1290pub(crate) unsafe fn shared_is_unique(data: &AtomicPtr<()>) -> bool {
1291 let shared = data.load(Ordering::Acquire);
1292 let ref_cnt = (*shared.cast::<Shared>()).ref_cnt.load(Ordering::Relaxed);
1293 ref_cnt == 1
1294}
1295
1296unsafe fn shared_drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) {
1297 data.with_mut(|shared| {
1298 release_shared(shared.cast());
1299 });
1300}
1301
1302unsafe fn shallow_clone_arc(shared: *mut Shared, ptr: *const u8, len: usize) -> Bytes {
1303 let old_size = (*shared).ref_cnt.fetch_add(1, Ordering::Relaxed);
1304
1305 if old_size > usize::MAX >> 1 {
1306 crate::abort();
1307 }
1308
1309 Bytes {
1310 ptr,
1311 len,
1312 data: AtomicPtr::new(shared as _),
1313 vtable: &SHARED_VTABLE,
1314 }
1315}
1316
1317#[cold]
1318unsafe fn shallow_clone_vec(
1319 atom: &AtomicPtr<()>,
1320 ptr: *const (),
1321 buf: *mut u8,
1322 offset: *const u8,
1323 len: usize,
1324) -> Bytes {
1325 let shared = Box::new(Shared {
1337 buf,
1338 cap: offset_from(offset, buf) + len,
1339 ref_cnt: AtomicUsize::new(2),
1343 });
1344
1345 let shared = Box::into_raw(shared);
1346
1347 debug_assert!(
1350 0 == (shared as usize & KIND_MASK),
1351 "internal: Box<Shared> should have an aligned pointer",
1352 );
1353
1354 match atom.compare_exchange(ptr as _, shared as _, Ordering::AcqRel, Ordering::Acquire) {
1364 Ok(actual) => {
1365 debug_assert!(actual as usize == ptr as usize);
1366 Bytes {
1369 ptr: offset,
1370 len,
1371 data: AtomicPtr::new(shared as _),
1372 vtable: &SHARED_VTABLE,
1373 }
1374 }
1375 Err(actual) => {
1376 let shared = Box::from_raw(shared);
1380 mem::forget(*shared);
1381
1382 shallow_clone_arc(actual as _, offset, len)
1385 }
1386 }
1387}
1388
1389unsafe fn release_shared(ptr: *mut Shared) {
1390 if (*ptr).ref_cnt.fetch_sub(1, Ordering::Release) != 1 {
1392 return;
1393 }
1394
1395 (*ptr).ref_cnt.load(Ordering::Acquire);
1416
1417 drop(Box::from_raw(ptr));
1419}
1420
1421#[cfg(miri)]
1428fn ptr_map<F>(ptr: *mut u8, f: F) -> *mut u8
1429where
1430 F: FnOnce(usize) -> usize,
1431{
1432 let old_addr = ptr as usize;
1433 let new_addr = f(old_addr);
1434 let diff = new_addr.wrapping_sub(old_addr);
1435 ptr.wrapping_add(diff)
1436}
1437
1438#[cfg(not(miri))]
1439fn ptr_map<F>(ptr: *mut u8, f: F) -> *mut u8
1440where
1441 F: FnOnce(usize) -> usize,
1442{
1443 let old_addr = ptr as usize;
1444 let new_addr = f(old_addr);
1445 new_addr as *mut u8
1446}
1447
1448fn without_provenance(ptr: usize) -> *const u8 {
1449 core::ptr::null::<u8>().wrapping_add(ptr)
1450}
1451
1452fn _split_to_must_use() {}
1463
1464fn _split_off_must_use() {}
1473
1474#[cfg(all(test, loom))]
1476mod fuzz {
1477 use loom::sync::Arc;
1478 use loom::thread;
1479
1480 use super::Bytes;
1481 #[test]
1482 fn bytes_cloning_vec() {
1483 loom::model(|| {
1484 let a = Bytes::from(b"abcdefgh".to_vec());
1485 let addr = a.as_ptr() as usize;
1486
1487 let a1 = Arc::new(a);
1489 let a2 = a1.clone();
1490
1491 let t1 = thread::spawn(move || {
1492 let b: Bytes = (*a1).clone();
1493 assert_eq!(b.as_ptr() as usize, addr);
1494 });
1495
1496 let t2 = thread::spawn(move || {
1497 let b: Bytes = (*a2).clone();
1498 assert_eq!(b.as_ptr() as usize, addr);
1499 });
1500
1501 t1.join().unwrap();
1502 t2.join().unwrap();
1503 });
1504 }
1505}