1use iter::BlockIter;
2use traits::{Bits, BitsMut, BitSliceable, get_masked_block};
3use storage::{Address, BlockType};
4use range_compat::*;
5
6use std::marker::PhantomData;
7use std::{cmp, fmt, hash, ptr};
8
9#[derive(Copy, Clone, Debug)]
13struct SliceSpan {
14 offset: u8,
15 len: u64,
16 aligned_blocks: usize,
17}
18#[derive(Copy, Clone, Debug)]
24enum BlockAddress {
25 FullBlockAt(usize),
26 SomeBitsAt(Address, usize),
27}
28
29impl SliceSpan {
30 fn new<Block: BlockType>(offset: u8, bit_len: u64) -> Self {
31 SliceSpan {
32 offset,
33 len: bit_len,
34 aligned_blocks: if offset == 0 {Block::ceil_div_nbits(bit_len)} else {0},
35 }
36 }
37
38 fn from_block_len<Block: BlockType>(block_len: usize) -> Self {
39 Self::new::<Block>(0, Block::mul_nbits(block_len))
40 }
41
42 fn block_len<Block: BlockType>(&self) -> usize {
43 Block::ceil_div_nbits(self.len)
44 }
45
46 fn find_block<Block: BlockType>(&self, position: usize) -> Option<BlockAddress> {
47 if position < self.aligned_blocks {
48 return Some(BlockAddress::FullBlockAt(position));
49 } else if position < self.block_len::<Block>() {
50 let start = Block::mul_nbits(position) + u64::from(self.offset);
51 let address = Address::new::<Block>(start);
52 let count = Block::block_bits(self.len, position);
53 Some(BlockAddress::SomeBitsAt(address, count))
54 } else {
55 None
56 }
57 }
58
59 fn find_bits<Block: BlockType>(&self, position: u64, count: usize) -> Option<BlockAddress> {
60 if position + (count as u64) <= self.len {
61 let address = Address::new::<Block>(position + u64::from(self.offset));
62 if count == Block::nbits() && address.bit_offset == 0 {
63 Some(BlockAddress::FullBlockAt(address.block_index))
64 } else {
65 Some(BlockAddress::SomeBitsAt(address, count))
66 }
67 } else {
68 None
69 }
70 }
71
72 fn find_bit<Block: BlockType>(&self, position: u64) -> Option<Address> {
73 if position < self.len {
74 Some(Address::new::<Block>(position + self.offset as u64))
75 } else {
76 None
77 }
78 }
79}
80
81impl BlockAddress {
82 unsafe fn read<Block: BlockType>(self, bits: *const Block) -> Block {
83 match self {
84 BlockAddress::FullBlockAt(position) =>
85 ptr::read(bits.offset(position as isize)),
86
87 BlockAddress::SomeBitsAt(address, count) => {
88 let offset = address.bit_offset;
89 let ptr1 = bits.offset(address.block_index as isize);
90 let block1 = ptr::read(ptr1);
91
92 let shift1 = offset as usize;
96 let shift2 = Block::nbits() - shift1;
97
98 let bits_size1 = cmp::min(shift2, count);
100 let chunk1 = block1.get_bits(shift1, bits_size1);
101
102 let bits_size2 = count - bits_size1;
104 if bits_size2 == 0 {
105 return chunk1;
106 }
107
108 let block2 = ptr::read(ptr1.offset(1));
111 let chunk2 = block2.get_bits(0, bits_size2);
112 (chunk1 | (chunk2 << shift2))
113 }
114 }
115 }
116
117 unsafe fn write<Block: BlockType>(self, bits: *mut Block, value: Block) {
118 match self {
119 BlockAddress::FullBlockAt(position) =>
120 ptr::write(bits.offset(position as isize), value),
121
122 BlockAddress::SomeBitsAt(address, count) => {
123 let offset = address.bit_offset;
124 let ptr1 = bits.offset(address.block_index as isize);
125
126 let shift1 = offset;
131 let shift2 = Block::nbits() - shift1;
132
133 let bits_size1 = cmp::min(count, shift2);
137 let old_block1 = ptr::read(ptr1);
138 let new_block1 = old_block1.with_bits(shift1, bits_size1, value);
139 ptr::write(ptr1, new_block1);
140
141 let bits_size2 = count - bits_size1;
143 if bits_size2 == 0 { return; }
144 let ptr2 = ptr1.offset(1);
145 let old_block2 = ptr::read(ptr2);
146 let new_block2 = old_block2.with_bits(0, bits_size2, value >> shift2);
147 ptr::write(ptr2, new_block2);
148 }
149 }
150 }
151}
152
153#[derive(Copy, Clone)]
170pub struct BitSlice<'a, Block> {
171 bits: *const Block,
172 span: SliceSpan,
173 marker: PhantomData<&'a ()>,
174}
175
176pub struct BitSliceMut<'a, Block> {
196 bits: *mut Block,
197 span: SliceSpan,
198 marker: PhantomData<&'a mut ()>,
199}
200
201impl<'a, Block: BlockType> BitSlice<'a, Block> {
202 pub fn from_slice(blocks: &'a [Block]) -> Self {
220 BitSlice {
221 bits: blocks.as_ptr(),
222 span: SliceSpan::from_block_len::<Block>(blocks.len()),
223 marker: PhantomData,
224 }
225 }
226
227 pub unsafe fn from_raw_parts(bits: *const Block, offset: u64, len: u64) -> Self {
239 let address = Address::new::<Block>(offset);
240 BitSlice {
241 bits: bits.offset(address.block_index as isize),
242 span: SliceSpan::new::<Block>(address.bit_offset as u8, len),
243 marker: PhantomData,
244 }
245 }
246
247 pub fn len(&self) -> u64 {
261 self.span.len
262 }
263
264 pub fn is_empty(&self) -> bool {
279 self.len() == 0
280 }
281}
282
283impl<'a, Block: BlockType> BitSliceMut<'a, Block> {
284 pub fn from_slice(blocks: &mut [Block]) -> Self {
289 BitSliceMut {
290 bits: blocks.as_mut_ptr(),
291 span: SliceSpan::from_block_len::<Block>(blocks.len()),
292 marker: PhantomData,
293 }
294 }
295
296 pub unsafe fn from_raw_parts(bits: *mut Block, offset: u64, len: u64) -> Self {
308 let address = Address::new::<Block>(offset);
309 BitSliceMut {
310 bits: bits.offset(address.block_index as isize),
311 span: SliceSpan::new::<Block>(address.bit_offset as u8, len),
312 marker: PhantomData,
313 }
314 }
315
316 pub fn len(&self) -> u64 {
318 self.span.len
319 }
320
321 pub fn is_empty(&self) -> bool {
323 self.len() == 0
324 }
325
326 pub fn as_bit_slice(&self) -> BitSlice<'a, Block> {
328 BitSlice {
329 bits: self.bits,
330 span: self.span,
331 marker: PhantomData,
332 }
333 }
334}
335
336impl<'a, 'b, Block: BlockType> From<&'b BitSliceMut<'a, Block>> for BitSlice<'a, Block> {
337 fn from(slice: &'b BitSliceMut<'a, Block>) -> Self {
338 slice.as_bit_slice()
339 }
340}
341
342impl<'a, Block: BlockType> From<&'a [Block]> for BitSlice<'a, Block> {
343 fn from(slice: &'a [Block]) -> Self {
344 BitSlice::from_slice(slice)
345 }
346}
347
348impl<'a, Block: BlockType> From<&'a mut [Block]> for BitSliceMut<'a, Block> {
349 fn from(slice: &'a mut [Block]) -> Self {
350 BitSliceMut::from_slice(slice)
351 }
352}
353
354unsafe fn get_raw_bit<Block: BlockType>(bits: *const Block, address: Address) -> bool {
358
359 let ptr = bits.offset(address.block_index as isize);
360 let block = ptr::read(ptr);
361 block.get_bit(address.bit_offset)
362}
363
364unsafe fn set_raw_bit<Block: BlockType>(bits: *mut Block, address: Address, value: bool) {
368 let ptr = bits.offset(address.block_index as isize);
369 let old_block = ptr::read(ptr);
370 let new_block = old_block.with_bit(address.bit_offset, value);
371 ptr::write(ptr, new_block);
372}
373
374impl<'a, Block: BlockType> Bits for BitSlice<'a, Block> {
375 type Block = Block;
376
377 fn bit_len(&self) -> u64 {
378 self.len()
379 }
380
381 fn get_bit(&self, position: u64) -> bool {
382 let address = self.span.find_bit::<Block>(position)
383 .expect("BitSlice::get_bit: out of bounds");
384 unsafe { get_raw_bit(self.bits, address) }
385 }
386
387 fn get_block(&self, position: usize) -> Block {
388 get_masked_block(self, position)
389 }
390
391 fn get_raw_block(&self, position: usize) -> Block {
392 let block_addr = self.span.find_block::<Block>(position)
393 .expect("BitSlice::get_block: out of bounds");
394 unsafe { block_addr.read(self.bits) }
395 }
396
397 fn get_bits(&self, start: u64, count: usize) -> Self::Block {
398 let block_addr = self.span.find_bits::<Block>(start, count)
399 .expect("BitSlice::get_bits: out of bounds");
400 unsafe { block_addr.read(self.bits) }
401 }
402}
403
404impl<'a, Block: BlockType> Bits for BitSliceMut<'a, Block> {
405 type Block = Block;
406
407 fn bit_len(&self) -> u64 {
408 self.len()
409 }
410
411 fn get_bit(&self, position: u64) -> bool {
412 let address = self.span.find_bit::<Block>(position)
413 .expect("BitSliceMut::get_bit: out of bounds");
414 unsafe { get_raw_bit(self.bits, address) }
415 }
416
417 fn get_block(&self, position: usize) -> Block {
418 let block_addr = self.span.find_block::<Block>(position)
419 .expect("BitSliceMut::get_block: out of bounds");
420 unsafe { block_addr.read(self.bits) }
421 }
422
423 fn get_bits(&self, start: u64, count: usize) -> Self::Block {
424 let block_addr = self.span.find_bits::<Block>(start, count)
425 .expect("BitSliceMut::get_bits: out of bounds");
426 unsafe { block_addr.read(self.bits) }
427 }
428}
429
430impl<'a, Block: BlockType> BitsMut for BitSliceMut<'a, Block> {
431 fn set_bit(&mut self, position: u64, value: bool) {
432 let address = self.span.find_bit::<Block>(position)
433 .expect("BitSliceMut::set_bit: out of bounds");
434 unsafe {
435 set_raw_bit(self.bits, address, value);
436 }
437 }
438
439 fn set_block(&mut self, position: usize, value: Block) {
440 let block_addr = self.span.find_block::<Block>(position)
441 .expect("BitSliceMut::set_block: out of bounds");
442 unsafe { block_addr.write(self.bits, value); }
443 }
444
445 fn set_bits(&mut self, start: u64, count: usize, value: Self::Block) {
446 let block_addr = self.span.find_bits::<Block>(start, count)
447 .expect("BitSliceMut::set_bits: out of bounds");
448 unsafe { block_addr.write(self.bits, value); }
449 }
450}
451
452impl_index_from_bits! {
453 impl['a, Block: BlockType] Index<u64> for BitSlice<'a, Block>;
454 impl['a, Block: BlockType] Index<u64> for BitSliceMut<'a, Block>;
455}
456
457impl<'a, Block: BlockType> BitSliceable<Range<u64>> for BitSlice<'a, Block> {
458 type Slice = Self;
459
460 fn bit_slice(self, range: Range<u64>) -> Self {
461 assert!(range.start <= range.end, "BitSlice::slice: bad range");
462 assert!(range.end <= self.span.len, "BitSlice::slice: out of bounds");
463
464 unsafe {
465 BitSlice::from_raw_parts(self.bits,
466 range.start + u64::from(self.span.offset),
467 range.end - range.start)
468 }
469 }
470}
471
472impl<'a, Block: BlockType> BitSliceable<Range<u64>> for BitSliceMut<'a, Block> {
473 type Slice = Self;
474
475 fn bit_slice(self, range: Range<u64>) -> Self {
476 assert!(range.start <= range.end, "BitSliceMut::slice: bad range");
477 assert!(range.end <= self.span.len, "BitSliceMut::slice: out of bounds");
478
479 unsafe {
480 BitSliceMut::from_raw_parts(self.bits,
481 range.start + u64::from(self.span.offset),
482 range.end - range.start)
483 }
484 }
485}
486
487#[cfg(inclusive_range)]
488impl<'a, Block: BlockType> BitSliceable<RangeInclusive<u64>> for BitSlice<'a, Block> {
489 type Slice = Self;
490
491 fn bit_slice(self, range: RangeInclusive<u64>) -> Self {
492 let (start, end) = get_inclusive_bounds(range)
493 .expect("BitSlice::slice: bad range");
494 assert!(end < self.span.len, "BitSlice::slice: out of bounds");
495
496 unsafe {
497 BitSlice::from_raw_parts(self.bits,
498 start + u64::from(self.span.offset),
499 end - start + 1)
500 }
501 }
502}
503
504#[cfg(inclusive_range)]
505impl<'a, Block: BlockType> BitSliceable<RangeInclusive<u64>> for BitSliceMut<'a, Block> {
506 type Slice = Self;
507
508 fn bit_slice(self, range: RangeInclusive<u64>) -> Self {
509 let (start, end) = get_inclusive_bounds(range)
510 .expect("BitSliceMut::slice: bad range");
511 assert!(end < self.span.len, "BitSliceMut::slice: out of bounds");
512
513 unsafe {
514 BitSliceMut::from_raw_parts(self.bits,
515 start + u64::from(self.span.offset),
516 end - start + 1)
517 }
518 }
519}
520
521impl<'a, Block: BlockType> BitSliceable<RangeFrom<u64>> for BitSlice<'a, Block> {
522 type Slice = Self;
523
524 fn bit_slice(self, range: RangeFrom<u64>) -> Self {
525 let len = self.span.len;
526 self.bit_slice(range.start .. len)
527 }
528}
529
530impl<'a, Block: BlockType> BitSliceable<RangeFrom<u64>> for BitSliceMut<'a, Block> {
531 type Slice = Self;
532
533 fn bit_slice(self, range: RangeFrom<u64>) -> Self {
534 let len = self.span.len;
535 self.bit_slice(range.start .. len)
536 }
537}
538
539impl<'a, Block: BlockType> BitSliceable<RangeTo<u64>> for BitSlice<'a, Block> {
540 type Slice = Self;
541
542 fn bit_slice(self, range: RangeTo<u64>) -> Self {
543 self.bit_slice(0 .. range.end)
544 }
545}
546
547impl<'a, Block: BlockType> BitSliceable<RangeTo<u64>> for BitSliceMut<'a, Block> {
548 type Slice = Self;
549
550 fn bit_slice(self, range: RangeTo<u64>) -> Self {
551 self.bit_slice(0 .. range.end)
552 }
553}
554
555#[cfg(inclusive_range)]
556impl<'a, Block: BlockType> BitSliceable<RangeToInclusive<u64>> for BitSlice<'a, Block> {
557 type Slice = Self;
558
559 fn bit_slice(self, range: RangeToInclusive<u64>) -> Self {
560 self.bit_slice(0 .. range.end + 1)
561 }
562}
563
564#[cfg(inclusive_range)]
565impl<'a, Block: BlockType> BitSliceable<RangeToInclusive<u64>> for BitSliceMut<'a, Block> {
566 type Slice = Self;
567
568 fn bit_slice(self, range: RangeToInclusive<u64>) -> Self {
569 self.bit_slice(0 .. range.end + 1)
570 }
571}
572
573impl<'a, Block: BlockType> BitSliceable<RangeFull> for BitSlice<'a, Block> {
574 type Slice = Self;
575
576 fn bit_slice(self, _: RangeFull) -> Self {
577 self
578 }
579}
580
581impl<'a, Block: BlockType> BitSliceable<RangeFull> for BitSliceMut<'a, Block> {
582 type Slice = Self;
583
584 fn bit_slice(self, _: RangeFull) -> Self {
585 self
586 }
587}
588
589impl<'a, Block, R> BitSliceable<R> for &'a [Block]
590 where Block: BlockType,
591 BitSlice<'a, Block>: BitSliceable<R, Block = Block, Slice = BitSlice<'a, Block>> {
592
593 type Slice = BitSlice<'a, Block>;
594
595 fn bit_slice(self, range: R) -> Self::Slice {
596 BitSlice::from_slice(self).bit_slice(range)
597 }
598}
599
600impl<'a, Block, R> BitSliceable<R> for &'a mut [Block]
601 where Block: BlockType,
602 BitSliceMut<'a, Block>: BitSliceable<R, Block = Block, Slice = BitSliceMut<'a, Block>> {
603
604 type Slice = BitSliceMut<'a, Block>;
605
606 fn bit_slice(self, range: R) -> Self::Slice {
607 BitSliceMut::from_slice(self).bit_slice(range)
608 }
609}
610
611impl<'a, Other: Bits> PartialEq<Other> for BitSlice<'a, Other::Block> {
612 fn eq(&self, other: &Other) -> bool {
613 BlockIter::new(self) == BlockIter::new(other)
614 }
615}
616
617impl<'a, Block: BlockType> Eq for BitSlice<'a, Block> {}
618
619impl<'a, Block: BlockType> PartialOrd for BitSlice<'a, Block> {
620 fn partial_cmp(&self, other: &BitSlice<Block>) -> Option<cmp::Ordering> {
621 Some(self.cmp(other))
622 }
623}
624
625impl<'a, Block: BlockType> Ord for BitSlice<'a, Block> {
626 fn cmp(&self, other: &Self) -> cmp::Ordering {
627 let iter1 = BlockIter::new(*self);
628 let iter2 = BlockIter::new(*other);
629 (iter1).cmp(iter2)
630 }
631}
632
633impl<'a, Other: Bits> PartialEq<Other> for BitSliceMut<'a, Other::Block> {
634 fn eq(&self, other: &Other) -> bool {
635 BlockIter::new(self) == BlockIter::new(other)
636 }
637}
638
639impl<'a, Block: BlockType> Eq for BitSliceMut<'a, Block> {}
640
641impl<'a, Block: BlockType> PartialOrd for BitSliceMut<'a, Block> {
642 fn partial_cmp(&self, other: &BitSliceMut<Block>) -> Option<cmp::Ordering> {
643 Some(self.cmp(other))
644 }
645}
646
647impl<'a, Block: BlockType> Ord for BitSliceMut<'a, Block> {
648 fn cmp(&self, other: &Self) -> cmp::Ordering {
649 self.as_bit_slice().cmp(&other.as_bit_slice())
650 }
651}
652
653impl<'a, Block: BlockType + hash::Hash> hash::Hash for BitSlice<'a, Block> {
654 fn hash<H: hash::Hasher>(&self, state: &mut H) {
655 state.write_u64(self.bit_len());
656 for block in BlockIter::new(self) {
657 block.hash(state);
658 }
659 }
660}
661
662impl<'a, Block: BlockType + hash::Hash> hash::Hash for BitSliceMut<'a, Block> {
663 fn hash<H: hash::Hasher>(&self, state: &mut H) {
664 self.as_bit_slice().hash(state);
665 }
666}
667
668impl<'a, Block: BlockType> fmt::Debug for BitSlice<'a, Block> {
669 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
670 write!(f, "bit_vec![")?;
671 if !self.is_empty() {
672 write!(f, "{}", self.get_bit(0))?;
673 }
674 for i in 1 .. self.span.len {
675 write!(f, ", {}", self.get_bit(i))?;
676 }
677 write!(f, "]")
678 }
679}
680
681impl<'a, Block: BlockType> fmt::Debug for BitSliceMut<'a, Block> {
682 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
683 self.as_bit_slice().fmt(f)
684 }
685}
686
687#[cfg(test)]
688mod test {
689 use BitVec;
690 use super::*;
691
692 #[test]
693 fn bit_slice_from_slice() {
694 let mut bytes = [0b00001111u8];
695 {
696 let mut bs = BitSliceMut::from_slice(&mut bytes);
697 assert_eq!( bs.get_block(0), 0b00001111 );
698 bs.set_bit(1, false);
699 assert_eq!( bs.get_block(0), 0b00001101 );
700 }
701
702 assert_eq!( bytes[0], 0b00001101 );
703 }
704
705 #[test]
706 fn bit_slice_index() {
707 let mut bytes = [0b00001111u8];
708 {
709 let bs = BitSlice::from_slice(&bytes);
710 assert_eq!( bs[3], true );
711 assert_eq!( bs[4], false );
712 }
713 {
714 let bs = BitSliceMut::from_slice(&mut bytes);
715 assert_eq!( bs[3], true );
716 assert_eq!( bs[4], false );
717 }
718 }
719
720 #[test]
721 fn bit_slice_update_across_blocks() {
722 let mut bv: BitVec<u8> = bit_vec![ true; 20 ];
723 bv.set_bit(3, false);
724 bv.set_bit(7, false);
725
726 {
727 let mut slice: BitSliceMut<u8> = (&mut bv).bit_slice(4..12);
728 slice.set_bit(1, false);
729 slice.set_bit(5, false);
730 }
731
732 assert!( bv[0] );
733 assert!( bv[1] );
734 assert!( bv[2] );
735 assert!( !bv[3] );
736 assert!( bv[4] );
737 assert!( !bv[5] );
738 assert!( bv[6] );
739 assert!( !bv[7] );
740 assert!( bv[8] );
741 assert!( !bv[9] );
742 }
743
744 #[test]
745 fn debug_for_bit_slice() {
746 let slice = [0b00110101u8];
747 let bs = BitSlice::from_slice(&slice);
748 let exp = "bit_vec![true, false, true, false, true, true, false, false]";
749 let act = format!("{:?}", bs);
750 assert_eq!( act, exp );
751 }
752
753 #[cfg(inclusive_range)]
754 #[test]
755 fn range_to_inclusive() {
756 use BitSliceable;
757
758 let base = [0b00110101u8];
759 let slice = base.bit_slice(::std::ops::RangeToInclusive { end: 4 });
760 assert_eq!( slice.len(), 5 );
761 }
762}
763