@@ -414,6 +414,13 @@ impl<H, T> HeaderVec<H, T> {
414
414
unsafe { ( self . ptr as * mut T ) . add ( Self :: offset ( ) ) }
415
415
}
416
416
417
+ /// Gets the pointer to the end of the slice. This returns a mutable pointer to
418
+ /// uninitialized memory behind the last element.
419
+ #[ inline( always) ]
420
+ fn end_ptr_mut ( & mut self ) -> * mut T {
421
+ unsafe { self . start_ptr_mut ( ) . add ( self . len_exact ( ) ) }
422
+ }
423
+
417
424
#[ inline( always) ]
418
425
fn header ( & self ) -> & HeaderVecHeader < H > {
419
426
// The beginning of the memory is always the header.
@@ -427,6 +434,28 @@ impl<H, T> HeaderVec<H, T> {
427
434
}
428
435
}
429
436
437
+ impl < H , T : Clone > HeaderVec < H , T > {
438
+ /// Adds items from a slice to the end of the list.
439
+ ///
440
+ /// Returns `Some(*const ())` if the memory was moved to a new location.
441
+ /// In this case, you are responsible for updating the weak nodes.
442
+ pub fn extend_from_slice ( & mut self , slice : & [ T ] ) -> Option < * const ( ) > {
443
+ let previous_pointer = self . reserve ( slice. len ( ) ) ;
444
+
445
+ // copy data
446
+ let end_ptr = self . end_ptr_mut ( ) ;
447
+ for ( index, item) in slice. iter ( ) . enumerate ( ) {
448
+ unsafe {
449
+ core:: ptr:: write ( end_ptr. add ( index) , item. clone ( ) ) ;
450
+ }
451
+ }
452
+ // correct the len
453
+ self . header_mut ( ) . len = ( self . len_exact ( ) + slice. len ( ) ) . into ( ) ;
454
+
455
+ previous_pointer
456
+ }
457
+ }
458
+
430
459
#[ cfg( feature = "atomic_append" ) ]
431
460
/// The atomic append API is only enabled when the `atomic_append` feature flag is set (which
432
461
/// is the default).
0 commit comments