@@ -200,15 +200,16 @@ ZFS_BTREE_FIND_IN_BUF_FUNC(zfs_range_tree_seg64_find_in_buf, zfs_range_seg64_t,
200200ZFS_BTREE_FIND_IN_BUF_FUNC (zfs_range_tree_seg_gap_find_in_buf ,
201201 zfs_range_seg_gap_t , zfs_range_tree_seg_gap_compare )
202202
203- zfs_range_tree_t *
204- zfs_range_tree_create_gap (const zfs_range_tree_ops_t * ops ,
203+ static zfs_range_tree_t *
204+ zfs_range_tree_create_impl (const zfs_range_tree_ops_t * ops ,
205205 zfs_range_seg_type_t type , void * arg , uint64_t start , uint64_t shift ,
206- uint64_t gap )
206+ uint64_t gap , zfs_range_tree_usecase_t usecase )
207207{
208208 zfs_range_tree_t * rt = kmem_zalloc (sizeof (zfs_range_tree_t ), KM_SLEEP );
209209
210210 ASSERT3U (shift , < , 64 );
211211 ASSERT3U (type , <=, ZFS_RANGE_SEG_NUM_TYPES );
212+ ASSERT3U (usecase , < , ZFS_RANGE_TREE_UC_NUM_CASES );
212213 size_t size ;
213214 int (* compare ) (const void * , const void * );
214215 bt_find_in_buf_f bt_find ;
@@ -235,6 +236,7 @@ zfs_range_tree_create_gap(const zfs_range_tree_ops_t *ops,
235236
236237 rt -> rt_ops = ops ;
237238 rt -> rt_gap = gap ;
239+ rt -> rt_usecase = usecase ;
238240 rt -> rt_arg = arg ;
239241 rt -> rt_type = type ;
240242 rt -> rt_start = start ;
@@ -246,11 +248,30 @@ zfs_range_tree_create_gap(const zfs_range_tree_ops_t *ops,
246248 return (rt );
247249}
248250
251+ zfs_range_tree_t *
252+ zfs_range_tree_create_gap (const zfs_range_tree_ops_t * ops ,
253+ zfs_range_seg_type_t type , void * arg , uint64_t start , uint64_t shift ,
254+ uint64_t gap )
255+ {
256+ return (zfs_range_tree_create_impl (ops , type , arg , start , shift , gap ,
257+ ZFS_RANGE_TREE_UC_UNKNOWN ));
258+ }
259+
249260zfs_range_tree_t *
250261zfs_range_tree_create (const zfs_range_tree_ops_t * ops ,
251262 zfs_range_seg_type_t type , void * arg , uint64_t start , uint64_t shift )
252263{
253- return (zfs_range_tree_create_gap (ops , type , arg , start , shift , 0 ));
264+ return (zfs_range_tree_create_impl (ops , type , arg , start , shift , 0 ,
265+ ZFS_RANGE_TREE_UC_UNKNOWN ));
266+ }
267+
268+ zfs_range_tree_t *
269+ zfs_range_tree_create_usecase (const zfs_range_tree_ops_t * ops ,
270+ zfs_range_seg_type_t type , void * arg , uint64_t start , uint64_t shift ,
271+ zfs_range_tree_usecase_t usecase )
272+ {
273+ return (zfs_range_tree_create_impl (ops , type , arg , start , shift , 0 ,
274+ usecase ));
254275}
255276
256277void
@@ -318,14 +339,25 @@ zfs_range_tree_add_impl(void *arg, uint64_t start, uint64_t size, uint64_t fill)
318339 * the normal code paths.
319340 */
320341 if (rs != NULL ) {
342+ uint64_t rstart = zfs_rs_get_start (rs , rt );
343+ uint64_t rend = zfs_rs_get_end (rs , rt );
321344 if (gap == 0 ) {
322- zfs_panic_recover ("zfs: adding existent segment to "
323- "range tree (offset=%llx size=%llx)" ,
324- (longlong_t )start , (longlong_t )size );
345+ zfs_panic_recover_ms ("zfs: adding segment "
346+ "(offset=%llx size=%llx) overlapping with "
347+ "existing one (offset=%llx size=%llx)" ,
348+ (longlong_t )start , (longlong_t )size ,
349+ (longlong_t )rstart , (longlong_t )(rend - rstart ));
350+ if (rt -> rt_usecase != ZFS_RANGE_TREE_UC_ALLOCATED_SPACE )
351+ return ;
352+ /* add non-overlapping chunks */
353+ if (rstart > start )
354+ zfs_range_tree_add_impl (rt , start , rstart - start ,
355+ rstart - start );
356+ if (rend < end )
357+ zfs_range_tree_add_impl (rt , rend , end - rend ,
358+ end - rend );
325359 return ;
326360 }
327- uint64_t rstart = zfs_rs_get_start (rs , rt );
328- uint64_t rend = zfs_rs_get_end (rs , rt );
329361 if (rstart <= start && rend >= end ) {
330362 zfs_range_tree_adjust_fill (rt , rs , fill );
331363 return ;
@@ -450,6 +482,7 @@ zfs_range_tree_remove_impl(zfs_range_tree_t *rt, uint64_t start, uint64_t size,
450482 zfs_range_seg_t * rs ;
451483 zfs_range_seg_max_t rsearch , rs_tmp ;
452484 uint64_t end = start + size ;
485+ uint64_t rstart , rend ;
453486 boolean_t left_over , right_over ;
454487
455488 VERIFY3U (size , != , 0 );
@@ -463,12 +496,15 @@ zfs_range_tree_remove_impl(zfs_range_tree_t *rt, uint64_t start, uint64_t size,
463496
464497 /* Make sure we completely overlap with someone */
465498 if (rs == NULL ) {
466- zfs_panic_recover ("zfs: removing nonexistent segment from "
499+ zfs_panic_recover_ms ("zfs: removing nonexistent segment from "
467500 "range tree (offset=%llx size=%llx)" ,
468501 (longlong_t )start , (longlong_t )size );
469502 return ;
470503 }
471504
505+ rstart = zfs_rs_get_start (rs , rt );
506+ rend = zfs_rs_get_end (rs , rt );
507+
472508 /*
473509 * Range trees with gap support must only remove complete segments
474510 * from the tree. This allows us to maintain accurate fill accounting
@@ -478,31 +514,47 @@ zfs_range_tree_remove_impl(zfs_range_tree_t *rt, uint64_t start, uint64_t size,
478514 if (rt -> rt_gap != 0 ) {
479515 if (do_fill ) {
480516 if (zfs_rs_get_fill (rs , rt ) == size ) {
481- start = zfs_rs_get_start ( rs , rt ) ;
482- end = zfs_rs_get_end ( rs , rt ) ;
517+ start = rstart ;
518+ end = rend ;
483519 size = end - start ;
484520 } else {
485521 zfs_range_tree_adjust_fill (rt , rs , - size );
486522 return ;
487523 }
488- } else if (zfs_rs_get_start (rs , rt ) != start ||
489- zfs_rs_get_end (rs , rt ) != end ) {
524+ } else if (rstart != start || rend != end ) {
490525 zfs_panic_recover ("zfs: freeing partial segment of "
491526 "gap tree (offset=%llx size=%llx) of "
492527 "(offset=%llx size=%llx)" ,
493528 (longlong_t )start , (longlong_t )size ,
494- (longlong_t )zfs_rs_get_start (rs , rt ),
495- (longlong_t )zfs_rs_get_end (rs , rt ) -
496- zfs_rs_get_start (rs , rt ));
529+ (longlong_t )rstart ,
530+ (longlong_t )(rend - rstart ));
497531 return ;
498532 }
499533 }
500534
501- VERIFY3U (zfs_rs_get_start (rs , rt ), <=, start );
502- VERIFY3U (zfs_rs_get_end (rs , rt ), >=, end );
535+ if (!(rstart <= start && rend >= end )) {
536+ zfs_panic_recover_ms ("zfs: removing segment "
537+ "(offset=%llx size=%llx) not completely overlapped by "
538+ "existing one (offset=%llx size=%llx)" ,
539+ (longlong_t )start , (longlong_t )size ,
540+ (longlong_t )rstart , (longlong_t )(rend - rstart ));
541+ if (rt -> rt_usecase != ZFS_RANGE_TREE_UC_FREE_SPACE )
542+ return ;
543+ /* perform removal of the chunks */
544+ if (rstart > start )
545+ zfs_range_tree_remove_impl (rt , start , rstart - start ,
546+ do_fill );
547+ uint64_t mstart = MAX (rstart , start );
548+ uint64_t mend = MIN (rend , end );
549+ zfs_range_tree_remove_impl (rt , mstart , mend - mstart , do_fill );
550+ if (rend < end )
551+ zfs_range_tree_remove_impl (rt , rend , end - rend ,
552+ do_fill );
553+ return ;
554+ }
503555
504- left_over = (zfs_rs_get_start ( rs , rt ) != start );
505- right_over = (zfs_rs_get_end ( rs , rt ) != end );
556+ left_over = (rstart != start );
557+ right_over = (rend != end );
506558
507559 zfs_range_tree_stat_decr (rt , rs );
508560
0 commit comments