diff --git a/module/zfs/metaslab.c b/module/zfs/metaslab.c index c1e32884f..12fe5890a 100644 --- a/module/zfs/metaslab.c +++ b/module/zfs/metaslab.c @@ -3035,7 +3035,6 @@ metaslab_group_alloc_normal(metaslab_group_t *mg, zio_alloc_list_t *zal, metaslab_t *msp = NULL; uint64_t offset = -1ULL; uint64_t activation_weight; - boolean_t tertiary = B_FALSE; activation_weight = METASLAB_WEIGHT_PRIMARY; for (int i = 0; i < d; i++) { @@ -3044,7 +3043,7 @@ metaslab_group_alloc_normal(metaslab_group_t *mg, zio_alloc_list_t *zal, activation_weight = METASLAB_WEIGHT_SECONDARY; } else if (activation_weight == METASLAB_WEIGHT_SECONDARY && DVA_GET_VDEV(&dva[i]) == mg->mg_vd->vdev_id) { - tertiary = B_TRUE; + activation_weight = METASLAB_WEIGHT_CLAIM; break; } } @@ -3053,10 +3052,8 @@ metaslab_group_alloc_normal(metaslab_group_t *mg, zio_alloc_list_t *zal, * If we don't have enough metaslabs active to fill the entire array, we * just use the 0th slot. */ - if (mg->mg_ms_ready < mg->mg_allocators * 2) { - tertiary = B_FALSE; + if (mg->mg_ms_ready < mg->mg_allocators * 3) allocator = 0; - } ASSERT3U(mg->mg_vd->vdev_ms_count, >=, 2); @@ -3082,7 +3079,7 @@ metaslab_group_alloc_normal(metaslab_group_t *mg, zio_alloc_list_t *zal, msp = mg->mg_primaries[allocator]; was_active = B_TRUE; } else if (activation_weight == METASLAB_WEIGHT_SECONDARY && - mg->mg_secondaries[allocator] != NULL && !tertiary) { + mg->mg_secondaries[allocator] != NULL) { msp = mg->mg_secondaries[allocator]; was_active = B_TRUE; } else { @@ -3125,7 +3122,8 @@ metaslab_group_alloc_normal(metaslab_group_t *mg, zio_alloc_list_t *zal, continue; } - if (msp->ms_weight & METASLAB_WEIGHT_CLAIM) { + if (msp->ms_weight & METASLAB_WEIGHT_CLAIM && + activation_weight != METASLAB_WEIGHT_CLAIM) { metaslab_passivate(msp, msp->ms_weight & ~METASLAB_WEIGHT_CLAIM); mutex_exit(&msp->ms_lock);