Lines Matching refs:check_state
1946 struct btree_check_state *check_state = info->state;
1947 struct cache_set *c = check_state->c;
1968 spin_lock(&check_state->idx_lock);
1969 cur_idx = check_state->key_idx;
1970 check_state->key_idx++;
1971 spin_unlock(&check_state->idx_lock);
1987 atomic_set(&check_state->enough, 1);
1988 /* Update check_state->enough earlier */
2022 /* update check_state->started among all CPUs */
2024 if (atomic_dec_and_test(&check_state->started))
2025 wake_up(&check_state->wait);
2050 struct btree_check_state check_state;
2061 memset(&check_state, 0, sizeof(struct btree_check_state));
2062 check_state.c = c;
2063 check_state.total_threads = bch_btree_chkthread_nr();
2064 check_state.key_idx = 0;
2065 spin_lock_init(&check_state.idx_lock);
2066 atomic_set(&check_state.started, 0);
2067 atomic_set(&check_state.enough, 0);
2068 init_waitqueue_head(&check_state.wait);
2073 * if check_state.enough is non-zero, it means current
2077 for (i = 0; i < check_state.total_threads; i++) {
2078 /* fetch latest check_state.enough earlier */
2080 if (atomic_read(&check_state.enough))
2083 check_state.infos[i].result = 0;
2084 check_state.infos[i].state = &check_state;
2086 check_state.infos[i].thread =
2088 &check_state.infos[i],
2090 if (IS_ERR(check_state.infos[i].thread)) {
2093 kthread_stop(check_state.infos[i].thread);
2097 atomic_inc(&check_state.started);
2103 wait_event(check_state.wait, atomic_read(&check_state.started) == 0);
2105 for (i = 0; i < check_state.total_threads; i++) {
2106 if (check_state.infos[i].result) {
2107 ret = check_state.infos[i].result;