Lines Matching refs:check_state

1925 	struct btree_check_state *check_state = info->state;
1926 struct cache_set *c = check_state->c;
1947 spin_lock(&check_state->idx_lock);
1948 cur_idx = check_state->key_idx;
1949 check_state->key_idx++;
1950 spin_unlock(&check_state->idx_lock);
1966 atomic_set(&check_state->enough, 1);
1967 /* Update check_state->enough earlier */
2001 /* update check_state->started among all CPUs */
2003 if (atomic_dec_and_test(&check_state->started))
2004 wake_up(&check_state->wait);
2029 struct btree_check_state check_state;
2040 memset(&check_state, 0, sizeof(struct btree_check_state));
2041 check_state.c = c;
2042 check_state.total_threads = bch_btree_chkthread_nr();
2043 check_state.key_idx = 0;
2044 spin_lock_init(&check_state.idx_lock);
2045 atomic_set(&check_state.started, 0);
2046 atomic_set(&check_state.enough, 0);
2047 init_waitqueue_head(&check_state.wait);
2052 * if check_state.enough is non-zero, it means current
2056 for (i = 0; i < check_state.total_threads; i++) {
2057 /* fetch latest check_state.enough earlier */
2059 if (atomic_read(&check_state.enough))
2062 check_state.infos[i].result = 0;
2063 check_state.infos[i].state = &check_state;
2065 check_state.infos[i].thread =
2067 &check_state.infos[i],
2069 if (IS_ERR(check_state.infos[i].thread)) {
2072 kthread_stop(check_state.infos[i].thread);
2076 atomic_inc(&check_state.started);
2082 wait_event(check_state.wait, atomic_read(&check_state.started) == 0);
2084 for (i = 0; i < check_state.total_threads; i++) {
2085 if (check_state.infos[i].result) {
2086 ret = check_state.infos[i].result;