Lines Matching refs:ThreadState
49 ThreadState current = state_.load_relaxed();
176 class ThreadState final {
178 static constexpr ThreadState Parked() {
179 return ThreadState(ParkedBit::kMask);
181 static constexpr ThreadState Running() { return ThreadState(0); }
185 constexpr ThreadState SetRunning() const V8_WARN_UNUSED_RESULT {
186 return ThreadState(raw_state_ & ~ParkedBit::kMask);
191 constexpr ThreadState SetParked() const V8_WARN_UNUSED_RESULT {
192 return ThreadState(ParkedBit::kMask | raw_state_);
209 constexpr explicit ThreadState(uint8_t value) : raw_state_(value) {}
220 constexpr explicit AtomicThreadState(ThreadState state)
223 bool CompareExchangeStrong(ThreadState& expected, ThreadState updated) {
228 bool CompareExchangeWeak(ThreadState& expected, ThreadState updated) {
233 ThreadState SetParked() {
234 return ThreadState(raw_state_.fetch_or(ParkedBit::kMask));
237 ThreadState SetSafepointRequested() {
238 return ThreadState(raw_state_.fetch_or(SafepointRequestedBit::kMask));
241 ThreadState ClearSafepointRequested() {
242 return ThreadState(raw_state_.fetch_and(~SafepointRequestedBit::kMask));
245 ThreadState SetCollectionRequested() {
246 return ThreadState(raw_state_.fetch_or(CollectionRequestedBit::kMask));
249 ThreadState ClearCollectionRequested() {
250 return ThreadState(raw_state_.fetch_and(~CollectionRequestedBit::kMask));
253 ThreadState load_relaxed() const {
254 return ThreadState(raw_state_.load(std::memory_order_relaxed));
270 ThreadState expected = ThreadState::Running();
271 if (!state_.CompareExchangeWeak(expected, ThreadState::Parked())) {
278 ThreadState expected = ThreadState::Parked();
279 if (!state_.CompareExchangeWeak(expected, ThreadState::Running())) {