Lines Matching refs:asid
56 u64 context_id, asid;
67 asid = per_cpu(active_asids, cpu).counter;
68 if (asid == 0)
69 asid = per_cpu(reserved_asids, cpu);
70 if (context_id == asid)
139 u64 asid;
144 asid = atomic64_xchg(&per_cpu(active_asids, i), 0);
152 if (asid == 0)
153 asid = per_cpu(reserved_asids, i);
154 __set_bit(asid & ~ASID_MASK, asid_map);
155 per_cpu(reserved_asids, i) = asid;
165 static bool check_update_reserved_asid(u64 asid, u64 newasid)
180 if (per_cpu(reserved_asids, cpu) == asid) {
192 u64 asid = atomic64_read(&mm->context.id);
195 if (asid != 0) {
196 u64 newasid = generation | (asid & ~ASID_MASK);
202 if (check_update_reserved_asid(asid, newasid))
209 asid &= ~ASID_MASK;
210 if (!__test_and_set_bit(asid, asid_map))
223 asid = find_next_zero_bit(asid_map, NUM_USER_ASIDS, cur_idx);
224 if (asid == NUM_USER_ASIDS) {
228 asid = find_next_zero_bit(asid_map, NUM_USER_ASIDS, 1);
231 __set_bit(asid, asid_map);
232 cur_idx = asid;
234 return asid | generation;
241 u64 asid;
252 asid = atomic64_read(&mm->context.id);
253 if (!((asid ^ atomic64_read(&asid_generation)) >> ASID_BITS)
254 && atomic64_xchg(&per_cpu(active_asids, cpu), asid))
259 asid = atomic64_read(&mm->context.id);
260 if ((asid ^ atomic64_read(&asid_generation)) >> ASID_BITS) {
261 asid = new_context(mm, cpu);
262 atomic64_set(&mm->context.id, asid);
270 atomic64_set(&per_cpu(active_asids, cpu), asid);