Lines Matching refs:gts
109 gru_dbg(grudev, "gid %d, gts %p, gms %p, inuse 0x%x, cxt %d\n",
187 struct gru_thread_state *gts)
190 gts->ts_cbr_map =
191 gru_reserve_cb_resources(gru, gts->ts_cbr_au_count,
192 gts->ts_cbr_idx);
193 gts->ts_dsr_map =
194 gru_reserve_ds_resources(gru, gts->ts_dsr_au_count, NULL);
198 struct gru_thread_state *gts)
201 gru->gs_cbr_map |= gts->ts_cbr_map;
202 gru->gs_dsr_map |= gts->ts_dsr_map;
225 struct gru_thread_state *gts)
227 struct gru_mm_struct *gms = gts->ts_gms;
229 unsigned short ctxbitmap = (1 << gts->ts_ctxnum);
254 "gid %d, gts %p, gms %p, ctxnum %d, asid 0x%x, asidmap 0x%lx\n",
255 gru->gs_gid, gts, gms, gts->ts_ctxnum, asid,
261 struct gru_thread_state *gts)
263 struct gru_mm_struct *gms = gts->ts_gms;
268 ctxbitmap = (1 << gts->ts_ctxnum);
273 gru_dbg(grudev, "gid %d, gts %p, gms %p, ctxnum %d, asidmap 0x%lx\n",
274 gru->gs_gid, gts, gms, gts->ts_ctxnum, gms->ms_asidmap[0]);
283 void gts_drop(struct gru_thread_state *gts)
285 if (gts && refcount_dec_and_test(>s->ts_refcnt)) {
286 if (gts->ts_gms)
287 gru_drop_mmu_notifier(gts->ts_gms);
288 kfree(gts);
299 struct gru_thread_state *gts;
301 list_for_each_entry(gts, &vdata->vd_head, ts_next)
302 if (gts->ts_tsid == tsid)
303 return gts;
314 struct gru_thread_state *gts;
320 gts = kmalloc(bytes, GFP_KERNEL);
321 if (!gts)
325 memset(gts, 0, sizeof(struct gru_thread_state)); /* zero out header */
326 refcount_set(>s->ts_refcnt, 1);
327 mutex_init(>s->ts_ctxlock);
328 gts->ts_cbr_au_count = cbr_au_count;
329 gts->ts_dsr_au_count = dsr_au_count;
330 gts->ts_tlb_preload_count = tlb_preload_count;
331 gts->ts_user_options = options;
332 gts->ts_user_blade_id = -1;
333 gts->ts_user_chiplet_id = -1;
334 gts->ts_tsid = tsid;
335 gts->ts_ctxnum = NULLCTX;
336 gts->ts_tlb_int_select = -1;
337 gts->ts_cch_req_slice = -1;
338 gts->ts_sizeavail = GRU_SIZEAVAIL(PAGE_SHIFT);
340 gts->ts_mm = current->mm;
341 gts->ts_vma = vma;
345 gts->ts_gms = gms;
348 gru_dbg(grudev, "alloc gts %p\n", gts);
349 return gts;
352 gts_drop(gts);
381 struct gru_thread_state *gts;
384 gts = gru_find_current_gts_nolock(vdata, tsid);
386 gru_dbg(grudev, "vma %p, gts %p\n", vma, gts);
387 return gts;
392 * another thread to race to create a gts.
398 struct gru_thread_state *gts, *ngts;
400 gts = gru_alloc_gts(vma, vdata->vd_cbr_au_count,
404 if (IS_ERR(gts))
405 return gts;
410 gts_drop(gts);
411 gts = ngts;
414 list_add(>s->ts_next, &vdata->vd_head);
417 gru_dbg(grudev, "vma %p, gts %p\n", vma, gts);
418 return gts;
424 static void gru_free_gru_context(struct gru_thread_state *gts)
428 gru = gts->ts_gru;
429 gru_dbg(grudev, "gts %p, gid %d\n", gts, gru->gs_gid);
432 gru->gs_gts[gts->ts_ctxnum] = NULL;
433 free_gru_resources(gru, gts);
434 BUG_ON(test_bit(gts->ts_ctxnum, &gru->gs_context_map) == 0);
435 __clear_bit(gts->ts_ctxnum, &gru->gs_context_map);
436 gts->ts_ctxnum = NULLCTX;
437 gts->ts_gru = NULL;
438 gts->ts_blade = -1;
441 gts_drop(gts);
542 void gru_unload_context(struct gru_thread_state *gts, int savestate)
544 struct gru_state *gru = gts->ts_gru;
546 int ctxnum = gts->ts_ctxnum;
548 if (!is_kernel_context(gts))
549 zap_vma_ptes(gts->ts_vma, UGRUADDR(gts), GRU_GSEG_PAGESIZE);
552 gru_dbg(grudev, "gts %p, cbrmap 0x%lx, dsrmap 0x%lx\n",
553 gts, gts->ts_cbr_map, gts->ts_dsr_map);
558 if (!is_kernel_context(gts))
559 gru_unload_mm_tracker(gru, gts);
561 gru_unload_context_data(gts->ts_gdata, gru->gs_gru_base_vaddr,
562 ctxnum, gts->ts_cbr_map,
563 gts->ts_dsr_map);
564 gts->ts_data_valid = 1;
571 gru_free_gru_context(gts);
578 void gru_load_context(struct gru_thread_state *gts)
580 struct gru_state *gru = gts->ts_gru;
582 int i, err, asid, ctxnum = gts->ts_ctxnum;
587 (gts->ts_user_options == GRU_OPT_MISS_FMM_POLL
588 || gts->ts_user_options == GRU_OPT_MISS_FMM_INTR);
589 cch->tlb_int_enable = (gts->ts_user_options == GRU_OPT_MISS_FMM_INTR);
591 gts->ts_tlb_int_select = gru_cpu_fault_map_id();
592 cch->tlb_int_select = gts->ts_tlb_int_select;
594 if (gts->ts_cch_req_slice >= 0) {
596 cch->req_slice = gts->ts_cch_req_slice;
601 cch->dsr_allocation_map = gts->ts_dsr_map;
602 cch->cbr_allocation_map = gts->ts_cbr_map;
604 if (is_kernel_context(gts)) {
613 asid = gru_load_mm_tracker(gru, gts);
616 cch->sizeavail[i] = gts->ts_sizeavail;
623 "err %d: cch %p, gts %p, cbr 0x%lx, dsr 0x%lx\n",
624 err, cch, gts, gts->ts_cbr_map, gts->ts_dsr_map);
628 gru_load_context_data(gts->ts_gdata, gru->gs_gru_base_vaddr, ctxnum,
629 gts->ts_cbr_map, gts->ts_dsr_map, gts->ts_data_valid);
635 gru_dbg(grudev, "gid %d, gts %p, cbrmap 0x%lx, dsrmap 0x%lx, tie %d, tis %d\n",
636 gts->ts_gru->gs_gid, gts, gts->ts_cbr_map, gts->ts_dsr_map,
637 (gts->ts_user_options == GRU_OPT_MISS_FMM_INTR), gts->ts_tlb_int_select);
645 int gru_update_cch(struct gru_thread_state *gts)
648 struct gru_state *gru = gts->ts_gru;
649 int i, ctxnum = gts->ts_ctxnum, ret = 0;
655 if (gru->gs_gts[gts->ts_ctxnum] != gts)
660 cch->sizeavail[i] = gts->ts_sizeavail;
661 gts->ts_tlb_int_select = gru_cpu_fault_map_id();
664 (gts->ts_user_options == GRU_OPT_MISS_FMM_POLL
665 || gts->ts_user_options == GRU_OPT_MISS_FMM_INTR);
682 static int gru_retarget_intr(struct gru_thread_state *gts)
684 if (gts->ts_tlb_int_select < 0
685 || gts->ts_tlb_int_select == gru_cpu_fault_map_id())
688 gru_dbg(grudev, "retarget from %d to %d\n", gts->ts_tlb_int_select,
690 return gru_update_cch(gts);
700 struct gru_thread_state *gts)
705 blade_id = gts->ts_user_blade_id;
709 chiplet_id = gts->ts_user_chiplet_id;
719 int gru_check_context_placement(struct gru_thread_state *gts)
729 gru = gts->ts_gru;
731 * If gru or gts->ts_tgid_owner isn't initialized properly, return
736 if (!gru || gts->ts_tgid_owner != current->tgid)
739 if (!gru_check_chiplet_assignment(gru, gts)) {
742 } else if (gru_retarget_intr(gts)) {
758 static int is_gts_stealable(struct gru_thread_state *gts,
761 if (is_kernel_context(gts))
764 return mutex_trylock(>s->ts_ctxlock);
767 static void gts_stolen(struct gru_thread_state *gts,
770 if (is_kernel_context(gts)) {
774 mutex_unlock(>s->ts_ctxlock);
779 void gru_steal_context(struct gru_thread_state *gts)
787 blade_id = gts->ts_user_blade_id;
790 cbr = gts->ts_cbr_au_count;
791 dsr = gts->ts_dsr_au_count;
805 if (gru_check_chiplet_assignment(gru, gts)) {
836 gts->ustats.context_stolen++;
844 "stole gid %d, ctxnum %d from gts %p. Need cb %d, ds %d;"
865 struct gru_state *gru_assign_gru_context(struct gru_thread_state *gts)
869 int blade_id = gts->ts_user_blade_id;
877 if (!gru_check_chiplet_assignment(grux, gts))
879 if (check_gru_resources(grux, gts->ts_cbr_au_count,
880 gts->ts_dsr_au_count,
891 if (!check_gru_resources(gru, gts->ts_cbr_au_count,
892 gts->ts_dsr_au_count, GRU_NUM_CCH)) {
896 reserve_gru_resources(gru, gts);
897 gts->ts_gru = gru;
898 gts->ts_blade = gru->gs_blade_id;
899 gts->ts_ctxnum = gru_assign_context_number(gru);
900 refcount_inc(>s->ts_refcnt);
901 gru->gs_gts[gts->ts_ctxnum] = gts;
906 "gseg %p, gts %p, gid %d, ctx %d, cbr %d, dsr %d\n",
907 gseg_virtual_address(gts->ts_gru, gts->ts_ctxnum), gts,
908 gts->ts_gru->gs_gid, gts->ts_ctxnum,
909 gts->ts_cbr_au_count, gts->ts_dsr_au_count);
928 struct gru_thread_state *gts;
938 gts = gru_find_thread_state(vma, TSID(vaddr, vma));
939 if (!gts)
943 mutex_lock(>s->ts_ctxlock);
946 if (gru_check_context_placement(gts)) {
948 mutex_unlock(>s->ts_ctxlock);
949 gru_unload_context(gts, 1);
953 if (!gts->ts_gru) {
955 if (!gru_assign_gru_context(gts)) {
957 mutex_unlock(>s->ts_ctxlock);
960 expires = gts->ts_steal_jiffies + GRU_STEAL_DELAY;
962 gru_steal_context(gts);
965 gru_load_context(gts);
966 paddr = gseg_physical_address(gts->ts_gru, gts->ts_ctxnum);
973 mutex_unlock(>s->ts_ctxlock);