Lines Matching refs:dev_dax

17 static int check_vma(struct dev_dax *dev_dax, struct vm_area_struct *vma,
20 struct device *dev = &dev_dax->dev;
23 if (!dax_alive(dev_dax->dax_dev))
34 mask = dev_dax->align - 1;
54 __weak phys_addr_t dax_pgoff_to_phys(struct dev_dax *dev_dax, pgoff_t pgoff,
59 for (i = 0; i < dev_dax->nr_range; i++) {
60 struct dev_dax_range *dax_range = &dev_dax->ranges[i];
81 struct dev_dax *dev_dax = filp->private_data;
85 if (dev_dax->pgmap->vmemmap_shift)
103 static vm_fault_t __dev_dax_pte_fault(struct dev_dax *dev_dax,
106 struct device *dev = &dev_dax->dev;
111 if (check_vma(dev_dax, vmf->vma, __func__))
114 if (dev_dax->align > PAGE_SIZE) {
116 dev_dax->align, fault_size);
120 if (fault_size != dev_dax->align)
123 phys = dax_pgoff_to_phys(dev_dax, vmf->pgoff, PAGE_SIZE);
136 static vm_fault_t __dev_dax_pmd_fault(struct dev_dax *dev_dax,
140 struct device *dev = &dev_dax->dev;
146 if (check_vma(dev_dax, vmf->vma, __func__))
149 if (dev_dax->align > PMD_SIZE) {
151 dev_dax->align, fault_size);
155 if (fault_size < dev_dax->align)
157 else if (fault_size > dev_dax->align)
166 phys = dax_pgoff_to_phys(dev_dax, pgoff, PMD_SIZE);
180 static vm_fault_t __dev_dax_pud_fault(struct dev_dax *dev_dax,
184 struct device *dev = &dev_dax->dev;
191 if (check_vma(dev_dax, vmf->vma, __func__))
194 if (dev_dax->align > PUD_SIZE) {
196 dev_dax->align, fault_size);
200 if (fault_size < dev_dax->align)
202 else if (fault_size > dev_dax->align)
211 phys = dax_pgoff_to_phys(dev_dax, pgoff, PUD_SIZE);
224 static vm_fault_t __dev_dax_pud_fault(struct dev_dax *dev_dax,
236 struct dev_dax *dev_dax = filp->private_data;
238 dev_dbg(&dev_dax->dev, "%s: %s (%#lx - %#lx) order:%d\n", current->comm,
244 rc = __dev_dax_pte_fault(dev_dax, vmf);
246 rc = __dev_dax_pmd_fault(dev_dax, vmf);
248 rc = __dev_dax_pud_fault(dev_dax, vmf);
265 struct dev_dax *dev_dax = filp->private_data;
267 if (!IS_ALIGNED(addr, dev_dax->align))
275 struct dev_dax *dev_dax = filp->private_data;
277 return dev_dax->align;
289 struct dev_dax *dev_dax = filp->private_data;
292 dev_dbg(&dev_dax->dev, "trace\n");
299 rc = check_vma(dev_dax, vma, __func__);
315 struct dev_dax *dev_dax = filp ? filp->private_data : NULL;
317 if (!dev_dax || addr)
320 align = dev_dax->align;
350 struct dev_dax *dev_dax = dax_get_private(dax_dev);
352 dev_dbg(&dev_dax->dev, "trace\n");
359 filp->private_data = dev_dax;
367 struct dev_dax *dev_dax = filp->private_data;
369 dev_dbg(&dev_dax->dev, "trace\n");
388 static void dev_dax_kill(void *dev_dax)
390 kill_dev_dax(dev_dax);
393 static int dev_dax_probe(struct dev_dax *dev_dax)
395 struct dax_device *dax_dev = dev_dax->dax_dev;
396 struct device *dev = &dev_dax->dev;
403 if (static_dev_dax(dev_dax)) {
404 if (dev_dax->nr_range > 1) {
410 pgmap = dev_dax->pgmap;
412 if (dev_dax->pgmap) {
419 struct_size(pgmap, ranges, dev_dax->nr_range - 1),
424 pgmap->nr_range = dev_dax->nr_range;
425 dev_dax->pgmap = pgmap;
427 for (i = 0; i < dev_dax->nr_range; i++) {
428 struct range *range = &dev_dax->ranges[i].range;
433 for (i = 0; i < dev_dax->nr_range; i++) {
434 struct range *range = &dev_dax->ranges[i].range;
445 if (dev_dax->align > PAGE_SIZE)
447 order_base_2(dev_dax->align >> PAGE_SHIFT);
466 return devm_add_action_or_reset(dev, dev_dax_kill, dev_dax);