Lines Matching refs:nvidia_smmu
34 struct nvidia_smmu {
41 static inline struct nvidia_smmu *to_nvidia_smmu(struct arm_smmu_device *smmu)
43 return container_of(smmu, struct nvidia_smmu, smmu);
49 struct nvidia_smmu *nvidia_smmu;
51 nvidia_smmu = container_of(smmu, struct nvidia_smmu, smmu);
52 return nvidia_smmu->bases[inst] + (page << smmu->pgshift);
66 struct nvidia_smmu *nvidia = to_nvidia_smmu(smmu);
87 struct nvidia_smmu *nvidia = to_nvidia_smmu(smmu);
100 struct nvidia_smmu *nvidia = to_nvidia_smmu(smmu);
134 struct nvidia_smmu *nvidia = to_nvidia_smmu(smmu);
180 struct nvidia_smmu *nvidia = to_nvidia_smmu(smmu);
226 struct nvidia_smmu *nvidia;
252 struct nvidia_smmu *nvidia = to_nvidia_smmu(smmu);
311 struct nvidia_smmu *nvidia_smmu;
315 nvidia_smmu = devm_krealloc(dev, smmu, sizeof(*nvidia_smmu), GFP_KERNEL);
316 if (!nvidia_smmu)
319 nvidia_smmu->mc = devm_tegra_memory_controller_get(dev);
320 if (IS_ERR(nvidia_smmu->mc))
321 return ERR_CAST(nvidia_smmu->mc);
324 nvidia_smmu->bases[0] = smmu->base;
325 nvidia_smmu->num_instances++;
332 nvidia_smmu->bases[i] = devm_ioremap_resource(dev, res);
333 if (IS_ERR(nvidia_smmu->bases[i]))
334 return ERR_CAST(nvidia_smmu->bases[i]);
336 nvidia_smmu->num_instances++;
339 if (nvidia_smmu->num_instances == 1)
340 nvidia_smmu->smmu.impl = &nvidia_smmu_single_impl;
342 nvidia_smmu->smmu.impl = &nvidia_smmu_impl;
344 return &nvidia_smmu->smmu;