Lines Matching full:domain

21 static int intel_nested_attach_dev(struct iommu_domain *domain,  in intel_nested_attach_dev()  argument
25 struct dmar_domain *dmar_domain = to_dmar_domain(domain); in intel_nested_attach_dev()
30 if (info->domain) in intel_nested_attach_dev()
39 * Stage-1 domain cannot work alone, it is nested on a s2_domain. in intel_nested_attach_dev()
43 ret = paging_domain_compatible(&dmar_domain->s2_domain->domain, dev); in intel_nested_attach_dev()
45 dev_err_ratelimited(dev, "s2 domain is not compatible\n"); in intel_nested_attach_dev()
51 dev_err_ratelimited(dev, "Failed to attach domain to iommu\n"); in intel_nested_attach_dev()
64 info->domain = dmar_domain; in intel_nested_attach_dev()
78 static void intel_nested_domain_free(struct iommu_domain *domain) in intel_nested_domain_free() argument
80 struct dmar_domain *dmar_domain = to_dmar_domain(domain); in intel_nested_domain_free()
90 static int intel_nested_cache_invalidate_user(struct iommu_domain *domain, in intel_nested_cache_invalidate_user() argument
93 struct dmar_domain *dmar_domain = to_dmar_domain(domain); in intel_nested_cache_invalidate_user()
134 struct dmar_domain *domain, in domain_setup_nested() argument
139 return intel_pasid_setup_nested(iommu, dev, pasid, domain); in domain_setup_nested()
142 domain); in domain_setup_nested()
145 static int intel_nested_set_dev_pasid(struct iommu_domain *domain, in intel_nested_set_dev_pasid() argument
150 struct dmar_domain *dmar_domain = to_dmar_domain(domain); in intel_nested_set_dev_pasid()
161 ret = paging_domain_compatible(&dmar_domain->s2_domain->domain, dev); in intel_nested_set_dev_pasid()
165 dev_pasid = domain_add_dev_pasid(domain, dev, pasid); in intel_nested_set_dev_pasid()
178 domain_remove_dev_pasid(domain, dev, pasid); in intel_nested_set_dev_pasid()
198 struct dmar_domain *domain; in intel_iommu_domain_alloc_nested() local
204 /* Must be nested domain */ in intel_iommu_domain_alloc_nested()
216 domain = kzalloc(sizeof(*domain), GFP_KERNEL_ACCOUNT); in intel_iommu_domain_alloc_nested()
217 if (!domain) in intel_iommu_domain_alloc_nested()
220 domain->use_first_level = true; in intel_iommu_domain_alloc_nested()
221 domain->s2_domain = s2_domain; in intel_iommu_domain_alloc_nested()
222 domain->s1_cfg = vtd; in intel_iommu_domain_alloc_nested()
223 domain->domain.ops = &intel_nested_domain_ops; in intel_iommu_domain_alloc_nested()
224 domain->domain.type = IOMMU_DOMAIN_NESTED; in intel_iommu_domain_alloc_nested()
225 INIT_LIST_HEAD(&domain->devices); in intel_iommu_domain_alloc_nested()
226 INIT_LIST_HEAD(&domain->dev_pasids); in intel_iommu_domain_alloc_nested()
227 INIT_LIST_HEAD(&domain->cache_tags); in intel_iommu_domain_alloc_nested()
228 spin_lock_init(&domain->lock); in intel_iommu_domain_alloc_nested()
229 spin_lock_init(&domain->cache_lock); in intel_iommu_domain_alloc_nested()
230 xa_init(&domain->iommu_array); in intel_iommu_domain_alloc_nested()
233 list_add(&domain->s2_link, &s2_domain->s1_domains); in intel_iommu_domain_alloc_nested()
236 return &domain->domain; in intel_iommu_domain_alloc_nested()