drm/i915: Always set the vma->pages
authorChris Wilson <chris@chris-wilson.co.uk>
Mon, 15 Aug 2016 09:48:47 +0000 (10:48 +0100)
committerChris Wilson <chris@chris-wilson.co.uk>
Mon, 15 Aug 2016 10:00:54 +0000 (11:00 +0100)
Previously, we would only set the vma->pages pointer for GGTT entries.
However, if we always set it, we can use it to prettify some code that
may want to access the backing store associated with the VMA (as
assigned to the VMA).

Signed-off-by: Chris Wilson <chris@chris-wilson.co.uk>
Reviewed-by: Joonas Lahtinen <joonas.lahtinen@linux.intel.com>
Link: http://patchwork.freedesktop.org/patch/msgid/1471254551-25805-8-git-send-email-chris@chris-wilson.co.uk
drivers/gpu/drm/i915/i915_gem.c
drivers/gpu/drm/i915/i915_gem_gtt.c
drivers/gpu/drm/i915/i915_gem_gtt.h

index f48c45080a6592eb7ae0863638426225b2c6c74b..45c45d3a6e312e36deb0b252b0ed541929804907 100644 (file)
@@ -2868,12 +2868,12 @@ int i915_vma_unbind(struct i915_vma *vma)
        if (i915_vma_is_ggtt(vma)) {
                if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL) {
                        obj->map_and_fenceable = false;
-               } else if (vma->ggtt_view.pages) {
-                       sg_free_table(vma->ggtt_view.pages);
-                       kfree(vma->ggtt_view.pages);
+               } else if (vma->pages) {
+                       sg_free_table(vma->pages);
+                       kfree(vma->pages);
                }
-               vma->ggtt_view.pages = NULL;
        }
+       vma->pages = NULL;
 
        /* Since the unbound list is global, only move to that list if
         * no more VMAs exist. */
index d876501694c6d9c20dde7d6948dd88a058759986..9c178b0c40b531775be3e73ac96241758d201773 100644 (file)
@@ -170,11 +170,13 @@ static int ppgtt_bind_vma(struct i915_vma *vma,
 {
        u32 pte_flags = 0;
 
+       vma->pages = vma->obj->pages;
+
        /* Currently applicable only to VLV */
        if (vma->obj->gt_ro)
                pte_flags |= PTE_READ_ONLY;
 
-       vma->vm->insert_entries(vma->vm, vma->obj->pages, vma->node.start,
+       vma->vm->insert_entries(vma->vm, vma->pages, vma->node.start,
                                cache_level, pte_flags);
 
        return 0;
@@ -2618,8 +2620,7 @@ static int ggtt_bind_vma(struct i915_vma *vma,
        if (obj->gt_ro)
                pte_flags |= PTE_READ_ONLY;
 
-       vma->vm->insert_entries(vma->vm, vma->ggtt_view.pages,
-                               vma->node.start,
+       vma->vm->insert_entries(vma->vm, vma->pages, vma->node.start,
                                cache_level, pte_flags);
 
        /*
@@ -2651,8 +2652,7 @@ static int aliasing_gtt_bind_vma(struct i915_vma *vma,
 
        if (flags & I915_VMA_GLOBAL_BIND) {
                vma->vm->insert_entries(vma->vm,
-                                       vma->ggtt_view.pages,
-                                       vma->node.start,
+                                       vma->pages, vma->node.start,
                                        cache_level, pte_flags);
        }
 
@@ -2660,8 +2660,7 @@ static int aliasing_gtt_bind_vma(struct i915_vma *vma,
                struct i915_hw_ppgtt *appgtt =
                        to_i915(vma->vm->dev)->mm.aliasing_ppgtt;
                appgtt->base.insert_entries(&appgtt->base,
-                                           vma->ggtt_view.pages,
-                                           vma->node.start,
+                                           vma->pages, vma->node.start,
                                            cache_level, pte_flags);
        }
 
@@ -3557,28 +3556,27 @@ i915_get_ggtt_vma_pages(struct i915_vma *vma)
 {
        int ret = 0;
 
-       if (vma->ggtt_view.pages)
+       if (vma->pages)
                return 0;
 
        if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL)
-               vma->ggtt_view.pages = vma->obj->pages;
+               vma->pages = vma->obj->pages;
        else if (vma->ggtt_view.type == I915_GGTT_VIEW_ROTATED)
-               vma->ggtt_view.pages =
+               vma->pages =
                        intel_rotate_fb_obj_pages(&vma->ggtt_view.params.rotated, vma->obj);
        else if (vma->ggtt_view.type == I915_GGTT_VIEW_PARTIAL)
-               vma->ggtt_view.pages =
-                       intel_partial_pages(&vma->ggtt_view, vma->obj);
+               vma->pages = intel_partial_pages(&vma->ggtt_view, vma->obj);
        else
                WARN_ONCE(1, "GGTT view %u not implemented!\n",
                          vma->ggtt_view.type);
 
-       if (!vma->ggtt_view.pages) {
+       if (!vma->pages) {
                DRM_ERROR("Failed to get pages for GGTT view type %u!\n",
                          vma->ggtt_view.type);
                ret = -EINVAL;
-       } else if (IS_ERR(vma->ggtt_view.pages)) {
-               ret = PTR_ERR(vma->ggtt_view.pages);
-               vma->ggtt_view.pages = NULL;
+       } else if (IS_ERR(vma->pages)) {
+               ret = PTR_ERR(vma->pages);
+               vma->pages = NULL;
                DRM_ERROR("Failed to get pages for VMA view type %u (%d)!\n",
                          vma->ggtt_view.type, ret);
        }
index 56e64a5355e89efec26700f172c033a979666818..b580e8a013ce6a2695b9805062d8396c9eade7bc 100644 (file)
@@ -155,8 +155,6 @@ struct i915_ggtt_view {
                } partial;
                struct intel_rotation_info rotated;
        } params;
-
-       struct sg_table *pages;
 };
 
 extern const struct i915_ggtt_view i915_ggtt_view_normal;
@@ -176,6 +174,7 @@ struct i915_vma {
        struct drm_mm_node node;
        struct drm_i915_gem_object *obj;
        struct i915_address_space *vm;
+       struct sg_table *pages;
        void __iomem *iomap;
        u64 size;