Signed-off-by: Daniel Vetter <daniel.vetter at ffwll.ch>
---
 drivers/gpu/drm/i915/i915_debugfs.c   |    6 +-
 drivers/gpu/drm/i915/i915_drv.h       |    2 +-
 drivers/gpu/drm/i915/i915_gem.c       |   93 ++++++++++++++-------------------
 drivers/gpu/drm/i915/i915_gem_evict.c |    6 +-
 drivers/gpu/drm/i915/i915_gem_gtt.c   |   10 ++--
 5 files changed, 52 insertions(+), 65 deletions(-)

diff --git a/drivers/gpu/drm/i915/i915_debugfs.c 
b/drivers/gpu/drm/i915/i915_debugfs.c
index 8caa55f..af133ac 100644
--- a/drivers/gpu/drm/i915/i915_debugfs.c
+++ b/drivers/gpu/drm/i915/i915_debugfs.c
@@ -124,9 +124,9 @@ describe_obj(struct seq_file *m, struct drm_i915_gem_object 
*obj)
                seq_printf(m, " (name: %d)", obj->base.name);
        if (obj->fence_reg != I915_FENCE_REG_NONE)
                seq_printf(m, " (fence: %d)", obj->fence_reg);
-       if (obj->gtt_space != NULL)
+       if (drm_mm_node_allocated(&obj->gtt_space))
                seq_printf(m, " (gtt offset: %08x, size: %08x)",
-                          obj->gtt_offset, (unsigned int)obj->gtt_space->size);
+                          obj->gtt_offset, (unsigned int)obj->gtt_space.size);
        if (obj->pin_mappable || obj->fault_mappable)
                seq_printf(m, " (mappable)");
        if (obj->ring != NULL)
@@ -180,7 +180,7 @@ static int i915_gem_object_list_info(struct seq_file *m, 
void *data)
                describe_obj(m, obj);
                seq_printf(m, "\n");
                total_obj_size += obj->base.size;
-               total_gtt_size += obj->gtt_space->size;
+               total_gtt_size += obj->gtt_space.size;
                count++;
        }
        mutex_unlock(&dev->struct_mutex);
diff --git a/drivers/gpu/drm/i915/i915_drv.h b/drivers/gpu/drm/i915/i915_drv.h
index 8a4b247..bdb05c2 100644
--- a/drivers/gpu/drm/i915/i915_drv.h
+++ b/drivers/gpu/drm/i915/i915_drv.h
@@ -712,7 +712,7 @@ struct drm_i915_gem_object {
        struct drm_gem_object base;

        /** Current space allocated to this object in the GTT, if any. */
-       struct drm_mm_node *gtt_space;
+       struct drm_mm_node gtt_space;
        struct list_head gtt_list;

        /** This object's place on the active/flushing/inactive lists */
diff --git a/drivers/gpu/drm/i915/i915_gem.c b/drivers/gpu/drm/i915/i915_gem.c
index 868d3a1..f8612be 100644
--- a/drivers/gpu/drm/i915/i915_gem.c
+++ b/drivers/gpu/drm/i915/i915_gem.c
@@ -87,10 +87,10 @@ static void i915_gem_info_add_gtt(struct drm_i915_private 
*dev_priv,
                                  struct drm_i915_gem_object *obj)
 {
        dev_priv->mm.gtt_count++;
-       dev_priv->mm.gtt_memory += obj->gtt_space->size;
+       dev_priv->mm.gtt_memory += obj->gtt_space.size;
        if (obj->gtt_offset < dev_priv->mm.gtt_mappable_end) {
                dev_priv->mm.mappable_gtt_used +=
-                       min_t(size_t, obj->gtt_space->size,
+                       min_t(size_t, obj->gtt_space.size,
                              dev_priv->mm.gtt_mappable_end - obj->gtt_offset);
        }
        list_add_tail(&obj->gtt_list, &dev_priv->mm.gtt_list);
@@ -100,10 +100,10 @@ static void i915_gem_info_remove_gtt(struct 
drm_i915_private *dev_priv,
                                     struct drm_i915_gem_object *obj)
 {
        dev_priv->mm.gtt_count--;
-       dev_priv->mm.gtt_memory -= obj->gtt_space->size;
+       dev_priv->mm.gtt_memory -= obj->gtt_space.size;
        if (obj->gtt_offset < dev_priv->mm.gtt_mappable_end) {
                dev_priv->mm.mappable_gtt_used -=
-                       min_t(size_t, obj->gtt_space->size,
+                       min_t(size_t, obj->gtt_space.size,
                              dev_priv->mm.gtt_mappable_end - obj->gtt_offset);
        }
        list_del_init(&obj->gtt_list);
@@ -124,13 +124,13 @@ i915_gem_info_update_mappable(struct drm_i915_private 
*dev_priv,
                        /* Combined state was already mappable. */
                        return;
                dev_priv->mm.gtt_mappable_count++;
-               dev_priv->mm.gtt_mappable_memory += obj->gtt_space->size;
+               dev_priv->mm.gtt_mappable_memory += obj->gtt_space.size;
        } else {
                if (obj->pin_mappable || obj->fault_mappable)
                        /* Combined state still mappable. */
                        return;
                dev_priv->mm.gtt_mappable_count--;
-               dev_priv->mm.gtt_mappable_memory -= obj->gtt_space->size;
+               dev_priv->mm.gtt_mappable_memory -= obj->gtt_space.size;
        }
 }

@@ -139,7 +139,7 @@ static void i915_gem_info_add_pin(struct drm_i915_private 
*dev_priv,
                                  bool mappable)
 {
        dev_priv->mm.pin_count++;
-       dev_priv->mm.pin_memory += obj->gtt_space->size;
+       dev_priv->mm.pin_memory += obj->gtt_space.size;
        if (mappable) {
                obj->pin_mappable = true;
                i915_gem_info_update_mappable(dev_priv, obj, true);
@@ -150,7 +150,7 @@ static void i915_gem_info_remove_pin(struct 
drm_i915_private *dev_priv,
                                     struct drm_i915_gem_object *obj)
 {
        dev_priv->mm.pin_count--;
-       dev_priv->mm.pin_memory -= obj->gtt_space->size;
+       dev_priv->mm.pin_memory -= obj->gtt_space.size;
        if (obj->pin_mappable) {
                obj->pin_mappable = false;
                i915_gem_info_update_mappable(dev_priv, obj, false);
@@ -212,7 +212,8 @@ static int i915_mutex_lock_interruptible(struct drm_device 
*dev)
 static inline bool
 i915_gem_object_is_inactive(struct drm_i915_gem_object *obj)
 {
-       return obj->gtt_space && !obj->active && obj->pin_count == 0;
+       return drm_mm_node_allocated(&obj->gtt_space)
+               && !obj->active && obj->pin_count == 0;
 }

 int i915_gem_do_init(struct drm_device *dev,
@@ -1059,7 +1060,7 @@ i915_gem_pwrite_ioctl(struct drm_device *dev, void *data,
        if (obj->phys_obj)
                ret = i915_gem_phys_pwrite(dev, obj, args, file);
        else if (obj->tiling_mode == I915_TILING_NONE &&
-                obj->gtt_space &&
+                drm_mm_node_allocated(&obj->gtt_space) &&
                 obj->base.write_domain != I915_GEM_DOMAIN_CPU) {
                ret = i915_gem_object_pin(obj, 0, true);
                if (ret)
@@ -1283,7 +1284,7 @@ int i915_gem_fault(struct vm_area_struct *vma, struct 
vm_fault *vmf)
        mutex_lock(&dev->struct_mutex);
        BUG_ON(obj->pin_count && !obj->pin_mappable);

-       if (obj->gtt_space) {
+       if (drm_mm_node_allocated(&obj->gtt_space)) {
                if (!obj->map_and_fenceable) {
                        ret = i915_gem_object_unbind(obj);
                        if (ret)
@@ -1291,7 +1292,7 @@ int i915_gem_fault(struct vm_area_struct *vma, struct 
vm_fault *vmf)
                }
        }

-       if (!obj->gtt_space) {
+       if (!drm_mm_node_allocated(&obj->gtt_space)) {
                ret = i915_gem_object_bind_to_gtt(obj, 0, true);
                if (ret)
                        goto unlock;
@@ -2193,7 +2194,7 @@ i915_gem_object_unbind(struct drm_i915_gem_object *obj)
        struct drm_i915_private *dev_priv = dev->dev_private;
        int ret = 0;

-       if (obj->gtt_space == NULL)
+       if (!drm_mm_node_allocated(&obj->gtt_space))
                return 0;

        if (obj->pin_count != 0) {
@@ -2235,8 +2236,7 @@ i915_gem_object_unbind(struct drm_i915_gem_object *obj)
        /* Avoid an unnecessary call to unbind on rebind. */
        obj->map_and_fenceable = true;

-       drm_mm_put_block(obj->gtt_space);
-       obj->gtt_space = NULL;
+       drm_mm_remove_node(&obj->gtt_space);
        obj->gtt_offset = 0;

        if (i915_gem_object_is_purgeable(obj))
@@ -2292,7 +2292,7 @@ static void sandybridge_write_fence_reg(struct 
drm_i915_gem_object *obj)
 {
        struct drm_device *dev = obj->base.dev;
        drm_i915_private_t *dev_priv = dev->dev_private;
-       u32 size = obj->gtt_space->size;
+       u32 size = obj->gtt_space.size;
        int regnum = obj->fence_reg;
        uint64_t val;

@@ -2313,7 +2313,7 @@ static void i965_write_fence_reg(struct 
drm_i915_gem_object *obj)
 {
        struct drm_device *dev = obj->base.dev;
        drm_i915_private_t *dev_priv = dev->dev_private;
-       u32 size = obj->gtt_space->size;
+       u32 size = obj->gtt_space.size;
        int regnum = obj->fence_reg;
        uint64_t val;

@@ -2332,7 +2332,7 @@ static void i915_write_fence_reg(struct 
drm_i915_gem_object *obj)
 {
        struct drm_device *dev = obj->base.dev;
        drm_i915_private_t *dev_priv = dev->dev_private;
-       u32 size = obj->gtt_space->size;
+       u32 size = obj->gtt_space.size;
        uint32_t fence_reg, val, pitch_val;
        int tile_width;

@@ -2340,7 +2340,7 @@ static void i915_write_fence_reg(struct 
drm_i915_gem_object *obj)
            (obj->gtt_offset & (size - 1))) {
                WARN(1, "%s: object 0x%08x [fenceable? %d] not 1M or size 
(0x%08x) aligned [gtt_space offset=%lx, size=%lx]\n",
                     __func__, obj->gtt_offset, obj->map_and_fenceable, size,
-                    obj->gtt_space->start, obj->gtt_space->size);
+                    obj->gtt_space.start, obj->gtt_space.size);
                return;
        }

@@ -2379,7 +2379,7 @@ static void i830_write_fence_reg(struct 
drm_i915_gem_object *obj)
 {
        struct drm_device *dev = obj->base.dev;
        drm_i915_private_t *dev_priv = dev->dev_private;
-       u32 size = obj->gtt_space->size;
+       u32 size = obj->gtt_space.size;
        int regnum = obj->fence_reg;
        uint32_t val;
        uint32_t pitch_val;
@@ -2642,7 +2642,6 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object 
*obj,
 {
        struct drm_device *dev = obj->base.dev;
        drm_i915_private_t *dev_priv = dev->dev_private;
-       struct drm_mm_node *free_space;
        gfp_t gfpmask = __GFP_NORETRY | __GFP_NOWARN;
        u32 size, fence_size, fence_alignment;
        bool mappable, fenceable;
@@ -2676,27 +2675,17 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object 
*obj,

  search_free:
        if (map_and_fenceable)
-               free_space =
-                       drm_mm_search_free_in_range(&dev_priv->mm.gtt_space,
+               ret =
+                       drm_mm_insert_node_in_range(&dev_priv->mm.gtt_space,
+                                                   &obj->gtt_space,
                                                    size, alignment, 0,
-                                                   
dev_priv->mm.gtt_mappable_end,
-                                                   0);
+                                                   
dev_priv->mm.gtt_mappable_end);
        else
-               free_space = drm_mm_search_free(&dev_priv->mm.gtt_space,
-                                               size, alignment, 0);
-
-       if (free_space != NULL) {
-               if (map_and_fenceable)
-                       obj->gtt_space =
-                               drm_mm_get_block_range_generic(free_space,
-                                                              size, alignment, 
0,
-                                                              
dev_priv->mm.gtt_mappable_end,
-                                                              0);
-               else
-                       obj->gtt_space =
-                               drm_mm_get_block(free_space, size, alignment);
-       }
-       if (obj->gtt_space == NULL) {
+               ret = drm_mm_insert_node(&dev_priv->mm.gtt_space,
+                                        &obj->gtt_space,
+                                        size, alignment);
+
+       if (ret != 0) {
                /* If the gtt is empty and we're still having trouble
                 * fitting our object in, we're out of memory.
                 */
@@ -2710,8 +2699,7 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object 
*obj,

        ret = i915_gem_object_get_pages_gtt(obj, gfpmask);
        if (ret) {
-               drm_mm_put_block(obj->gtt_space);
-               obj->gtt_space = NULL;
+               drm_mm_remove_node(&obj->gtt_space);

                if (ret == -ENOMEM) {
                        /* first try to clear up some space from the GTT */
@@ -2737,8 +2725,7 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object 
*obj,
        ret = i915_gem_gtt_bind_object(obj);
        if (ret) {
                i915_gem_object_put_pages_gtt(obj);
-               drm_mm_put_block(obj->gtt_space);
-               obj->gtt_space = NULL;
+               drm_mm_remove_node(&obj->gtt_space);

                ret = i915_gem_evict_something(dev, size,
                                               alignment, map_and_fenceable);
@@ -2748,7 +2735,7 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object 
*obj,
                goto search_free;
        }

-       obj->gtt_offset = obj->gtt_space->start;
+       obj->gtt_offset = obj->gtt_space.start;

        /* keep track of bounds object by adding it to the inactive list */
        list_add_tail(&obj->mm_list, &dev_priv->mm.inactive_list);
@@ -2764,8 +2751,8 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object 
*obj,
        trace_i915_gem_object_bind(obj, obj->gtt_offset, map_and_fenceable);

        fenceable =
-               obj->gtt_space->size == fence_size &&
-               (obj->gtt_space->start & (fence_alignment -1)) == 0;
+               obj->gtt_space.size == fence_size &&
+               (obj->gtt_space.start & (fence_alignment -1)) == 0;

        mappable =
                obj->gtt_offset + obj->base.size <= 
dev_priv->mm.gtt_mappable_end;
@@ -2866,7 +2853,7 @@ i915_gem_object_set_to_gtt_domain(struct 
drm_i915_gem_object *obj, int write)
        int ret;

        /* Not valid to be called on unbound objects. */
-       if (obj->gtt_space == NULL)
+       if (!drm_mm_node_allocated(&obj->gtt_space))
                return -EINVAL;

        ret = i915_gem_object_flush_gpu_write_domain(obj, false);
@@ -2914,7 +2901,7 @@ i915_gem_object_set_to_display_plane(struct 
drm_i915_gem_object *obj,
        int ret;

        /* Not valid to be called on unbound objects. */
-       if (obj->gtt_space == NULL)
+       if (!drm_mm_node_allocated(&obj->gtt_space))
                return -EINVAL;

        ret = i915_gem_object_flush_gpu_write_domain(obj, true);
@@ -4084,7 +4071,7 @@ i915_gem_object_pin(struct drm_i915_gem_object *obj,
        BUG_ON(map_and_fenceable && !map_and_fenceable);
        WARN_ON(i915_verify_lists(dev));

-       if (obj->gtt_space != NULL) {
+       if (drm_mm_node_allocated(&obj->gtt_space)) {
                if ((alignment && obj->gtt_offset & (alignment - 1)) ||
                    (map_and_fenceable && !obj->map_and_fenceable)) {
                        WARN(obj->pin_count,
@@ -4100,7 +4087,7 @@ i915_gem_object_pin(struct drm_i915_gem_object *obj,
                }
        }

-       if (obj->gtt_space == NULL) {
+       if (!drm_mm_node_allocated(&obj->gtt_space)) {
                ret = i915_gem_object_bind_to_gtt(obj, alignment,
                                                  map_and_fenceable);
                if (ret)
@@ -4127,7 +4114,7 @@ i915_gem_object_unpin(struct drm_i915_gem_object *obj)

        WARN_ON(i915_verify_lists(dev));
        BUG_ON(obj->pin_count == 0);
-       BUG_ON(obj->gtt_space == NULL);
+       BUG_ON(!drm_mm_node_allocated(&obj->gtt_space));

        if (--obj->pin_count == 0) {
                if (!obj->active)
@@ -4319,7 +4306,7 @@ i915_gem_madvise_ioctl(struct drm_device *dev, void *data,

        /* if the object is no longer bound, discard its backing storage */
        if (i915_gem_object_is_purgeable(obj) &&
-           obj->gtt_space == NULL)
+           !drm_mm_node_allocated(&obj->gtt_space))
                i915_gem_object_truncate(obj);

        args->retained = obj->madv != __I915_MADV_PURGED;
diff --git a/drivers/gpu/drm/i915/i915_gem_evict.c 
b/drivers/gpu/drm/i915/i915_gem_evict.c
index 03e15d3..ea252a4 100644
--- a/drivers/gpu/drm/i915/i915_gem_evict.c
+++ b/drivers/gpu/drm/i915/i915_gem_evict.c
@@ -36,7 +36,7 @@ mark_free(struct drm_i915_gem_object *obj, struct list_head 
*unwind)
 {
        list_add(&obj->evict_list, unwind);
        drm_gem_object_reference(&obj->base);
-       return drm_mm_scan_add_block(obj->gtt_space);
+       return drm_mm_scan_add_block(&obj->gtt_space);
 }

 int
@@ -128,7 +128,7 @@ i915_gem_evict_something(struct drm_device *dev, int 
min_size,

        /* Nothing found, clean up and bail out! */
        list_for_each_entry(obj, &unwind_list, evict_list) {
-               ret = drm_mm_scan_remove_block(obj->gtt_space);
+               ret = drm_mm_scan_remove_block(&obj->gtt_space);
                BUG_ON(ret);
                drm_gem_object_unreference(&obj->base);
        }
@@ -147,7 +147,7 @@ found:
                obj = list_first_entry(&unwind_list,
                                       struct drm_i915_gem_object,
                                       evict_list);
-               if (drm_mm_scan_remove_block(obj->gtt_space)) {
+               if (drm_mm_scan_remove_block(&obj->gtt_space)) {
                        list_move(&obj->evict_list, &eviction_list);
                        continue;
                }
diff --git a/drivers/gpu/drm/i915/i915_gem_gtt.c 
b/drivers/gpu/drm/i915/i915_gem_gtt.c
index 71c2b0f..d4537b5 100644
--- a/drivers/gpu/drm/i915/i915_gem_gtt.c
+++ b/drivers/gpu/drm/i915/i915_gem_gtt.c
@@ -40,11 +40,11 @@ void i915_gem_restore_gtt_mappings(struct drm_device *dev)

                        intel_gtt_insert_sg_entries(obj->sg_list,
                                                    obj->num_sg,
-                                                   obj->gtt_space->start
+                                                   obj->gtt_space.start
                                                        >> PAGE_SHIFT,
                                                    obj->agp_type);
                } else
-                       intel_gtt_insert_pages(obj->gtt_space->start
+                       intel_gtt_insert_pages(obj->gtt_space.start
                                                   >> PAGE_SHIFT,
                                               obj->base.size >> PAGE_SHIFT,
                                               obj->pages,
@@ -71,10 +71,10 @@ int i915_gem_gtt_bind_object(struct drm_i915_gem_object 
*obj)

                intel_gtt_insert_sg_entries(obj->sg_list,
                                            obj->num_sg,
-                                           obj->gtt_space->start >> PAGE_SHIFT,
+                                           obj->gtt_space.start >> PAGE_SHIFT,
                                            obj->agp_type);
        } else
-               intel_gtt_insert_pages(obj->gtt_space->start >> PAGE_SHIFT,
+               intel_gtt_insert_pages(obj->gtt_space.start >> PAGE_SHIFT,
                                       obj->base.size >> PAGE_SHIFT,
                                       obj->pages,
                                       obj->agp_type);
@@ -93,6 +93,6 @@ void i915_gem_gtt_unbind_object(struct drm_i915_gem_object 
*obj)
                obj->num_sg = 0;
        }

-       intel_gtt_clear_range(obj->gtt_space->start >> PAGE_SHIFT,
+       intel_gtt_clear_range(obj->gtt_space.start >> PAGE_SHIFT,
                              obj->base.size >> PAGE_SHIFT);
 }
-- 
1.7.1

Reply via email to