| From 35d850faa406c7663ed2b2d137dcca25b8dd4d8c Mon Sep 17 00:00:00 2001 |
| From: Ben Widawsky <ben@bwidawsk.net> |
| Date: Wed, 14 Aug 2013 11:38:35 +0200 |
| Subject: drm/i915: prepare bind_to_vm for preallocated vma |
| |
| In the new execbuf code we want to track buffers using the vmas even |
| before they're all properly mapped. Which means that bind_to_vm needs |
| to deal with buffers which have preallocated vmas which aren't yet |
| bound. |
| |
| This patch implements this prep work and adjusts our WARN/BUG checks. |
| |
| Signed-off-by: Ben Widawsky <ben@bwidawsk.net> |
| [danvet: Split out from Ben's big execbuf patch. Also move one BUG |
| back to its original place to deflate the diff a notch.] |
| Signed-off-by: Daniel Vetter <daniel.vetter@ffwll.ch> |
| |
| (cherry picked from commit accfef2e5a8f713bfa0c06696b5e10754686dc72) |
| Signed-off-by: Darren Hart <dvhart@linux.intel.com> |
| --- |
| drivers/gpu/drm/i915/i915_drv.h | 3 +++ |
| drivers/gpu/drm/i915/i915_gem.c | 23 +++++++++++++++++------ |
| 2 files changed, 20 insertions(+), 6 deletions(-) |
| |
| diff --git a/drivers/gpu/drm/i915/i915_drv.h b/drivers/gpu/drm/i915/i915_drv.h |
| index 3f1f65865d3c..2b503d58dcd3 100644 |
| --- a/drivers/gpu/drm/i915/i915_drv.h |
| +++ b/drivers/gpu/drm/i915/i915_drv.h |
| @@ -1913,6 +1913,9 @@ unsigned long i915_gem_obj_size(struct drm_i915_gem_object *o, |
| struct i915_address_space *vm); |
| struct i915_vma *i915_gem_obj_to_vma(struct drm_i915_gem_object *obj, |
| struct i915_address_space *vm); |
| +struct i915_vma * |
| +i915_gem_obj_lookup_or_create_vma(struct drm_i915_gem_object *obj, |
| + struct i915_address_space *vm); |
| /* Some GGTT VM helpers */ |
| #define obj_to_ggtt(obj) \ |
| (&((struct drm_i915_private *)(obj)->base.dev->dev_private)->gtt.base) |
| diff --git a/drivers/gpu/drm/i915/i915_gem.c b/drivers/gpu/drm/i915/i915_gem.c |
| index 10e3c536f911..199107e734fb 100644 |
| --- a/drivers/gpu/drm/i915/i915_gem.c |
| +++ b/drivers/gpu/drm/i915/i915_gem.c |
| @@ -3111,9 +3111,6 @@ i915_gem_object_bind_to_vm(struct drm_i915_gem_object *obj, |
| struct i915_vma *vma; |
| int ret; |
| |
| - if (WARN_ON(!list_empty(&obj->vma_list))) |
| - return -EBUSY; |
| - |
| fence_size = i915_gem_get_gtt_size(dev, |
| obj->base.size, |
| obj->tiling_mode); |
| @@ -3152,16 +3149,17 @@ i915_gem_object_bind_to_vm(struct drm_i915_gem_object *obj, |
| |
| i915_gem_object_pin_pages(obj); |
| |
| - /* FIXME: For now we only ever use 1 VMA per object */ |
| BUG_ON(!i915_is_ggtt(vm)); |
| - WARN_ON(!list_empty(&obj->vma_list)); |
| |
| - vma = i915_gem_vma_create(obj, vm); |
| + vma = i915_gem_obj_lookup_or_create_vma(obj, vm); |
| if (IS_ERR(vma)) { |
| ret = PTR_ERR(vma); |
| goto err_unpin; |
| } |
| |
| + /* For now we only ever use 1 vma per object */ |
| + WARN_ON(!list_is_singular(&obj->vma_list)); |
| + |
| search_free: |
| ret = drm_mm_insert_node_in_range_generic(&vm->mm, &vma->node, |
| size, alignment, |
| @@ -4870,3 +4868,16 @@ struct i915_vma *i915_gem_obj_to_vma(struct drm_i915_gem_object *obj, |
| |
| return NULL; |
| } |
| + |
| +struct i915_vma * |
| +i915_gem_obj_lookup_or_create_vma(struct drm_i915_gem_object *obj, |
| + struct i915_address_space *vm) |
| +{ |
| + struct i915_vma *vma; |
| + |
| + vma = i915_gem_obj_to_vma(obj, vm); |
| + if (!vma) |
| + vma = i915_gem_vma_create(obj, vm); |
| + |
| + return vma; |
| +} |
| -- |
| 1.8.5.rc3 |
| |