Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 05a20d09 authored by Chris Wilson's avatar Chris Wilson
Browse files

drm/i915: Move map-and-fenceable tracking to the VMA



By moving map-and-fenceable tracking from the object to the VMA, we gain
fine-grained tracking and the ability to track individual fences on the VMA
(subsequent patch).

Signed-off-by: default avatarChris Wilson <chris@chris-wilson.co.uk>
Reviewed-by: default avatarJoonas Lahtinen <joonas.lahtinen@linux.intel.com>
Link: http://patchwork.freedesktop.org/patch/msgid/20160818161718.27187-16-chris@chris-wilson.co.uk
parent 9e53d9be
Loading
Loading
Loading
Loading
+0 −6
Original line number Diff line number Diff line
@@ -2192,12 +2192,6 @@ struct drm_i915_gem_object {
	 */
	unsigned int fence_dirty:1;

	/**
	 * Is the object at the current location in the gtt mappable and
	 * fenceable? Used to avoid costly recalculations.
	 */
	unsigned int map_and_fenceable:1;

	/**
	 * Whether the current gtt mapping needs to be mappable (and isn't just
	 * mappable by accident). Track pin and fault separate for a more
+17 −20
Original line number Diff line number Diff line
@@ -2899,8 +2899,7 @@ int i915_vma_unbind(struct i915_vma *vma)
	GEM_BUG_ON(obj->bind_count == 0);
	GEM_BUG_ON(!obj->pages);

	if (i915_vma_is_ggtt(vma) &&
	    vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL) {
	if (i915_vma_is_map_and_fenceable(vma)) {
		i915_gem_object_finish_gtt(obj);

		/* release the fence reg _after_ flushing */
@@ -2909,6 +2908,7 @@ int i915_vma_unbind(struct i915_vma *vma)
			return ret;

		__i915_vma_iounmap(vma);
		vma->flags &= ~I915_VMA_CAN_FENCE;
	}

	if (likely(!vma->vm->closed)) {
@@ -2920,14 +2920,11 @@ int i915_vma_unbind(struct i915_vma *vma)
	drm_mm_remove_node(&vma->node);
	list_move_tail(&vma->vm_link, &vma->vm->unbound_list);

	if (i915_vma_is_ggtt(vma)) {
		if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL) {
			obj->map_and_fenceable = false;
		} else if (vma->pages) {
	if (vma->pages != obj->pages) {
		GEM_BUG_ON(!vma->pages);
		sg_free_table(vma->pages);
		kfree(vma->pages);
	}
	}
	vma->pages = NULL;

	/* Since the unbound list is global, only move to that list if
@@ -3703,8 +3700,6 @@ i915_gem_ring_throttle(struct drm_device *dev, struct drm_file *file)
static bool
i915_vma_misplaced(struct i915_vma *vma, u64 size, u64 alignment, u64 flags)
{
	struct drm_i915_gem_object *obj = vma->obj;

	if (!drm_mm_node_allocated(&vma->node))
		return false;

@@ -3714,7 +3709,7 @@ i915_vma_misplaced(struct i915_vma *vma, u64 size, u64 alignment, u64 flags)
	if (alignment && vma->node.start & (alignment - 1))
		return true;

	if (flags & PIN_MAPPABLE && !obj->map_and_fenceable)
	if (flags & PIN_MAPPABLE && !i915_vma_is_map_and_fenceable(vma))
		return true;

	if (flags & PIN_OFFSET_BIAS &&
@@ -3736,10 +3731,10 @@ void __i915_vma_set_map_and_fenceable(struct i915_vma *vma)
	u32 fence_size, fence_alignment;

	fence_size = i915_gem_get_ggtt_size(dev_priv,
					    obj->base.size,
					    vma->size,
					    i915_gem_object_get_tiling(obj));
	fence_alignment = i915_gem_get_ggtt_alignment(dev_priv,
						      obj->base.size,
						      vma->size,
						      i915_gem_object_get_tiling(obj),
						      true);

@@ -3749,7 +3744,10 @@ void __i915_vma_set_map_and_fenceable(struct i915_vma *vma)
	mappable = (vma->node.start + fence_size <=
		    dev_priv->ggtt.mappable_end);

	obj->map_and_fenceable = mappable && fenceable;
	if (mappable && fenceable)
		vma->flags |= I915_VMA_CAN_FENCE;
	else
		vma->flags &= ~I915_VMA_CAN_FENCE;
}

int __i915_vma_do_pin(struct i915_vma *vma,
@@ -3809,12 +3807,11 @@ i915_gem_object_ggtt_pin(struct drm_i915_gem_object *obj,

		WARN(i915_vma_is_pinned(vma),
		     "bo is already pinned in ggtt with incorrect alignment:"
		     " offset=%08x, req.alignment=%llx, req.map_and_fenceable=%d,"
		     " obj->map_and_fenceable=%d\n",
		     i915_ggtt_offset(vma),
		     alignment,
		     " offset=%08x, req.alignment=%llx,"
		     " req.map_and_fenceable=%d, vma->map_and_fenceable=%d\n",
		     i915_ggtt_offset(vma), alignment,
		     !!(flags & PIN_MAPPABLE),
		     obj->map_and_fenceable);
		     i915_vma_is_map_and_fenceable(vma));
		ret = i915_vma_unbind(vma);
		if (ret)
			return ERR_PTR(ret);
+2 −2
Original line number Diff line number Diff line
@@ -857,7 +857,6 @@ static bool
eb_vma_misplaced(struct i915_vma *vma)
{
	struct drm_i915_gem_exec_object2 *entry = vma->exec_entry;
	struct drm_i915_gem_object *obj = vma->obj;

	WARN_ON(entry->flags & __EXEC_OBJECT_NEEDS_MAP &&
		!i915_vma_is_ggtt(vma));
@@ -878,7 +877,8 @@ eb_vma_misplaced(struct i915_vma *vma)
		return true;

	/* avoid costly ping-pong once a batch bo ended up non-mappable */
	if (entry->flags & __EXEC_OBJECT_NEEDS_MAP && !obj->map_and_fenceable)
	if (entry->flags & __EXEC_OBJECT_NEEDS_MAP &&
	    !i915_vma_is_map_and_fenceable(vma))
		return !only_mappable_for_reloc(entry->flags);

	if ((entry->flags & EXEC_OBJECT_SUPPORTS_48B_ADDRESS) == 0 &&
+3 −4
Original line number Diff line number Diff line
@@ -130,7 +130,9 @@ static void i915_write_fence_reg(struct drm_device *dev, int reg,
		     !is_power_of_2(vma->node.size) ||
		     (vma->node.start & (vma->node.size - 1)),
		     "object 0x%08llx [fenceable? %d] not 1M or pot-size (0x%08llx) aligned\n",
		     vma->node.start, obj->map_and_fenceable, vma->node.size);
		     vma->node.start,
		     i915_vma_is_map_and_fenceable(vma),
		     vma->node.size);

		if (tiling == I915_TILING_Y && HAS_128_BYTE_Y_TILING(dev))
			tile_width = 128;
@@ -389,9 +391,6 @@ i915_gem_object_get_fence(struct drm_i915_gem_object *obj)
			return 0;
		}
	} else if (enable) {
		if (WARN_ON(!obj->map_and_fenceable))
			return -EINVAL;

		reg = i915_find_fence_reg(dev);
		if (IS_ERR(reg))
			return PTR_ERR(reg);
+1 −1
Original line number Diff line number Diff line
@@ -3671,7 +3671,7 @@ void __iomem *i915_vma_pin_iomap(struct i915_vma *vma)
	assert_rpm_wakelock_held(to_i915(vma->vm->dev));

	lockdep_assert_held(&vma->vm->dev->struct_mutex);
	if (WARN_ON(!vma->obj->map_and_fenceable))
	if (WARN_ON(!i915_vma_is_map_and_fenceable(vma)))
		return IO_ERR_PTR(-ENODEV);

	GEM_BUG_ON(!i915_vma_is_ggtt(vma));
Loading