struct drm_mm vram;
/** Memory allocator for GTT */
struct drm_mm gtt_space;
+ /** End of mappable part of GTT */
+ unsigned long gtt_mappable_end;
struct io_mapping *gtt_mapping;
int gtt_mtrr;
void i915_gem_shrinker_exit(void);
/* i915_gem_evict.c */
-int i915_gem_evict_something(struct drm_device *dev, int min_size, unsigned alignment);
+int i915_gem_evict_something(struct drm_device *dev, int min_size,
+ unsigned alignment, bool mappable);
int i915_gem_evict_everything(struct drm_device *dev);
int i915_gem_evict_inactive(struct drm_device *dev);
end - start);
dev_priv->mm.gtt_total = end - start;
+ dev_priv->mm.gtt_mappable_end = end;
return 0;
}
struct drm_device *dev = obj->dev;
ret = i915_gem_evict_something(dev, obj->size,
- i915_gem_get_gtt_alignment(obj));
+ i915_gem_get_gtt_alignment(obj),
+ false);
if (ret)
return ret;
/* If the gtt is empty and we're still having trouble
* fitting our object in, we're out of memory.
*/
- ret = i915_gem_evict_something(dev, obj->size, alignment);
+ ret = i915_gem_evict_something(dev, obj->size, alignment, true);
if (ret)
return ret;
if (ret == -ENOMEM) {
/* first try to clear up some space from the GTT */
ret = i915_gem_evict_something(dev, obj->size,
- alignment);
+ alignment, true);
if (ret) {
/* now try to shrink everyone else */
if (gfpmask) {
drm_mm_put_block(obj_priv->gtt_space);
obj_priv->gtt_space = NULL;
- ret = i915_gem_evict_something(dev, obj->size, alignment);
+ ret = i915_gem_evict_something(dev, obj->size, alignment, true);
if (ret)
return ret;
}
int
-i915_gem_evict_something(struct drm_device *dev, int min_size, unsigned alignment)
+i915_gem_evict_something(struct drm_device *dev, int min_size,
+ unsigned alignment, bool mappable)
{
drm_i915_private_t *dev_priv = dev->dev_private;
struct list_head eviction_list, unwind_list;
i915_gem_retire_requests(dev);
/* Re-check for free space after retiring requests */
- if (drm_mm_search_free(&dev_priv->mm.gtt_space,
- min_size, alignment, 0))
- return 0;
+ if (mappable) {
+ if (drm_mm_search_free_in_range(&dev_priv->mm.gtt_space,
+ min_size, alignment, 0,
+ dev_priv->mm.gtt_mappable_end,
+ 0))
+ return 0;
+ } else {
+ if (drm_mm_search_free(&dev_priv->mm.gtt_space,
+ min_size, alignment, 0))
+ return 0;
+ }
/*
* The goal is to evict objects and amalgamate space in LRU order.
*/
INIT_LIST_HEAD(&unwind_list);
- drm_mm_init_scan(&dev_priv->mm.gtt_space, min_size, alignment);
+ if (mappable)
+ drm_mm_init_scan_with_range(&dev_priv->mm.gtt_space, min_size,
+ alignment, 0,
+ dev_priv->mm.gtt_mappable_end);
+ else
+ drm_mm_init_scan(&dev_priv->mm.gtt_space, min_size, alignment);
/* First see if there is a large enough contiguous idle region... */
list_for_each_entry(obj_priv, &dev_priv->mm.inactive_list, mm_list) {