2 * drivers/gpu/ion/ion_carveout_heap.c
4 * Copyright (C) 2011 Google, Inc.
6 * This software is licensed under the terms of the GNU General Public
7 * License version 2, as published by the Free Software Foundation, and
8 * may be copied, distributed, and modified under those terms.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
17 #include <linux/spinlock.h>
18 #include <linux/err.h>
19 #include <linux/genalloc.h>
21 #include <linux/ion.h>
23 #include <linux/scatterlist.h>
24 #include <linux/slab.h>
25 #include <linux/vmalloc.h>
26 #include <linux/iommu.h>
27 #include <linux/seq_file.h>
28 #include <asm/mach/map.h>
29 #include <linux/dma-mapping.h>
30 #include <asm/cacheflush.h>
35 #define RESERVED_SIZE(total) ((total)/10)
36 struct ion_carveout_heap {
38 struct gen_pool *pool;
41 unsigned long allocated_bytes;
42 unsigned long vpu_allocated_bytes;
43 unsigned long max_allocated;
44 unsigned long total_size;
48 ion_phys_addr_t ion_carveout_allocate(struct ion_heap *heap,
53 struct ion_carveout_heap *carveout_heap =
54 container_of(heap, struct ion_carveout_heap, heap);
56 unsigned long free_size = carveout_heap->total_size - carveout_heap->allocated_bytes;
58 if((flags & (1<<ION_VPU_ID)) &&
59 (free_size < RESERVED_SIZE(carveout_heap->total_size))){
60 printk("%s: heap %s has not enough memory for vpu: vpu allocated(%luM)\n",
61 __func__, heap->name, carveout_heap->vpu_allocated_bytes/SZ_1M);
62 return ION_CARVEOUT_ALLOCATE_FAIL;
64 offset = gen_pool_alloc(carveout_heap->pool, size);
67 if ((carveout_heap->total_size -
68 carveout_heap->allocated_bytes) > size)
69 printk("%s: heap %s has enough memory (%luK) but"
70 " the allocation of size %lu pages still failed."
71 " Memory is probably fragmented.\n",
73 (carveout_heap->total_size - carveout_heap->allocated_bytes)/SZ_1K,
76 printk("%s: heap %s has not enough memory(%luK)"
77 "the alloction of size is %luK.\n",
79 (carveout_heap->total_size - carveout_heap->allocated_bytes)/SZ_1K,
81 return ION_CARVEOUT_ALLOCATE_FAIL;
84 if(flags & (1<<ION_VPU_ID))
85 carveout_heap->vpu_allocated_bytes += size;
86 carveout_heap->allocated_bytes += size;
88 if((offset + size - carveout_heap->base) > carveout_heap->max_allocated)
89 carveout_heap->max_allocated = offset + size - carveout_heap->base;
91 bitmap_set(carveout_heap->bits,
92 (offset - carveout_heap->base)/PAGE_SIZE , size/PAGE_SIZE);
96 void ion_carveout_free(struct ion_heap *heap, ion_phys_addr_t addr,
97 unsigned long size, unsigned long flags)
99 struct ion_carveout_heap *carveout_heap =
100 container_of(heap, struct ion_carveout_heap, heap);
102 if (addr == ION_CARVEOUT_ALLOCATE_FAIL)
104 gen_pool_free(carveout_heap->pool, addr, size);
105 if(flags & (1<<ION_VPU_ID))
106 carveout_heap->vpu_allocated_bytes -= size;
107 carveout_heap->allocated_bytes -= size;
108 bitmap_clear(carveout_heap->bits,
109 (addr - carveout_heap->base)/PAGE_SIZE, size/PAGE_SIZE);
112 static int ion_carveout_heap_phys(struct ion_heap *heap,
113 struct ion_buffer *buffer,
114 ion_phys_addr_t *addr, size_t *len)
116 *addr = buffer->priv_phys;
121 static int ion_carveout_heap_allocate(struct ion_heap *heap,
122 struct ion_buffer *buffer,
123 unsigned long size, unsigned long align,
126 buffer->priv_phys = ion_carveout_allocate(heap, size, align, flags);
127 return buffer->priv_phys == ION_CARVEOUT_ALLOCATE_FAIL ? -ENOMEM : 0;
130 static void ion_carveout_heap_free(struct ion_buffer *buffer)
132 struct ion_heap *heap = buffer->heap;
134 ion_carveout_free(heap, buffer->priv_phys, buffer->size, buffer->flags);
135 buffer->priv_phys = ION_CARVEOUT_ALLOCATE_FAIL;
138 struct scatterlist *ion_carveout_heap_map_dma(struct ion_heap *heap,
139 struct ion_buffer *buffer)
141 return ERR_PTR(-EINVAL);
144 void ion_carveout_heap_unmap_dma(struct ion_heap *heap,
145 struct ion_buffer *buffer)
150 void *ion_carveout_heap_map_kernel(struct ion_heap *heap,
151 struct ion_buffer *buffer)
153 return __arch_ioremap(buffer->priv_phys, buffer->size,
154 MT_MEMORY_NONCACHED);
157 void ion_carveout_heap_unmap_kernel(struct ion_heap *heap,
158 struct ion_buffer *buffer)
160 __arch_iounmap(buffer->vaddr);
161 buffer->vaddr = NULL;
165 int ion_carveout_heap_map_user(struct ion_heap *heap, struct ion_buffer *buffer,
166 struct vm_area_struct *vma, unsigned long flags)
169 if (ION_IS_CACHED(flags))
170 err = remap_pfn_range(vma, vma->vm_start,
171 __phys_to_pfn(buffer->priv_phys) + vma->vm_pgoff,
172 vma->vm_end - vma->vm_start,
175 err = remap_pfn_range(vma, vma->vm_start,
176 __phys_to_pfn(buffer->priv_phys) + vma->vm_pgoff,
177 vma->vm_end - vma->vm_start,
178 pgprot_noncached(vma->vm_page_prot));
182 int ion_carveout_cache_op(struct ion_heap *heap, struct ion_buffer *buffer,
183 void *virt, size_t size, unsigned int cmd)
185 unsigned long start, end;
187 start = (unsigned long)virt;
190 case ION_CACHE_FLUSH:
191 dmac_flush_range((void *)start, (void *)end);
192 outer_flush_range(buffer->priv_phys,buffer->priv_phys + size);
194 case ION_CACHE_CLEAN:
195 /* When cleaning, always clean the innermost (L1) cache first
196 * and then clean the outer cache(s).
198 dmac_clean_range((void *)start, (void *)end);
199 outer_clean_range(buffer->priv_phys,buffer->priv_phys + size);
201 case ION_CACHE_INVALID:
202 /* When invalidating, always invalidate the outermost cache first
203 * and the L1 cache last.
205 outer_inv_range(buffer->priv_phys,buffer->priv_phys + size);
206 dmac_inv_range((void *)start, (void *)end);
214 static int ion_carveout_print_debug(struct ion_heap *heap, struct seq_file *s)
217 struct ion_carveout_heap *carveout_heap =
218 container_of(heap, struct ion_carveout_heap, heap);
220 for(i = carveout_heap->bit_nr/8 - 1; i>= 0; i--){
221 seq_printf(s, "%.3uM> Bits[%.3d - %.3d]: %08lx %08lx %08lx %08lx %08lx %08lx %08lx %08lx\n",
223 carveout_heap->bits[i*8 + 7],
224 carveout_heap->bits[i*8 + 6],
225 carveout_heap->bits[i*8 + 5],
226 carveout_heap->bits[i*8 + 4],
227 carveout_heap->bits[i*8 + 3],
228 carveout_heap->bits[i*8 + 2],
229 carveout_heap->bits[i*8 + 1],
230 carveout_heap->bits[i*8]);
232 seq_printf(s, "VPU allocated: %luM\n",
233 carveout_heap->vpu_allocated_bytes/SZ_1M);
234 seq_printf(s, "Total allocated: %luM\n",
235 carveout_heap->allocated_bytes/SZ_1M);
236 seq_printf(s, "max_allocated: %luM\n",
237 carveout_heap->max_allocated/SZ_1M);
238 seq_printf(s, "Heap size: %luM, heap base: 0x%lx\n",
239 carveout_heap->total_size/SZ_1M, carveout_heap->base);
242 static struct ion_heap_ops carveout_heap_ops = {
243 .allocate = ion_carveout_heap_allocate,
244 .free = ion_carveout_heap_free,
245 .phys = ion_carveout_heap_phys,
246 .map_user = ion_carveout_heap_map_user,
247 .map_kernel = ion_carveout_heap_map_kernel,
248 .unmap_kernel = ion_carveout_heap_unmap_kernel,
249 .cache_op = ion_carveout_cache_op,
250 .print_debug = ion_carveout_print_debug,
253 struct ion_heap *ion_carveout_heap_create(struct ion_platform_heap *heap_data)
255 struct ion_carveout_heap *carveout_heap;
257 carveout_heap = kzalloc(sizeof(struct ion_carveout_heap), GFP_KERNEL);
259 return ERR_PTR(-ENOMEM);
261 carveout_heap->pool = gen_pool_create(12, -1);
262 if (!carveout_heap->pool) {
263 kfree(carveout_heap);
264 return ERR_PTR(-ENOMEM);
266 carveout_heap->base = heap_data->base;
267 gen_pool_add(carveout_heap->pool, carveout_heap->base, heap_data->size,
269 carveout_heap->heap.ops = &carveout_heap_ops;
270 carveout_heap->heap.type = ION_HEAP_TYPE_CARVEOUT;
271 carveout_heap->vpu_allocated_bytes = 0;
272 carveout_heap->allocated_bytes = 0;
273 carveout_heap->max_allocated = 0;
274 carveout_heap->total_size = heap_data->size;
275 carveout_heap->bit_nr = heap_data->size/(PAGE_SIZE * sizeof(unsigned long) * 8);
276 carveout_heap->bits =
277 (unsigned long *)kzalloc(carveout_heap->bit_nr * sizeof(unsigned long), GFP_KERNEL);
279 return &carveout_heap->heap;
282 void ion_carveout_heap_destroy(struct ion_heap *heap)
284 struct ion_carveout_heap *carveout_heap =
285 container_of(heap, struct ion_carveout_heap, heap);
287 gen_pool_destroy(carveout_heap->pool);
288 kfree(carveout_heap->bits);
289 kfree(carveout_heap);
290 carveout_heap = NULL;