{
}
+static void *ion_dma_buf_vmap(struct dma_buf *dmabuf)
+{
+ struct ion_buffer *buffer = dmabuf->priv;
+
+ if (buffer->heap->ops->map_kernel) {
+ mutex_lock(&buffer->lock);
+ ion_buffer_kmap_get(buffer);
+ mutex_unlock(&buffer->lock);
+ }
+
+ return buffer->vaddr;
+}
+
+static void ion_dma_buf_vunmap(struct dma_buf *dmabuf, void *ptr)
+{
+ struct ion_buffer *buffer = dmabuf->priv;
+
+ if (buffer->heap->ops->map_kernel) {
+ mutex_lock(&buffer->lock);
+ ion_buffer_kmap_put(buffer);
+ mutex_unlock(&buffer->lock);
+ }
+}
+
static int ion_dma_buf_begin_cpu_access(struct dma_buf *dmabuf,
enum dma_data_direction direction)
{
.unmap_atomic = ion_dma_buf_kunmap,
.map = ion_dma_buf_kmap,
.unmap = ion_dma_buf_kunmap,
+ .vmap = ion_dma_buf_vmap,
+ .vunmap = ion_dma_buf_vunmap,
};
int ion_alloc(size_t len, unsigned int heap_id_mask, unsigned int flags)