struct ion_carveout_heap *carveout_heap =
container_of(heap, struct ion_carveout_heap, heap);
- data->size = carveout_heap->size;
+ data->size = (__u32)carveout_heap->size;
if (carveout_heap->secure)
data->heap_flags |= ION_HEAPDATA_FLAGS_ALLOW_PROTECTION;
if (carveout_heap->untouchable)
ion_reserved_mem[reserved_mem_count].base = rmem->base;
ion_reserved_mem[reserved_mem_count].size = rmem->size;
ion_reserved_mem[reserved_mem_count].heapname = heapname;
- ion_reserved_mem[reserved_mem_count].alloc_align = alloc_align;
+ ion_reserved_mem[reserved_mem_count].alloc_align = (unsigned int)alloc_align;
ion_reserved_mem[reserved_mem_count].protection_id = protection_id;
ion_reserved_mem[reserved_mem_count].secure = secure;
ion_reserved_mem[reserved_mem_count].untouchable = untch;
unsigned long flags)
{
struct ion_hpa_heap *hpa_heap = to_hpa_heap(heap);
- unsigned int count = ION_HPA_PAGE_COUNT((unsigned int)len, hpa_heap);
+ unsigned int count =
+ (unsigned int)ION_HPA_PAGE_COUNT((unsigned int)len, hpa_heap);
bool zero = !(flags & ION_FLAG_NOZEROED);
bool cacheflush = !(flags & ION_FLAG_CACHED) ||
((flags & ION_FLAG_SYNC_FORCE) != 0);