list_for_each_entry(m, &vmcore_list, list) {
if (*fpos < m->offset + m->size) {
- tsz = min_t(size_t, m->offset + m->size - *fpos, buflen);
+ tsz = (size_t)min_t(unsigned long long,
+ m->offset + m->size - *fpos,
+ buflen);
start = m->paddr + *fpos - m->offset;
tmp = read_from_oldmem(buffer, tsz, &start, userbuf);
if (tmp < 0)
if (start < m->offset + m->size) {
u64 paddr = 0;
- tsz = min_t(size_t, m->offset + m->size - start, size);
+ tsz = (size_t)min_t(unsigned long long,
+ m->offset + m->size - start, size);
paddr = m->paddr + start - m->offset;
if (vmcore_remap_oldmem_pfn(vma, vma->vm_start + len,
paddr >> PAGE_SHIFT, tsz,