static void __init cma_activate_area(struct cma *cma)
{
- unsigned long pfn, end_pfn;
+ unsigned long pfn, end_pfn, early_pfn[CMA_MAX_RANGES];
int allocrange, r;
struct cma_memrange *cmr;
unsigned long bitmap_count, count;
for (allocrange = 0; allocrange < cma->nranges; allocrange++) {
cmr = &cma->ranges[allocrange];
+ early_pfn[allocrange] = cmr->early_pfn;
cmr->bitmap = bitmap_zalloc(cma_bitmap_maxno(cma, cmr),
GFP_KERNEL);
if (!cmr->bitmap)
for (r = 0; r < cma->nranges; r++) {
cmr = &cma->ranges[r];
- if (cmr->early_pfn != cmr->base_pfn) {
- count = cmr->early_pfn - cmr->base_pfn;
+ if (early_pfn[r] != cmr->base_pfn) {
+ count = early_pfn[r] - cmr->base_pfn;
bitmap_count = cma_bitmap_pages_to_bits(cma, count);
bitmap_set(cmr->bitmap, 0, bitmap_count);
}
- for (pfn = cmr->early_pfn; pfn < cmr->base_pfn + cmr->count;
+ for (pfn = early_pfn[r]; pfn < cmr->base_pfn + cmr->count;
pfn += pageblock_nr_pages)
init_cma_reserved_pageblock(pfn_to_page(pfn));
}
for (r = 0; r < allocrange; r++) {
cmr = &cma->ranges[r];
end_pfn = cmr->base_pfn + cmr->count;
- for (pfn = cmr->early_pfn; pfn < end_pfn; pfn++)
+ for (pfn = early_pfn[r]; pfn < end_pfn; pfn++)
free_reserved_page(pfn_to_page(pfn));
}
}