Just removing what isn't necessary for human comprehension.

Signed-off-by: Joe Perches <[email protected]>
---
 mm/page_alloc.c | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/mm/page_alloc.c b/mm/page_alloc.c
index b6605b077053..efc3184aa6bc 100644
--- a/mm/page_alloc.c
+++ b/mm/page_alloc.c
@@ -1806,7 +1806,7 @@ struct page *__rmqueue_smallest(struct zone *zone, 
unsigned int order,
 
        /* Find a page of the appropriate size in the preferred list */
        for (current_order = order; current_order < MAX_ORDER; ++current_order) 
{
-               area = &(zone->free_area[current_order]);
+               area = &zone->free_area[current_order];
                page = list_first_entry_or_null(&area->free_list[migratetype],
                                                struct page, lru);
                if (!page)
@@ -2158,7 +2158,7 @@ static bool unreserve_highatomic_pageblock(const struct 
alloc_context *ac,
 
                spin_lock_irqsave(&zone->lock, flags);
                for (order = 0; order < MAX_ORDER; order++) {
-                       struct free_area *area = &(zone->free_area[order]);
+                       struct free_area *area = &zone->free_area[order];
 
                        page = list_first_entry_or_null(
                                &area->free_list[MIGRATE_HIGHATOMIC],
@@ -2228,7 +2228,7 @@ __rmqueue_fallback(struct zone *zone, unsigned int order, 
int start_migratetype)
        for (current_order = MAX_ORDER - 1;
             current_order >= order && current_order <= MAX_ORDER - 1;
             --current_order) {
-               area = &(zone->free_area[current_order]);
+               area = &zone->free_area[current_order];
                fallback_mt = find_suitable_fallback(area, current_order,
                                                     start_migratetype, false,
                                                     &can_steal);
-- 
2.10.0.rc2.1.g053435c

Reply via email to