Extend the movable_node to movable_node=nn[KMG]@ss[KMG]. Since in current code, kaslr may choose the memory region in hot-pluggable nodes. So we can specific the region in immovable node. And store the regions in immovable_mem.
Multiple regions can be specified, comma delimited. Considering the usage of memory, only support for 4 regions. 4 regions contains 2 nodes at least, and is enough for kernel to extract. Signed-off-by: Chao Fan <fanc.f...@cn.fujitsu.com> --- arch/x86/boot/compressed/kaslr.c | 63 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 62 insertions(+), 1 deletion(-) diff --git a/arch/x86/boot/compressed/kaslr.c b/arch/x86/boot/compressed/kaslr.c index 17818ba6906f..3c1f5204693b 100644 --- a/arch/x86/boot/compressed/kaslr.c +++ b/arch/x86/boot/compressed/kaslr.c @@ -107,6 +107,12 @@ enum mem_avoid_index { static struct mem_vector mem_avoid[MEM_AVOID_MAX]; +/* Only supporting at most 4 immovable memory regions with kaslr */ +#define MAX_IMMOVABLE_MEM 4 + +static struct mem_vector immovable_mem[MAX_IMMOVABLE_MEM]; +static int num_immovable_region; + static bool mem_overlaps(struct mem_vector *one, struct mem_vector *two) { /* Item one is entirely before item two. */ @@ -167,6 +173,28 @@ parse_memmap(char *p, unsigned long long *start, unsigned long long *size) return -EINVAL; } +static int parse_immovable_mem(char *p, + unsigned long long *start, + unsigned long long *size) +{ + char *oldp; + + if (!p) + return -EINVAL; + + oldp = p; + *size = memparse(p, &p); + if (p == oldp) + return -EINVAL; + + if (*p == '@') { + *start = memparse(p + 1, &p); + return 0; + } + + return -EINVAL; +} + static void mem_avoid_memmap(char *str) { static int i; @@ -206,6 +234,36 @@ static void mem_avoid_memmap(char *str) memmap_too_large = true; } +#ifdef CONFIG_MEMORY_HOTPLUG +static void mem_mark_immovable(char *str) +{ + int i = 0; + + while (str && (i < MAX_IMMOVABLE_MEM)) { + int rc; + unsigned long long start, size; + char *k = strchr(str, ','); + + if (k) + *k++ = 0; + + rc = parse_immovable_mem(str, &start, &size); + if (rc < 0) + break; + str = k; + + immovable_mem[i].start = start; + immovable_mem[i].size = size; + i++; + } + num_immovable_region = i; +} +#else +static inline void mem_mark_immovable(char *str) +{ +} +#endif + static int handle_mem_memmap(void) { char *args = (char *)get_cmd_line_ptr(); @@ -214,7 +272,8 @@ static int handle_mem_memmap(void) char *param, *val; u64 mem_size; - if (!strstr(args, "memmap=") && !strstr(args, "mem=")) + if (!strstr(args, "memmap=") && !strstr(args, "mem=") && + !strstr(args, "movable_node=")) return 0; tmp_cmdline = malloc(len + 1); @@ -239,6 +298,8 @@ static int handle_mem_memmap(void) if (!strcmp(param, "memmap")) { mem_avoid_memmap(val); + } else if (!strcmp(param, "movable_node")) { + mem_mark_immovable(val); } else if (!strcmp(param, "mem")) { char *p = val; -- 2.13.6