@@ -40,39 +40,43 @@ static pgd_t kasan_pg_dir[PTRS_PER_PGD] __initdata __aligned(PAGE_SIZE);
4040#define __pte_none (early , pte ) (early ? pte_none(pte) : \
4141((pte_val(pte) & _PFN_MASK) == (unsigned long)__pa(kasan_early_shadow_page)))
4242
43- void * kasan_mem_to_shadow (const void * addr )
43+ static void * mem_to_shadow (const void * addr )
4444{
45- if (!kasan_enabled ()) {
45+ unsigned long offset = 0 ;
46+ unsigned long maddr = (unsigned long )addr ;
47+ unsigned long xrange = (maddr >> XRANGE_SHIFT ) & 0xffff ;
48+
49+ if (maddr >= FIXADDR_START )
4650 return (void * )(kasan_early_shadow_page );
47- } else {
48- unsigned long maddr = (unsigned long )addr ;
49- unsigned long xrange = (maddr >> XRANGE_SHIFT ) & 0xffff ;
50- unsigned long offset = 0 ;
51-
52- if (maddr >= FIXADDR_START )
53- return (void * )(kasan_early_shadow_page );
54-
55- maddr &= XRANGE_SHADOW_MASK ;
56- switch (xrange ) {
57- case XKPRANGE_CC_SEG :
58- offset = XKPRANGE_CC_SHADOW_OFFSET ;
59- break ;
60- case XKPRANGE_UC_SEG :
61- offset = XKPRANGE_UC_SHADOW_OFFSET ;
62- break ;
63- case XKPRANGE_WC_SEG :
64- offset = XKPRANGE_WC_SHADOW_OFFSET ;
65- break ;
66- case XKVRANGE_VC_SEG :
67- offset = XKVRANGE_VC_SHADOW_OFFSET ;
68- break ;
69- default :
70- WARN_ON (1 );
71- return NULL ;
72- }
7351
74- return (void * )((maddr >> KASAN_SHADOW_SCALE_SHIFT ) + offset );
52+ maddr &= XRANGE_SHADOW_MASK ;
53+ switch (xrange ) {
54+ case XKPRANGE_CC_SEG :
55+ offset = XKPRANGE_CC_SHADOW_OFFSET ;
56+ break ;
57+ case XKPRANGE_UC_SEG :
58+ offset = XKPRANGE_UC_SHADOW_OFFSET ;
59+ break ;
60+ case XKPRANGE_WC_SEG :
61+ offset = XKPRANGE_WC_SHADOW_OFFSET ;
62+ break ;
63+ case XKVRANGE_VC_SEG :
64+ offset = XKVRANGE_VC_SHADOW_OFFSET ;
65+ break ;
66+ default :
67+ WARN_ON (1 );
68+ return NULL ;
7569 }
70+
71+ return (void * )((maddr >> KASAN_SHADOW_SCALE_SHIFT ) + offset );
72+ }
73+
74+ void * kasan_mem_to_shadow (const void * addr )
75+ {
76+ if (kasan_enabled ())
77+ return mem_to_shadow (addr );
78+ else
79+ return (void * )(kasan_early_shadow_page );
7680}
7781
7882const void * kasan_shadow_to_mem (const void * shadow_addr )
@@ -293,11 +297,8 @@ void __init kasan_init(void)
293297 /* Maps everything to a single page of zeroes */
294298 kasan_pgd_populate (KASAN_SHADOW_START , KASAN_SHADOW_END , NUMA_NO_NODE , true);
295299
296- kasan_populate_early_shadow (kasan_mem_to_shadow ((void * )VMALLOC_START ),
297- kasan_mem_to_shadow ((void * )KFENCE_AREA_END ));
298-
299- /* Enable KASAN here before kasan_mem_to_shadow(). */
300- kasan_init_generic ();
300+ kasan_populate_early_shadow (mem_to_shadow ((void * )VMALLOC_START ),
301+ mem_to_shadow ((void * )KFENCE_AREA_END ));
301302
302303 /* Populate the linear mapping */
303304 for_each_mem_range (i , & pa_start , & pa_end ) {
@@ -307,13 +308,13 @@ void __init kasan_init(void)
307308 if (start >= end )
308309 break ;
309310
310- kasan_map_populate ((unsigned long )kasan_mem_to_shadow (start ),
311- (unsigned long )kasan_mem_to_shadow (end ), NUMA_NO_NODE );
311+ kasan_map_populate ((unsigned long )mem_to_shadow (start ),
312+ (unsigned long )mem_to_shadow (end ), NUMA_NO_NODE );
312313 }
313314
314315 /* Populate modules mapping */
315- kasan_map_populate ((unsigned long )kasan_mem_to_shadow ((void * )MODULES_VADDR ),
316- (unsigned long )kasan_mem_to_shadow ((void * )MODULES_END ), NUMA_NO_NODE );
316+ kasan_map_populate ((unsigned long )mem_to_shadow ((void * )MODULES_VADDR ),
317+ (unsigned long )mem_to_shadow ((void * )MODULES_END ), NUMA_NO_NODE );
317318 /*
318319 * KAsan may reuse the contents of kasan_early_shadow_pte directly, so we
319320 * should make sure that it maps the zero page read-only.
@@ -328,4 +329,5 @@ void __init kasan_init(void)
328329
329330 /* At this point kasan is fully initialized. Enable error messages */
330331 init_task .kasan_depth = 0 ;
332+ kasan_init_generic ();
331333}
0 commit comments