generic.c 8.51 KB
Newer Older
1
// SPDX-License-Identifier: GPL-2.0
2
/*
3
 * This file contains core generic KASAN code.
4
5
 *
 * Copyright (c) 2014 Samsung Electronics Co., Ltd.
6
 * Author: Andrey Ryabinin <ryabinin.a.a@gmail.com>
7
 *
8
 * Some code borrowed from https://github.com/xairy/kasan-prototype by
9
 *        Andrey Konovalov <andreyknvl@gmail.com>
10
11
12
13
14
15
16
17
18
19
20
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License version 2 as
 * published by the Free Software Foundation.
 *
 */

#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
#define DISABLE_BRANCH_PROFILING

#include <linux/export.h>
21
#include <linux/interrupt.h>
22
#include <linux/init.h>
23
#include <linux/kasan.h>
24
#include <linux/kernel.h>
25
#include <linux/kmemleak.h>
26
#include <linux/linkage.h>
27
#include <linux/memblock.h>
28
#include <linux/memory.h>
29
#include <linux/mm.h>
30
#include <linux/module.h>
31
32
#include <linux/printk.h>
#include <linux/sched.h>
33
#include <linux/sched/task_stack.h>
34
35
36
37
#include <linux/slab.h>
#include <linux/stacktrace.h>
#include <linux/string.h>
#include <linux/types.h>
38
#include <linux/vmalloc.h>
39
#include <linux/bug.h>
40
41

#include "kasan.h"
42
#include "../slab.h"
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61

/*
 * All functions below always inlined so compiler could
 * perform better optimizations in each of __asan_loadX/__assn_storeX
 * depending on memory access size X.
 */

static __always_inline bool memory_is_poisoned_1(unsigned long addr)
{
	s8 shadow_value = *(s8 *)kasan_mem_to_shadow((void *)addr);

	if (unlikely(shadow_value)) {
		s8 last_accessible_byte = addr & KASAN_SHADOW_MASK;
		return unlikely(last_accessible_byte >= shadow_value);
	}

	return false;
}

62
63
static __always_inline bool memory_is_poisoned_2_4_8(unsigned long addr,
						unsigned long size)
64
{
65
	u8 *shadow_addr = (u8 *)kasan_mem_to_shadow((void *)addr);
66

67
68
69
70
71
72
	/*
	 * Access crosses 8(shadow size)-byte boundary. Such access maps
	 * into 2 shadow bytes, so we need to check them both.
	 */
	if (unlikely(((addr + size - 1) & KASAN_SHADOW_MASK) < size - 1))
		return *shadow_addr || memory_is_poisoned_1(addr + size - 1);
73

74
	return memory_is_poisoned_1(addr + size - 1);
75
76
77
78
}

static __always_inline bool memory_is_poisoned_16(unsigned long addr)
{
79
	u16 *shadow_addr = (u16 *)kasan_mem_to_shadow((void *)addr);
80

81
82
83
	/* Unaligned 16-bytes access maps into 3 shadow bytes. */
	if (unlikely(!IS_ALIGNED(addr, KASAN_SHADOW_SCALE_SIZE)))
		return *shadow_addr || memory_is_poisoned_1(addr + 15);
84

85
	return *shadow_addr;
86
87
}

88
static __always_inline unsigned long bytes_is_nonzero(const u8 *start,
89
90
91
92
93
94
95
96
97
98
99
100
					size_t size)
{
	while (size) {
		if (unlikely(*start))
			return (unsigned long)start;
		start++;
		size--;
	}

	return 0;
}

101
static __always_inline unsigned long memory_is_nonzero(const void *start,
102
103
104
105
106
107
108
						const void *end)
{
	unsigned int words;
	unsigned long ret;
	unsigned int prefix = (unsigned long)start % 8;

	if (end - start <= 16)
109
		return bytes_is_nonzero(start, end - start);
110
111
112

	if (prefix) {
		prefix = 8 - prefix;
113
		ret = bytes_is_nonzero(start, prefix);
114
115
116
117
118
119
120
121
		if (unlikely(ret))
			return ret;
		start += prefix;
	}

	words = (end - start) / 8;
	while (words) {
		if (unlikely(*(u64 *)start))
122
			return bytes_is_nonzero(start, 8);
123
124
125
126
		start += 8;
		words--;
	}

127
	return bytes_is_nonzero(start, (end - start) % 8);
128
129
130
131
132
133
134
}

static __always_inline bool memory_is_poisoned_n(unsigned long addr,
						size_t size)
{
	unsigned long ret;

135
	ret = memory_is_nonzero(kasan_mem_to_shadow((void *)addr),
136
137
138
139
140
141
142
			kasan_mem_to_shadow((void *)addr + size - 1) + 1);

	if (unlikely(ret)) {
		unsigned long last_byte = addr + size - 1;
		s8 *last_shadow = (s8 *)kasan_mem_to_shadow((void *)last_byte);

		if (unlikely(ret != (unsigned long)last_shadow ||
Wang Long's avatar
Wang Long committed
143
			((long)(last_byte & KASAN_SHADOW_MASK) >= *last_shadow)))
144
145
146
147
148
149
150
151
152
153
154
155
156
157
			return true;
	}
	return false;
}

static __always_inline bool memory_is_poisoned(unsigned long addr, size_t size)
{
	if (__builtin_constant_p(size)) {
		switch (size) {
		case 1:
			return memory_is_poisoned_1(addr);
		case 2:
		case 4:
		case 8:
158
			return memory_is_poisoned_2_4_8(addr, size);
159
160
161
162
163
164
165
166
167
168
		case 16:
			return memory_is_poisoned_16(addr);
		default:
			BUILD_BUG();
		}
	}

	return memory_is_poisoned_n(addr, size);
}

169
static __always_inline bool check_memory_region_inline(unsigned long addr,
170
171
						size_t size, bool write,
						unsigned long ret_ip)
172
173
{
	if (unlikely(size == 0))
174
		return true;
175

176
177
178
	if (unlikely(addr + size < addr))
		return !kasan_report(addr, size, write, ret_ip);

179
180
	if (unlikely((void *)addr <
		kasan_shadow_to_mem((void *)KASAN_SHADOW_START))) {
181
		return !kasan_report(addr, size, write, ret_ip);
182
183
184
	}

	if (likely(!memory_is_poisoned(addr, size)))
185
		return true;
186

187
	return !kasan_report(addr, size, write, ret_ip);
188
189
}

190
bool check_memory_region(unsigned long addr, size_t size, bool write,
191
192
				unsigned long ret_ip)
{
193
	return check_memory_region_inline(addr, size, write, ret_ip);
194
}
195

196
197
198
199
200
void kasan_cache_shrink(struct kmem_cache *cache)
{
	quarantine_remove_cache(cache);
}

201
void kasan_cache_shutdown(struct kmem_cache *cache)
202
{
203
204
	if (!__kmem_cache_empty(cache))
		quarantine_remove_cache(cache);
205
206
}

207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
static void register_global(struct kasan_global *global)
{
	size_t aligned_size = round_up(global->size, KASAN_SHADOW_SCALE_SIZE);

	kasan_unpoison_shadow(global->beg, global->size);

	kasan_poison_shadow(global->beg + aligned_size,
		global->size_with_redzone - aligned_size,
		KASAN_GLOBAL_REDZONE);
}

void __asan_register_globals(struct kasan_global *globals, size_t size)
{
	int i;

	for (i = 0; i < size; i++)
		register_global(&globals[i]);
}
EXPORT_SYMBOL(__asan_register_globals);

void __asan_unregister_globals(struct kasan_global *globals, size_t size)
{
}
EXPORT_SYMBOL(__asan_unregister_globals);

232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
#define DEFINE_ASAN_LOAD_STORE(size)					\
	void __asan_load##size(unsigned long addr)			\
	{								\
		check_memory_region_inline(addr, size, false, _RET_IP_);\
	}								\
	EXPORT_SYMBOL(__asan_load##size);				\
	__alias(__asan_load##size)					\
	void __asan_load##size##_noabort(unsigned long);		\
	EXPORT_SYMBOL(__asan_load##size##_noabort);			\
	void __asan_store##size(unsigned long addr)			\
	{								\
		check_memory_region_inline(addr, size, true, _RET_IP_);	\
	}								\
	EXPORT_SYMBOL(__asan_store##size);				\
	__alias(__asan_store##size)					\
	void __asan_store##size##_noabort(unsigned long);		\
248
249
250
251
252
253
254
255
256
257
	EXPORT_SYMBOL(__asan_store##size##_noabort)

DEFINE_ASAN_LOAD_STORE(1);
DEFINE_ASAN_LOAD_STORE(2);
DEFINE_ASAN_LOAD_STORE(4);
DEFINE_ASAN_LOAD_STORE(8);
DEFINE_ASAN_LOAD_STORE(16);

void __asan_loadN(unsigned long addr, size_t size)
{
258
	check_memory_region(addr, size, false, _RET_IP_);
259
260
261
262
263
264
265
266
267
}
EXPORT_SYMBOL(__asan_loadN);

__alias(__asan_loadN)
void __asan_loadN_noabort(unsigned long, size_t);
EXPORT_SYMBOL(__asan_loadN_noabort);

void __asan_storeN(unsigned long addr, size_t size)
{
268
	check_memory_region(addr, size, true, _RET_IP_);
269
270
271
272
273
274
275
276
277
278
}
EXPORT_SYMBOL(__asan_storeN);

__alias(__asan_storeN)
void __asan_storeN_noabort(unsigned long, size_t);
EXPORT_SYMBOL(__asan_storeN_noabort);

/* to shut up compiler complaints */
void __asan_handle_no_return(void) {}
EXPORT_SYMBOL(__asan_handle_no_return);
279

280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
/* Emitted by compiler to poison alloca()ed objects. */
void __asan_alloca_poison(unsigned long addr, size_t size)
{
	size_t rounded_up_size = round_up(size, KASAN_SHADOW_SCALE_SIZE);
	size_t padding_size = round_up(size, KASAN_ALLOCA_REDZONE_SIZE) -
			rounded_up_size;
	size_t rounded_down_size = round_down(size, KASAN_SHADOW_SCALE_SIZE);

	const void *left_redzone = (const void *)(addr -
			KASAN_ALLOCA_REDZONE_SIZE);
	const void *right_redzone = (const void *)(addr + rounded_up_size);

	WARN_ON(!IS_ALIGNED(addr, KASAN_ALLOCA_REDZONE_SIZE));

	kasan_unpoison_shadow((const void *)(addr + rounded_down_size),
			      size - rounded_down_size);
	kasan_poison_shadow(left_redzone, KASAN_ALLOCA_REDZONE_SIZE,
			KASAN_ALLOCA_LEFT);
	kasan_poison_shadow(right_redzone,
			padding_size + KASAN_ALLOCA_REDZONE_SIZE,
			KASAN_ALLOCA_RIGHT);
}
EXPORT_SYMBOL(__asan_alloca_poison);

/* Emitted by compiler to unpoison alloca()ed areas when the stack unwinds. */
void __asan_allocas_unpoison(const void *stack_top, const void *stack_bottom)
{
	if (unlikely(!stack_top || stack_top > stack_bottom))
		return;

	kasan_unpoison_shadow(stack_top, stack_bottom - stack_top);
}
EXPORT_SYMBOL(__asan_allocas_unpoison);

314
315
316
317
318
319
320
321
322
323
324
325
326
327
/* Emitted by the compiler to [un]poison local variables. */
#define DEFINE_ASAN_SET_SHADOW(byte) \
	void __asan_set_shadow_##byte(const void *addr, size_t size)	\
	{								\
		__memset((void *)addr, 0x##byte, size);			\
	}								\
	EXPORT_SYMBOL(__asan_set_shadow_##byte)

DEFINE_ASAN_SET_SHADOW(00);
DEFINE_ASAN_SET_SHADOW(f1);
DEFINE_ASAN_SET_SHADOW(f2);
DEFINE_ASAN_SET_SHADOW(f3);
DEFINE_ASAN_SET_SHADOW(f5);
DEFINE_ASAN_SET_SHADOW(f8);