kasan.h 3.42 KB
Newer Older
1
2
3
#ifndef _LINUX_KASAN_H
#define _LINUX_KASAN_H

4
#include <linux/sched.h>
5
6
7
8
#include <linux/types.h>

struct kmem_cache;
struct page;
9
struct vm_struct;
10
11
12
13
14
15

#ifdef CONFIG_KASAN

#define KASAN_SHADOW_SCALE_SHIFT 3

#include <asm/kasan.h>
16
#include <asm/pgtable.h>
17

18
19
20
21
22
23
24
25
extern unsigned char kasan_zero_page[PAGE_SIZE];
extern pte_t kasan_zero_pte[PTRS_PER_PTE];
extern pmd_t kasan_zero_pmd[PTRS_PER_PMD];
extern pud_t kasan_zero_pud[PTRS_PER_PUD];

void kasan_populate_zero_shadow(const void *shadow_start,
				const void *shadow_end);

26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
static inline void *kasan_mem_to_shadow(const void *addr)
{
	return (void *)((unsigned long)addr >> KASAN_SHADOW_SCALE_SHIFT)
		+ KASAN_SHADOW_OFFSET;
}

/* Enable reporting bugs after kasan_disable_current() */
static inline void kasan_enable_current(void)
{
	current->kasan_depth++;
}

/* Disable reporting bugs for current task */
static inline void kasan_disable_current(void)
{
	current->kasan_depth--;
}

void kasan_unpoison_shadow(const void *address, size_t size);

46
47
void kasan_unpoison_task_stack(struct task_struct *task);

48
49
50
void kasan_alloc_pages(struct page *page, unsigned int order);
void kasan_free_pages(struct page *page, unsigned int order);

Alexander Potapenko's avatar
Alexander Potapenko committed
51
52
53
void kasan_cache_create(struct kmem_cache *cache, size_t *size,
			unsigned long *flags);

54
55
56
57
58
59
void kasan_poison_slab(struct page *page);
void kasan_unpoison_object_data(struct kmem_cache *cache, void *object);
void kasan_poison_object_data(struct kmem_cache *cache, void *object);

void kasan_kmalloc_large(const void *ptr, size_t size);
void kasan_kfree_large(const void *ptr);
60
void kasan_kfree(void *ptr);
61
62
63
64
65
66
void kasan_kmalloc(struct kmem_cache *s, const void *object, size_t size);
void kasan_krealloc(const void *object, size_t new_size);

void kasan_slab_alloc(struct kmem_cache *s, void *object);
void kasan_slab_free(struct kmem_cache *s, void *object);

Alexander Potapenko's avatar
Alexander Potapenko committed
67
68
69
70
71
struct kasan_cache {
	int alloc_meta_offset;
	int free_meta_offset;
};

72
int kasan_module_alloc(void *addr, size_t size);
73
void kasan_free_shadow(const struct vm_struct *vm);
74

75
76
77
78
#else /* CONFIG_KASAN */

static inline void kasan_unpoison_shadow(const void *address, size_t size) {}

79
80
static inline void kasan_unpoison_task_stack(struct task_struct *task) {}

81
82
83
static inline void kasan_enable_current(void) {}
static inline void kasan_disable_current(void) {}

84
85
86
static inline void kasan_alloc_pages(struct page *page, unsigned int order) {}
static inline void kasan_free_pages(struct page *page, unsigned int order) {}

Alexander Potapenko's avatar
Alexander Potapenko committed
87
88
89
90
static inline void kasan_cache_create(struct kmem_cache *cache,
				      size_t *size,
				      unsigned long *flags) {}

91
92
93
94
95
96
97
98
static inline void kasan_poison_slab(struct page *page) {}
static inline void kasan_unpoison_object_data(struct kmem_cache *cache,
					void *object) {}
static inline void kasan_poison_object_data(struct kmem_cache *cache,
					void *object) {}

static inline void kasan_kmalloc_large(void *ptr, size_t size) {}
static inline void kasan_kfree_large(const void *ptr) {}
99
static inline void kasan_kfree(void *ptr) {}
100
101
102
103
104
105
106
static inline void kasan_kmalloc(struct kmem_cache *s, const void *object,
				size_t size) {}
static inline void kasan_krealloc(const void *object, size_t new_size) {}

static inline void kasan_slab_alloc(struct kmem_cache *s, void *object) {}
static inline void kasan_slab_free(struct kmem_cache *s, void *object) {}

107
static inline int kasan_module_alloc(void *addr, size_t size) { return 0; }
108
static inline void kasan_free_shadow(const struct vm_struct *vm) {}
109

110
111
112
#endif /* CONFIG_KASAN */

#endif /* LINUX_KASAN_H */