uaccess_64.h 6.3 KB
Newer Older
H
H. Peter Anvin 已提交
1 2
#ifndef _ASM_X86_UACCESS_64_H
#define _ASM_X86_UACCESS_64_H
L
Linus Torvalds 已提交
3 4 5 6 7 8 9

/*
 * User space memory access functions
 */
#include <linux/compiler.h>
#include <linux/errno.h>
#include <linux/prefetch.h>
10
#include <linux/lockdep.h>
L
Linus Torvalds 已提交
11 12 13 14 15 16 17
#include <asm/page.h>

/*
 * Copy To/From Userspace
 */

/* Handles exceptions in both to and from, but doesn't do access_ok */
18 19 20 21
__must_check unsigned long
copy_user_generic(void *to, const void *from, unsigned len);

__must_check unsigned long
22
_copy_to_user(void __user *to, const void *from, unsigned len);
23
__must_check unsigned long
24
_copy_from_user(void *to, const void __user *from, unsigned len);
25 26 27
__must_check unsigned long
copy_in_user(void __user *to, const void __user *from, unsigned len);

28 29 30 31 32 33 34
static inline unsigned long __must_check copy_from_user(void *to,
					  const void __user *from,
					  unsigned long n)
{
	int sz = __compiletime_object_size(to);
	int ret = -EFAULT;

35
	might_fault();
36 37 38 39 40 41 42 43 44
	if (likely(sz == -1 || sz >= n))
		ret = _copy_from_user(to, from, n);
#ifdef CONFIG_DEBUG_VM
	else
		WARN(1, "Buffer overflow detected!\n");
#endif
	return ret;
}

45 46 47 48 49 50 51
static __always_inline __must_check
int copy_to_user(void __user *dst, const void *src, unsigned size)
{
	might_fault();

	return _copy_to_user(dst, src, size);
}
52

53 54
static __always_inline __must_check
int __copy_from_user(void *dst, const void __user *src, unsigned size)
55
{
56
	int ret = 0;
57

58
	might_fault();
L
Linus Torvalds 已提交
59
	if (!__builtin_constant_p(size))
60 61 62 63
		return copy_user_generic(dst, (__force void *)src, size);
	switch (size) {
	case 1:__get_user_asm(*(u8 *)dst, (u8 __user *)src,
			      ret, "b", "b", "=q", 1);
L
Linus Torvalds 已提交
64
		return ret;
65 66
	case 2:__get_user_asm(*(u16 *)dst, (u16 __user *)src,
			      ret, "w", "w", "=r", 2);
L
Linus Torvalds 已提交
67
		return ret;
68 69 70 71 72
	case 4:__get_user_asm(*(u32 *)dst, (u32 __user *)src,
			      ret, "l", "k", "=r", 4);
		return ret;
	case 8:__get_user_asm(*(u64 *)dst, (u64 __user *)src,
			      ret, "q", "", "=r", 8);
L
Linus Torvalds 已提交
73 74
		return ret;
	case 10:
75
		__get_user_asm(*(u64 *)dst, (u64 __user *)src,
76
			       ret, "q", "", "=r", 10);
77 78 79 80 81 82
		if (unlikely(ret))
			return ret;
		__get_user_asm(*(u16 *)(8 + (char *)dst),
			       (u16 __user *)(8 + (char __user *)src),
			       ret, "w", "w", "=r", 2);
		return ret;
L
Linus Torvalds 已提交
83
	case 16:
84 85 86 87 88 89 90 91
		__get_user_asm(*(u64 *)dst, (u64 __user *)src,
			       ret, "q", "", "=r", 16);
		if (unlikely(ret))
			return ret;
		__get_user_asm(*(u64 *)(8 + (char *)dst),
			       (u64 __user *)(8 + (char __user *)src),
			       ret, "q", "", "=r", 8);
		return ret;
L
Linus Torvalds 已提交
92
	default:
93
		return copy_user_generic(dst, (__force void *)src, size);
L
Linus Torvalds 已提交
94
	}
95
}
L
Linus Torvalds 已提交
96

97 98
static __always_inline __must_check
int __copy_to_user(void __user *dst, const void *src, unsigned size)
99
{
100
	int ret = 0;
101

102
	might_fault();
L
Linus Torvalds 已提交
103
	if (!__builtin_constant_p(size))
104 105 106 107
		return copy_user_generic((__force void *)dst, src, size);
	switch (size) {
	case 1:__put_user_asm(*(u8 *)src, (u8 __user *)dst,
			      ret, "b", "b", "iq", 1);
L
Linus Torvalds 已提交
108
		return ret;
109 110
	case 2:__put_user_asm(*(u16 *)src, (u16 __user *)dst,
			      ret, "w", "w", "ir", 2);
L
Linus Torvalds 已提交
111
		return ret;
112 113 114 115
	case 4:__put_user_asm(*(u32 *)src, (u32 __user *)dst,
			      ret, "l", "k", "ir", 4);
		return ret;
	case 8:__put_user_asm(*(u64 *)src, (u64 __user *)dst,
116
			      ret, "q", "", "er", 8);
L
Linus Torvalds 已提交
117 118
		return ret;
	case 10:
119
		__put_user_asm(*(u64 *)src, (u64 __user *)dst,
120
			       ret, "q", "", "er", 10);
121 122
		if (unlikely(ret))
			return ret;
L
Linus Torvalds 已提交
123
		asm("":::"memory");
124 125 126
		__put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst,
			       ret, "w", "w", "ir", 2);
		return ret;
L
Linus Torvalds 已提交
127
	case 16:
128
		__put_user_asm(*(u64 *)src, (u64 __user *)dst,
129
			       ret, "q", "", "er", 16);
130 131
		if (unlikely(ret))
			return ret;
L
Linus Torvalds 已提交
132
		asm("":::"memory");
133
		__put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst,
134
			       ret, "q", "", "er", 8);
135
		return ret;
L
Linus Torvalds 已提交
136
	default:
137
		return copy_user_generic((__force void *)dst, src, size);
L
Linus Torvalds 已提交
138
	}
139
}
L
Linus Torvalds 已提交
140

141 142
static __always_inline __must_check
int __copy_in_user(void __user *dst, const void __user *src, unsigned size)
143
{
144
	int ret = 0;
145

146
	might_fault();
L
Linus Torvalds 已提交
147
	if (!__builtin_constant_p(size))
148 149 150 151
		return copy_user_generic((__force void *)dst,
					 (__force void *)src, size);
	switch (size) {
	case 1: {
L
Linus Torvalds 已提交
152
		u8 tmp;
153 154
		__get_user_asm(tmp, (u8 __user *)src,
			       ret, "b", "b", "=q", 1);
L
Linus Torvalds 已提交
155
		if (likely(!ret))
156 157
			__put_user_asm(tmp, (u8 __user *)dst,
				       ret, "b", "b", "iq", 1);
L
Linus Torvalds 已提交
158 159
		return ret;
	}
160
	case 2: {
L
Linus Torvalds 已提交
161
		u16 tmp;
162 163
		__get_user_asm(tmp, (u16 __user *)src,
			       ret, "w", "w", "=r", 2);
L
Linus Torvalds 已提交
164
		if (likely(!ret))
165 166
			__put_user_asm(tmp, (u16 __user *)dst,
				       ret, "w", "w", "ir", 2);
L
Linus Torvalds 已提交
167 168 169
		return ret;
	}

170
	case 4: {
L
Linus Torvalds 已提交
171
		u32 tmp;
172 173
		__get_user_asm(tmp, (u32 __user *)src,
			       ret, "l", "k", "=r", 4);
L
Linus Torvalds 已提交
174
		if (likely(!ret))
175 176
			__put_user_asm(tmp, (u32 __user *)dst,
				       ret, "l", "k", "ir", 4);
L
Linus Torvalds 已提交
177 178
		return ret;
	}
179
	case 8: {
L
Linus Torvalds 已提交
180
		u64 tmp;
181 182
		__get_user_asm(tmp, (u64 __user *)src,
			       ret, "q", "", "=r", 8);
L
Linus Torvalds 已提交
183
		if (likely(!ret))
184
			__put_user_asm(tmp, (u64 __user *)dst,
185
				       ret, "q", "", "er", 8);
L
Linus Torvalds 已提交
186 187 188
		return ret;
	}
	default:
189 190
		return copy_user_generic((__force void *)dst,
					 (__force void *)src, size);
L
Linus Torvalds 已提交
191
	}
192
}
L
Linus Torvalds 已提交
193

194
__must_check long
195
strncpy_from_user(char *dst, const char __user *src, long count);
196
__must_check long
197 198 199 200 201 202 203
__strncpy_from_user(char *dst, const char __user *src, long count);
__must_check long strnlen_user(const char __user *str, long n);
__must_check long __strnlen_user(const char __user *str, long n);
__must_check long strlen_user(const char __user *str);
__must_check unsigned long clear_user(void __user *mem, unsigned long len);
__must_check unsigned long __clear_user(void __user *mem, unsigned long len);

204 205 206 207 208
static __must_check __always_inline int
__copy_from_user_inatomic(void *dst, const void __user *src, unsigned size)
{
	return copy_user_generic(dst, (__force const void *)src, size);
}
209 210 211 212 213 214

static __must_check __always_inline int
__copy_to_user_inatomic(void __user *dst, const void *src, unsigned size)
{
	return copy_user_generic((__force void *)dst, src, size);
}
L
Linus Torvalds 已提交
215

216 217
extern long __copy_user_nocache(void *dst, const void __user *src,
				unsigned size, int zerorest);
218

219 220
static inline int
__copy_from_user_nocache(void *dst, const void __user *src, unsigned size)
221 222
{
	might_sleep();
223
	return __copy_user_nocache(dst, src, size, 1);
224 225
}

226 227 228
static inline int
__copy_from_user_inatomic_nocache(void *dst, const void __user *src,
				  unsigned size)
229
{
230
	return __copy_user_nocache(dst, src, size, 0);
231 232
}

233 234 235
unsigned long
copy_user_handle_tail(char *to, char *from, unsigned len, unsigned zerorest);

H
H. Peter Anvin 已提交
236
#endif /* _ASM_X86_UACCESS_64_H */