memcpy_user_64.c 2.5 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86
/*
 * Copyright 2011 Tilera Corporation. All Rights Reserved.
 *
 *   This program is free software; you can redistribute it and/or
 *   modify it under the terms of the GNU General Public License
 *   as published by the Free Software Foundation, version 2.
 *
 *   This program is distributed in the hope that it will be useful, but
 *   WITHOUT ANY WARRANTY; without even the implied warranty of
 *   MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE, GOOD TITLE or
 *   NON INFRINGEMENT.  See the GNU General Public License for
 *   more details.
 *
 * Do memcpy(), but trap and return "n" when a load or store faults.
 *
 * Note: this idiom only works when memcpy() compiles to a leaf function.
 * If "sp" is updated during memcpy, the "jrp lr" will be incorrect.
 *
 * Also note that we are capturing "n" from the containing scope here.
 */

#define _ST(p, inst, v)						\
	({							\
		asm("1: " #inst " %0, %1;"			\
		    ".pushsection .coldtext.memcpy,\"ax\";"	\
		    "2: { move r0, %2; jrp lr };"		\
		    ".section __ex_table,\"a\";"		\
		    ".quad 1b, 2b;"				\
		    ".popsection"				\
		    : "=m" (*(p)) : "r" (v), "r" (n));		\
	})

#define _LD(p, inst)						\
	({							\
		unsigned long __v;				\
		asm("1: " #inst " %0, %1;"			\
		    ".pushsection .coldtext.memcpy,\"ax\";"	\
		    "2: { move r0, %2; jrp lr };"		\
		    ".section __ex_table,\"a\";"		\
		    ".quad 1b, 2b;"				\
		    ".popsection"				\
		    : "=r" (__v) : "m" (*(p)), "r" (n));	\
		__v;						\
	})

#define USERCOPY_FUNC __copy_to_user_inatomic
#define ST1(p, v) _ST((p), st1, (v))
#define ST2(p, v) _ST((p), st2, (v))
#define ST4(p, v) _ST((p), st4, (v))
#define ST8(p, v) _ST((p), st, (v))
#define LD1 LD
#define LD2 LD
#define LD4 LD
#define LD8 LD
#include "memcpy_64.c"

#define USERCOPY_FUNC __copy_from_user_inatomic
#define ST1 ST
#define ST2 ST
#define ST4 ST
#define ST8 ST
#define LD1(p) _LD((p), ld1u)
#define LD2(p) _LD((p), ld2u)
#define LD4(p) _LD((p), ld4u)
#define LD8(p) _LD((p), ld)
#include "memcpy_64.c"

#define USERCOPY_FUNC __copy_in_user_inatomic
#define ST1(p, v) _ST((p), st1, (v))
#define ST2(p, v) _ST((p), st2, (v))
#define ST4(p, v) _ST((p), st4, (v))
#define ST8(p, v) _ST((p), st, (v))
#define LD1(p) _LD((p), ld1u)
#define LD2(p) _LD((p), ld2u)
#define LD4(p) _LD((p), ld4u)
#define LD8(p) _LD((p), ld)
#include "memcpy_64.c"

unsigned long __copy_from_user_zeroing(void *to, const void __user *from,
				       unsigned long n)
{
	unsigned long rc = __copy_from_user_inatomic(to, from, n);
	if (unlikely(rc))
		memset(to + n - rc, 0, rc);
	return rc;
}