summaryrefslogtreecommitdiff
path: root/arch/m32r
diff options
context:
space:
mode:
authorAl Viro <viro@zeniv.linux.org.uk>2017-03-19 16:11:13 -0400
committerAl Viro <viro@zeniv.linux.org.uk>2017-03-28 18:23:38 -0400
commit8cd920f26785ce42ec6fb807d40de67fc2eb41ce (patch)
tree8fed33da7a042733084eda32eb4d4c6e1663a61a /arch/m32r
parent35f8acd5c660eb9dbf7a8379724821abb823a3d8 (diff)
downloadlinux-exynos-8cd920f26785ce42ec6fb807d40de67fc2eb41ce.tar.gz
linux-exynos-8cd920f26785ce42ec6fb807d40de67fc2eb41ce.tar.bz2
linux-exynos-8cd920f26785ce42ec6fb807d40de67fc2eb41ce.zip
m32r: get rid of zeroing
Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>
Diffstat (limited to 'arch/m32r')
-rw-r--r--arch/m32r/include/asm/uaccess.h66
-rw-r--r--arch/m32r/lib/usercopy.c9
2 files changed, 6 insertions, 69 deletions
diff --git a/arch/m32r/include/asm/uaccess.h b/arch/m32r/include/asm/uaccess.h
index e47dcc05a0f2..d5c5e68fa2fb 100644
--- a/arch/m32r/include/asm/uaccess.h
+++ b/arch/m32r/include/asm/uaccess.h
@@ -460,77 +460,13 @@ do { \
: "r14", "memory"); \
} while (0)
-#define __copy_user_zeroing(to, from, size) \
-do { \
- unsigned long __dst, __src, __c; \
- __asm__ __volatile__ ( \
- " mv r14, %0\n" \
- " or r14, %1\n" \
- " beq %0, %1, 9f\n" \
- " beqz %2, 9f\n" \
- " and3 r14, r14, #3\n" \
- " bnez r14, 2f\n" \
- " and3 %2, %2, #3\n" \
- " beqz %3, 2f\n" \
- " addi %0, #-4 ; word_copy \n" \
- " .fillinsn\n" \
- "0: ld r14, @%1+\n" \
- " addi %3, #-1\n" \
- " .fillinsn\n" \
- "1: st r14, @+%0\n" \
- " bnez %3, 0b\n" \
- " beqz %2, 9f\n" \
- " addi %0, #4\n" \
- " .fillinsn\n" \
- "2: ldb r14, @%1 ; byte_copy \n" \
- " .fillinsn\n" \
- "3: stb r14, @%0\n" \
- " addi %1, #1\n" \
- " addi %2, #-1\n" \
- " addi %0, #1\n" \
- " bnez %2, 2b\n" \
- " .fillinsn\n" \
- "9:\n" \
- ".section .fixup,\"ax\"\n" \
- " .balign 4\n" \
- "5: addi %3, #1\n" \
- " addi %1, #-4\n" \
- " .fillinsn\n" \
- "6: slli %3, #2\n" \
- " add %2, %3\n" \
- " addi %0, #4\n" \
- " .fillinsn\n" \
- "7: ldi r14, #0 ; store zero \n" \
- " .fillinsn\n" \
- "8: addi %2, #-1\n" \
- " stb r14, @%0 ; ACE? \n" \
- " addi %0, #1\n" \
- " bnez %2, 8b\n" \
- " seth r14, #high(9b)\n" \
- " or3 r14, r14, #low(9b)\n" \
- " jmp r14\n" \
- ".previous\n" \
- ".section __ex_table,\"a\"\n" \
- " .balign 4\n" \
- " .long 0b,6b\n" \
- " .long 1b,5b\n" \
- " .long 2b,7b\n" \
- " .long 3b,7b\n" \
- ".previous\n" \
- : "=&r" (__dst), "=&r" (__src), "=&r" (size), \
- "=&r" (__c) \
- : "0" (to), "1" (from), "2" (size), "3" (size / 4) \
- : "r14", "memory"); \
-} while (0)
-
-
/* We let the __ versions of copy_from/to_user inline, because they're often
* used in fast paths and have only a small space overhead.
*/
static inline unsigned long __generic_copy_from_user_nocheck(void *to,
const void __user *from, unsigned long n)
{
- __copy_user_zeroing(to, from, n);
+ __copy_user(to, from, n);
return n;
}
diff --git a/arch/m32r/lib/usercopy.c b/arch/m32r/lib/usercopy.c
index fd03f2731f20..6aacf5ba0a58 100644
--- a/arch/m32r/lib/usercopy.c
+++ b/arch/m32r/lib/usercopy.c
@@ -23,12 +23,13 @@ __generic_copy_to_user(void __user *to, const void *from, unsigned long n)
unsigned long
__generic_copy_from_user(void *to, const void __user *from, unsigned long n)
{
+ unsigned long ret = n;
prefetchw(to);
if (access_ok(VERIFY_READ, from, n))
- __copy_user_zeroing(to,from,n);
- else
- memset(to, 0, n);
- return n;
+ ret = __copy_user(to,from,n);
+ if (unlikely(ret))
+ memset(to + n - ret, 0, ret);
+ return ret;
}