Skip to content

Commit 931de11

Browse files
Christoph Hellwigpalmer-dabbelt
authored andcommitted
asm-generic: improve the nommu {get,put}_user handling
Instead of reusing raw_{copy,to}_from_user implement separate handlers using {get,put}_unaligned. This ensures unaligned access is handled correctly, and avoid the need for the small constant size optimization in raw_{copy,to}_from_user. Signed-off-by: Christoph Hellwig <hch@lst.de> Acked-by: Arnd Bergmann <arnd@arndb.de> Signed-off-by: Palmer Dabbelt <palmerdabbelt@google.com>
1 parent 24ce66c commit 931de11

1 file changed

Lines changed: 51 additions & 40 deletions

File tree

include/asm-generic/uaccess.h

Lines changed: 51 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -10,56 +10,67 @@
1010
#include <linux/string.h>
1111

1212
#ifdef CONFIG_UACCESS_MEMCPY
13-
static inline __must_check unsigned long
14-
raw_copy_from_user(void *to, const void __user * from, unsigned long n)
13+
#include <asm/unaligned.h>
14+
15+
static inline int __get_user_fn(size_t size, const void __user *from, void *to)
1516
{
16-
if (__builtin_constant_p(n)) {
17-
switch(n) {
18-
case 1:
19-
*(u8 *)to = *(u8 __force *)from;
20-
return 0;
21-
case 2:
22-
*(u16 *)to = *(u16 __force *)from;
23-
return 0;
24-
case 4:
25-
*(u32 *)to = *(u32 __force *)from;
26-
return 0;
27-
#ifdef CONFIG_64BIT
28-
case 8:
29-
*(u64 *)to = *(u64 __force *)from;
30-
return 0;
31-
#endif
32-
}
17+
BUILD_BUG_ON(!__builtin_constant_p(size));
18+
19+
switch (size) {
20+
case 1:
21+
*(u8 *)to = get_unaligned((u8 __force *)from);
22+
return 0;
23+
case 2:
24+
*(u16 *)to = get_unaligned((u16 __force *)from);
25+
return 0;
26+
case 4:
27+
*(u32 *)to = get_unaligned((u32 __force *)from);
28+
return 0;
29+
case 8:
30+
*(u64 *)to = get_unaligned((u64 __force *)from);
31+
return 0;
32+
default:
33+
BUILD_BUG();
34+
return 0;
35+
}
36+
37+
}
38+
#define __get_user_fn(sz, u, k) __get_user_fn(sz, u, k)
39+
40+
static inline int __put_user_fn(size_t size, void __user *to, void *from)
41+
{
42+
BUILD_BUG_ON(!__builtin_constant_p(size));
43+
44+
switch (size) {
45+
case 1:
46+
put_unaligned(*(u8 *)from, (u8 __force *)to);
47+
return 0;
48+
case 2:
49+
put_unaligned(*(u16 *)from, (u16 __force *)to);
50+
return 0;
51+
case 4:
52+
put_unaligned(*(u32 *)from, (u32 __force *)to);
53+
return 0;
54+
case 8:
55+
put_unaligned(*(u64 *)from, (u64 __force *)to);
56+
return 0;
57+
default:
58+
BUILD_BUG();
59+
return 0;
3360
}
61+
}
62+
#define __put_user_fn(sz, u, k) __put_user_fn(sz, u, k)
3463

64+
static inline __must_check unsigned long
65+
raw_copy_from_user(void *to, const void __user * from, unsigned long n)
66+
{
3567
memcpy(to, (const void __force *)from, n);
3668
return 0;
3769
}
3870

3971
static inline __must_check unsigned long
4072
raw_copy_to_user(void __user *to, const void *from, unsigned long n)
4173
{
42-
if (__builtin_constant_p(n)) {
43-
switch(n) {
44-
case 1:
45-
*(u8 __force *)to = *(u8 *)from;
46-
return 0;
47-
case 2:
48-
*(u16 __force *)to = *(u16 *)from;
49-
return 0;
50-
case 4:
51-
*(u32 __force *)to = *(u32 *)from;
52-
return 0;
53-
#ifdef CONFIG_64BIT
54-
case 8:
55-
*(u64 __force *)to = *(u64 *)from;
56-
return 0;
57-
#endif
58-
default:
59-
break;
60-
}
61-
}
62-
6374
memcpy((void __force *)to, from, n);
6475
return 0;
6576
}

0 commit comments

Comments
 (0)