GNU Linux-libre 4.14.254-gnu1
[releases.git] / arch / c6x / include / asm / uaccess.h
1 /*
2  *  Copyright (C) 2011 Texas Instruments Incorporated
3  *  Author: Mark Salter <msalter@redhat.com>
4  *
5  *  This program is free software; you can redistribute it and/or modify
6  *  it under the terms of the GNU General Public License version 2 as
7  *  published by the Free Software Foundation.
8  */
9 #ifndef _ASM_C6X_UACCESS_H
10 #define _ASM_C6X_UACCESS_H
11
12 #include <linux/types.h>
13 #include <linux/compiler.h>
14 #include <linux/string.h>
15
16 /*
17  * C6X supports unaligned 32 and 64 bit loads and stores.
18  */
19 static inline __must_check unsigned long
20 raw_copy_from_user(void *to, const void __user *from, unsigned long n)
21 {
22         u32 tmp32;
23         u64 tmp64;
24
25         if (__builtin_constant_p(n)) {
26                 switch (n) {
27                 case 1:
28                         *(u8 *)to = *(u8 __force *)from;
29                         return 0;
30                 case 4:
31                         asm volatile ("ldnw .d1t1 *%2,%0\n"
32                                       "nop  4\n"
33                                       "stnw .d1t1 %0,*%1\n"
34                                       : "=&a"(tmp32)
35                                       : "A"(to), "a"(from)
36                                       : "memory");
37                         return 0;
38                 case 8:
39                         asm volatile ("ldndw .d1t1 *%2,%0\n"
40                                       "nop   4\n"
41                                       "stndw .d1t1 %0,*%1\n"
42                                       : "=&a"(tmp64)
43                                       : "a"(to), "a"(from)
44                                       : "memory");
45                         return 0;
46                 default:
47                         break;
48                 }
49         }
50
51         memcpy(to, (const void __force *)from, n);
52         return 0;
53 }
54
55 static inline __must_check unsigned long
56 raw_copy_to_user(void __user *to, const void *from, unsigned long n)
57 {
58         u32 tmp32;
59         u64 tmp64;
60
61         if (__builtin_constant_p(n)) {
62                 switch (n) {
63                 case 1:
64                         *(u8 __force *)to = *(u8 *)from;
65                         return 0;
66                 case 4:
67                         asm volatile ("ldnw .d1t1 *%2,%0\n"
68                                       "nop  4\n"
69                                       "stnw .d1t1 %0,*%1\n"
70                                       : "=&a"(tmp32)
71                                       : "a"(to), "a"(from)
72                                       : "memory");
73                         return 0;
74                 case 8:
75                         asm volatile ("ldndw .d1t1 *%2,%0\n"
76                                       "nop   4\n"
77                                       "stndw .d1t1 %0,*%1\n"
78                                       : "=&a"(tmp64)
79                                       : "a"(to), "a"(from)
80                                       : "memory");
81                         return 0;
82                 default:
83                         break;
84                 }
85         }
86
87         memcpy((void __force *)to, from, n);
88         return 0;
89 }
90 #define INLINE_COPY_FROM_USER
91 #define INLINE_COPY_TO_USER
92
93 extern int _access_ok(unsigned long addr, unsigned long size);
94 #ifdef CONFIG_ACCESS_CHECK
95 #define __access_ok _access_ok
96 #endif
97
98 #include <asm-generic/uaccess.h>
99
100 #endif /* _ASM_C6X_UACCESS_H */