2 * Authors: Hans-Peter Nilsson (hp@axis.com)
5 #ifndef _CRIS_ARCH_UACCESS_H
6 #define _CRIS_ARCH_UACCESS_H
9 * We don't tell gcc that we are accessing memory, but this is OK
10 * because we do not write to any memory gcc knows about, so there
11 * are no aliasing issues.
13 * Note that PC at a fault is the address *at* the faulting
14 * instruction for CRISv32.
16 #define __put_user_asm(x, addr, err, op) \
17 __asm__ __volatile__( \
20 " .section .fixup,\"ax\"\n" \
25 " .section __ex_table,\"a\"\n" \
29 : "r" (x), "r" (addr), "g" (-EFAULT), "0" (err))
31 #define __put_user_asm_64(x, addr, err) do { \
32 int dummy_for_put_user_asm_64_; \
33 __asm__ __volatile__( \
34 "2: move.d %M2,[%1+]\n" \
35 "4: move.d %H2,[%1]\n" \
37 " .section .fixup,\"ax\"\n" \
41 " .section __ex_table,\"a\"\n" \
45 : "=r" (err), "=b" (dummy_for_put_user_asm_64_) \
46 : "r" (x), "1" (addr), "g" (-EFAULT), \
50 /* See comment before __put_user_asm. */
52 #define __get_user_asm(x, addr, err, op) \
53 __asm__ __volatile__( \
56 " .section .fixup,\"ax\"\n" \
61 " .section __ex_table,\"a\"\n" \
64 : "=r" (err), "=r" (x) \
65 : "r" (addr), "g" (-EFAULT), "0" (err))
67 #define __get_user_asm_64(x, addr, err) do { \
68 int dummy_for_get_user_asm_64_; \
69 __asm__ __volatile__( \
70 "2: move.d [%2+],%M1\n" \
71 "4: move.d [%2],%H1\n" \
73 " .section .fixup,\"ax\"\n" \
78 " .section __ex_table,\"a\"\n" \
82 : "=r" (err), "=r" (x), \
83 "=b" (dummy_for_get_user_asm_64_) \
84 : "2" (addr), "g" (-EFAULT), "0" (err));\
88 * Copy a null terminated string from userspace.
91 * -EFAULT for an exception
92 * count if we hit the buffer limit
93 * bytes copied if we hit a null byte
94 * (without the null byte)
97 __do_strncpy_from_user(char *dst, const char *src, long count)
105 * Currently, in 2.4.0-test9, most ports use a simple byte-copy loop.
108 * This code is deduced from:
113 * while ((*dst++ = (tmp2 = *src++)) != 0
117 * res = count - tmp1;
122 __asm__ __volatile__ (
124 "5: move.b [%2+],$acr\n"
126 " move.b $acr,[%1+]\n"
130 " move.b [%2+],$acr\n"
135 " .section .fixup,\"ax\"\n"
140 /* The address for a fault at the first move is trivial.
141 The address for a fault at the second move is that of
142 the preceding branch insn, since the move insn is in
143 its delay-slot. That address is also a branch
144 target. Just so you don't get confused... */
146 " .section __ex_table,\"a\"\n"
150 : "=r" (res), "=b" (dst), "=b" (src), "=r" (count)
151 : "3" (count), "1" (dst), "2" (src), "g" (-EFAULT)
157 /* A few copy asms to build up the more complex ones from.
159 Note again, a post-increment is performed regardless of whether a bus
160 fault occurred in that instruction, and PC for a faulted insn is the
161 address for the insn, or for the preceding branch when in a delay-slot. */
163 #define __asm_copy_user_cont(to, from, ret, COPY, FIXUP, TENTRY) \
164 __asm__ __volatile__ ( \
167 " .section .fixup,\"ax\"\n" \
170 " .section __ex_table,\"a\"\n" \
173 : "=b" (to), "=b" (from), "=r" (ret) \
174 : "0" (to), "1" (from), "2" (ret) \
177 #define __asm_copy_from_user_1(to, from, ret) \
178 __asm_copy_user_cont(to, from, ret, \
179 "2: move.b [%1+],$acr\n" \
180 " move.b $acr,[%0+]\n", \
183 " clear.b [%0+]\n", \
186 #define __asm_copy_from_user_2x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
187 __asm_copy_user_cont(to, from, ret, \
189 "2: move.w [%1+],$acr\n" \
190 " move.w $acr,[%0+]\n", \
194 " clear.w [%0+]\n", \
198 #define __asm_copy_from_user_2(to, from, ret) \
199 __asm_copy_from_user_2x_cont(to, from, ret, "", "", "")
201 #define __asm_copy_from_user_3(to, from, ret) \
202 __asm_copy_from_user_2x_cont(to, from, ret, \
203 "4: move.b [%1+],$acr\n" \
204 " move.b $acr,[%0+]\n", \
206 " clear.b [%0+]\n", \
209 #define __asm_copy_from_user_4x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
210 __asm_copy_user_cont(to, from, ret, \
212 "2: move.d [%1+],$acr\n" \
213 " move.d $acr,[%0+]\n", \
217 " clear.d [%0+]\n", \
221 #define __asm_copy_from_user_4(to, from, ret) \
222 __asm_copy_from_user_4x_cont(to, from, ret, "", "", "")
224 #define __asm_copy_from_user_5(to, from, ret) \
225 __asm_copy_from_user_4x_cont(to, from, ret, \
226 "4: move.b [%1+],$acr\n" \
227 " move.b $acr,[%0+]\n", \
229 " clear.b [%0+]\n", \
232 #define __asm_copy_from_user_6x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
233 __asm_copy_from_user_4x_cont(to, from, ret, \
235 "4: move.w [%1+],$acr\n" \
236 " move.w $acr,[%0+]\n", \
239 " clear.w [%0+]\n", \
243 #define __asm_copy_from_user_6(to, from, ret) \
244 __asm_copy_from_user_6x_cont(to, from, ret, "", "", "")
246 #define __asm_copy_from_user_7(to, from, ret) \
247 __asm_copy_from_user_6x_cont(to, from, ret, \
248 "6: move.b [%1+],$acr\n" \
249 " move.b $acr,[%0+]\n", \
251 " clear.b [%0+]\n", \
254 #define __asm_copy_from_user_8x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
255 __asm_copy_from_user_4x_cont(to, from, ret, \
257 "4: move.d [%1+],$acr\n" \
258 " move.d $acr,[%0+]\n", \
261 " clear.d [%0+]\n", \
265 #define __asm_copy_from_user_8(to, from, ret) \
266 __asm_copy_from_user_8x_cont(to, from, ret, "", "", "")
268 #define __asm_copy_from_user_9(to, from, ret) \
269 __asm_copy_from_user_8x_cont(to, from, ret, \
270 "6: move.b [%1+],$acr\n" \
271 " move.b $acr,[%0+]\n", \
273 " clear.b [%0+]\n", \
276 #define __asm_copy_from_user_10x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
277 __asm_copy_from_user_8x_cont(to, from, ret, \
279 "6: move.w [%1+],$acr\n" \
280 " move.w $acr,[%0+]\n", \
283 " clear.w [%0+]\n", \
287 #define __asm_copy_from_user_10(to, from, ret) \
288 __asm_copy_from_user_10x_cont(to, from, ret, "", "", "")
290 #define __asm_copy_from_user_11(to, from, ret) \
291 __asm_copy_from_user_10x_cont(to, from, ret, \
292 "8: move.b [%1+],$acr\n" \
293 " move.b $acr,[%0+]\n", \
295 " clear.b [%0+]\n", \
298 #define __asm_copy_from_user_12x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
299 __asm_copy_from_user_8x_cont(to, from, ret, \
301 "6: move.d [%1+],$acr\n" \
302 " move.d $acr,[%0+]\n", \
305 " clear.d [%0+]\n", \
309 #define __asm_copy_from_user_12(to, from, ret) \
310 __asm_copy_from_user_12x_cont(to, from, ret, "", "", "")
312 #define __asm_copy_from_user_13(to, from, ret) \
313 __asm_copy_from_user_12x_cont(to, from, ret, \
314 "8: move.b [%1+],$acr\n" \
315 " move.b $acr,[%0+]\n", \
317 " clear.b [%0+]\n", \
320 #define __asm_copy_from_user_14x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
321 __asm_copy_from_user_12x_cont(to, from, ret, \
323 "8: move.w [%1+],$acr\n" \
324 " move.w $acr,[%0+]\n", \
327 " clear.w [%0+]\n", \
331 #define __asm_copy_from_user_14(to, from, ret) \
332 __asm_copy_from_user_14x_cont(to, from, ret, "", "", "")
334 #define __asm_copy_from_user_15(to, from, ret) \
335 __asm_copy_from_user_14x_cont(to, from, ret, \
336 "10: move.b [%1+],$acr\n" \
337 " move.b $acr,[%0+]\n", \
339 " clear.b [%0+]\n", \
342 #define __asm_copy_from_user_16x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
343 __asm_copy_from_user_12x_cont(to, from, ret, \
345 "8: move.d [%1+],$acr\n" \
346 " move.d $acr,[%0+]\n", \
349 " clear.d [%0+]\n", \
353 #define __asm_copy_from_user_16(to, from, ret) \
354 __asm_copy_from_user_16x_cont(to, from, ret, "", "", "")
356 #define __asm_copy_from_user_20x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
357 __asm_copy_from_user_16x_cont(to, from, ret, \
359 "10: move.d [%1+],$acr\n" \
360 " move.d $acr,[%0+]\n", \
363 " clear.d [%0+]\n", \
367 #define __asm_copy_from_user_20(to, from, ret) \
368 __asm_copy_from_user_20x_cont(to, from, ret, "", "", "")
370 #define __asm_copy_from_user_24x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
371 __asm_copy_from_user_20x_cont(to, from, ret, \
373 "12: move.d [%1+],$acr\n" \
374 " move.d $acr,[%0+]\n", \
377 " clear.d [%0+]\n", \
381 #define __asm_copy_from_user_24(to, from, ret) \
382 __asm_copy_from_user_24x_cont(to, from, ret, "", "", "")
384 /* And now, the to-user ones. */
386 #define __asm_copy_to_user_1(to, from, ret) \
387 __asm_copy_user_cont(to, from, ret, \
388 " move.b [%1+],$acr\n" \
389 "2: move.b $acr,[%0+]\n", \
394 #define __asm_copy_to_user_2x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
395 __asm_copy_user_cont(to, from, ret, \
397 " move.w [%1+],$acr\n" \
398 "2: move.w $acr,[%0+]\n", \
405 #define __asm_copy_to_user_2(to, from, ret) \
406 __asm_copy_to_user_2x_cont(to, from, ret, "", "", "")
408 #define __asm_copy_to_user_3(to, from, ret) \
409 __asm_copy_to_user_2x_cont(to, from, ret, \
410 " move.b [%1+],$acr\n" \
411 "4: move.b $acr,[%0+]\n", \
415 #define __asm_copy_to_user_4x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
416 __asm_copy_user_cont(to, from, ret, \
418 " move.d [%1+],$acr\n" \
419 "2: move.d $acr,[%0+]\n", \
426 #define __asm_copy_to_user_4(to, from, ret) \
427 __asm_copy_to_user_4x_cont(to, from, ret, "", "", "")
429 #define __asm_copy_to_user_5(to, from, ret) \
430 __asm_copy_to_user_4x_cont(to, from, ret, \
431 " move.b [%1+],$acr\n" \
432 "4: move.b $acr,[%0+]\n", \
436 #define __asm_copy_to_user_6x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
437 __asm_copy_to_user_4x_cont(to, from, ret, \
439 " move.w [%1+],$acr\n" \
440 "4: move.w $acr,[%0+]\n", \
446 #define __asm_copy_to_user_6(to, from, ret) \
447 __asm_copy_to_user_6x_cont(to, from, ret, "", "", "")
449 #define __asm_copy_to_user_7(to, from, ret) \
450 __asm_copy_to_user_6x_cont(to, from, ret, \
451 " move.b [%1+],$acr\n" \
452 "6: move.b $acr,[%0+]\n", \
456 #define __asm_copy_to_user_8x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
457 __asm_copy_to_user_4x_cont(to, from, ret, \
459 " move.d [%1+],$acr\n" \
460 "4: move.d $acr,[%0+]\n", \
466 #define __asm_copy_to_user_8(to, from, ret) \
467 __asm_copy_to_user_8x_cont(to, from, ret, "", "", "")
469 #define __asm_copy_to_user_9(to, from, ret) \
470 __asm_copy_to_user_8x_cont(to, from, ret, \
471 " move.b [%1+],$acr\n" \
472 "6: move.b $acr,[%0+]\n", \
476 #define __asm_copy_to_user_10x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
477 __asm_copy_to_user_8x_cont(to, from, ret, \
479 " move.w [%1+],$acr\n" \
480 "6: move.w $acr,[%0+]\n", \
486 #define __asm_copy_to_user_10(to, from, ret) \
487 __asm_copy_to_user_10x_cont(to, from, ret, "", "", "")
489 #define __asm_copy_to_user_11(to, from, ret) \
490 __asm_copy_to_user_10x_cont(to, from, ret, \
491 " move.b [%1+],$acr\n" \
492 "8: move.b $acr,[%0+]\n", \
496 #define __asm_copy_to_user_12x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
497 __asm_copy_to_user_8x_cont(to, from, ret, \
499 " move.d [%1+],$acr\n" \
500 "6: move.d $acr,[%0+]\n", \
506 #define __asm_copy_to_user_12(to, from, ret) \
507 __asm_copy_to_user_12x_cont(to, from, ret, "", "", "")
509 #define __asm_copy_to_user_13(to, from, ret) \
510 __asm_copy_to_user_12x_cont(to, from, ret, \
511 " move.b [%1+],$acr\n" \
512 "8: move.b $acr,[%0+]\n", \
516 #define __asm_copy_to_user_14x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
517 __asm_copy_to_user_12x_cont(to, from, ret, \
519 " move.w [%1+],$acr\n" \
520 "8: move.w $acr,[%0+]\n", \
526 #define __asm_copy_to_user_14(to, from, ret) \
527 __asm_copy_to_user_14x_cont(to, from, ret, "", "", "")
529 #define __asm_copy_to_user_15(to, from, ret) \
530 __asm_copy_to_user_14x_cont(to, from, ret, \
531 " move.b [%1+],$acr\n" \
532 "10: move.b $acr,[%0+]\n", \
536 #define __asm_copy_to_user_16x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
537 __asm_copy_to_user_12x_cont(to, from, ret, \
539 " move.d [%1+],$acr\n" \
540 "8: move.d $acr,[%0+]\n", \
546 #define __asm_copy_to_user_16(to, from, ret) \
547 __asm_copy_to_user_16x_cont(to, from, ret, "", "", "")
549 #define __asm_copy_to_user_20x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
550 __asm_copy_to_user_16x_cont(to, from, ret, \
552 " move.d [%1+],$acr\n" \
553 "10: move.d $acr,[%0+]\n", \
559 #define __asm_copy_to_user_20(to, from, ret) \
560 __asm_copy_to_user_20x_cont(to, from, ret, "", "", "")
562 #define __asm_copy_to_user_24x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
563 __asm_copy_to_user_20x_cont(to, from, ret, \
565 " move.d [%1+],$acr\n" \
566 "12: move.d $acr,[%0+]\n", \
572 #define __asm_copy_to_user_24(to, from, ret) \
573 __asm_copy_to_user_24x_cont(to, from, ret, "", "", "")
575 /* Define a few clearing asms with exception handlers. */
577 /* This frame-asm is like the __asm_copy_user_cont one, but has one less
580 #define __asm_clear(to, ret, CLEAR, FIXUP, TENTRY) \
581 __asm__ __volatile__ ( \
584 " .section .fixup,\"ax\"\n" \
587 " .section __ex_table,\"a\"\n" \
590 : "=b" (to), "=r" (ret) \
591 : "0" (to), "1" (ret) \
594 #define __asm_clear_1(to, ret) \
595 __asm_clear(to, ret, \
596 "2: clear.b [%0+]\n", \
601 #define __asm_clear_2(to, ret) \
602 __asm_clear(to, ret, \
603 "2: clear.w [%0+]\n", \
608 #define __asm_clear_3(to, ret) \
609 __asm_clear(to, ret, \
610 "2: clear.w [%0+]\n" \
611 "3: clear.b [%0+]\n", \
618 #define __asm_clear_4x_cont(to, ret, CLEAR, FIXUP, TENTRY) \
619 __asm_clear(to, ret, \
621 "2: clear.d [%0+]\n", \
628 #define __asm_clear_4(to, ret) \
629 __asm_clear_4x_cont(to, ret, "", "", "")
631 #define __asm_clear_8x_cont(to, ret, CLEAR, FIXUP, TENTRY) \
632 __asm_clear_4x_cont(to, ret, \
634 "4: clear.d [%0+]\n", \
640 #define __asm_clear_8(to, ret) \
641 __asm_clear_8x_cont(to, ret, "", "", "")
643 #define __asm_clear_12x_cont(to, ret, CLEAR, FIXUP, TENTRY) \
644 __asm_clear_8x_cont(to, ret, \
646 "6: clear.d [%0+]\n", \
652 #define __asm_clear_12(to, ret) \
653 __asm_clear_12x_cont(to, ret, "", "", "")
655 #define __asm_clear_16x_cont(to, ret, CLEAR, FIXUP, TENTRY) \
656 __asm_clear_12x_cont(to, ret, \
658 "8: clear.d [%0+]\n", \
664 #define __asm_clear_16(to, ret) \
665 __asm_clear_16x_cont(to, ret, "", "", "")
667 #define __asm_clear_20x_cont(to, ret, CLEAR, FIXUP, TENTRY) \
668 __asm_clear_16x_cont(to, ret, \
670 "10: clear.d [%0+]\n", \
676 #define __asm_clear_20(to, ret) \
677 __asm_clear_20x_cont(to, ret, "", "", "")
679 #define __asm_clear_24x_cont(to, ret, CLEAR, FIXUP, TENTRY) \
680 __asm_clear_20x_cont(to, ret, \
682 "12: clear.d [%0+]\n", \
688 #define __asm_clear_24(to, ret) \
689 __asm_clear_24x_cont(to, ret, "", "", "")
692 * Return the size of a string (including the ending 0)
694 * Return length of string in userspace including terminating 0
695 * or 0 for error. Return a value greater than N if too long.
699 strnlen_user(const char *s, long n)
703 if (!access_ok(VERIFY_READ, s, 0))
707 * This code is deduced from:
710 * while (tmp1-- > 0 && *s++)
718 __asm__ __volatile__ (
732 " .section .fixup,\"ax\"\n"
738 " .section __ex_table,\"a\"\n"
741 : "=r" (res), "=r" (tmp1)