Commit de09be34 authored by Al Viro's avatar Al Viro
Browse files

cris: get rid of zeroing



... the rest of it

Signed-off-by: default avatarAl Viro <viro@zeniv.linux.org.uk>
parent c8313947
Loading
Loading
Loading
Loading
+6 −18
Original line number Diff line number Diff line
@@ -217,19 +217,17 @@ unsigned long __copy_user_zeroing(void *pdst, const void __user *psrc,
    {
      __asm_copy_from_user_1 (dst, src, retn);
      n--;
      if (retn)
         goto exception;
    }

    if (((unsigned long) src & 2) && n >= 2)
    {
      __asm_copy_from_user_2 (dst, src, retn);
      n -= 2;
      if (retn)
         goto exception;
    }

    /* We only need one check after the unalignment-adjustments, because
       if both adjustments were done, either both or neither reference
       had an exception.  */
    if (retn != 0)
      goto copy_exception_bytes;
  }

  /* Decide which copying method to use. */
@@ -328,7 +326,7 @@ unsigned long __copy_user_zeroing(void *pdst, const void __user *psrc,
    n -= 4;

    if (retn)
      goto copy_exception_bytes;
      goto exception;
  }

  /* If we get here, there were no memory read faults.  */
@@ -356,17 +354,7 @@ unsigned long __copy_user_zeroing(void *pdst, const void __user *psrc,
     bytes.  */
  return retn;

copy_exception_bytes:
  /* We already have "retn" bytes cleared, and need to clear the
     remaining "n" bytes.  A non-optimized simple byte-for-byte in-line
     memset is preferred here, since this isn't speed-critical code and
     we'd rather have this a leaf-function than calling memset.  */
  {
    char *endp;
    for (endp = dst + n; dst < endp; dst++)
      *dst = 0;
  }

exception:
  return retn + n;
}
EXPORT_SYMBOL(__copy_user_zeroing);
+6 −17
Original line number Diff line number Diff line
@@ -184,19 +184,18 @@ unsigned long __copy_user_zeroing(void *pdst, const void __user *psrc,
    {
      __asm_copy_from_user_1 (dst, src, retn);
      n--;
      if (retn != 0)
        goto exception;
    }

    if (((unsigned long) src & 2) && n >= 2)
    {
      __asm_copy_from_user_2 (dst, src, retn);
      n -= 2;
      if (retn != 0)
        goto exception;
    }

    /* We only need one check after the unalignment-adjustments, because
       if both adjustments were done, either both or neither reference
       had an exception.  */
    if (retn != 0)
      goto copy_exception_bytes;
  }

  /* Movem is dirt cheap.  The overheap is low enough to always use the
@@ -279,7 +278,7 @@ unsigned long __copy_user_zeroing(void *pdst, const void __user *psrc,
    n -= 4;

    if (retn)
      goto copy_exception_bytes;
      goto exception;
  }

  /* If we get here, there were no memory read faults.  */
@@ -307,17 +306,7 @@ unsigned long __copy_user_zeroing(void *pdst, const void __user *psrc,
     bytes.  */
  return retn;

copy_exception_bytes:
  /* We already have "retn" bytes cleared, and need to clear the
     remaining "n" bytes.  A non-optimized simple byte-for-byte in-line
     memset is preferred here, since this isn't speed-critical code and
     we'd rather have this a leaf-function than calling memset.  */
  {
    char *endp;
    for (endp = dst + n; dst < endp; dst++)
      *dst = 0;
  }

exception:
  return retn + n;
}
EXPORT_SYMBOL(__copy_user_zeroing);
+4 −8
Original line number Diff line number Diff line
@@ -172,16 +172,14 @@ __do_strncpy_from_user(char *dst, const char *src, long count)
	__asm_copy_user_cont(to, from, ret,	\
		"	move.b [%1+],$r9\n"	\
		"2:	move.b $r9,[%0+]\n",	\
		"3:	addq 1,%2\n"		\
		"	clear.b [%0+]\n",	\
		"3:	addq 1,%2\n",		\
		"	.dword 2b,3b\n")

#define __asm_copy_from_user_2x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
	__asm_copy_user_cont(to, from, ret,		\
		"	move.w [%1+],$r9\n"		\
		"2:	move.w $r9,[%0+]\n" COPY,	\
		"3:	addq 2,%2\n"			\
		"	clear.w [%0+]\n" FIXUP,		\
		"3:	addq 2,%2\n" FIXUP,		\
		"	.dword 2b,3b\n" TENTRY)

#define __asm_copy_from_user_2(to, from, ret) \
@@ -191,16 +189,14 @@ __do_strncpy_from_user(char *dst, const char *src, long count)
	__asm_copy_from_user_2x_cont(to, from, ret,	\
		"	move.b [%1+],$r9\n"		\
		"4:	move.b $r9,[%0+]\n",		\
		"5:	addq 1,%2\n"			\
		"	clear.b [%0+]\n",		\
		"5:	addq 1,%2\n",			\
		"	.dword 4b,5b\n")

#define __asm_copy_from_user_4x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
	__asm_copy_user_cont(to, from, ret,		\
		"	move.d [%1+],$r9\n"		\
		"2:	move.d $r9,[%0+]\n" COPY,	\
		"3:	addq 4,%2\n"			\
		"	clear.d [%0+]\n" FIXUP,		\
		"3:	addq 4,%2\n" FIXUP,		\
		"	.dword 2b,3b\n" TENTRY)

#define __asm_copy_from_user_4(to, from, ret) \
+4 −8
Original line number Diff line number Diff line
@@ -178,8 +178,7 @@ __do_strncpy_from_user(char *dst, const char *src, long count)
		"2:	move.b [%1+],$acr\n"	\
		"	move.b $acr,[%0+]\n",	\
		"3:	addq 1,%2\n"		\
		"	jump 1b\n"		\
		"	clear.b [%0+]\n",	\
		"	jump 1b\n",		\
		"	.dword 2b,3b\n")

#define __asm_copy_from_user_2x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
@@ -189,8 +188,7 @@ __do_strncpy_from_user(char *dst, const char *src, long count)
		"	move.w $acr,[%0+]\n",		\
			FIXUP				\
		"3:	addq 2,%2\n"			\
		"	jump 1b\n"			\
		"	clear.w [%0+]\n",		\
		"	jump 1b\n",			\
			TENTRY				\
		"	.dword 2b,3b\n")

@@ -201,8 +199,7 @@ __do_strncpy_from_user(char *dst, const char *src, long count)
	__asm_copy_from_user_2x_cont(to, from, ret,	\
		"4:	move.b [%1+],$acr\n"		\
		"	move.b $acr,[%0+]\n",		\
		"5:	addq 1,%2\n"			\
		"	clear.b [%0+]\n",		\
		"5:	addq 1,%2\n",			\
		"	.dword 4b,5b\n")

#define __asm_copy_from_user_4x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
@@ -212,8 +209,7 @@ __do_strncpy_from_user(char *dst, const char *src, long count)
		"	move.d $acr,[%0+]\n",		\
			FIXUP				\
		"3:	addq 4,%2\n"			\
		"	jump 1b\n"			\
		"	clear.d [%0+]\n",		\
		"	jump 1b\n",			\
			TENTRY				\
		"	.dword 2b,3b\n")