Unverified Commit 04091d6c authored by Nylon Chen's avatar Nylon Chen Committed by Palmer Dabbelt
Browse files

riscv: provide memmove implementation



The memmove used by the kernel feature like KASAN.

Signed-off-by: default avatarNick Hu <nickhu@andestech.com>
Signed-off-by: default avatarNick Hu <nick650823@gmail.com>
Signed-off-by: default avatarNylon Chen <nylon7@andestech.com>
Signed-off-by: default avatarPalmer Dabbelt <palmerdabbelt@google.com>
parent b5b11a8a
Loading
Loading
Loading
Loading
+4 −4
Original line number Diff line number Diff line
@@ -12,16 +12,16 @@
#define __HAVE_ARCH_MEMSET
extern asmlinkage void *memset(void *, int, size_t);
extern asmlinkage void *__memset(void *, int, size_t);

#define __HAVE_ARCH_MEMCPY
extern asmlinkage void *memcpy(void *, const void *, size_t);
extern asmlinkage void *__memcpy(void *, const void *, size_t);

#define __HAVE_ARCH_MEMMOVE
extern asmlinkage void *memmove(void *, const void *, size_t);
extern asmlinkage void *__memmove(void *, const void *, size_t);
/* For those files which don't want to check by kasan. */
#if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)

#define memcpy(dst, src, len) __memcpy(dst, src, len)
#define memset(s, c, n) __memset(s, c, n)

#define memmove(dst, src, len) __memmove(dst, src, len)
#endif
#endif /* _ASM_RISCV_STRING_H */
+2 −0
Original line number Diff line number Diff line
@@ -11,5 +11,7 @@
 */
EXPORT_SYMBOL(memset);
EXPORT_SYMBOL(memcpy);
EXPORT_SYMBOL(memmove);
EXPORT_SYMBOL(__memset);
EXPORT_SYMBOL(__memcpy);
EXPORT_SYMBOL(__memmove);
+1 −0
Original line number Diff line number Diff line
@@ -2,5 +2,6 @@
lib-y			+= delay.o
lib-y			+= memcpy.o
lib-y			+= memset.o
lib-y			+= memmove.o
lib-$(CONFIG_MMU)	+= uaccess.o
lib-$(CONFIG_64BIT)	+= tishift.o
+64 −0
Original line number Diff line number Diff line
/* SPDX-License-Identifier: GPL-2.0 */

#include <linux/linkage.h>
#include <asm/asm.h>

ENTRY(__memmove)
WEAK(memmove)
        move    t0, a0
        move    t1, a1

        beq     a0, a1, exit_memcpy
        beqz    a2, exit_memcpy
        srli    t2, a2, 0x2

        slt     t3, a0, a1
        beqz    t3, do_reverse

        andi    a2, a2, 0x3
        li      t4, 1
        beqz    t2, byte_copy

word_copy:
        lw      t3, 0(a1)
        addi    t2, t2, -1
        addi    a1, a1, 4
        sw      t3, 0(a0)
        addi    a0, a0, 4
        bnez    t2, word_copy
        beqz    a2, exit_memcpy
        j       byte_copy

do_reverse:
        add     a0, a0, a2
        add     a1, a1, a2
        andi    a2, a2, 0x3
        li      t4, -1
        beqz    t2, reverse_byte_copy

reverse_word_copy:
        addi    a1, a1, -4
        addi    t2, t2, -1
        lw      t3, 0(a1)
        addi    a0, a0, -4
        sw      t3, 0(a0)
        bnez    t2, reverse_word_copy
        beqz    a2, exit_memcpy

reverse_byte_copy:
        addi    a0, a0, -1
        addi    a1, a1, -1

byte_copy:
        lb      t3, 0(a1)
        addi    a2, a2, -1
        sb      t3, 0(a0)
        add     a1, a1, t4
        add     a0, a0, t4
        bnez    a2, byte_copy

exit_memcpy:
        move a0, t0
        move a1, t1
        ret
END(__memmove)