Revert most of ad's movs/stos change. Instead do a lot simpler: declare

svs_quad_copy() used by SVS only, with no need for instrumentation, because
SVS is disabled when sanitizers are on.
This commit is contained in:
maxv 2020-07-19 07:35:08 +00:00
parent 58d7a688ba
commit 95a0a18880
5 changed files with 15 additions and 50 deletions

View File

@ -1,4 +1,4 @@
/* $NetBSD: cpufunc.S,v 1.63 2020/06/24 18:09:37 maxv Exp $ */ /* $NetBSD: cpufunc.S,v 1.64 2020/07/19 07:35:08 maxv Exp $ */
/* /*
* Copyright (c) 1998, 2007, 2008, 2020 The NetBSD Foundation, Inc. * Copyright (c) 1998, 2007, 2008, 2020 The NetBSD Foundation, Inc.
@ -446,10 +446,13 @@ ENTRY(outl)
ret ret
END(outl) END(outl)
ENTRY(x86_movs) /*
* Used by SVS only, to make an atomic but fast copy. Doesn't have
* sanitizer instrumentation, but sanitizers disable SVS, so no problem.
*/
ENTRY(svs_quad_copy)
movq %rdx,%rcx movq %rdx,%rcx
KMSAN_REP_STOS(8)
rep rep
movsq movsq
ret ret
END(x86_movs) END(svs_quad_copy)

View File

@ -1,4 +1,4 @@
/* $NetBSD: frameasm.h,v 1.51 2020/06/21 16:53:37 bouyer Exp $ */ /* $NetBSD: frameasm.h,v 1.52 2020/07/19 07:35:08 maxv Exp $ */
#ifndef _AMD64_MACHINE_FRAMEASM_H #ifndef _AMD64_MACHINE_FRAMEASM_H
#define _AMD64_MACHINE_FRAMEASM_H #define _AMD64_MACHINE_FRAMEASM_H
@ -276,33 +276,11 @@
popq %rdx ; \ popq %rdx ; \
popq %rcx ; \ popq %rcx ; \
popq %rax popq %rax
#define KMSAN_REP_STOS(scale) \
pushq %rax ; \
pushq %rcx ; \
pushq %rdx ; \
pushq %rsi ; \
pushq %rdi ; \
pushq %r8 ; \
pushq %r9 ; \
pushq %r10 ; \
pushq %r11 ; \
leaq (,%rcx,scale),%rsi ; \
callq _C_LABEL(__msan_instrument_asm_store); \
popq %r11 ; \
popq %r10 ; \
popq %r9 ; \
popq %r8 ; \
popq %rdi ; \
popq %rsi ; \
popq %rdx ; \
popq %rcx ; \
popq %rax
#else #else
#define KMSAN_ENTER /* nothing */ #define KMSAN_ENTER /* nothing */
#define KMSAN_LEAVE /* nothing */ #define KMSAN_LEAVE /* nothing */
#define KMSAN_INIT_ARG(sz) /* nothing */ #define KMSAN_INIT_ARG(sz) /* nothing */
#define KMSAN_INIT_RET(sz) /* nothing */ #define KMSAN_INIT_RET(sz) /* nothing */
#define KMSAN_REP_STOS(scale) /* nothing */
#endif #endif
#ifdef KCOV #ifdef KCOV

View File

@ -1,4 +1,4 @@
/* $NetBSD: cpufunc.S,v 1.48 2020/06/24 18:09:37 maxv Exp $ */ /* $NetBSD: cpufunc.S,v 1.49 2020/07/19 07:35:08 maxv Exp $ */
/*- /*-
* Copyright (c) 1998, 2007, 2020 The NetBSD Foundation, Inc. * Copyright (c) 1998, 2007, 2020 The NetBSD Foundation, Inc.
@ -38,7 +38,7 @@
#include <sys/errno.h> #include <sys/errno.h>
#include <machine/asm.h> #include <machine/asm.h>
__KERNEL_RCSID(0, "$NetBSD: cpufunc.S,v 1.48 2020/06/24 18:09:37 maxv Exp $"); __KERNEL_RCSID(0, "$NetBSD: cpufunc.S,v 1.49 2020/07/19 07:35:08 maxv Exp $");
#include "opt_xen.h" #include "opt_xen.h"
@ -353,19 +353,3 @@ ENTRY(outl)
outl %eax, %dx outl %eax, %dx
ret ret
END(outl) END(outl)
ENTRY(x86_movs)
pushl %ebp
movl %esp,%ebp
pushl %edi
pushl %esi
movl 8(%ebp),%edi
movl 12(%ebp),%esi
movl 16(%ebp),%ecx
rep
movsl
popl %esi
popl %edi
leave
ret
END(x86_movs)

View File

@ -1,4 +1,4 @@
/* $NetBSD: pmap.h,v 1.124 2020/07/14 00:45:53 yamaguchi Exp $ */ /* $NetBSD: pmap.h,v 1.125 2020/07/19 07:35:08 maxv Exp $ */
/* /*
* Copyright (c) 1997 Charles D. Cranor and Washington University. * Copyright (c) 1997 Charles D. Cranor and Washington University.
@ -612,7 +612,7 @@ extern vaddr_t pmap_direct_end;
#endif /* __HAVE_DIRECT_MAP */ #endif /* __HAVE_DIRECT_MAP */
void x86_movs(void *, void *, long); void svs_quad_copy(void *, void *, long);
#endif /* _KERNEL */ #endif /* _KERNEL */

View File

@ -1,4 +1,4 @@
/* $NetBSD: svs.c,v 1.38 2020/07/14 00:45:53 yamaguchi Exp $ */ /* $NetBSD: svs.c,v 1.39 2020/07/19 07:35:08 maxv Exp $ */
/* /*
* Copyright (c) 2018-2020 The NetBSD Foundation, Inc. * Copyright (c) 2018-2020 The NetBSD Foundation, Inc.
@ -30,7 +30,7 @@
*/ */
#include <sys/cdefs.h> #include <sys/cdefs.h>
__KERNEL_RCSID(0, "$NetBSD: svs.c,v 1.38 2020/07/14 00:45:53 yamaguchi Exp $"); __KERNEL_RCSID(0, "$NetBSD: svs.c,v 1.39 2020/07/19 07:35:08 maxv Exp $");
#include "opt_svs.h" #include "opt_svs.h"
#include "opt_user_ldt.h" #include "opt_user_ldt.h"
@ -704,7 +704,7 @@ svs_pdir_switch(struct pmap *pmap)
/* Copy user slots. */ /* Copy user slots. */
mutex_enter(&ci->ci_svs_mtx); mutex_enter(&ci->ci_svs_mtx);
x86_movs(ci->ci_svs_updir, pmap->pm_pdir, PDIR_SLOT_USERLIM); svs_quad_copy(ci->ci_svs_updir, pmap->pm_pdir, PDIR_SLOT_USERLIM);
mutex_exit(&ci->ci_svs_mtx); mutex_exit(&ci->ci_svs_mtx);
if (svs_pcid) { if (svs_pcid) {