From 11c0c6c2f50e2fc096441bc97852c049b4fa18d3 Mon Sep 17 00:00:00 2001 From: Martin Whitaker Date: Thu, 23 Dec 2021 09:46:01 +0000 Subject: [PATCH] Use atomic memory read/write functions in tests. This ensures compiler optimisations won't interfere with the tests. --- system/memrw64.h | 60 ++++++++++++++++++++++++++++++++++++++++++ tests/addr_walk1.c | 10 +++---- tests/bit_fade.c | 6 ++--- tests/block_move.c | 40 +++++++++++++++------------- tests/modulo_n.c | 8 +++--- tests/mov_inv_fixed.c | 12 ++++----- tests/mov_inv_random.c | 8 +++--- tests/mov_inv_walk1.c | 12 ++++----- tests/own_addr.c | 6 ++--- tests/test_helper.h | 15 ++++++++++- 10 files changed, 126 insertions(+), 51 deletions(-) create mode 100644 system/memrw64.h diff --git a/system/memrw64.h b/system/memrw64.h new file mode 100644 index 0000000..cc507bb --- /dev/null +++ b/system/memrw64.h @@ -0,0 +1,60 @@ +// SPDX-License-Identifier: GPL-2.0 +#ifndef MEMRW64_H +#define MEMRW64_H +/* + * Provides some 64-bit memory access functions. These stop the compiler + * optimizing accesses which need to be ordered and atomic. Mostly used + * for accessing memory-mapped hardware registers. + * + * Copyright (C) 2021 Martin Whitaker. + */ + +#include + +/* + * Reads and returns the value stored in the 64-bit memory location pointed + * to by ptr. + */ +static inline uint64_t read64(const volatile uint64_t *ptr) +{ + uint64_t val; + __asm__ __volatile__( + "movq %1, %0" + : "=r" (val) + : "m" (*ptr) + : "memory" + ); + return val; +} + +/* + * Writes val to the 64-bit memory location pointed to by ptr. + */ +static inline void write64(const volatile uint64_t *ptr, uint64_t val) +{ + __asm__ __volatile__( + "movq %1, %0" + : + : "m" (*ptr), + "r" (val) + : "memory" + ); +} + +/* + * Writes val to the 64-bit memory location pointed to by ptr. Reads it + * back (and discards it) to ensure the write is complete. + */ +static inline void flush64(const volatile uint64_t *ptr, uint64_t val) +{ + __asm__ __volatile__( + "movl %1, %0\n" + "movl %0, %1" + : + : "m" (*ptr), + "r" (val) + : "memory" + ); +} + +#endif // MEMRW64_H diff --git a/tests/addr_walk1.c b/tests/addr_walk1.c index ee4bdc8..06a8ef9 100644 --- a/tests/addr_walk1.c +++ b/tests/addr_walk1.c @@ -1,5 +1,5 @@ // SPDX-License-Identifier: GPL-2.0 -// Copyright (C) 2020 Martin Whitaker. +// Copyright (C) 2020-2021 Martin Whitaker. // // Derived from an extract of memtest86+ test.c: // @@ -56,7 +56,7 @@ int test_addr_walk1(int my_vcpu) break; } testword_t expect = invert ^ (testword_t)p1; - *p1 = expect; + write_word(p1, expect); // Walking one on our second address. uintptr_t mask2 = sizeof(testword_t); @@ -69,12 +69,12 @@ int test_addr_walk1(int my_vcpu) if (p2 > (testword_t *)pe) { break; } - *p2 = ~invert ^ (testword_t)p2; + write_word(p2, ~invert ^ (testword_t)p2); - testword_t actual = *p1; + testword_t actual = read_word(p1); if (unlikely(actual != expect)) { addr_error(p1, p2, expect, actual); - *p1 = expect; // recover from error + write_word(p1, expect); // recover from error } } while (mask2); diff --git a/tests/bit_fade.c b/tests/bit_fade.c index 1b52b20..d9feb1c 100644 --- a/tests/bit_fade.c +++ b/tests/bit_fade.c @@ -1,5 +1,5 @@ // SPDX-License-Identifier: GPL-2.0 -// Copyright (C) 2020 Martin Whitaker. +// Copyright (C) 2020-2021 Martin Whitaker. // // Derived from an extract of memtest86+ test.c: // @@ -59,7 +59,7 @@ static int pattern_fill(int my_vcpu, testword_t pattern) } test_addr[my_vcpu] = (uintptr_t)p; do { - *p = pattern; + write_word(p, pattern); } while (p++ < pe); // test before increment in case pointer overflows do_tick(my_vcpu); BAILOUT; @@ -95,7 +95,7 @@ static int pattern_check(int my_vcpu, testword_t pattern) } test_addr[my_vcpu] = (uintptr_t)p; do { - testword_t actual = *p; + testword_t actual = read_word(p); if (unlikely(actual != pattern)) { data_error(p, pattern, actual, true); } diff --git a/tests/block_move.c b/tests/block_move.c index 4da3a32..603dd17 100644 --- a/tests/block_move.c +++ b/tests/block_move.c @@ -1,5 +1,5 @@ // SPDX-License-Identifier: GPL-2.0 -// Copyright (C) 2020 Martin Whitaker. +// Copyright (C) 2020-2021 Martin Whitaker. // // Derived from an extract of memtest86+ test.c: // @@ -60,22 +60,22 @@ int test_block_move(int my_vcpu, int iterations) testword_t pattern1 = 1; do { testword_t pattern2 = ~pattern1; - p[ 0] = pattern1; - p[ 1] = pattern1; - p[ 2] = pattern1; - p[ 3] = pattern1; - p[ 4] = pattern2; - p[ 5] = pattern2; - p[ 6] = pattern1; - p[ 7] = pattern1; - p[ 8] = pattern1; - p[ 9] = pattern1; - p[10] = pattern2; - p[11] = pattern2; - p[12] = pattern1; - p[13] = pattern1; - p[14] = pattern2; - p[15] = pattern2; + write_word(p + 0, pattern1); + write_word(p + 1, pattern1); + write_word(p + 2, pattern1); + write_word(p + 3, pattern1); + write_word(p + 4, pattern2); + write_word(p + 5, pattern2); + write_word(p + 6, pattern1); + write_word(p + 7, pattern1); + write_word(p + 8, pattern1); + write_word(p + 9, pattern1); + write_word(p + 10, pattern2); + write_word(p + 11, pattern2); + write_word(p + 12, pattern1); + write_word(p + 13, pattern1); + write_word(p + 14, pattern2); + write_word(p + 15, pattern2); pattern1 = pattern1 << 1 | pattern1 >> (TESTWORD_WIDTH - 1); // rotate left } while (p <= (pe - 16) && (p += 16)); // test before increment in case pointer overflows do_tick(my_vcpu); @@ -219,8 +219,10 @@ int test_block_move(int my_vcpu, int iterations) } test_addr[my_vcpu] = (uintptr_t)p; do { - if (unlikely(p[0] != p[1])) { - data_error(p, p[0], p[1], false); + testword_t p0 = read_word(p + 0); + testword_t p1 = read_word(p + 1); + if (unlikely(p0 != p1)) { + data_error(p, p0, p1, false); } } while (p <= (pe - 2) && (p += 2)); // test before increment in case pointer overflows do_tick(my_vcpu); diff --git a/tests/modulo_n.c b/tests/modulo_n.c index d8e04b6..e58c173 100644 --- a/tests/modulo_n.c +++ b/tests/modulo_n.c @@ -1,5 +1,5 @@ // SPDX-License-Identifier: GPL-2.0 -// Copyright (C) 2020 Martin Whitaker. +// Copyright (C) 2020-2021 Martin Whitaker. // // Derived from an extract of memtest86+ test.c: // @@ -59,7 +59,7 @@ int test_modulo_n(int my_vcpu, int iterations, testword_t pattern1, testword_t p } test_addr[my_vcpu] = (uintptr_t)p; do { - *p = pattern1; + write_word(p, pattern1); } while (p <= (pe - n) && (p += n)); // test before increment in case pointer overflows do_tick(my_vcpu); BAILOUT; @@ -92,7 +92,7 @@ int test_modulo_n(int my_vcpu, int iterations, testword_t pattern1, testword_t p test_addr[my_vcpu] = (uintptr_t)p; do { if (k != offset) { - *p = pattern2; + write_word(p, pattern2); } k++; if (k == n) { @@ -129,7 +129,7 @@ int test_modulo_n(int my_vcpu, int iterations, testword_t pattern1, testword_t p } test_addr[my_vcpu] = (uintptr_t)p; do { - testword_t actual = *p; + testword_t actual = read_word(p); if (unlikely(actual != pattern1)) { data_error(p, pattern1, actual, true); } diff --git a/tests/mov_inv_fixed.c b/tests/mov_inv_fixed.c index 2ccc782..a1ced74 100644 --- a/tests/mov_inv_fixed.c +++ b/tests/mov_inv_fixed.c @@ -1,5 +1,5 @@ // SPDX-License-Identifier: GPL-2.0 -// Copyright (C) 2020 Martin Whitaker. +// Copyright (C) 2020-2021 Martin Whitaker. // // Derived from an extract of memtest86+ test.c: // @@ -83,7 +83,7 @@ int test_mov_inv_fixed(int my_vcpu, int iterations, testword_t pattern1, testwor #endif #else do { - *p = pattern1; + write_word(p, pattern1); } while (p++ < pe); // test before increment in case pointer overflows #endif do_tick(my_vcpu); @@ -116,11 +116,11 @@ int test_mov_inv_fixed(int my_vcpu, int iterations, testword_t pattern1, testwor } test_addr[my_vcpu] = (uintptr_t)p; do { - testword_t actual = *p; + testword_t actual = read_word(p); if (unlikely(actual != pattern1)) { data_error(p, pattern1, actual, true); } - *p = pattern2; + write_word(p, pattern2); } while (p++ < pe); // test before increment in case pointer overflows do_tick(my_vcpu); BAILOUT; @@ -149,11 +149,11 @@ int test_mov_inv_fixed(int my_vcpu, int iterations, testword_t pattern1, testwor } test_addr[my_vcpu] = (uintptr_t)p; do { - testword_t actual = *p; + testword_t actual = read_word(p); if (unlikely(actual != pattern2)) { data_error(p, pattern2, actual, true); } - *p = pattern1; + write_word(p, pattern1); } while (p-- > ps); // test before decrement in case pointer overflows do_tick(my_vcpu); BAILOUT; diff --git a/tests/mov_inv_random.c b/tests/mov_inv_random.c index e226481..4ecbb92 100644 --- a/tests/mov_inv_random.c +++ b/tests/mov_inv_random.c @@ -1,5 +1,5 @@ // SPDX-License-Identifier: GPL-2.0 -// Copyright (C) 2020 Martin Whitaker. +// Copyright (C) 2020-2021 Martin Whitaker. // // Derived from an extract of memtest86+ test.c: // @@ -69,7 +69,7 @@ int test_mov_inv_random(int my_vcpu) } test_addr[my_vcpu] = (uintptr_t)p; do { - *p = random(my_vcpu); + write_word(p, random(my_vcpu)); } while (p++ < pe); // test before increment in case pointer overflows do_tick(my_vcpu); BAILOUT; @@ -104,11 +104,11 @@ int test_mov_inv_random(int my_vcpu) test_addr[my_vcpu] = (uintptr_t)p; do { testword_t expect = random(my_vcpu) ^ invert; - testword_t actual = *p; + testword_t actual = read_word(p); if (unlikely(actual != expect)) { data_error(p, expect, actual, true); } - *p = ~expect; + write_word(p, ~expect); } while (p++ < pe); // test before increment in case pointer overflows do_tick(my_vcpu); BAILOUT; diff --git a/tests/mov_inv_walk1.c b/tests/mov_inv_walk1.c index 0405ce5..ef68539 100644 --- a/tests/mov_inv_walk1.c +++ b/tests/mov_inv_walk1.c @@ -1,5 +1,5 @@ // SPDX-License-Identifier: GPL-2.0 -// Copyright (C) 2020 Martin Whitaker. +// Copyright (C) 2020-2021 Martin Whitaker. // // Derived from an extract of memtest86+ test.c: // @@ -60,7 +60,7 @@ int test_mov_inv_walk1(int my_vcpu, int iterations, int offset, bool inverse) } test_addr[my_vcpu] = (uintptr_t)p; do { - *p = inverse ? ~pattern : pattern; + write_word(p, inverse ? ~pattern : pattern); pattern = pattern << 1 | pattern >> (TESTWORD_WIDTH - 1); // rotate left } while (p++ < pe); // test before increment in case pointer overflows do_tick(my_vcpu); @@ -96,11 +96,11 @@ int test_mov_inv_walk1(int my_vcpu, int iterations, int offset, bool inverse) test_addr[my_vcpu] = (uintptr_t)p; do { testword_t expect = inverse ? ~pattern : pattern; - testword_t actual = *p; + testword_t actual = read_word(p); if (unlikely(actual != expect)) { data_error(p, expect, actual, true); } - *p = ~expect; + write_word(p, ~expect); pattern = pattern << 1 | pattern >> (TESTWORD_WIDTH - 1); // rotate left } while (p++ < pe); // test before increment in case pointer overflows do_tick(my_vcpu); @@ -132,11 +132,11 @@ int test_mov_inv_walk1(int my_vcpu, int iterations, int offset, bool inverse) do { pattern = pattern >> 1 | pattern << (TESTWORD_WIDTH - 1); // rotate right testword_t expect = inverse ? pattern : ~pattern; - testword_t actual = *p; + testword_t actual = read_word(p); if (unlikely(actual != expect)) { data_error(p, expect, actual, true); } - *p = ~expect; + write_word(p, ~expect); } while (p-- > ps); // test before decrement in case pointer overflows do_tick(my_vcpu); BAILOUT; diff --git a/tests/own_addr.c b/tests/own_addr.c index f5f78bb..5f56f15 100644 --- a/tests/own_addr.c +++ b/tests/own_addr.c @@ -1,5 +1,5 @@ // SPDX-License-Identifier: GPL-2.0 -// Copyright (C) 2020 Martin Whitaker. +// Copyright (C) 2020-2021 Martin Whitaker. // // Derived from an extract of memtest86+ test.c: // @@ -58,7 +58,7 @@ static int pattern_fill(int my_vcpu, testword_t offset) } test_addr[my_vcpu] = (uintptr_t)p; do { - *p = (testword_t)p + offset; + write_word(p, (testword_t)p + offset); } while (p++ < pe); // test before increment in case pointer overflows do_tick(my_vcpu); BAILOUT; @@ -96,7 +96,7 @@ static int pattern_check(int my_vcpu, testword_t offset) test_addr[my_vcpu] = (uintptr_t)p; do { testword_t expect = (testword_t)p + offset; - testword_t actual = *p; + testword_t actual = read_word(p); if (unlikely(actual != expect)) { data_error(p, expect, actual, true); } diff --git a/tests/test_helper.h b/tests/test_helper.h index c758f81..195e7ca 100644 --- a/tests/test_helper.h +++ b/tests/test_helper.h @@ -5,7 +5,7 @@ * Provides some common definitions and helper functions for the memory * tests. * - * Copyright (C) 2020 Martin Whitaker. + * Copyright (C) 2020-2021 Martin Whitaker. */ #include @@ -13,6 +13,19 @@ #include "test.h" +/* + * Test word atomic read and write functions. + */ +#ifdef __x86_64__ +#include "memrw64.h" +#define read_word read64 +#define write_word write64 +#else +#include "memrw32.h" +#define read_word read32 +#define write_word write32 +#endif + /* * A wrapper for guiding branch prediction. */