Remove unused symbol __aarch64
This was added as a possible variant of __aarch64__ back when 64-bit ARM was vaporware. It hasn't shown up in the wild since then, so remove. Nathan Bossart Discussion: https://www.postgresql.org/message-id/CAFBsxsEN5nW3uRh%3Djrs-QexDrC1btu0ZfriD3FFfb%3D3J6tAngg%40mail.gmail.com
This commit is contained in:
parent
b4ddf3ee30
commit
4112e39f70
@ -63,8 +63,7 @@
|
|||||||
* compiler barrier.
|
* compiler barrier.
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
#if defined(__arm__) || defined(__arm) || \
|
#if defined(__arm__) || defined(__arm) || defined(__aarch64__)
|
||||||
defined(__aarch64__) || defined(__aarch64)
|
|
||||||
#include "port/atomics/arch-arm.h"
|
#include "port/atomics/arch-arm.h"
|
||||||
#elif defined(__i386__) || defined(__i386) || defined(__x86_64__)
|
#elif defined(__i386__) || defined(__i386) || defined(__x86_64__)
|
||||||
#include "port/atomics/arch-x86.h"
|
#include "port/atomics/arch-x86.h"
|
||||||
|
@ -21,7 +21,7 @@
|
|||||||
* 64 bit atomics on ARM32 are implemented using kernel fallbacks and thus
|
* 64 bit atomics on ARM32 are implemented using kernel fallbacks and thus
|
||||||
* might be slow, so disable entirely. On ARM64 that problem doesn't exist.
|
* might be slow, so disable entirely. On ARM64 that problem doesn't exist.
|
||||||
*/
|
*/
|
||||||
#if !defined(__aarch64__) && !defined(__aarch64)
|
#if !defined(__aarch64__)
|
||||||
#define PG_DISABLE_64_BIT_ATOMICS
|
#define PG_DISABLE_64_BIT_ATOMICS
|
||||||
#else
|
#else
|
||||||
/*
|
/*
|
||||||
@ -29,4 +29,4 @@
|
|||||||
* general purpose register is atomic.
|
* general purpose register is atomic.
|
||||||
*/
|
*/
|
||||||
#define PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY
|
#define PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY
|
||||||
#endif /* __aarch64__ || __aarch64 */
|
#endif /* __aarch64__ */
|
||||||
|
@ -256,7 +256,7 @@ spin_delay(void)
|
|||||||
* We use the int-width variant of the builtin because it works on more chips
|
* We use the int-width variant of the builtin because it works on more chips
|
||||||
* than other widths.
|
* than other widths.
|
||||||
*/
|
*/
|
||||||
#if defined(__arm__) || defined(__arm) || defined(__aarch64__) || defined(__aarch64)
|
#if defined(__arm__) || defined(__arm) || defined(__aarch64__)
|
||||||
#ifdef HAVE_GCC__SYNC_INT32_TAS
|
#ifdef HAVE_GCC__SYNC_INT32_TAS
|
||||||
#define HAS_TEST_AND_SET
|
#define HAS_TEST_AND_SET
|
||||||
|
|
||||||
@ -277,7 +277,7 @@ tas(volatile slock_t *lock)
|
|||||||
* high-core-count ARM64 processors. It seems mostly a wash for smaller gear,
|
* high-core-count ARM64 processors. It seems mostly a wash for smaller gear,
|
||||||
* and ISB doesn't exist at all on pre-v7 ARM chips.
|
* and ISB doesn't exist at all on pre-v7 ARM chips.
|
||||||
*/
|
*/
|
||||||
#if defined(__aarch64__) || defined(__aarch64)
|
#if defined(__aarch64__)
|
||||||
|
|
||||||
#define SPIN_DELAY() spin_delay()
|
#define SPIN_DELAY() spin_delay()
|
||||||
|
|
||||||
@ -288,9 +288,9 @@ spin_delay(void)
|
|||||||
" isb; \n");
|
" isb; \n");
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif /* __aarch64__ || __aarch64 */
|
#endif /* __aarch64__ */
|
||||||
#endif /* HAVE_GCC__SYNC_INT32_TAS */
|
#endif /* HAVE_GCC__SYNC_INT32_TAS */
|
||||||
#endif /* __arm__ || __arm || __aarch64__ || __aarch64 */
|
#endif /* __arm__ || __arm || __aarch64__ */
|
||||||
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
Loading…
x
Reference in New Issue
Block a user