smp: aarch64: Don't needlessly invalidate the data cache
Also get rid of the function to do clean + invalidate as not to be tempted by it.
This commit is contained in:
parent
ba5d63b82a
commit
50dab6c2f1
@ -23,7 +23,6 @@ extern void delay(uint64_t cycles);
|
|||||||
|
|
||||||
extern size_t icache_line_size(void);
|
extern size_t icache_line_size(void);
|
||||||
extern size_t dcache_line_size(void);
|
extern size_t dcache_line_size(void);
|
||||||
extern void clean_inval_dcache_poc(uintptr_t start, uintptr_t end);
|
|
||||||
extern void clean_dcache_poc(uintptr_t start, uintptr_t end);
|
extern void clean_dcache_poc(uintptr_t start, uintptr_t end);
|
||||||
extern void inval_icache_pou(uintptr_t start, uintptr_t end);
|
extern void inval_icache_pou(uintptr_t start, uintptr_t end);
|
||||||
extern int current_el(void);
|
extern int current_el(void);
|
||||||
|
@ -252,19 +252,6 @@ inline size_t dcache_line_size(void) {
|
|||||||
return ((ctr >> 16) & 0b1111) << 4;
|
return ((ctr >> 16) & 0b1111) << 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clean and invalidate D-Cache to Point of Coherency
|
|
||||||
inline void clean_inval_dcache_poc(uintptr_t start, uintptr_t end) {
|
|
||||||
size_t dsz = dcache_line_size();
|
|
||||||
|
|
||||||
uintptr_t addr = start & ~(dsz - 1);
|
|
||||||
while (addr < end) {
|
|
||||||
asm volatile ("dc civac, %0" :: "r"(addr) : "memory");
|
|
||||||
addr += dsz;
|
|
||||||
}
|
|
||||||
|
|
||||||
asm volatile ("dsb sy\n\tisb");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clean D-Cache to Point of Coherency
|
// Clean D-Cache to Point of Coherency
|
||||||
inline void clean_dcache_poc(uintptr_t start, uintptr_t end) {
|
inline void clean_dcache_poc(uintptr_t start, uintptr_t end) {
|
||||||
size_t dsz = dcache_line_size();
|
size_t dsz = dcache_line_size();
|
||||||
|
@ -366,7 +366,7 @@ static bool try_start_ap(int boot_method, uint64_t method_ptr,
|
|||||||
// Additionally, the newly-booted AP may have caches disabled which implies
|
// Additionally, the newly-booted AP may have caches disabled which implies
|
||||||
// it possibly does not see our cache contents either.
|
// it possibly does not see our cache contents either.
|
||||||
|
|
||||||
clean_inval_dcache_poc((uintptr_t)trampoline, (uintptr_t)trampoline + 0x1000);
|
clean_dcache_poc((uintptr_t)trampoline, (uintptr_t)trampoline + 0x1000);
|
||||||
inval_icache_pou((uintptr_t)trampoline, (uintptr_t)trampoline + 0x1000);
|
inval_icache_pou((uintptr_t)trampoline, (uintptr_t)trampoline + 0x1000);
|
||||||
|
|
||||||
asm volatile ("" ::: "memory");
|
asm volatile ("" ::: "memory");
|
||||||
@ -374,7 +374,7 @@ static bool try_start_ap(int boot_method, uint64_t method_ptr,
|
|||||||
switch (boot_method) {
|
switch (boot_method) {
|
||||||
case BOOT_WITH_SPIN_TBL:
|
case BOOT_WITH_SPIN_TBL:
|
||||||
*(volatile uint64_t *)method_ptr = (uint64_t)(uintptr_t)trampoline;
|
*(volatile uint64_t *)method_ptr = (uint64_t)(uintptr_t)trampoline;
|
||||||
clean_inval_dcache_poc(method_ptr, method_ptr + 8);
|
clean_dcache_poc(method_ptr, method_ptr + 8);
|
||||||
asm ("sev");
|
asm ("sev");
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user