[arch][ops] define some global ARCH macros to be a bit more scoped

Instead of ICACHE/DCACHE/UCACHE, add the ARCH_CACHE_FLAG_ prefix to be a
little cleaner and not collide with anything else.

No functional change.
This commit is contained in:
Travis Geiselbrecht
2024-05-09 19:28:56 -07:00
parent 6a3db09e55
commit d3cd5be13e
7 changed files with 28 additions and 29 deletions

View File

@@ -92,7 +92,7 @@ void arch_early_init(void) {
#endif
#if ARM_WITH_CACHE
arch_enable_cache(UCACHE);
arch_enable_cache(ARCH_CACHE_FLAG_UCACHE);
#endif
}
@@ -111,7 +111,7 @@ void arch_init(void) {
void arch_quiesce(void) {
#if ARM_WITH_CACHE
arch_disable_cache(UCACHE);
arch_disable_cache(ARCH_CACHE_FLAG_UCACHE);
#endif
}

View File

@@ -19,18 +19,18 @@
/* cache flushing routines for cortex-m cores that support it */
void arch_disable_cache(uint flags) {
if (flags & DCACHE)
if (flags & ARCH_CACHE_FLAG_DCACHE)
SCB_DisableDCache();
if (flags & ICACHE)
if (flags & ARCH_CACHE_FLAG_ICACHE)
SCB_DisableICache();
}
void arch_enable_cache(uint flags) {
if (flags & DCACHE)
if (flags & ARCH_CACHE_FLAG_DCACHE)
SCB_EnableDCache();
if (flags & ICACHE)
if (flags & ARCH_CACHE_FLAG_ICACHE)
SCB_EnableICache();
}

View File

@@ -57,7 +57,7 @@ volatile int secondaries_to_init = 0;
void arch_early_init(void) {
/* turn off the cache */
arch_disable_cache(UCACHE);
arch_disable_cache(ARCH_CACHE_FLAG_UCACHE);
#if WITH_DEV_CACHE_PL310
pl310_set_enable(false);
#endif
@@ -80,7 +80,7 @@ void arch_early_init(void) {
#if WITH_DEV_CACHE_PL310
pl310_set_enable(true);
#endif
arch_enable_cache(UCACHE);
arch_enable_cache(ARCH_CACHE_FLAG_UCACHE);
}
void arch_init(void) {
@@ -155,7 +155,7 @@ void arm_secondary_entry(uint asm_cpu_num) {
arm_basic_setup();
/* enable the local L1 cache */
//arch_enable_cache(UCACHE);
//arch_enable_cache(ARCH_CACHE_FLAG_UCACHE);
// XXX may not be safe, but just hard enable i and d cache here
// at the moment cannot rely on arch_enable_cache not dumping the L2
@@ -365,7 +365,7 @@ void arch_chain_load(void *entry, ulong arg0, ulong arg1, ulong arg2, ulong arg3
#endif
LTRACEF("disabling instruction/data cache\n");
arch_disable_cache(UCACHE);
arch_disable_cache(ARCH_CACHE_FLAG_UCACHE);
#if WITH_DEV_CACHE_PL310
pl310_set_enable(false);
#endif

View File

@@ -24,7 +24,7 @@ FUNCTION(arch_disable_cache)
cpsid iaf // interrupts disabled
.Ldcache_disable:
tst r0, #DCACHE
tst r0, #ARCH_CACHE_FLAG_DCACHE
beq .Licache_disable
mrc p15, 0, r1, c1, c0, 0 // cr1
tst r1, #(1<<2) // is the dcache already disabled?
@@ -37,7 +37,7 @@ FUNCTION(arch_disable_cache)
mcr p15, 0, r0, c7, c10, 4 // data sync barrier (formerly drain write buffer)
.Licache_disable:
tst r0, #ICACHE
tst r0, #ARCH_CACHE_FLAG_ICACHE
beq .Ldone_disable
mrc p15, 0, r1, c1, c0, 0 // cr1
@@ -57,7 +57,7 @@ FUNCTION(arch_enable_cache)
cpsid iaf // interrupts disabled
.Ldcache_enable:
tst r0, #DCACHE
tst r0, #ARCH_CACHE_FLAG_DCACHE
beq .Licache_enable
mrc p15, 0, r1, c1, c0, 0 // cr1
tst r1, #(1<<2) // is the dcache already enabled?
@@ -69,7 +69,7 @@ FUNCTION(arch_enable_cache)
mcr p15, 0, r1, c1, c0, 0 // enable dcache
.Licache_enable:
tst r0, #ICACHE
tst r0, #ARCH_CACHE_FLAG_ICACHE
beq .Ldone_enable
mcr p15, 0, r12, c7, c5, 0 // invalidate icache
@@ -94,7 +94,7 @@ FUNCTION(arch_disable_cache)
cpsid iaf // interrupts disabled
.Ldcache_disable:
tst r7, #DCACHE
tst r7, #ARCH_CACHE_FLAG_DCACHE
beq .Licache_disable
mrc p15, 0, r0, c1, c0, 0 // cr1
tst r0, #(1<<2) // is the dcache already disabled?
@@ -124,7 +124,7 @@ FUNCTION(arch_disable_cache)
#endif
.Licache_disable:
tst r7, #ICACHE
tst r7, #ARCH_CACHE_FLAG_ICACHE
beq .Ldone_disable
mrc p15, 0, r0, c1, c0, 0 // cr1
@@ -149,7 +149,7 @@ FUNCTION(arch_enable_cache)
cpsid iaf // interrupts disabled
.Ldcache_enable:
tst r7, #DCACHE
tst r7, #ARCH_CACHE_FLAG_DCACHE
beq .Licache_enable
mrc p15, 0, r0, c1, c0, 0 // cr1
tst r0, #(1<<2) // is the dcache already enabled?
@@ -171,7 +171,7 @@ FUNCTION(arch_enable_cache)
mcr p15, 0, r0, c1, c0, 0 // enable dcache
.Licache_enable:
tst r7, #ICACHE
tst r7, #ARCH_CACHE_FLAG_ICACHE
beq .Ldone_enable
mov r0, #0

View File

@@ -29,12 +29,6 @@ static uint arch_curr_cpu_num(void);
/* Use to align structures on cache lines to avoid cpu aliasing. */
#define __CPU_ALIGN __ALIGNED(CACHE_LINE)
#endif // !ASSEMBLY
#define ICACHE 1
#define DCACHE 2
#define UCACHE (ICACHE|DCACHE)
#ifndef ASSEMBLY
void arch_disable_cache(uint flags);
void arch_enable_cache(uint flags);
@@ -49,5 +43,10 @@ __END_CDECLS
#endif // !ASSEMBLY
/* for the above arch enable/disable routines */
#define ARCH_CACHE_FLAG_ICACHE 1
#define ARCH_CACHE_FLAG_DCACHE 2
#define ARCH_CACHE_FLAG_UCACHE (ARCH_CACHE_FLAG_ICACHE|ARCH_CACHE_FLAG_DCACHE)
#include <arch/arch_ops.h>

View File

@@ -52,9 +52,9 @@ void arch_invalidate_cache_all(void) {
void arch_disable_cache(uint flags) {
uint32_t sr = mfspr(OR1K_SPR_SYS_SR_ADDR);
if (flags & ICACHE)
if (flags & ARCH_CACHE_FLAG_ICACHE)
sr &= ~OR1K_SPR_SYS_SR_ICE_MASK;
if (flags & DCACHE)
if (flags & ARCH_CACHE_FLAG_DCACHE)
sr &= ~OR1K_SPR_SYS_SR_DCE_MASK;
mtspr(OR1K_SPR_SYS_SR_ADDR, sr);
@@ -63,9 +63,9 @@ void arch_disable_cache(uint flags) {
void arch_enable_cache(uint flags) {
uint32_t sr = mfspr(OR1K_SPR_SYS_SR_ADDR);
if (flags & ICACHE)
if (flags & ARCH_CACHE_FLAG_ICACHE)
sr |= OR1K_SPR_SYS_SR_ICE_MASK;
if (flags & DCACHE)
if (flags & ARCH_CACHE_FLAG_DCACHE)
sr |= OR1K_SPR_SYS_SR_DCE_MASK;
mtspr(OR1K_SPR_SYS_SR_ADDR, sr);

View File

@@ -442,7 +442,7 @@ FUNCTION(start)
l.jal arch_invalidate_cache_all
l.nop
l.jal arch_enable_cache
l.ori r3, r0, UCACHE
l.ori r3, r0, ARCH_CACHE_FLAG_UCACHE
/* clear bss */
l.movhi r3, hi(__bss_start)