Commit 1ab557a074aaa1927f7532489a1b75137e245b70

Authored by Stephen Warren
Committed by Tom Warren
1 parent b9ae6415b6

armv8: add hooks for all cache-wide operations

SoC-specific logic may be required for all forms of cache-wide
operations; invalidate and flush of both dcache and icache (note that
only 3 of the 4 possible combinations make sense, since the icache never
contains dirty lines). This patch adds an optional hook for all
implemented cache-wide operations, and renames the one existing hook to
better represent exactly which operation it is implementing. A dummy
no-op implementation of each hook is provided.

Signed-off-by: Stephen Warren <swarren@nvidia.com>
Reviewed-by: Simon Glass <sjg@chromium.org>
Signed-off-by: Tom Warren <twarren@nvidia.com>

Showing 5 changed files with 27 additions and 11 deletions Side-by-side Diff

arch/arm/cpu/armv8/cache.S
... ... @@ -150,11 +150,23 @@
150 150 ret
151 151 ENDPROC(__asm_invalidate_icache_all)
152 152  
153   -ENTRY(__asm_flush_l3_cache)
  153 +ENTRY(__asm_invalidate_l3_dcache)
154 154 mov x0, #0 /* return status as success */
155 155 ret
156   -ENDPROC(__asm_flush_l3_cache)
157   - .weak __asm_flush_l3_cache
  156 +ENDPROC(__asm_invalidate_l3_dcache)
  157 + .weak __asm_invalidate_l3_dcache
  158 +
  159 +ENTRY(__asm_flush_l3_dcache)
  160 + mov x0, #0 /* return status as success */
  161 + ret
  162 +ENDPROC(__asm_flush_l3_dcache)
  163 + .weak __asm_flush_l3_dcache
  164 +
  165 +ENTRY(__asm_invalidate_l3_icache)
  166 + mov x0, #0 /* return status as success */
  167 + ret
  168 +ENDPROC(__asm_invalidate_l3_icache)
  169 + .weak __asm_invalidate_l3_icache
158 170  
159 171 /*
160 172 * void __asm_switch_ttbr(ulong new_ttbr)
arch/arm/cpu/armv8/cache_v8.c
... ... @@ -421,19 +421,20 @@
421 421 void invalidate_dcache_all(void)
422 422 {
423 423 __asm_invalidate_dcache_all();
  424 + __asm_invalidate_l3_dcache();
424 425 }
425 426  
426 427 /*
427 428 * Performs a clean & invalidation of the entire data cache at all levels.
428 429 * This function needs to be inline to avoid using stack.
429   - * __asm_flush_l3_cache return status of timeout
  430 + * __asm_flush_l3_dcache return status of timeout
430 431 */
431 432 inline void flush_dcache_all(void)
432 433 {
433 434 int ret;
434 435  
435 436 __asm_flush_dcache_all();
436   - ret = __asm_flush_l3_cache();
  437 + ret = __asm_flush_l3_dcache();
437 438 if (ret)
438 439 debug("flushing dcache returns 0x%x\n", ret);
439 440 else
... ... @@ -623,7 +624,7 @@
623 624  
624 625 void icache_enable(void)
625 626 {
626   - __asm_invalidate_icache_all();
  627 + invalidate_icache_all();
627 628 set_sctlr(get_sctlr() | CR_I);
628 629 }
629 630  
... ... @@ -640,6 +641,7 @@
640 641 void invalidate_icache_all(void)
641 642 {
642 643 __asm_invalidate_icache_all();
  644 + __asm_invalidate_l3_icache();
643 645 }
644 646  
645 647 #else /* CONFIG_SYS_ICACHE_OFF */
arch/arm/cpu/armv8/fsl-layerscape/lowlevel.S
... ... @@ -245,7 +245,7 @@
245 245  
246 246 ret
247 247  
248   -ENTRY(__asm_flush_l3_cache)
  248 +ENTRY(__asm_flush_l3_dcache)
249 249 /*
250 250 * Return status in x0
251 251 * success 0
... ... @@ -275,7 +275,7 @@
275 275 mov x0, x8
276 276 mov lr, x29
277 277 ret
278   -ENDPROC(__asm_flush_l3_cache)
  278 +ENDPROC(__asm_flush_l3_dcache)
279 279 #endif
280 280  
281 281 #ifdef CONFIG_MP
arch/arm/include/asm/system.h
... ... @@ -93,7 +93,9 @@
93 93 void __asm_flush_dcache_range(u64 start, u64 end);
94 94 void __asm_invalidate_tlb_all(void);
95 95 void __asm_invalidate_icache_all(void);
96   -int __asm_flush_l3_cache(void);
  96 +int __asm_invalidate_l3_dcache(void);
  97 +int __asm_flush_l3_dcache(void);
  98 +int __asm_invalidate_l3_icache(void);
97 99 void __asm_switch_ttbr(u64 new_ttbr);
98 100  
99 101 void armv8_switch_to_el2(void);
arch/arm/mach-tegra/tegra186/cache.S
... ... @@ -10,7 +10,7 @@
10 10 #define SMC_SIP_INVOKE_MCE 0x82FFFF00
11 11 #define MCE_SMC_ROC_FLUSH_CACHE (SMC_SIP_INVOKE_MCE | 11)
12 12  
13   -ENTRY(__asm_flush_l3_cache)
  13 +ENTRY(__asm_flush_l3_dcache)
14 14 mov x0, #(MCE_SMC_ROC_FLUSH_CACHE & 0xffff)
15 15 movk x0, #(MCE_SMC_ROC_FLUSH_CACHE >> 16), lsl #16
16 16 mov x1, #0
... ... @@ -22,5 +22,5 @@
22 22 smc #0
23 23 mov x0, #0
24 24 ret
25   -ENDPROC(__asm_flush_l3_cache)
  25 +ENDPROC(__asm_flush_l3_dcache)