Add cache maintenance functions which will be used by startup code to

initially set up the MMU.  Some day they may also be useful as part of
suspend/resume handling, when we get better at power management.

Submitted by: Svatopluk Kraus <onwahe@gmail.com>,
              Michal Meloun <meloun@miracle.cz
This commit is contained in:
Ian Lepore 2014-12-28 18:38:25 +00:00
parent 8d59f6c4da
commit 6c1d9637b7
2 changed files with 201 additions and 0 deletions

200
sys/arm/arm/cpu_asm-v6.S Normal file
View File

@ -0,0 +1,200 @@
/*-
* Copyright 2014 Svatopluk Kraus <onwahe@gmail.com>
* Copyright 2014 Michal Meloun <meloun@miracle.cz>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* $FreeBSD$
*/
#include <machine/acle-compat.h>
#include <machine/asm.h>
#include <machine/asmacros.h>
#include <machine/armreg.h>
#include <machine/sysreg.h>
#if __ARM_ARCH >= 7
/*
* Define cache functions used by startup code, which counts on the fact that
* only r0-r4,r12 (ip) are modified and no stack space is used. This set
* of function must be called with interrupts disabled and don't follow
* ARM ABI (cannot be called form C code.
* Moreover, it works only with caches integrated to CPU (accessible via CP15).
*/
/* Invalidate D cache to PoC. (aka all cache levels)*/
ASENTRY(dcache_inv_poc_all)
mrc CP15_CLIDR(r0)
ands r0, r0, #0x07000000
mov r0, r0, lsr #23 /* Get LoC (naturally aligned) */
beq 4f
1: mcr CP15_CSSELR(r0) /* set cache level */
isb
mrc CP15_CCSIDR(r0) /* read CCSIDR */
ubfx r2, r0, #13, #15 /* get num sets - 1 from CCSIDR */
ubfx r3, r0, #3, #10 /* get num ways - 1 from CCSIDR */
clz r1, r3 /* number of bits to MSB of way */
lsl r3, r3, r1 /* shift into position */
mov ip, #1
lsl ip, ip, r1 /* ip now contains the way decr */
ubfx r0, r0, #0, #3 /* get linesize from CCSIDR */
add r0, r0, #4 /* apply bias */
lsl r2, r2, r0 /* shift sets by log2(linesize) */
add r3, r3, r2 /* merge numsets - 1 with numways - 1 */
sub ip, ip, r2 /* subtract numsets - 1 from way decr */
mov r1, #1
lsl r1, r1, r0 /* r1 now contains the set decr */
mov r2, ip /* r2 now contains set way decr */
/* r3 = ways/sets, r2 = way decr, r1 = set decr, r0 and ip are free */
2: mcr CP15_DCISW(r3) /* invalidate line */
movs r0, r3 /* get current way/set */
beq 3f /* at 0 means we are done */
movs r0, r0, lsl #10 /* clear way bits leaving only set bits*/
subne r3, r3, r1 /* non-zero?, decrement set */
subeq r3, r3, r2 /* zero?, decrement way and restore set count */
b 2b
3:
mrc CP15_CSSELR(r0) /* get cache level */
add r0, r0, #2 /* next level */
mrc CP15_CLIDR(r1)
ands r1, r1, #0x07000000
mov r1, r1, lsr #23 /* Get LoC (naturally aligned) */
cmp r1, r0
bgt 1b
4: dsb /* wait for stores to finish */
mov r0, #0
mcr CP15_CSSELR(r0)
isb
bx lr
END(dcache_inv_poc_all)
/* Invalidate D cache to PoU. (aka L1 cache only)*/
ASENTRY(dcache_inv_pou_all)
mrc CP15_CLIDR(r0)
ands r0, r0, #0x07000000
mov r0, r0, lsr #26 /* Get LoUU (naturally aligned) */
beq 4f
1: mcr CP15_CSSELR(r0) /* set cache level */
isb
mrc CP15_CCSIDR(r0) /* read CCSIDR */
ubfx r2, r0, #13, #15 /* get num sets - 1 from CCSIDR */
ubfx r3, r0, #3, #10 /* get num ways - 1 from CCSIDR */
clz r1, r3 /* number of bits to MSB of way */
lsl r3, r3, r1 /* shift into position */
mov ip, #1
lsl ip, ip, r1 /* ip now contains the way decr */
ubfx r0, r0, #0, #3 /* get linesize from CCSIDR */
add r0, r0, #4 /* apply bias */
lsl r2, r2, r0 /* shift sets by log2(linesize) */
add r3, r3, r2 /* merge numsets - 1 with numways - 1 */
sub ip, ip, r2 /* subtract numsets - 1 from way decr */
mov r1, #1
lsl r1, r1, r0 /* r1 now contains the set decr */
mov r2, ip /* r2 now contains set way decr */
/* r3 = ways/sets, r2 = way decr, r1 = set decr, r0 and ip are free */
2: mcr CP15_DCISW(r3) /* clean & invalidate line */
movs r0, r3 /* get current way/set */
beq 3f /* at 0 means we are done */
movs r0, r0, lsl #10 /* clear way bits leaving only set bits*/
subne r3, r3, r1 /* non-zero?, decrement set */
subeq r3, r3, r2 /* zero?, decrement way and restore set count */
b 2b
3:
mrc CP15_CSSELR(r0) /* get cache level */
add r0, r0, #2 /* next level */
mrc CP15_CLIDR(r1)
ands r1, r1, #0x07000000
mov r1, r1, lsr #26 /* Get LoUU (naturally aligned) */
cmp r1, r0
bgt 1b
4: dsb /* wait for stores to finish */
mov r0, #0
mcr CP15_CSSELR(r0)
bx lr
END(dcache_inv_pou_all)
/* Write back and Invalidate D cache to PoC. */
ASENTRY(dcache_wbinv_poc_all)
mrc CP15_CLIDR(r0)
ands r0, r0, #0x07000000
mov r0, r0, lsr #23 /* Get LoC (naturally aligned) */
beq 4f
1: mcr CP15_CSSELR(r0) /* set cache level */
isb
mrc CP15_CCSIDR(r0) /* read CCSIDR */
ubfx r2, r0, #13, #15 /* get num sets - 1 from CCSIDR */
ubfx r3, r0, #3, #10 /* get num ways - 1 from CCSIDR */
clz r1, r3 /* number of bits to MSB of way */
lsl r3, r3, r1 /* shift into position */
mov ip, #1
lsl ip, ip, r1 /* ip now contains the way decr */
ubfx r0, r0, #0, #3 /* get linesize from CCSIDR */
add r0, r0, #4 /* apply bias */
lsl r2, r2, r0 /* shift sets by log2(linesize) */
add r3, r3, r2 /* merge numsets - 1 with numways - 1 */
sub ip, ip, r2 /* subtract numsets - 1 from way decr */
mov r1, #1
lsl r1, r1, r0 /* r1 now contains the set decr */
mov r2, ip /* r2 now contains set way decr */
/* r3 = ways/sets, r2 = way decr, r1 = set decr, r0 and ip are free */
2: mcr CP15_DCCISW(r3) /* clean & invalidate line */
movs r0, r3 /* get current way/set */
beq 3f /* at 0 means we are done */
movs r0, r0, lsl #10 /* clear way bits leaving only set bits*/
subne r3, r3, r1 /* non-zero?, decrement set */
subeq r3, r3, r2 /* zero?, decrement way and restore set count */
b 2b
3:
mrc CP15_CSSELR(r0) /* get cache level */
add r0, r0, #2 /* next level */
mrc CP15_CLIDR(r1)
ands r1, r1, #0x07000000
mov r1, r1, lsr #23 /* Get LoC (naturally aligned) */
cmp r1, r0
bgt 1b
4: dsb /* wait for stores to finish */
mov r0, #0
mcr CP15_CSSELR(r0)
bx lr
END(dcache_wbinv_poc_all)
#endif /* __ARM_ARCH >= 7 */

View File

@ -12,6 +12,7 @@ arm/arm/cpufunc.c standard
arm/arm/cpufunc_asm.S standard
arm/arm/cpufunc_asm_armv4.S standard
arm/arm/cpuinfo.c standard
arm/arm/cpu_asm-v6.S optional armv6
arm/arm/db_disasm.c optional ddb
arm/arm/db_interface.c optional ddb
arm/arm/db_trace.c optional ddb