Allow an SMP kernel to boot on Cortex-A8:

* Rename ARM_HAVE_MP_EXTENSIONS to ARM_USE_MP_EXTENSIONS and extend it to
   handle more cases, including when SMP is not enabled.
 * Check ARM_USE_MP_EXTENSIONS when building for ARMv7+, even if no SMP.
 * Use ARM_USE_MP_EXTENSIONS in pmap-v6.c to detect when to set PRRR_NS1.

With this we should be able to boot on all ARMv7+ Cortex-A cores with
32-bit support.

Reviewed by:	mmel, imp (earlier version)
Relnotes:	yes
Sponsored by:	ABT Systems Ltd
Differential Revision:	https://reviews.freebsd.org/D8335
This commit is contained in:
Andrew Turner 2016-11-02 13:10:08 +00:00
parent 7359fdcf5f
commit e9524a3c2f
3 changed files with 41 additions and 31 deletions

View File

@ -500,7 +500,8 @@ pmap_set_tex(void)
/* Add shareable bits for normal memory in SMP case. */
#ifdef SMP
prrr |= PRRR_NS1;
if (ARM_USE_MP_EXTENSIONS)
prrr |= PRRR_NS1;
#endif
cp15_prrr_set(prrr);
cp15_nmrr_set(nmrr);

View File

@ -45,6 +45,21 @@
#define CPU_ASID_KERNEL 0
#if __ARM_ARCH >= 7
#if !defined(SMP)
/* No SMP so no need to use the MP extensions */
#define ARM_USE_MP_EXTENSIONS 0
#elif defined(CPU_CORTEXA8) && \
(defined(CPU_CORTEXA_MP) || defined(CPU_KRAIT) || defined(CPU_MV_PJ4B))
#define ARM_USE_MP_EXTENSIONS (cpuinfo.mp_ext != 0)
#elif defined(CPU_CORTEXA8)
#define ARM_USE_MP_EXTENSIONS 0
#else
#define ARM_USE_MP_EXTENSIONS 1
#endif
#endif /* __ARM_ARCH >= 7 */
void dcache_wbinv_poc_all(void); /* !!! NOT SMP coherent function !!! */
vm_offset_t dcache_wb_pou_checked(vm_offset_t, vm_size_t);
vm_offset_t icache_inv_pou_checked(vm_offset_t, vm_size_t);
@ -111,15 +126,15 @@ fname(uint64_t reg) \
/* TLB */
_WF0(_CP15_TLBIALL, CP15_TLBIALL) /* Invalidate entire unified TLB */
#if __ARM_ARCH >= 7 && defined SMP
#if __ARM_ARCH >= 7
_WF0(_CP15_TLBIALLIS, CP15_TLBIALLIS) /* Invalidate entire unified TLB IS */
#endif
_WF1(_CP15_TLBIASID, CP15_TLBIASID(%0)) /* Invalidate unified TLB by ASID */
#if __ARM_ARCH >= 7 && defined SMP
#if __ARM_ARCH >= 7
_WF1(_CP15_TLBIASIDIS, CP15_TLBIASIDIS(%0)) /* Invalidate unified TLB by ASID IS */
#endif
_WF1(_CP15_TLBIMVAA, CP15_TLBIMVAA(%0)) /* Invalidate unified TLB by MVA, all ASID */
#if __ARM_ARCH >= 7 && defined SMP
#if __ARM_ARCH >= 7
_WF1(_CP15_TLBIMVAAIS, CP15_TLBIMVAAIS(%0)) /* Invalidate unified TLB by MVA, all ASID IS */
#endif
_WF1(_CP15_TLBIMVA, CP15_TLBIMVA(%0)) /* Invalidate unified TLB by MVA */
@ -129,7 +144,7 @@ _WF1(_CP15_TTB_SET, CP15_TTBR0(%0))
/* Cache and Branch predictor */
_WF0(_CP15_BPIALL, CP15_BPIALL) /* Branch predictor invalidate all */
#if __ARM_ARCH >= 7 && defined SMP
#if __ARM_ARCH >= 7
_WF0(_CP15_BPIALLIS, CP15_BPIALLIS) /* Branch predictor invalidate all IS */
#endif
_WF1(_CP15_BPIMVA, CP15_BPIMVA(%0)) /* Branch predictor invalidate by MVA */
@ -143,7 +158,7 @@ _WF1(_CP15_DCCSW, CP15_DCCSW(%0)) /* Data cache clean by set/way */
_WF1(_CP15_DCIMVAC, CP15_DCIMVAC(%0)) /* Data cache invalidate by MVA PoC */
_WF1(_CP15_DCISW, CP15_DCISW(%0)) /* Data cache invalidate by set/way */
_WF0(_CP15_ICIALLU, CP15_ICIALLU) /* Instruction cache invalidate all PoU */
#if __ARM_ARCH >= 7 && defined SMP
#if __ARM_ARCH >= 7
_WF0(_CP15_ICIALLUIS, CP15_ICIALLUIS) /* Instruction cache invalidate all PoU IS */
#endif
_WF1(_CP15_ICIMVAU, CP15_ICIMVAU(%0)) /* Instruction cache invalidate */
@ -345,20 +360,14 @@ tlb_flush_range_local(vm_offset_t va, vm_size_t size)
}
/* Broadcasting operations. */
#if __ARM_ARCH >= 7 && defined SMP
#if defined(CPU_CORTEXA8)
#define ARM_HAVE_MP_EXTENSIONS (cpuinfo.mp_ext != 0)
#else
#define ARM_HAVE_MP_EXTENSIONS 1
#endif
#if __ARM_ARCH >= 7
static __inline void
tlb_flush_all(void)
{
dsb();
if (ARM_HAVE_MP_EXTENSIONS)
if (ARM_USE_MP_EXTENSIONS)
_CP15_TLBIALLIS();
else
_CP15_TLBIALL();
@ -370,7 +379,7 @@ tlb_flush_all_ng(void)
{
dsb();
if (ARM_HAVE_MP_EXTENSIONS)
if (ARM_USE_MP_EXTENSIONS)
_CP15_TLBIASIDIS(CPU_ASID_KERNEL);
else
_CP15_TLBIASID(CPU_ASID_KERNEL);
@ -384,7 +393,7 @@ tlb_flush(vm_offset_t va)
KASSERT((va & PAGE_MASK) == 0, ("%s: va %#x not aligned", __func__, va));
dsb();
if (ARM_HAVE_MP_EXTENSIONS)
if (ARM_USE_MP_EXTENSIONS)
_CP15_TLBIMVAAIS(va);
else
_CP15_TLBIMVA(va | CPU_ASID_KERNEL);
@ -401,7 +410,7 @@ tlb_flush_range(vm_offset_t va, vm_size_t size)
size));
dsb();
if (ARM_HAVE_MP_EXTENSIONS) {
if (ARM_USE_MP_EXTENSIONS) {
for (; va < eva; va += PAGE_SIZE)
_CP15_TLBIMVAAIS(va);
} else {
@ -410,14 +419,14 @@ tlb_flush_range(vm_offset_t va, vm_size_t size)
}
dsb();
}
#else /* SMP */
#else /* __ARM_ARCH < 7 */
#define tlb_flush_all() tlb_flush_all_local()
#define tlb_flush_all_ng() tlb_flush_all_ng_local()
#define tlb_flush(va) tlb_flush_local(va)
#define tlb_flush_range(va, size) tlb_flush_range_local(va, size)
#endif /* SMP */
#endif /* __ARM_ARCH < 7 */
/*
* Cache maintenance operations.
@ -431,8 +440,8 @@ icache_sync(vm_offset_t va, vm_size_t size)
dsb();
va &= ~cpuinfo.dcache_line_mask;
#if __ARM_ARCH >= 7 && defined SMP
if (ARM_HAVE_MP_EXTENSIONS) {
#if __ARM_ARCH >= 7
if (ARM_USE_MP_EXTENSIONS) {
for ( ; va < eva; va += cpuinfo.dcache_line_size)
_CP15_DCCMVAU(va);
} else
@ -442,8 +451,8 @@ icache_sync(vm_offset_t va, vm_size_t size)
_CP15_DCCMVAC(va);
}
dsb();
#if __ARM_ARCH >= 7 && defined SMP
if (ARM_HAVE_MP_EXTENSIONS)
#if __ARM_ARCH >= 7
if (ARM_USE_MP_EXTENSIONS)
_CP15_ICIALLUIS();
else
#endif
@ -456,8 +465,8 @@ icache_sync(vm_offset_t va, vm_size_t size)
static __inline void
icache_inv_all(void)
{
#if __ARM_ARCH >= 7 && defined SMP
if (ARM_HAVE_MP_EXTENSIONS)
#if __ARM_ARCH >= 7
if (ARM_USE_MP_EXTENSIONS)
_CP15_ICIALLUIS();
else
#endif
@ -470,8 +479,8 @@ icache_inv_all(void)
static __inline void
bpb_inv_all(void)
{
#if __ARM_ARCH >= 7 && defined SMP
if (ARM_HAVE_MP_EXTENSIONS)
#if __ARM_ARCH >= 7
if (ARM_USE_MP_EXTENSIONS)
_CP15_BPIALLIS();
else
#endif
@ -488,8 +497,8 @@ dcache_wb_pou(vm_offset_t va, vm_size_t size)
dsb();
va &= ~cpuinfo.dcache_line_mask;
#if __ARM_ARCH >= 7 && defined SMP
if (ARM_HAVE_MP_EXTENSIONS) {
#if __ARM_ARCH >= 7
if (ARM_USE_MP_EXTENSIONS) {
for ( ; va < eva; va += cpuinfo.dcache_line_size)
_CP15_DCCMVAU(va);
} else

View File

@ -140,7 +140,7 @@
/*
* CP15 C7 registers
*/
#if __ARM_ARCH >= 7 && defined(SMP)
#if __ARM_ARCH >= 7
/* From ARMv7: */
#define CP15_ICIALLUIS p15, 0, r0, c7, c1, 0 /* Instruction cache invalidate all PoU, IS */
#define CP15_BPIALLIS p15, 0, r0, c7, c1, 6 /* Branch predictor invalidate all IS */
@ -205,7 +205,7 @@
/*
* CP15 C8 registers
*/
#if __ARM_ARCH >= 7 && defined(SMP)
#if __ARM_ARCH >= 7
/* From ARMv7: */
#define CP15_TLBIALLIS p15, 0, r0, c8, c3, 0 /* Invalidate entire unified TLB IS */
#define CP15_TLBIMVAIS(rr) p15, 0, rr, c8, c3, 1 /* Invalidate unified TLB by MVA IS */