Commit e7a3d6b9 authored by Michael Zehrer's avatar Michael Zehrer
Browse files

mark cache/mmu as potential unsuitable with the current or later version(s)

and remove some poor tested cache maintenance functions
parent a678c129
/**
* Copyright (c) 2018, Michael Zehrer
* Copyright (c) 2018-2020, ARCTOS
* All rights reserved.
*
* @licence BSD
* @brief Some useful cache-control functions
* @author Michael Zehrer
*/
#ifndef ARCTOS_RPI_CACHE_H
#define ARCTOS_RPI_CACHE_H
#ifndef RASPBERRY_CACHE_H
#define RASPBERRY_CACHE_H
#include <stdint.h>
#ifdef __cplusplus
extern "C" {
#endif
void _iCache_enable();
void _iCache_disable();
void _iCache_invalidateAll();
void _iCache_invalidateByMVA(intptr_t base_addr, uint32_t length);
void os_iCache_enable();
void os_iCache_disable();
void os_iCache_invalidateAll();
void _dCache_enable();
void _dCache_disable();
void _dCache_invalidateBySetWay(uint32_t setway);
void _dCache_cleanAndInvalidateBySetWay(uint32_t setway);
void _dCache_cleanByMVA(intptr_t base_addr, uint32_t length);
void _dCache_invalidateAll();
void _dCache_cleanAndInvalidateAll();
void os_dCache_enable();
void os_dCache_disable();
#ifdef ARMv6
void os_dCache_invalidateAll();
void os_dCache_cleanAndInvalidateAll();
#endif
#if ARM_ARCH <= 7
void _branchPredictor_enable();
void _branchPredictor_disable();
void os_branchPredictor_enable();
void os_branchPredictor_disable();
#endif
void _branchPredictor_invalidate();
void os_branchPredictor_invalidate();
#ifdef __cplusplus
} // end extern "C"
#endif
#endif /* ARCTOS_RPI_CACHE_H */
#endif /* RASPBERRY_CACHE_H */
/**
* Copyright (c) 2018-2019, Michael Zehrer
* Copyright (c) 2018-2020, ARCTOS
* All rights reserved.
*
* @licence BSD
......@@ -7,11 +7,11 @@
* (Only Short-descriptor translation table format)
* @author Michael Zehrer
*
* See ARMv8-A Architecture Reference Manual (Page 3580)
* See ARMv7-AR Architecture Reference Manual (Page 1326)
* See ARMv8-A Architecture Reference Manual (G4.4)
* See ARMv7-AR Architecture Reference Manual (B3.5.1)
*/
#ifndef ARCTOS_RPI_MMU_H
#define ARCTOS_RPI_MMU_H
#ifndef RASPBERRY_MMU_H
#define RASPBERRY_MMU_H
#include <stdint.h>
#ifdef __cplusplus
......@@ -42,13 +42,13 @@ typedef union {
} MMULevel1Section;
void _mmu_enable();
void _mmu_disable();
void os_mmu_enable();
void os_mmu_disable();
void _mmu_init();
void os_mmu_init();
#ifdef __cplusplus
} // end extern "C"
#endif
#endif /* ARCTOS_RPI_MMU_H */
#endif /* RASPBERRY_MMU_H */
/**
* Copyright (c) 2018, Michael Zehrer
* Copyright (c) 2018-2020, ARCTOS
* All rights reserved.
*
* @licence BSD
......@@ -19,9 +19,9 @@
* r4-r12 are used for working values, and their value after a function is called must be the same as before.
*/
.global _dCache_enable
.func _dCache_enable
_dCache_enable:
.global os_dCache_enable
.func os_dCache_enable
os_dCache_enable:
ASM_DMB
// read the system control register (SCTLR)
mrc p15, #0, r0, c1, c0, #0
......@@ -34,9 +34,9 @@ _dCache_enable:
bx lr
.endfunc
.global _dCache_disable
.func _dCache_disable
_dCache_disable:
.global os_dCache_disable
.func os_dCache_disable
os_dCache_disable:
ASM_DMB
// read the system control register (SCTLR)
mrc p15, #0, r0, c1, c0, #0
......@@ -49,9 +49,9 @@ _dCache_disable:
bx lr
.endfunc
.global _iCache_enable
.func _iCache_enable
_iCache_enable:
.global os_iCache_enable
.func os_iCache_enable
os_iCache_enable:
ASM_DMB
// read the system control register (SCTLR)
mrc p15, #0, r0, c1, c0, #0
......@@ -64,9 +64,9 @@ _iCache_enable:
bx lr
.endfunc
.global _iCache_disable
.func _iCache_disable
_iCache_disable:
.global os_iCache_disable
.func os_iCache_disable
os_iCache_disable:
ASM_DMB
// read the system control register (SCTLR)
mrc p15, #0, r0, c1, c0, #0
......@@ -85,9 +85,9 @@ _iCache_disable:
* The following two functions are not available on ARMv8 (Raspberry Pi 3)
* because the Z-bit isn't present in SCTLR any more. (on ARMv7/ARMv6 it is)
*/
.global _branchPredictor_enable
.func _branchPredictor_enable
_branchPredictor_enable:
.global os_branchPredictor_enable
.func os_branchPredictor_enable
os_branchPredictor_enable:
ASM_DMB
// read the system control register (SCTLR)
mrc p15, #0, r0, c1, c0, #0
......@@ -98,9 +98,9 @@ _branchPredictor_enable:
bx lr
.endfunc
.global _branchPredictor_disable
.func _branchPredictor_disable
_branchPredictor_disable:
.global os_branchPredictor_disable
.func os_branchPredictor_disable
os_branchPredictor_disable:
ASM_DMB
// read the system control register (SCTLR)
mrc p15, #0, r0, c1, c0, #0
......@@ -113,9 +113,9 @@ _branchPredictor_disable:
#endif
.global _iCache_invalidateAll
.func _iCache_invalidateAll
_iCache_invalidateAll:
.global os_iCache_invalidateAll
.func os_iCache_invalidateAll
os_iCache_invalidateAll:
ASM_DMB
// (ICIALLU) Instruction Cache Invalidate All to PoU
// The ICIALLU operation ignores the value in r0.
......@@ -124,9 +124,9 @@ _iCache_invalidateAll:
bx lr
.endfunc
.global _branchPredictor_invalidate
.func _branchPredictor_invalidate
_branchPredictor_invalidate:
.global os_branchPredictor_invalidate
.func os_branchPredictor_invalidate
os_branchPredictor_invalidate:
ASM_DMB
// (BPIALL) Branch predictor invalidate all
// The BPIALL operation ignores the value in r0.
......@@ -136,133 +136,10 @@ _branchPredictor_invalidate:
bx lr
.endfunc
.global _dCache_invalidateBySetWay
.func _dCache_invalidateBySetWay
_dCache_invalidateBySetWay:
// (DCISW) Data Cache line Invalidate by Set/Way
mcr p15, #0, r0, c7, c6, #2
bx lr
.endfunc
.global _dCache_cleanAndInvalidateBySetWay
.func _dCache_cleanAndInvalidateBySetWay
_dCache_cleanAndInvalidateBySetWay:
// (DCCISW) Clean and invalidate data cache line by set/way
mcr p15, #0, r0, c7, c14, #2
bx lr
.endfunc
/*
* r0 - base address
* r1 - length (we assume the length is not 0)
*/
.global _dCache_cleanByMVA
.func _dCache_cleanByMVA
_dCache_cleanByMVA:
// Calculate end of the region
add r1, r1, r0
// Read Cache Type Register (CTR)
mrc p15, #0, r2, c0, c0, #1
#ifdef ARMv6
// extract Dsize 'len' from CTR
and r4, r2, #0x3000
lsr r4, r4, #12
// calculate the cache line size
add r4, r4, #3
mov r3, #1
lsl r3, r3, r4
#else
// extract DminLine (log2 of the smallest cache line size)
ubfx r4, r2, #16, #4
// DminLine is the number of words (4 bytes)
// r3 = 4 * 2^(DminLine) = size of smallest cache line
mov r3, #4
lsl r3, r3, r4
#endif
// get the mask for the cache line
sub r4, r3, #1
// Aligned the base address of the region
bic r4, r0, r4
dCache_cleanByMVA_loop:
// Data cache clean by MVA to PoC
mcr p15, #0, r4, c7, c10, #1
// Next cache line
add r4, r4, r3
// Is r4 (current cache line) smaller than the end of the region
cmp r4, r1
// while (address < end_address)
blt dCache_cleanByMVA_loop
ASM_DSB
bx lr
.endfunc
/*
* r0 - base address
* r1 - length (we assume the length is not 0)
*/
.global _iCache_invalidateByMVA
.func _iCache_invalidateByMVA
_iCache_invalidateByMVA:
// Calculate end of the region
add r1, r1, r0
// Read Cache Type Register (CTR)
mrc p15, #0, r2, c0, c0, #1
#ifdef ARMv6
// extract Isize 'len' from CTR
and r4, r2, #0x3
// calculate the cache line size
add r4, r4, #3
mov r3, #1
lsl r3, r3, r4
#else
// Extract IminLine (log2 of the cache line)
and r4, r2, #0xF
// IminLine is the number of words (4 bytes)
// r3 = 4 * 2^(IminLine) = size of smallest cache line
mov r3, #4
lsl r3, r3, r4
#endif
// Get the mask for the cache line
sub r4, r3, #1
// Aligned the base address of the region
bic r4, r0, r4
iCache_invalidateByMVA_loop:
// (ICIMVAU) Instruction Cache Invalidate by MVA to PoU
mcr p15, #0, r4, c7, c5, #1
// Next cache line
add r4, r4, r3
// Is r4 (current cache line) smaller than the end of the region
cmp r4, r1
// while (address < end_address)
blt iCache_invalidateByMVA_loop
// Ensure completion of the invalidations
ASM_DSB
// Synchronize the fetched instruction stream
ASM_ISB
.endfunc
#ifdef ARMv6
.global _dCache_invalidateAll
.func _dCache_invalidateAll
_dCache_invalidateAll:
.global os_dCache_invalidateAll
.func os_dCache_invalidateAll
os_dCache_invalidateAll:
ASM_DMB
mov r0, #0
// Invalidate entire (L1) data cache
......@@ -271,9 +148,9 @@ _dCache_invalidateAll:
bx lr
.endfunc
.global _dCache_cleanAndInvalidateAll
.func _dCache_cleanAndInvalidateAll
_dCache_cleanAndInvalidateAll:
.global os_dCache_cleanAndInvalidateAll
.func os_dCache_cleanAndInvalidateAll
os_dCache_cleanAndInvalidateAll:
ASM_DMB
mov r0, #0
// Clean and invalidate entire (L1) data cache
......@@ -281,113 +158,4 @@ _dCache_cleanAndInvalidateAll:
ASM_DSB
bx lr
.endfunc
#else
.global _dCache_invalidateAll
.func _dCache_invalidateAll
_dCache_invalidateAll:
push {r4-r8, lr}
ASM_DMB
/* Invalidate Level 1 cache */
mov r0, #0
ldr r1, =_dCache_invalidateBySetWay
bl callForAllSetWay
/* Invalidate Level 2 cache */
mov r0, #1
// ldr r1, =dCache_invalidateBySetWay
bl callForAllSetWay
ASM_DSB
ASM_ISB
pop {r4-r8, lr}
bx lr
.endfunc
.global _dCache_cleanAndInvalidateAll
.func _dCache_cleanAndInvalidateAll
_dCache_cleanAndInvalidateAll:
push {r4-r8, lr}
ASM_DMB
/* Clean and invalidate Level 1 cache */
mov r0, #0
ldr r1, =_dCache_cleanAndInvalidateBySetWay
bl callForAllSetWay
/* Clean and invalidate Level 2 cache */
mov r0, #1
// ldr r1, =_dCache_cleanAndInvalidateBySetWay
bl callForAllSetWay
ASM_DSB
ASM_ISB
pop {r4-r8, lr}
bx lr
.endfunc
/*
* r0 - cache level (0 = L1, 1 = L2)
* r1 - address of subroutine which accepts set/way in r0
*
* r2-r8 are used internally
*/
callForAllSetWay:
// Save lr (faster than push on stack)
mov r8, lr
lsl r7, r0, #1
// Write (CSSELR) Cache Size Selection Register
mcr p15, #2, r7, c0, c0, #0
ASM_ISB
// Read (CCSIDR) Current Cache Size ID Register
mrc p15, #1, r6, c0, c0, #0
// Extract LineSize
ldr r0, =CCSIDR_LINE_SIZE_MASK
and r2, r6, r0
add r2, r2, #4
// Extract Associativity
ldr r0, =CCSIDR_ASSOCIATIVITY_MASK
and r3, r6, r0
lsr r3, r3, #CCSIDR_ASSOCIATIVITY_START_BIT
clz r4, r3
// Extract NumSets
ldr r0, =CCSIDR_NUM_SETS_MASK
and r5, r6, r0
lsr r5, r5, #CCSIDR_NUM_SETS_START_BIT
// r1 - subroutine which accepts set/way in r0
// r2 - Log2 Line Length
// r3 - Way
// r4 - LEADING ZEROS of WAY
// r5 - Set
// r7 - (cache level << 1)
// r8 - !!DO NOT USE!!
1:
mov r6, r3
2:
// r0 = (cache level << 1) | (set << log2 LINE_LEN) | (way << LEADING_ZEROS_OF WAY)
orr r0, r7, r6, lsl r4
orr r0, r0, r5, lsl r2
// call the subroutine
blx r1
subs r6, r6, #1
bge 2b
subs r5, r5, #1
bge 1b
// Set the cache size selection back to L1
mov r7, #0
// Write (CSSELR) Cache Size Selection Register
mcr p15, #2, r7, c0, c0, #0
// Restore lr from r8
bx r8
#endif
......@@ -18,7 +18,7 @@ extern "C" {
// defined by the linker script
extern uint32_t _mmu_level1_table_start_[MMU_LEVEL1_TABLE_SIZE];
void _mmu_enable() {
void os_mmu_enable() {
asm volatile (
// (TLBIALL) TLB Invalidate All entries (value in r0 is ignored)
"mcr p15, 0, r0, c8, c7, 0\n\t"
......@@ -32,7 +32,7 @@ void _mmu_enable() {
::: "r0", "memory");
}
void _mmu_disable() {
void os_mmu_disable() {
asm volatile (
// read the System Control Register
"mrc p15, 0, r0, c1, c0, 0\n\t"
......@@ -72,7 +72,7 @@ static void createPageTableEntries(uint32_t physical_addr, uint32_t virtual_addr
asm volatile ("mcr p15, 0, r0, c8, c7, 0\n\t" ::: "memory");
}
void _mmu_init() {
void os_mmu_init() {
asm volatile (
"mvn r0, #0\n\t"
// (DACR) Domain Access Control Register
......@@ -83,8 +83,6 @@ void _mmu_init() {
"ldr r0, =_mmu_level1_table_start_\n\t"
// (TTBR0) Translation Table Base Register 0
"mcr p15, 0, r0, c2, c0, 0\n\t"
// (TTBR1) Translation Table Base Register 1
"mcr p15, 0, r0, c2, c0, 1\n\t"
::: "r0", "memory");
// The complete RAM (from 0 to PERIPHERALS_BASE) could be cached
......@@ -96,7 +94,7 @@ void _mmu_init() {
createPageTableEntries(ARM_LOCAL_BASE, ARM_LOCAL_BASE, 0x00100000, false);
#endif
_mmu_enable();
os_mmu_enable();
}
#ifdef __cplusplus
......
......@@ -118,14 +118,14 @@ enum LogDestination {
};
void hwInit(void) {
_mmu_init();
#ifdef RPI_ENABLE_CACHE
_iCache_enable();
_dCache_enable();
#warning The current implementation might not work with cache enabled!
os_mmu_init();
os_iCache_enable();
os_dCache_enable();
#endif
#if ARM_ARCH <= 7
_branchPredictor_enable();
os_branchPredictor_enable();
#endif
#if ARCTOS_LOG_LEVEL != ARCTOS_LOG_LEVEL_NONE
......
/**
* Copyright (c) 2018, Michael Zehrer
* Copyright (c) 2018-2020, ARCTOS
* All rights reserved.
*
* @licence BSD
......@@ -60,15 +60,11 @@ reset_hdl:
#endif
reset_svc_mode:
bl _dCache_disable
bl _iCache_disable
bl os_dCache_disable
bl os_iCache_disable
#if ARM_ARCH <= 7
bl _branchPredictor_disable
bl os_branchPredictor_disable
#endif
bl _dCache_cleanAndInvalidateAll
bl _iCache_invalidateAll
bl _branchPredictor_invalidate
// disable watchdog (just to be sure)
ldr r0, =POWER_MANAGEMENT_RSTC
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment