armv7m: add instruction & data cache support

This patch adds armv7m instruction & data cache support.

Signed-off-by: Vikas Manocha <vikas.manocha@st.com>
cc: Christophe KERELLO <christophe.kerello@st.com>
This commit is contained in:
Vikas Manocha 2017-03-27 13:02:44 -07:00 committed by Tom Rini
parent 3f66149d9f
commit bf4d0495d2
4 changed files with 363 additions and 4 deletions

View File

@ -6,6 +6,5 @@
#
extra-y := start.o
obj-y += cpu.o
obj-y += cpu.o cache.o
obj-$(CONFIG_SYS_ARCH_TIMER) += systick-timer.o

336
arch/arm/cpu/armv7m/cache.c Normal file
View File

@ -0,0 +1,336 @@
/*
* (C) Copyright 2017
* Vikas Manocha, ST Micoelectronics, vikas.manocha@st.com.
*
* SPDX-License-Identifier: GPL-2.0+
*/
#include <common.h>
#include <errno.h>
#include <asm/armv7m.h>
#include <asm/io.h>
/* Cache maintenance operation registers */
#define V7M_CACHE_REG_ICIALLU ((u32 *)(V7M_CACHE_MAINT_BASE + 0x00))
#define INVAL_ICACHE_POU 0
#define V7M_CACHE_REG_ICIMVALU ((u32 *)(V7M_CACHE_MAINT_BASE + 0x08))
#define V7M_CACHE_REG_DCIMVAC ((u32 *)(V7M_CACHE_MAINT_BASE + 0x0C))
#define V7M_CACHE_REG_DCISW ((u32 *)(V7M_CACHE_MAINT_BASE + 0x10))
#define V7M_CACHE_REG_DCCMVAU ((u32 *)(V7M_CACHE_MAINT_BASE + 0x14))
#define V7M_CACHE_REG_DCCMVAC ((u32 *)(V7M_CACHE_MAINT_BASE + 0x18))
#define V7M_CACHE_REG_DCCSW ((u32 *)(V7M_CACHE_MAINT_BASE + 0x1C))
#define V7M_CACHE_REG_DCCIMVAC ((u32 *)(V7M_CACHE_MAINT_BASE + 0x20))
#define V7M_CACHE_REG_DCCISW ((u32 *)(V7M_CACHE_MAINT_BASE + 0x24))
#define WAYS_SHIFT 30
#define SETS_SHIFT 5
/* armv7m processor feature registers */
#define V7M_PROC_REG_CLIDR ((u32 *)(V7M_PROC_FTR_BASE + 0x00))
#define V7M_PROC_REG_CTR ((u32 *)(V7M_PROC_FTR_BASE + 0x04))
#define V7M_PROC_REG_CCSIDR ((u32 *)(V7M_PROC_FTR_BASE + 0x08))
#define MASK_NUM_WAYS GENMASK(12, 3)
#define MASK_NUM_SETS GENMASK(27, 13)
#define CLINE_SIZE_MASK GENMASK(2, 0)
#define NUM_WAYS_SHIFT 3
#define NUM_SETS_SHIFT 13
#define V7M_PROC_REG_CSSELR ((u32 *)(V7M_PROC_FTR_BASE + 0x0C))
#define SEL_I_OR_D BIT(0)
enum cache_type {
DCACHE,
ICACHE,
};
/* PoU : Point of Unification, Poc: Point of Coherency */
enum cache_action {
INVALIDATE_POU, /* i-cache invalidate by address */
INVALIDATE_POC, /* d-cache invalidate by address */
INVALIDATE_SET_WAY, /* d-cache invalidate by sets/ways */
FLUSH_POU, /* d-cache clean by address to the PoU */
FLUSH_POC, /* d-cache clean by address to the PoC */
FLUSH_SET_WAY, /* d-cache clean by sets/ways */
FLUSH_INVAL_POC, /* d-cache clean & invalidate by addr to PoC */
FLUSH_INVAL_SET_WAY, /* d-cache clean & invalidate by set/ways */
};
#ifndef CONFIG_SYS_DCACHE_OFF
struct dcache_config {
u32 ways;
u32 sets;
};
static void get_cache_ways_sets(struct dcache_config *cache)
{
u32 cache_size_id = readl(V7M_PROC_REG_CCSIDR);
cache->ways = (cache_size_id & MASK_NUM_WAYS) >> NUM_WAYS_SHIFT;
cache->sets = (cache_size_id & MASK_NUM_SETS) >> NUM_SETS_SHIFT;
}
/*
* Return the io register to perform required cache action like clean or clean
* & invalidate by sets/ways.
*/
static u32 *get_action_reg_set_ways(enum cache_action action)
{
switch (action) {
case INVALIDATE_SET_WAY:
return V7M_CACHE_REG_DCISW;
case FLUSH_SET_WAY:
return V7M_CACHE_REG_DCCSW;
case FLUSH_INVAL_SET_WAY:
return V7M_CACHE_REG_DCCISW;
default:
break;
};
return NULL;
}
/*
* Return the io register to perform required cache action like clean or clean
* & invalidate by adddress or range.
*/
static u32 *get_action_reg_range(enum cache_action action)
{
switch (action) {
case INVALIDATE_POU:
return V7M_CACHE_REG_ICIMVALU;
case INVALIDATE_POC:
return V7M_CACHE_REG_DCIMVAC;
case FLUSH_POU:
return V7M_CACHE_REG_DCCMVAU;
case FLUSH_POC:
return V7M_CACHE_REG_DCCMVAC;
case FLUSH_INVAL_POC:
return V7M_CACHE_REG_DCCIMVAC;
default:
break;
}
return NULL;
}
static u32 get_cline_size(enum cache_type type)
{
u32 size;
if (type == DCACHE)
clrbits_le32(V7M_PROC_REG_CSSELR, BIT(SEL_I_OR_D));
else if (type == ICACHE)
setbits_le32(V7M_PROC_REG_CSSELR, BIT(SEL_I_OR_D));
/* Make sure cache selection is effective for next memory access */
dsb();
size = readl(V7M_PROC_REG_CCSIDR) & CLINE_SIZE_MASK;
/* Size enocoded as 2 less than log(no_of_words_in_cache_line) base 2 */
size = 1 << (size + 2);
debug("cache line size is %d\n", size);
return size;
}
/* Perform the action like invalidate/clean on a range of cache addresses */
static int action_cache_range(enum cache_action action, u32 start_addr,
int64_t size)
{
u32 cline_size;
u32 *action_reg;
enum cache_type type;
action_reg = get_action_reg_range(action);
if (!action_reg)
return -EINVAL;
if (action == INVALIDATE_POU)
type = ICACHE;
else
type = DCACHE;
/* Cache line size is minium size for the cache action */
cline_size = get_cline_size(type);
/* Align start address to cache line boundary */
start_addr &= ~(cline_size - 1);
debug("total size for cache action = %llx\n", size);
do {
writel(start_addr, action_reg);
size -= cline_size;
start_addr += cline_size;
} while (size > cline_size);
/* Make sure cache action is effective for next memory access */
dsb();
isb(); /* Make sure instruction stream sees it */
debug("cache action on range done\n");
return 0;
}
/* Perform the action like invalidate/clean on all cached addresses */
static int action_dcache_all(enum cache_action action)
{
struct dcache_config cache;
u32 *action_reg;
int i, j;
action_reg = get_action_reg_set_ways(action);
if (!action_reg)
return -EINVAL;
clrbits_le32(V7M_PROC_REG_CSSELR, BIT(SEL_I_OR_D));
/* Make sure cache selection is effective for next memory access */
dsb();
get_cache_ways_sets(&cache); /* Get number of ways & sets */
debug("cache: ways= %d, sets= %d\n", cache.ways + 1, cache.sets + 1);
for (i = cache.sets; i >= 0; i--) {
for (j = cache.ways; j >= 0; j--) {
writel((j << WAYS_SHIFT) | (i << SETS_SHIFT),
action_reg);
}
}
/* Make sure cache action is effective for next memory access */
dsb();
isb(); /* Make sure instruction stream sees it */
return 0;
}
void dcache_enable(void)
{
if (dcache_status()) /* return if cache already enabled */
return;
if (action_dcache_all(INVALIDATE_SET_WAY)) {
printf("ERR: D-cache not enabled\n");
return;
}
setbits_le32(&V7M_SCB->ccr, BIT(V7M_CCR_DCACHE));
/* Make sure cache action is effective for next memory access */
dsb();
isb(); /* Make sure instruction stream sees it */
}
void dcache_disable(void)
{
if (!dcache_status())
return;
/* if dcache is enabled-> dcache disable & then flush */
if (action_dcache_all(FLUSH_SET_WAY)) {
printf("ERR: D-cache not flushed\n");
return;
}
clrbits_le32(&V7M_SCB->ccr, BIT(V7M_CCR_DCACHE));
/* Make sure cache action is effective for next memory access */
dsb();
isb(); /* Make sure instruction stream sees it */
}
int dcache_status(void)
{
return (readl(&V7M_SCB->ccr) & BIT(V7M_CCR_DCACHE)) != 0;
}
void invalidate_dcache_range(unsigned long start, unsigned long stop)
{
if (action_cache_range(INVALIDATE_POC, start, stop - start)) {
printf("ERR: D-cache not invalidated\n");
return;
}
}
void flush_dcache_range(unsigned long start, unsigned long stop)
{
if (action_cache_range(FLUSH_POC, start, stop - start)) {
printf("ERR: D-cache not flushed\n");
return;
}
}
#else
void dcache_enable(void)
{
return;
}
void dcache_disable(void)
{
return;
}
int dcache_status(void)
{
return 0;
}
#endif
#ifndef CONFIG_SYS_ICACHE_OFF
void invalidate_icache_all(void)
{
writel(INVAL_ICACHE_POU, V7M_CACHE_REG_ICIALLU);
/* Make sure cache action is effective for next memory access */
dsb();
isb(); /* Make sure instruction stream sees it */
}
void icache_enable(void)
{
if (icache_status())
return;
invalidate_icache_all();
setbits_le32(&V7M_SCB->ccr, BIT(V7M_CCR_ICACHE));
/* Make sure cache action is effective for next memory access */
dsb();
isb(); /* Make sure instruction stream sees it */
}
int icache_status(void)
{
return (readl(&V7M_SCB->ccr) & BIT(V7M_CCR_ICACHE)) != 0;
}
void icache_disable(void)
{
if (!icache_status())
return;
isb(); /* flush pipeline */
clrbits_le32(&V7M_SCB->ccr, BIT(V7M_CCR_ICACHE));
isb(); /* subsequent instructions fetch see cache disable effect */
}
#else
void icache_enable(void)
{
return;
}
void icache_disable(void)
{
return;
}
int icache_status(void)
{
return 0;
}
#endif
void enable_caches(void)
{
#ifndef CONFIG_SYS_ICACHE_OFF
icache_enable();
#endif
#ifndef CONFIG_SYS_DCACHE_OFF
dcache_enable();
#endif
}

View File

@ -16,8 +16,15 @@
.thumb
#endif
#define V7M_SCB_BASE 0xE000ED00
#define V7M_MPU_BASE 0xE000ED90
/* armv7m fixed base addresses */
#define V7M_SCS_BASE 0xE000E000
#define V7M_NVIC_BASE (V7M_SCS_BASE + 0x0100)
#define V7M_SCB_BASE (V7M_SCS_BASE + 0x0D00)
#define V7M_PROC_FTR_BASE (V7M_SCS_BASE + 0x0D78)
#define V7M_MPU_BASE (V7M_SCS_BASE + 0x0D90)
#define V7M_FPU_BASE (V7M_SCS_BASE + 0x0F30)
#define V7M_CACHE_MAINT_BASE (V7M_SCS_BASE + 0x0F50)
#define V7M_ACCESS_CNTL_BASE (V7M_SCS_BASE + 0x0F90)
#define V7M_SCB_VTOR 0x08
@ -27,6 +34,18 @@ struct v7m_scb {
uint32_t icsr; /* Interrupt Control and State Register */
uint32_t vtor; /* Vector Table Offset Register */
uint32_t aircr; /* App Interrupt and Reset Control Register */
uint32_t scr; /* offset 0x10: System Control Register */
uint32_t ccr; /* offset 0x14: Config and Control Register */
uint32_t shpr1; /* offset 0x18: System Handler Priority Reg 1 */
uint32_t shpr2; /* offset 0x1c: System Handler Priority Reg 2 */
uint32_t shpr3; /* offset 0x20: System Handler Priority Reg 3 */
uint32_t shcrs; /* offset 0x24: System Handler Control State */
uint32_t cfsr; /* offset 0x28: Configurable Fault Status Reg */
uint32_t hfsr; /* offset 0x2C: HardFault Status Register */
uint32_t res; /* offset 0x30: reserved */
uint32_t mmar; /* offset 0x34: MemManage Fault Address Reg */
uint32_t bfar; /* offset 0x38: BusFault Address Reg */
uint32_t afsr; /* offset 0x3C: Auxiliary Fault Status Reg */
};
#define V7M_SCB ((struct v7m_scb *)V7M_SCB_BASE)
@ -39,6 +58,9 @@ struct v7m_scb {
#define V7M_ICSR_VECTACT_MSK 0xFF
#define V7M_CCR_DCACHE 16
#define V7M_CCR_ICACHE 17
struct v7m_mpu {
uint32_t type; /* Type Register */
uint32_t ctrl; /* Control Register */

View File

@ -55,8 +55,10 @@ endif
obj-y += cache.o
ifndef CONFIG_ARM64
ifndef CONFIG_CPU_V7M
obj-y += cache-cp15.o
endif
endif
obj-y += psci-dt.o