Skip to content

Commit

Permalink
core: update cache helpers
Browse files Browse the repository at this point in the history
Updates AArch64 and ARMv7 cache helpers from lib/aarch32/cache_helpers.S
and lib/aarch64/cache_helpers.S in ARM-TF,
https://github.com/ARM-software/arm-trusted-firmware/tree/2bd26faf62411c75111fea4b23c542865383b068

The imported routines only covers the inner cache. Already present ARMv7
cache routines are replaced by the new equivalent routines. The AArch64
routines are updated with the resent changes in ARM-TF.

The imported files are modified to better fit into OP-TEE, some
functions and defines are renamed.

Reviewed-by: Etienne Carriere <etienne.carriere@linaro.org>
Tested-by: Jens Wiklander <jens.wiklander@linaro.org> (Hikey AArch{32,64} pager)
Tested-by: Jens Wiklander <jens.wiklander@linaro.org> (Juno AArch{32,64} pager)
Signed-off-by: Jens Wiklander <jens.wiklander@linaro.org>
  • Loading branch information
jenswi-linaro committed Jun 21, 2017
1 parent d3880cd commit b3177a3
Show file tree
Hide file tree
Showing 10 changed files with 508 additions and 426 deletions.
56 changes: 56 additions & 0 deletions core/arch/arm/include/kernel/cache_helpers.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
/*
* Copyright (c) 2017, Linaro Limited
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/

#ifndef __KERNEL_CACHE_HELPERS_H
#define __KERNEL_CACHE_HELPERS_H

#ifndef ASM
#include <types_ext.h>
#endif

/* Data Cache set/way op type defines */
#define DCACHE_OP_INV 0x0
#define DCACHE_OP_CLEAN_INV 0x1
#define DCACHE_OP_CLEAN 0x2

#ifndef ASM
void dcache_cleaninv_range(void *addr, size_t size);
void dcache_clean_range(void *addr, size_t size);
void dcache_inv_range(void *addr, size_t size);

void icache_inv_all(void);
void icache_inv_range(void *addr, size_t size);

void dcache_op_louis(unsigned long op_type);
void dcache_op_all(unsigned long op_type);

void dcache_op_level1(unsigned long op_type);
void dcache_op_level2(unsigned long op_type);
void dcache_op_level3(unsigned long op_type);
#endif /*!ASM*/

#endif /*__KERNEL_CACHE_HELPERS_H*/
27 changes: 0 additions & 27 deletions core/arch/arm/include/kernel/tz_ssvce.h
Original file line number Diff line number Diff line change
Expand Up @@ -34,19 +34,6 @@

unsigned int secure_get_cpu_id(void);

void arm_cl1_d_cleanbysetway(void);
void arm_cl1_d_invbysetway(void);
void arm_cl1_d_cleaninvbysetway(void);
/* End address is included in the range (last address in range)*/
void arm_cl1_d_cleanbyva(void *start, void *end);
/* End address is included in the range (last address in range)*/
void arm_cl1_d_invbyva(void *start, void *end);
/* End address is included in the range (last address in range)*/
void arm_cl1_d_cleaninvbyva(void *start, void *end);
void arm_cl1_i_inv_all(void);
/* End address is included in the range (last address in range)*/
void arm_cl1_i_inv(void *start, void *end);

void secure_mmu_datatlbinvall(void);
void secure_mmu_unifiedtlbinvall(void);
void secure_mmu_unifiedtlbinvbymva(unsigned long addr);
Expand All @@ -56,18 +43,4 @@ void secure_mmu_unifiedtlbinv_byasid(unsigned long asid);
void secure_mmu_disable(void);
#endif /*!ASM*/

#ifdef ARM64
/* D$ set/way op type defines */
#define DCISW 0x0
#define DCCISW 0x1
#define DCCSW 0x2

#ifndef ASM
void flush_dcache_range(vaddr_t va, size_t len);
void inv_dcache_range(vaddr_t va, size_t len);
void dcsw_op_louis(uint32_t op);
void dcsw_op_all(uint32_t op);
#endif /*!ASM*/
#endif /*ARM64*/

#endif
294 changes: 294 additions & 0 deletions core/arch/arm/kernel/cache_helpers_a32.S
Original file line number Diff line number Diff line change
@@ -0,0 +1,294 @@
/*
* Copyright (c) 2017, Linaro Limited. All rights reserved.
* Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/

#include <arm.h>
#include <arm32_macros.S>
#include <asm.S>
#include <kernel/cache_helpers.h>
#include <kernel/unwind.h>

#define WORD_SIZE 4

/*
* Cache line size helpers
*/
.macro dcache_line_size reg, tmp
read_ctr \tmp
ubfx \tmp, \tmp, #CTR_DMINLINE_SHIFT, #CTR_DMINLINE_WIDTH
mov \reg, #WORD_SIZE
lsl \reg, \reg, \tmp
.endm

.macro icache_line_size reg, tmp
read_ctr \tmp
and \tmp, \tmp, #CTR_IMINLINE_MASK
mov \reg, #WORD_SIZE
lsl \reg, \reg, \tmp
.endm

/*
* This macro can be used for implementing various data cache operations `op`
*/
.macro do_dcache_maintenance_by_mva reg
dcache_line_size r2, r3
add r1, r0, r1
sub r3, r2, #1
bic r0, r0, r3
loop_\reg:
write_\reg r0
add r0, r0, r2
cmp r0, r1
blo loop_\reg
dsb sy
bx lr
.endm

/* ------------------------------------------
* Clean+Invalidate from base address till
* size. 'r0' = addr, 'r1' = size
* ------------------------------------------
*/
.section .text.dcache_cleaninv_range
FUNC dcache_cleaninv_range , :
UNWIND( .fnstart)
do_dcache_maintenance_by_mva dccimvac
UNWIND( .fnend)
END_FUNC dcache_cleaninv_range

/* ------------------------------------------
* Clean from base address till size.
* 'r0' = addr, 'r1' = size
* ------------------------------------------
*/
.section .text.dcache_clean_range
FUNC dcache_clean_range , :
UNWIND( .fnstart)
do_dcache_maintenance_by_mva dccmvac
UNWIND( .fnend)
END_FUNC dcache_clean_range

/* ------------------------------------------
* Invalidate from base address till
* size. 'r0' = addr, 'r1' = size
* ------------------------------------------
*/
.section .text.dcache_inv_range
FUNC dcache_inv_range , :
UNWIND( .fnstart)
do_dcache_maintenance_by_mva dcimvac
UNWIND( .fnend)
END_FUNC dcache_inv_range

/* ----------------------------------------------------------------
* Data cache operations by set/way to the level specified
*
* The main function, do_dcsw_op requires:
* r0: The operation type (DCACHE_OP_INV, DCACHE_OP_CLEAN_INV,
* DCACHE_OP_CLEAN), as defined in cache_helpers.h
* r1: The cache level to begin operation from
* r2: clidr_el1
* r3: The last cache level to operate on
* and will carry out the operation on each data cache from level 0
* to the level in r3 in sequence
*
* The dcsw_op macro sets up the r2 and r3 parameters based on
* clidr_el1 cache information before invoking the main function
* ----------------------------------------------------------------
*/

.macro dcsw_op shift, fw, ls
read_clidr r2
ubfx r3, r2, \shift, \fw
lsl r3, r3, \ls
mov r1, #0
b do_dcsw_op
.endm

.section .text.do_dcsw_op
LOCAL_FUNC do_dcsw_op , :
UNWIND( .fnstart)
push {r4-r12,lr}
adr r11, dcsw_loop_table // compute cache op based on the operation type
add r6, r11, r0, lsl #3 // cache op is 2x32-bit instructions
loop1:
add r10, r1, r1, LSR #1 // Work out 3x current cache level
mov r12, r2, LSR r10 // extract cache type bits from clidr
and r12, r12, #7 // mask the bits for current cache only
cmp r12, #2 // see what cache we have at this level
blo level_done // no cache or only instruction cache at this level

write_csselr r1 // select current cache level in csselr
isb // isb to sych the new cssr&csidr
read_ccsidr r12 // read the new ccsidr
and r10, r12, #7 // extract the length of the cache lines
add r10, r10, #4 // add 4 (r10 = line length offset)
ubfx r4, r12, #3, #10 // r4 = maximum way number (right aligned)
clz r5, r4 // r5 = the bit position of the way size increment
mov r9, r4 // r9 working copy of the aligned max way number

loop2:
ubfx r7, r12, #13, #15 // r7 = max set number (right aligned)

loop3:
orr r0, r1, r9, LSL r5 // factor in the way number and cache level into r0
orr r0, r0, r7, LSL r10 // factor in the set number

blx r6
subs r7, r7, #1 // decrement the set number
bhs loop3
subs r9, r9, #1 // decrement the way number
bhs loop2
level_done:
add r1, r1, #2 // increment the cache number
cmp r3, r1
dsb sy // ensure completion of previous cache maintenance instruction
bhi loop1

mov r6, #0
write_csselr r6 //select cache level 0 in csselr
dsb sy
isb
pop {r4-r12,pc}

dcsw_loop_table:
write_dcisw r0
bx lr
write_dccisw r0
bx lr
write_dccsw r0
bx lr
UNWIND( .fnend)
END_FUNC do_dcsw_op

/* ---------------------------------------------------------------
* Data cache operations by set/way till PoU.
*
* The function requires :
* r0: The operation type (DCACHE_OP_INV, DCACHE_OP_CLEAN_INV,
* DCACHE_OP_CLEAN), as defined in cache_helpers.h
* ---------------------------------------------------------------
*/
.section .text.dcache_op_louis
FUNC dcache_op_louis , :
UNWIND( .fnstart)
dcsw_op #CLIDR_LOUIS_SHIFT, #CLIDR_FIELD_WIDTH, #CSSELR_LEVEL_SHIFT
UNWIND( .fnend)
END_FUNC dcache_op_louis

/* ---------------------------------------------------------------
* Data cache operations by set/way till PoC.
*
* The function requires :
* r0: The operation type (DCACHE_OP_INV, DCACHE_OP_CLEAN_INV,
* DCACHE_OP_CLEAN), as defined in cache_helpers.h
* ---------------------------------------------------------------
*/
.section .text.dcache_op_all
FUNC dcache_op_all , :
UNWIND( .fnstart)
dcsw_op #CLIDR_LOC_SHIFT, #CLIDR_FIELD_WIDTH, #CSSELR_LEVEL_SHIFT
UNWIND( .fnend)
END_FUNC dcache_op_all


/* ---------------------------------------------------------------
* Helper macro for data cache operations by set/way for the
* level specified
* ---------------------------------------------------------------
*/
.macro dcsw_op_level level
read_clidr r2
mov r3, \level
sub r1, r3, #2
b do_dcsw_op
.endm

/* ---------------------------------------------------------------
* Data cache operations by set/way for level 1 cache
*
* The main function, do_dcsw_op requires:
* r0: The operation type (DCACHE_OP_INV, DCACHE_OP_CLEAN_INV,
* DCACHE_OP_CLEAN), as defined in cache_helpers.h
* ---------------------------------------------------------------
*/
.section .text.dcache_op_level1
FUNC dcache_op_level1 , :
UNWIND( .fnstart)
dcsw_op_level #(1 << CSSELR_LEVEL_SHIFT)
UNWIND( .fnend)
END_FUNC dcache_op_level1

/* ---------------------------------------------------------------
* Data cache operations by set/way for level 2 cache
*
* The main function, do_dcsw_op requires:
* r0: The operation type (DCACHE_OP_INV, DCACHE_OP_CLEAN_INV,
* DCACHE_OP_CLEAN), as defined in cache_helpers.h
* ---------------------------------------------------------------
*/
.section .text.dcache_op_level2
FUNC dcache_op_level2 , :
UNWIND( .fnstart)
dcsw_op_level #(2 << CSSELR_LEVEL_SHIFT)
UNWIND( .fnend)
END_FUNC dcache_op_level2

/* ---------------------------------------------------------------
* Data cache operations by set/way for level 3 cache
*
* The main function, do_dcsw_op requires:
* r0: The operation type (DCACHE_OP_INV, DCACHE_OP_CLEAN_INV,
* DCACHE_OP_CLEAN), as defined in cache_helpers.h
* ---------------------------------------------------------------
*/
.section .text.dcache_op_level3
FUNC dcache_op_level3 , :
UNWIND( .fnstart)
dcsw_op_level #(3 << CSSELR_LEVEL_SHIFT)
UNWIND( .fnend)
END_FUNC dcache_op_level3

.section .text.icache_inv_all
FUNC icache_inv_all , :
UNWIND( .fnstart)
/* Invalidate Entire Instruction Cache (and branch predictors) */
write_icialluis

dsb ishst /* ensure that maintenance operations are seen */
isb /* by the instructions rigth after the isb */

bx lr
UNWIND( .fnend)
END_FUNC icache_inv_all

/* ------------------------------------------
* Invalidate from base address till
* size. 'r0' = addr, 'r1' = size
* ------------------------------------------
*/
.section .text.icache_inv_range
FUNC icache_inv_range , :
UNWIND( .fnstart)
icache_line_size r2, r3
add r1, r0, r1
sub r3, r2, #1
bic r0, r0, r3
loop_ic_inv:
write_icimvau r0
add r0, r0, r2
cmp r0, r1
blo loop_ic_inv

/* Invalidate entire branch predictor array inner shareable */
write_bpiallis

dsb ishst
isb

bx lr
UNWIND( .fnend)
END_FUNC icache_inv_range
Loading

0 comments on commit b3177a3

Please sign in to comment.