NMSIS-Core  Version 1.3.1
NMSIS-Core support for Nuclei processor-based devices
core_feature_spmp.h
1 /*
2  * Copyright (c) 2019 Nuclei Limited. All rights reserved.
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  *
6  * Licensed under the Apache License, Version 2.0 (the License); you may
7  * not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  * www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
14  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  */
18 #ifndef __CORE_FEATURE_SPMP_H__
19 #define __CORE_FEATURE_SPMP_H__
20 
24 /*
25  * sPMP Feature Configuration Macro:
26  * 1. __SPMP_PRESENT: Define whether sPMP is present or not
27  * __SMPU_PRESENT: Define whether SMPU is present or not
28  * * 0: Not present
29  * * 1: Present
30  * 2. __SPMP_ENTRY_NUM: Define the number of sPMP entries, only 8 or 16 is configurable
31  * __SMPU_ENTRY_NUM: Define the number of SMPU entries, only 8 or 16 is configurable
32  * __SMPU_ENTRY_NUM is the same as __SPMP_ENTRY_NUM
33  */
34 #ifdef __cplusplus
35 extern "C" {
36 #endif
37 
38 #include "core_feature_base.h"
39 #include "core_compatiable.h"
40 
41 #if defined(__SPMP_PRESENT) && (__SPMP_PRESENT == 1)
42 
43 /* ===== sPMP Operations ===== */
58 #ifndef __SPMP_ENTRY_NUM
59 /* Number of __SPMP_ENTRY_NUM entries should be defined in <Device.h> */
60 #error "__SPMP_ENTRY_NUM is not defined, please check!"
61 #endif
62 
63 typedef struct SPMP_CONFIG {
69  unsigned int protection;
75  unsigned long order;
80  unsigned long base_addr;
81 } spmp_config;
82 
96 {
97  switch (csr_idx) {
98  case 0: return __RV_CSR_READ(CSR_SPMPCFG0);
99  case 1: return __RV_CSR_READ(CSR_SPMPCFG1);
100  case 2: return __RV_CSR_READ(CSR_SPMPCFG2);
101  case 3: return __RV_CSR_READ(CSR_SPMPCFG3);
102  default: return 0;
103  }
104 }
105 
118 __STATIC_INLINE void __set_sPMPCFGx(uint32_t csr_idx, rv_csr_t spmpcfg)
119 {
120  switch (csr_idx) {
121  case 0: __RV_CSR_WRITE(CSR_SPMPCFG0, spmpcfg); break;
122  case 1: __RV_CSR_WRITE(CSR_SPMPCFG1, spmpcfg); break;
123  case 2: __RV_CSR_WRITE(CSR_SPMPCFG2, spmpcfg); break;
124  case 3: __RV_CSR_WRITE(CSR_SPMPCFG3, spmpcfg); break;
125  default: return;
126  }
127 }
128 
135 __STATIC_INLINE uint8_t __get_sPMPxCFG(uint32_t entry_idx)
136 {
137  rv_csr_t spmpcfgx = 0;
138  uint8_t csr_cfg_num = 0;
139  uint16_t csr_idx = 0;
140  uint16_t cfg_shift = 0;
141 
142  if (entry_idx >= __SPMP_ENTRY_NUM) return 0;
143 
144 #if __RISCV_XLEN == 32
145  csr_cfg_num = 4;
146  csr_idx = entry_idx >> 2;
147 #elif __RISCV_XLEN == 64
148  csr_cfg_num = 8;
149  /* For RV64, spmpcfg0 and spmpcfg2 each hold 8 sPMP entries, align by 2 */
150  csr_idx = (entry_idx >> 2) & ~1;
151 #else
152  // TODO Add RV128 Handling
153  return 0;
154 #endif
155  spmpcfgx = __get_sPMPCFGx(csr_idx);
156  /*
157  * first get specific spmpxcfg's order in one CSR composed of csr_cfg_num spmpxcfgs,
158  * then get spmpxcfg's bit position in one CSR by left shift 3(each spmpxcfg size is one byte)
159  */
160  cfg_shift = (entry_idx & (csr_cfg_num - 1)) << 3;
161 
162  /* read specific spmpxcfg register value */
163  return (uint8_t)(__RV_EXTRACT_FIELD(spmpcfgx, 0xFF << cfg_shift));
164 }
165 
175 __STATIC_INLINE void __set_sPMPxCFG(uint32_t entry_idx, uint8_t spmpxcfg)
176 {
177  rv_csr_t spmpcfgx = 0;
178  uint8_t csr_cfg_num = 0;
179  uint16_t csr_idx = 0;
180  uint16_t cfg_shift = 0;
181  if (entry_idx >= __SPMP_ENTRY_NUM) return;
182 
183 #if __RISCV_XLEN == 32
184  csr_cfg_num = 4;
185  csr_idx = entry_idx >> 2;
186 #elif __RISCV_XLEN == 64
187  csr_cfg_num = 8;
188  /* For RV64, spmpcfg0 and spmpcfg2 each hold 8 sPMP entries, align by 2 */
189  csr_idx = (entry_idx >> 2) & ~1;
190 #else
191  // TODO Add RV128 Handling
192  return;
193 #endif
194  /* read specific spmpcfgx register value */
195  spmpcfgx = __get_sPMPCFGx(csr_idx);
196  /*
197  * first get specific spmpxcfg's order in one CSR composed of csr_cfg_num spmpxcfgs,
198  * then get spmpxcfg's bit position in one CSR by left shift 3(each spmpxcfg size is one byte)
199  */
200  cfg_shift = (entry_idx & (csr_cfg_num - 1)) << 3;
201 
202  spmpcfgx = __RV_INSERT_FIELD(spmpcfgx, 0xFFUL << cfg_shift, spmpxcfg);
203  __set_sPMPCFGx(csr_idx, spmpcfgx);
204 }
205 
213 {
214  switch (csr_idx) {
215  case 0: return __RV_CSR_READ(CSR_SPMPADDR0);
216  case 1: return __RV_CSR_READ(CSR_SPMPADDR1);
217  case 2: return __RV_CSR_READ(CSR_SPMPADDR2);
218  case 3: return __RV_CSR_READ(CSR_SPMPADDR3);
219  case 4: return __RV_CSR_READ(CSR_SPMPADDR4);
220  case 5: return __RV_CSR_READ(CSR_SPMPADDR5);
221  case 6: return __RV_CSR_READ(CSR_SPMPADDR6);
222  case 7: return __RV_CSR_READ(CSR_SPMPADDR7);
223  case 8: return __RV_CSR_READ(CSR_SPMPADDR8);
224  case 9: return __RV_CSR_READ(CSR_SPMPADDR9);
225  case 10: return __RV_CSR_READ(CSR_SPMPADDR10);
226  case 11: return __RV_CSR_READ(CSR_SPMPADDR11);
227  case 12: return __RV_CSR_READ(CSR_SPMPADDR12);
228  case 13: return __RV_CSR_READ(CSR_SPMPADDR13);
229  case 14: return __RV_CSR_READ(CSR_SPMPADDR14);
230  case 15: return __RV_CSR_READ(CSR_SPMPADDR15);
231  default: return 0;
232  }
233 }
234 
241 __STATIC_INLINE void __set_sPMPADDRx(uint32_t csr_idx, rv_csr_t spmpaddr)
242 {
243  switch (csr_idx) {
244  case 0: __RV_CSR_WRITE(CSR_SPMPADDR0, spmpaddr); break;
245  case 1: __RV_CSR_WRITE(CSR_SPMPADDR1, spmpaddr); break;
246  case 2: __RV_CSR_WRITE(CSR_SPMPADDR2, spmpaddr); break;
247  case 3: __RV_CSR_WRITE(CSR_SPMPADDR3, spmpaddr); break;
248  case 4: __RV_CSR_WRITE(CSR_SPMPADDR4, spmpaddr); break;
249  case 5: __RV_CSR_WRITE(CSR_SPMPADDR5, spmpaddr); break;
250  case 6: __RV_CSR_WRITE(CSR_SPMPADDR6, spmpaddr); break;
251  case 7: __RV_CSR_WRITE(CSR_SPMPADDR7, spmpaddr); break;
252  case 8: __RV_CSR_WRITE(CSR_SPMPADDR8, spmpaddr); break;
253  case 9: __RV_CSR_WRITE(CSR_SPMPADDR9, spmpaddr); break;
254  case 10: __RV_CSR_WRITE(CSR_SPMPADDR10, spmpaddr); break;
255  case 11: __RV_CSR_WRITE(CSR_SPMPADDR11, spmpaddr); break;
256  case 12: __RV_CSR_WRITE(CSR_SPMPADDR12, spmpaddr); break;
257  case 13: __RV_CSR_WRITE(CSR_SPMPADDR13, spmpaddr); break;
258  case 14: __RV_CSR_WRITE(CSR_SPMPADDR14, spmpaddr); break;
259  case 15: __RV_CSR_WRITE(CSR_SPMPADDR15, spmpaddr); break;
260  default: return;
261  }
262 }
263 
275 __STATIC_INLINE void __set_sPMPENTRYx(uint32_t entry_idx, const spmp_config *spmp_cfg)
276 {
277  unsigned int cfg_shift, cfg_csr_idx, addr_csr_idx = 0;
278  unsigned long cfgmask, addrmask = 0;
279  unsigned long spmpcfg, spmpaddr = 0;
280  unsigned long protection, csr_cfg_num = 0;
281  /* check parameters */
282  if (entry_idx >= __SPMP_ENTRY_NUM || spmp_cfg->order > __RISCV_XLEN || spmp_cfg->order < SPMP_SHIFT) return;
283 
284  /* calculate sPMP register and offset */
285 #if __RISCV_XLEN == 32
286  csr_cfg_num = 4;
287  cfg_csr_idx = (entry_idx >> 2);
288 #elif __RISCV_XLEN == 64
289  csr_cfg_num = 8;
290  cfg_csr_idx = ((entry_idx >> 2)) & ~1;
291 #else
292  // TODO Add RV128 Handling
293  return;
294 #endif
295  /*
296  * first get specific spmpxcfg's order in one CSR composed of csr_cfg_num spmpxcfgs,
297  * then get spmpxcfg's bit position in one CSR by left shift 3, each spmpxcfg size is one byte
298  */
299  cfg_shift = (entry_idx & (csr_cfg_num - 1)) << 3;
300  addr_csr_idx = entry_idx;
301 
302  /* encode sPMP config */
303  protection = (unsigned long)spmp_cfg->protection;
304  protection |= (SPMP_SHIFT == spmp_cfg->order) ? SPMP_A_NA4 : SPMP_A_NAPOT;
305  cfgmask = ~(0xFFUL << cfg_shift);
306  spmpcfg = (__get_sPMPCFGx(cfg_csr_idx) & cfgmask);
307  spmpcfg |= ((protection << cfg_shift) & ~cfgmask);
308 
309  /* encode sPMP address */
310  if (SPMP_SHIFT == spmp_cfg->order) { /* NA4 */
311  spmpaddr = (spmp_cfg->base_addr >> SPMP_SHIFT);
312  } else { /* NAPOT */
313  addrmask = (1UL << (spmp_cfg->order - SPMP_SHIFT)) - 1;
314  spmpaddr = ((spmp_cfg->base_addr >> SPMP_SHIFT) & ~addrmask);
315  spmpaddr |= (addrmask >> 1);
316  }
317  /*
318  * write csrs, update the address first, in case the entry is locked that
319  * we won't be able to modify it after we set the config csr.
320  */
321  __set_sPMPADDRx(addr_csr_idx, spmpaddr);
322  __set_sPMPCFGx(cfg_csr_idx, spmpcfg);
323 }
324 
336 __STATIC_INLINE int __get_sPMPENTRYx(unsigned int entry_idx, spmp_config *spmp_cfg)
337 {
338  unsigned int cfg_shift, cfg_csr_idx, addr_csr_idx = 0;
339  unsigned long cfgmask, spmpcfg, prot = 0;
340  unsigned long t1, addr, spmpaddr, len = 0;
341  uint8_t csr_cfg_num = 0;
342  /* check parameters */
343  if (entry_idx >= __SPMP_ENTRY_NUM || !spmp_cfg) return -1;
344 
345  /* calculate sPMP register and offset */
346 #if __RISCV_XLEN == 32
347  csr_cfg_num = 4;
348  cfg_csr_idx = entry_idx >> 2;
349 #elif __RISCV_XLEN == 64
350  csr_cfg_num = 8;
351  cfg_csr_idx = (entry_idx >> 2) & ~1;
352 #else
353 // TODO Add RV128 Handling
354  return -1;
355 #endif
356 
357  cfg_shift = (entry_idx & (csr_cfg_num - 1)) << 3;
358  addr_csr_idx = entry_idx;
359 
360  /* decode sPMP config */
361  cfgmask = (0xFFUL << cfg_shift);
362  spmpcfg = (__get_sPMPCFGx(cfg_csr_idx) & cfgmask);
363  prot = spmpcfg >> cfg_shift;
364 
365  /* decode sPMP address */
366  spmpaddr = __get_sPMPADDRx(addr_csr_idx);
367  if (SPMP_A_NAPOT == (prot & SPMP_A)) {
368  t1 = __CTZ(~spmpaddr);
369  addr = (spmpaddr & ~((1UL << t1) - 1)) << SPMP_SHIFT;
370  len = (t1 + SPMP_SHIFT + 1);
371  } else {
372  addr = spmpaddr << SPMP_SHIFT;
373  len = SPMP_SHIFT;
374  }
375 
376  /* return details */
377  spmp_cfg->protection = prot;
378  spmp_cfg->base_addr = addr;
379  spmp_cfg->order = len;
380 
381  return 0;
382 }
383 
384 #if defined(__SMPU_PRESENT) && (__SMPU_PRESENT == 1)
385 
388 typedef spmp_config smpu_config;
389 #define __get_SMPUCFGx __get_sPMPCFGx
390 #define __set_SMPUCFGx __set_sPMPCFGx
391 #define __get_SMPUxCFG __get_sPMPxCFG
392 #define __set_SMPUxCFG __set_sPMPxCFG
393 #define __get_SMPUADDRx __get_sPMPADDRx
394 #define __set_SMPUADDRx __set_sPMPADDRx
395 #define __set_SMPUENTRYx __set_sPMPENTRYx
396 #define __get_SMPUENTRYx __get_sPMPENTRYx
397 
407 __STATIC_INLINE void __set_SMPUSWITCHx(uint64_t val)
408 {
409 #if __RISCV_XLEN == 32
410  __RV_CSR_WRITE(CSR_SMPUSWITCH0, (uint32_t)val);
411  __RV_CSR_WRITE(CSR_SMPUSWITCH1, (uint32_t)(val >> 32));
412 #elif __RISCV_XLEN == 64
414 #else
415  // TODO Add RV128 Handling
416 #endif
417 }
418 
427 __STATIC_INLINE uint64_t __get_SMPUSWITCHx(void)
428 {
429 #if __RISCV_XLEN == 32
430  uint32_t lo, hi = 0;
433  return (uint64_t)((((uint64_t)hi) << 32) | lo);
434 #elif __RISCV_XLEN == 64
435  return (uint64_t)__RV_CSR_READ(CSR_SMPUSWITCH0);
436 #else
437  // TODO Add RV128 Handling
438 #endif
439 }
440 
441 #endif
442  /* End of Doxygen Group NMSIS_Core_SPMP_Functions */
444 #endif /* defined(__SPMP_PRESENT) && (__SPMP_PRESENT == 1) */
445 
446 #ifdef __cplusplus
447 }
448 #endif
449 #endif /* __CORE_FEATURE_SPMP_H__ */
CSR_SPMPADDR12
#define CSR_SPMPADDR12
Definition: riscv_encoding.h:900
CSR_SPMPADDR9
#define CSR_SPMPADDR9
Definition: riscv_encoding.h:897
SPMP_A
#define SPMP_A
Definition: riscv_encoding.h:422
SPMP_A_NA4
#define SPMP_A_NA4
Definition: riscv_encoding.h:424
__set_sPMPxCFG
__STATIC_INLINE void __set_sPMPxCFG(uint32_t entry_idx, uint8_t spmpxcfg)
Set 8bit sPMPxCFG by spmp entry index.
Definition: core_feature_spmp.h:175
__CTZ
__STATIC_FORCEINLINE unsigned long __CTZ(unsigned long data)
Count tailing zero.
Definition: core_compatiable.h:255
CSR_SPMPADDR4
#define CSR_SPMPADDR4
Definition: riscv_encoding.h:892
__get_sPMPxCFG
__STATIC_INLINE uint8_t __get_sPMPxCFG(uint32_t entry_idx)
Get 8bit sPMPxCFG Register by sPMP entry index.
Definition: core_feature_spmp.h:135
__RV_CSR_WRITE
#define __RV_CSR_WRITE(csr, val)
CSR operation Macro for csrw instruction.
Definition: core_feature_base.h:532
CSR_SPMPADDR10
#define CSR_SPMPADDR10
Definition: riscv_encoding.h:898
__set_sPMPADDRx
__STATIC_INLINE void __set_sPMPADDRx(uint32_t csr_idx, rv_csr_t spmpaddr)
Set sPMPADDRx by CSR index.
Definition: core_feature_spmp.h:241
__set_sPMPCFGx
__STATIC_INLINE void __set_sPMPCFGx(uint32_t csr_idx, rv_csr_t spmpcfg)
Set sPMPCFGx by csr index.
Definition: core_feature_spmp.h:118
CSR_SPMPADDR0
#define CSR_SPMPADDR0
Definition: riscv_encoding.h:888
CSR_SPMPCFG3
#define CSR_SPMPCFG3
Definition: riscv_encoding.h:887
__STATIC_INLINE
#define __STATIC_INLINE
Define a static function that may be inlined by the compiler.
Definition: nmsis_gcc.h:65
SPMP_SHIFT
#define SPMP_SHIFT
Definition: riscv_encoding.h:429
__get_sPMPCFGx
__STATIC_INLINE rv_csr_t __get_sPMPCFGx(uint32_t csr_idx)
Get sPMPCFGx Register by csr index.
Definition: core_feature_spmp.h:95
CSR_SPMPADDR15
#define CSR_SPMPADDR15
Definition: riscv_encoding.h:903
SPMP_CONFIG::protection
unsigned int protection
Set permissions using macros SMPU_S/ SMPU_R/SMPU_W/ SMPU_X of SMPU; SPMP_L/ SPMP_U/SPMP_R/ SPMP_W/SPM...
Definition: core_feature_spmp.h:69
SPMP_CONFIG
Definition: core_feature_spmp.h:63
__get_sPMPADDRx
__STATIC_INLINE rv_csr_t __get_sPMPADDRx(uint32_t csr_idx)
Get sPMPADDRx Register by CSR index.
Definition: core_feature_spmp.h:212
CSR_SMPUSWITCH0
#define CSR_SMPUSWITCH0
Definition: riscv_encoding.h:926
__RV_CSR_READ
#define __RV_CSR_READ(csr)
CSR operation Macro for csrr instruction.
Definition: core_feature_base.h:514
CSR_SPMPADDR2
#define CSR_SPMPADDR2
Definition: riscv_encoding.h:890
CSR_SPMPADDR8
#define CSR_SPMPADDR8
Definition: riscv_encoding.h:896
CSR_SPMPADDR14
#define CSR_SPMPADDR14
Definition: riscv_encoding.h:902
CSR_SPMPADDR6
#define CSR_SPMPADDR6
Definition: riscv_encoding.h:894
CSR_SPMPADDR7
#define CSR_SPMPADDR7
Definition: riscv_encoding.h:895
CSR_SPMPADDR3
#define CSR_SPMPADDR3
Definition: riscv_encoding.h:891
CSR_SPMPCFG0
#define CSR_SPMPCFG0
Definition: riscv_encoding.h:884
CSR_SPMPCFG1
#define CSR_SPMPCFG1
Definition: riscv_encoding.h:885
SPMP_CONFIG::base_addr
unsigned long base_addr
Base address of memory region It must be 2^order aligned address.
Definition: core_feature_spmp.h:80
CSR_SPMPCFG2
#define CSR_SPMPCFG2
Definition: riscv_encoding.h:886
SPMP_CONFIG::order
unsigned long order
Size of memory region as power of 2, it has to be minimum 2 and maxium __RISCV_XLEN according to the ...
Definition: core_feature_spmp.h:75
CSR_SMPUSWITCH1
#define CSR_SMPUSWITCH1
Definition: riscv_encoding.h:927
__get_sPMPENTRYx
__STATIC_INLINE int __get_sPMPENTRYx(unsigned int entry_idx, spmp_config *spmp_cfg)
Get sPMP entry by entry idx.
Definition: core_feature_spmp.h:336
__RISCV_XLEN
#define __RISCV_XLEN
Refer to the width of an integer register in bits(either 32 or 64)
Definition: core_feature_base.h:48
CSR_SPMPADDR1
#define CSR_SPMPADDR1
Definition: riscv_encoding.h:889
__set_sPMPENTRYx
__STATIC_INLINE void __set_sPMPENTRYx(uint32_t entry_idx, const spmp_config *spmp_cfg)
Set sPMP entry by entry idx.
Definition: core_feature_spmp.h:275
CSR_SPMPADDR13
#define CSR_SPMPADDR13
Definition: riscv_encoding.h:901
rv_csr_t
unsigned long rv_csr_t
Type of Control and Status Register(CSR), depends on the XLEN defined in RISC-V.
Definition: core_feature_base.h:55
CSR_SPMPADDR5
#define CSR_SPMPADDR5
Definition: riscv_encoding.h:893
SPMP_A_NAPOT
#define SPMP_A_NAPOT
Definition: riscv_encoding.h:425
CSR_SPMPADDR11
#define CSR_SPMPADDR11
Definition: riscv_encoding.h:899