NMSIS-Core  Version 1.5.0
NMSIS-Core support for Nuclei processor-based devices
core_feature_dsp.h
1 /*
2  * Copyright (c) 2019 Nuclei Limited. All rights reserved.
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  *
6  * Licensed under the Apache License, Version 2.0 (the License); you may
7  * not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  * www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
14  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  */
18 #ifndef __CORE_FEATURE_DSP__
19 #define __CORE_FEATURE_DSP__
20 
25 /*
26  * DSP Feature Configuration Macro:
27  * 1. __DSP_PRESENT: Define whether Digital Signal Processing Unit(DSP) is present or not
28  * * 0: Not present
29  * * 1: Present
30  */
31 #ifdef __cplusplus
32  extern "C" {
33 #endif
34 
35 #include "core_feature_base.h"
36 
37 #if defined(__DSP_PRESENT) && (__DSP_PRESENT == 1)
38 
39 #if defined(__INC_INTRINSIC_API) && (__INC_INTRINSIC_API == 1)
40 #if defined(__zcc__)
41 #include <rvp_intrinsic.h>
42 #else
43 #if !defined(__ICCRISCV__) && !defined(__llvm__)
44 #include <rvp_intrinsic.h>
45 #endif
46 #endif
47 #endif
48 
49 #ifndef __ICCRISCV__
50 /* ########################### CPU SIMD DSP Intrinsic Functions ########################### */ /* End of Doxygen Group NMSIS_Core_DSP_Intrinsic */
87 
420 /* ===== Inline Function Start for 3.1. ADD8 ===== */
453 __STATIC_FORCEINLINE unsigned long __RV_ADD8(unsigned long a, unsigned long b)
454 {
455  unsigned long result;
456  __ASM volatile("add8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
457  return result;
458 }
459 /* ===== Inline Function End for 3.1. ADD8 ===== */
460 
461 /* ===== Inline Function Start for 3.2. ADD16 ===== */
494 __STATIC_FORCEINLINE unsigned long __RV_ADD16(unsigned long a, unsigned long b)
495 {
496  unsigned long result;
497  __ASM volatile("add16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
498  return result;
499 }
500 /* ===== Inline Function End for 3.2. ADD16 ===== */
501 
502 /* ===== Inline Function Start for 3.3. ADD64 ===== */
547 __STATIC_FORCEINLINE unsigned long long __RV_ADD64(unsigned long long a, unsigned long long b)
548 {
549  unsigned long long result;
550  __ASM volatile("add64 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
551  return result;
552 }
553 /* ===== Inline Function End for 3.3. ADD64 ===== */
554 
555 /* ===== Inline Function Start for 3.4. AVE ===== */
586 __STATIC_FORCEINLINE long __RV_AVE(long a, long b)
587 {
588  long result;
589  __ASM volatile("ave %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
590  return result;
591 }
592 /* ===== Inline Function End for 3.4. AVE ===== */
593 
594 /* ===== Inline Function Start for 3.5. BITREV ===== */
627 __STATIC_FORCEINLINE unsigned long __RV_BITREV(unsigned long a, unsigned long b)
628 {
629  unsigned long result;
630  __ASM volatile("bitrev %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
631  return result;
632 }
633 /* ===== Inline Function End for 3.5. BITREV ===== */
634 
635 /* ===== Inline Function Start for 3.6. BITREVI ===== */
669 #define __RV_BITREVI(a, b) \
670  ({ \
671  unsigned long __res; \
672  unsigned long __a = (unsigned long)(a); \
673  __ASM volatile("bitrevi %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
674  __res; \
675  })
676 /* ===== Inline Function End for 3.6. BITREVI ===== */
677 
678 /* ===== Inline Function Start for 3.7. BPICK ===== */
710 __STATIC_FORCEINLINE unsigned long __RV_BPICK(unsigned long a, unsigned long b, unsigned long c)
711 {
712  unsigned long result;
713  __ASM volatile("bpick %0, %1, %2, %3" : "=r"(result) : "r"(a), "r"(b), "r"(c));
714  return result;
715 }
716 /* ===== Inline Function End for 3.7. BPICK ===== */
717 
718 /* ===== Inline Function Start for 3.8. CLROV ===== */
736 {
737  __ASM volatile("clrov ");
738 }
739 /* ===== Inline Function End for 3.8. CLROV ===== */
740 
741 /* ===== Inline Function Start for 3.9. CLRS8 ===== */
780 __STATIC_FORCEINLINE unsigned long __RV_CLRS8(unsigned long a)
781 {
782  unsigned long result;
783  __ASM volatile("clrs8 %0, %1" : "=r"(result) : "r"(a));
784  return result;
785 }
786 /* ===== Inline Function End for 3.9. CLRS8 ===== */
787 
788 /* ===== Inline Function Start for 3.10. CLRS16 ===== */
827 __STATIC_FORCEINLINE unsigned long __RV_CLRS16(unsigned long a)
828 {
829  unsigned long result;
830  __ASM volatile("clrs16 %0, %1" : "=r"(result) : "r"(a));
831  return result;
832 }
833 /* ===== Inline Function End for 3.10. CLRS16 ===== */
834 
835 /* ===== Inline Function Start for 3.11. CLRS32 ===== */
874 __STATIC_FORCEINLINE unsigned long __RV_CLRS32(unsigned long a)
875 {
876  unsigned long result;
877  __ASM volatile("clrs32 %0, %1" : "=r"(result) : "r"(a));
878  return result;
879 }
880 /* ===== Inline Function End for 3.11. CLRS32 ===== */
881 
882 /* ===== Inline Function Start for 3.12. CLO8 ===== */
921 __STATIC_FORCEINLINE unsigned long __RV_CLO8(unsigned long a)
922 {
923  unsigned long result;
924  __ASM volatile("clo8 %0, %1" : "=r"(result) : "r"(a));
925  return result;
926 }
927 /* ===== Inline Function End for 3.12. CLO8 ===== */
928 
929 /* ===== Inline Function Start for 3.13. CLO16 ===== */
968 __STATIC_FORCEINLINE unsigned long __RV_CLO16(unsigned long a)
969 {
970  unsigned long result;
971  __ASM volatile("clo16 %0, %1" : "=r"(result) : "r"(a));
972  return result;
973 }
974 /* ===== Inline Function End for 3.13. CLO16 ===== */
975 
976 /* ===== Inline Function Start for 3.14. CLO32 ===== */
1015 __STATIC_FORCEINLINE unsigned long __RV_CLO32(unsigned long a)
1016 {
1017  unsigned long result;
1018  __ASM volatile("clo32 %0, %1" : "=r"(result) : "r"(a));
1019  return result;
1020 }
1021 /* ===== Inline Function End for 3.14. CLO32 ===== */
1022 
1023 /* ===== Inline Function Start for 3.15. CLZ8 ===== */
1062 __STATIC_FORCEINLINE unsigned long __RV_CLZ8(unsigned long a)
1063 {
1064  unsigned long result;
1065  __ASM volatile("clz8 %0, %1" : "=r"(result) : "r"(a));
1066  return result;
1067 }
1068 /* ===== Inline Function End for 3.15. CLZ8 ===== */
1069 
1070 /* ===== Inline Function Start for 3.16. CLZ16 ===== */
1109 __STATIC_FORCEINLINE unsigned long __RV_CLZ16(unsigned long a)
1110 {
1111  unsigned long result;
1112  __ASM volatile("clz16 %0, %1" : "=r"(result) : "r"(a));
1113  return result;
1114 }
1115 /* ===== Inline Function End for 3.16. CLZ16 ===== */
1116 
1117 /* ===== Inline Function Start for 3.17. CLZ32 ===== */
1156 __STATIC_FORCEINLINE unsigned long __RV_CLZ32(unsigned long a)
1157 {
1158  unsigned long result;
1159  __ASM volatile("clz32 %0, %1" : "=r"(result) : "r"(a));
1160  return result;
1161 }
1162 /* ===== Inline Function End for 3.17. CLZ32 ===== */
1163 
1164 /* ===== Inline Function Start for 3.18. CMPEQ8 ===== */
1198 __STATIC_FORCEINLINE unsigned long __RV_CMPEQ8(unsigned long a, unsigned long b)
1199 {
1200  unsigned long result;
1201  __ASM volatile("cmpeq8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
1202  return result;
1203 }
1204 /* ===== Inline Function End for 3.18. CMPEQ8 ===== */
1205 
1206 /* ===== Inline Function Start for 3.19. CMPEQ16 ===== */
1240 __STATIC_FORCEINLINE unsigned long __RV_CMPEQ16(unsigned long a, unsigned long b)
1241 {
1242  unsigned long result;
1243  __ASM volatile("cmpeq16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
1244  return result;
1245 }
1246 /* ===== Inline Function End for 3.19. CMPEQ16 ===== */
1247 
1248 /* ===== Inline Function Start for 3.20. CRAS16 ===== */
1286 __STATIC_FORCEINLINE unsigned long __RV_CRAS16(unsigned long a, unsigned long b)
1287 {
1288  unsigned long result;
1289  __ASM volatile("cras16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
1290  return result;
1291 }
1292 /* ===== Inline Function End for 3.20. CRAS16 ===== */
1293 
1294 /* ===== Inline Function Start for 3.21. CRSA16 ===== */
1332 __STATIC_FORCEINLINE unsigned long __RV_CRSA16(unsigned long a, unsigned long b)
1333 {
1334  unsigned long result;
1335  __ASM volatile("crsa16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
1336  return result;
1337 }
1338 /* ===== Inline Function End for 3.21. CRSA16 ===== */
1339 
1340 /* ===== Inline Function Start for 3.22. INSB ===== */
1372 #define __RV_INSB(t, a, b) \
1373  ({ \
1374  unsigned long __t = (unsigned long)(t); \
1375  unsigned long __a = (unsigned long)(a); \
1376  __ASM volatile("insb %0, %1, %2" : "+r"(__t) : "r"(__a), "K"(b)); \
1377  __t; \
1378  })
1379 /* ===== Inline Function End for 3.22. INSB ===== */
1380 
1381 /* ===== Inline Function Start for 3.23. KABS8 ===== */
1418 __STATIC_FORCEINLINE unsigned long __RV_KABS8(unsigned long a)
1419 {
1420  unsigned long result;
1421  __ASM volatile("kabs8 %0, %1" : "=r"(result) : "r"(a));
1422  return result;
1423 }
1424 /* ===== Inline Function End for 3.23. KABS8 ===== */
1425 
1426 /* ===== Inline Function Start for 3.24. KABS16 ===== */
1463 __STATIC_FORCEINLINE unsigned long __RV_KABS16(unsigned long a)
1464 {
1465  unsigned long result;
1466  __ASM volatile("kabs16 %0, %1" : "=r"(result) : "r"(a));
1467  return result;
1468 }
1469 /* ===== Inline Function End for 3.24. KABS16 ===== */
1470 
1471 /* ===== Inline Function Start for 3.25. KABSW ===== */
1510 __STATIC_FORCEINLINE unsigned long __RV_KABSW(signed long a)
1511 {
1512  unsigned long result;
1513  __ASM volatile("kabsw %0, %1" : "=r"(result) : "r"(a));
1514  return result;
1515 }
1516 /* ===== Inline Function End for 3.25. KABSW ===== */
1517 
1518 /* ===== Inline Function Start for 3.26. KADD8 ===== */
1557 __STATIC_FORCEINLINE unsigned long __RV_KADD8(unsigned long a, unsigned long b)
1558 {
1559  unsigned long result;
1560  __ASM volatile("kadd8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
1561  return result;
1562 }
1563 /* ===== Inline Function End for 3.26. KADD8 ===== */
1564 
1565 /* ===== Inline Function Start for 3.27. KADD16 ===== */
1604 __STATIC_FORCEINLINE unsigned long __RV_KADD16(unsigned long a, unsigned long b)
1605 {
1606  unsigned long result;
1607  __ASM volatile("kadd16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
1608  return result;
1609 }
1610 /* ===== Inline Function End for 3.27. KADD16 ===== */
1611 
1612 /* ===== Inline Function Start for 3.28. KADD64 ===== */
1670 __STATIC_FORCEINLINE long long __RV_KADD64(long long a, long long b)
1671 {
1672  long long result;
1673  __ASM volatile("kadd64 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
1674  return result;
1675 }
1676 /* ===== Inline Function End for 3.28. KADD64 ===== */
1677 
1678 /* ===== Inline Function Start for 3.29. KADDH ===== */
1718 {
1719  long result;
1720  __ASM volatile("kaddh %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
1721  return result;
1722 }
1723 /* ===== Inline Function End for 3.29. KADDH ===== */
1724 
1725 /* ===== Inline Function Start for 3.30. KADDW ===== */
1766 {
1767  long result;
1768  __ASM volatile("kaddw %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
1769  return result;
1770 }
1771 /* ===== Inline Function End for 3.30. KADDW ===== */
1772 
1773 /* ===== Inline Function Start for 3.31. KCRAS16 ===== */
1822 __STATIC_FORCEINLINE unsigned long __RV_KCRAS16(unsigned long a, unsigned long b)
1823 {
1824  unsigned long result;
1825  __ASM volatile("kcras16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
1826  return result;
1827 }
1828 /* ===== Inline Function End for 3.31. KCRAS16 ===== */
1829 
1830 /* ===== Inline Function Start for 3.32. KCRSA16 ===== */
1879 __STATIC_FORCEINLINE unsigned long __RV_KCRSA16(unsigned long a, unsigned long b)
1880 {
1881  unsigned long result;
1882  __ASM volatile("kcrsa16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
1883  return result;
1884 }
1885 /* ===== Inline Function End for 3.32. KCRSA16 ===== */
1886 
1887 /* ===== Inline Function Start for 3.33.1. KDMBB ===== */
1934 __STATIC_FORCEINLINE long __RV_KDMBB(unsigned int a, unsigned int b)
1935 {
1936  long result;
1937  __ASM volatile("kdmbb %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
1938  return result;
1939 }
1940 /* ===== Inline Function End for 3.33.1. KDMBB ===== */
1941 
1942 /* ===== Inline Function Start for 3.33.2. KDMBT ===== */
1989 __STATIC_FORCEINLINE long __RV_KDMBT(unsigned int a, unsigned int b)
1990 {
1991  long result;
1992  __ASM volatile("kdmbt %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
1993  return result;
1994 }
1995 /* ===== Inline Function End for 3.33.2. KDMBT ===== */
1996 
1997 /* ===== Inline Function Start for 3.33.3. KDMTT ===== */
2044 __STATIC_FORCEINLINE long __RV_KDMTT(unsigned int a, unsigned int b)
2045 {
2046  long result;
2047  __ASM volatile("kdmtt %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
2048  return result;
2049 }
2050 /* ===== Inline Function End for 3.33.3. KDMTT ===== */
2051 
2052 /* ===== Inline Function Start for 3.34.1. KDMABB ===== */
2109 __STATIC_FORCEINLINE long __RV_KDMABB(long t, unsigned int a, unsigned int b)
2110 {
2111  __ASM volatile("kdmabb %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
2112  return t;
2113 }
2114 /* ===== Inline Function End for 3.34.1. KDMABB ===== */
2115 
2116 /* ===== Inline Function Start for 3.34.2. KDMABT ===== */
2173 __STATIC_FORCEINLINE long __RV_KDMABT(long t, unsigned int a, unsigned int b)
2174 {
2175  __ASM volatile("kdmabt %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
2176  return t;
2177 }
2178 /* ===== Inline Function End for 3.34.2. KDMABT ===== */
2179 
2180 /* ===== Inline Function Start for 3.34.3. KDMATT ===== */
2237 __STATIC_FORCEINLINE long __RV_KDMATT(long t, unsigned int a, unsigned int b)
2238 {
2239  __ASM volatile("kdmatt %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
2240  return t;
2241 }
2242 /* ===== Inline Function End for 3.34.3. KDMATT ===== */
2243 
2244 /* ===== Inline Function Start for 3.35.1. KHM8 ===== */
2298 __STATIC_FORCEINLINE unsigned long __RV_KHM8(unsigned long a, unsigned long b)
2299 {
2300  unsigned long result;
2301  __ASM volatile("khm8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
2302  return result;
2303 }
2304 /* ===== Inline Function End for 3.35.1. KHM8 ===== */
2305 
2306 /* ===== Inline Function Start for 3.35.2. KHMX8 ===== */
2360 __STATIC_FORCEINLINE unsigned long __RV_KHMX8(unsigned long a, unsigned long b)
2361 {
2362  unsigned long result;
2363  __ASM volatile("khmx8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
2364  return result;
2365 }
2366 /* ===== Inline Function End for 3.35.2. KHMX8 ===== */
2367 
2368 /* ===== Inline Function Start for 3.36.1. KHM16 ===== */
2423 __STATIC_FORCEINLINE unsigned long __RV_KHM16(unsigned long a, unsigned long b)
2424 {
2425  unsigned long result;
2426  __ASM volatile("khm16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
2427  return result;
2428 }
2429 /* ===== Inline Function End for 3.36.1. KHM16 ===== */
2430 
2431 /* ===== Inline Function Start for 3.36.2. KHMX16 ===== */
2486 __STATIC_FORCEINLINE unsigned long __RV_KHMX16(unsigned long a, unsigned long b)
2487 {
2488  unsigned long result;
2489  __ASM volatile("khmx16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
2490  return result;
2491 }
2492 /* ===== Inline Function End for 3.36.2. KHMX16 ===== */
2493 
2494 /* ===== Inline Function Start for 3.37.1. KHMBB ===== */
2539 __STATIC_FORCEINLINE long __RV_KHMBB(unsigned int a, unsigned int b)
2540 {
2541  long result;
2542  __ASM volatile("khmbb %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
2543  return result;
2544 }
2545 /* ===== Inline Function End for 3.37.1. KHMBB ===== */
2546 
2547 /* ===== Inline Function Start for 3.37.2. KHMBT ===== */
2592 __STATIC_FORCEINLINE long __RV_KHMBT(unsigned int a, unsigned int b)
2593 {
2594  long result;
2595  __ASM volatile("khmbt %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
2596  return result;
2597 }
2598 /* ===== Inline Function End for 3.37.2. KHMBT ===== */
2599 
2600 /* ===== Inline Function Start for 3.37.3. KHMTT ===== */
2645 __STATIC_FORCEINLINE long __RV_KHMTT(unsigned int a, unsigned int b)
2646 {
2647  long result;
2648  __ASM volatile("khmtt %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
2649  return result;
2650 }
2651 /* ===== Inline Function End for 3.37.3. KHMTT ===== */
2652 
2653 /* ===== Inline Function Start for 3.38.1. KMABB ===== */
2709 __STATIC_FORCEINLINE long __RV_KMABB(long t, unsigned long a, unsigned long b)
2710 {
2711  __ASM volatile("kmabb %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
2712  return t;
2713 }
2714 /* ===== Inline Function End for 3.38.1. KMABB ===== */
2715 
2716 /* ===== Inline Function Start for 3.38.2. KMABT ===== */
2772 __STATIC_FORCEINLINE long __RV_KMABT(long t, unsigned long a, unsigned long b)
2773 {
2774  __ASM volatile("kmabt %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
2775  return t;
2776 }
2777 /* ===== Inline Function End for 3.38.2. KMABT ===== */
2778 
2779 /* ===== Inline Function Start for 3.38.3. KMATT ===== */
2835 __STATIC_FORCEINLINE long __RV_KMATT(long t, unsigned long a, unsigned long b)
2836 {
2837  __ASM volatile("kmatt %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
2838  return t;
2839 }
2840 /* ===== Inline Function End for 3.38.3. KMATT ===== */
2841 
2842 /* ===== Inline Function Start for 3.39.1. KMADA ===== */
2899 __STATIC_FORCEINLINE long __RV_KMADA(long t, unsigned long a, unsigned long b)
2900 {
2901  __ASM volatile("kmada %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
2902  return t;
2903 }
2904 /* ===== Inline Function End for 3.39.1. KMADA ===== */
2905 
2906 /* ===== Inline Function Start for 3.39.2. KMAXDA ===== */
2963 __STATIC_FORCEINLINE long __RV_KMAXDA(long t, unsigned long a, unsigned long b)
2964 {
2965  __ASM volatile("kmaxda %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
2966  return t;
2967 }
2968 /* ===== Inline Function End for 3.39.2. KMAXDA ===== */
2969 
2970 /* ===== Inline Function Start for 3.40.1. KMADS ===== */
3035 __STATIC_FORCEINLINE long __RV_KMADS(long t, unsigned long a, unsigned long b)
3036 {
3037  __ASM volatile("kmads %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
3038  return t;
3039 }
3040 /* ===== Inline Function End for 3.40.1. KMADS ===== */
3041 
3042 /* ===== Inline Function Start for 3.40.2. KMADRS ===== */
3107 __STATIC_FORCEINLINE long __RV_KMADRS(long t, unsigned long a, unsigned long b)
3108 {
3109  __ASM volatile("kmadrs %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
3110  return t;
3111 }
3112 /* ===== Inline Function End for 3.40.2. KMADRS ===== */
3113 
3114 /* ===== Inline Function Start for 3.40.3. KMAXDS ===== */
3179 __STATIC_FORCEINLINE long __RV_KMAXDS(long t, unsigned long a, unsigned long b)
3180 {
3181  __ASM volatile("kmaxds %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
3182  return t;
3183 }
3184 /* ===== Inline Function End for 3.40.3. KMAXDS ===== */
3185 
3186 /* ===== Inline Function Start for 3.41. KMAR64 ===== */
3247 __STATIC_FORCEINLINE long long __RV_KMAR64(long long t, long a, long b)
3248 {
3249  __ASM volatile("kmar64 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
3250  return t;
3251 }
3252 /* ===== Inline Function End for 3.41. KMAR64 ===== */
3253 
3254 /* ===== Inline Function Start for 3.42.1. KMDA ===== */
3297 __STATIC_FORCEINLINE long __RV_KMDA(unsigned long a, unsigned long b)
3298 {
3299  long result;
3300  __ASM volatile("kmda %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
3301  return result;
3302 }
3303 /* ===== Inline Function End for 3.42.1. KMDA ===== */
3304 
3305 /* ===== Inline Function Start for 3.42.2. KMXDA ===== */
3348 __STATIC_FORCEINLINE long __RV_KMXDA(unsigned long a, unsigned long b)
3349 {
3350  long result;
3351  __ASM volatile("kmxda %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
3352  return result;
3353 }
3354 /* ===== Inline Function End for 3.42.2. KMXDA ===== */
3355 
3356 /* ===== Inline Function Start for 3.43.1. KMMAC ===== */
3410 __STATIC_FORCEINLINE long __RV_KMMAC(long t, long a, long b)
3411 {
3412  __ASM volatile("kmmac %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
3413  return t;
3414 }
3415 /* ===== Inline Function End for 3.43.1. KMMAC ===== */
3416 
3417 /* ===== Inline Function Start for 3.43.2. KMMAC.u ===== */
3471 __STATIC_FORCEINLINE long __RV_KMMAC_U(long t, long a, long b)
3472 {
3473  __ASM volatile("kmmac.u %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
3474  return t;
3475 }
3476 /* ===== Inline Function End for 3.43.2. KMMAC.u ===== */
3477 
3478 /* ===== Inline Function Start for 3.44.1. KMMAWB ===== */
3533 __STATIC_FORCEINLINE long __RV_KMMAWB(long t, unsigned long a, unsigned long b)
3534 {
3535  __ASM volatile("kmmawb %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
3536  return t;
3537 }
3538 /* ===== Inline Function End for 3.44.1. KMMAWB ===== */
3539 
3540 /* ===== Inline Function Start for 3.44.2. KMMAWB.u ===== */
3595 __STATIC_FORCEINLINE long __RV_KMMAWB_U(long t, unsigned long a, unsigned long b)
3596 {
3597  __ASM volatile("kmmawb.u %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
3598  return t;
3599 }
3600 /* ===== Inline Function End for 3.44.2. KMMAWB.u ===== */
3601 
3602 /* ===== Inline Function Start for 3.45.1. KMMAWB2 ===== */
3663 __STATIC_FORCEINLINE long __RV_KMMAWB2(long t, unsigned long a, unsigned long b)
3664 {
3665  __ASM volatile("kmmawb2 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
3666  return t;
3667 }
3668 /* ===== Inline Function End for 3.45.1. KMMAWB2 ===== */
3669 
3670 /* ===== Inline Function Start for 3.45.2. KMMAWB2.u ===== */
3731 __STATIC_FORCEINLINE long __RV_KMMAWB2_U(long t, unsigned long a, unsigned long b)
3732 {
3733  __ASM volatile("kmmawb2.u %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
3734  return t;
3735 }
3736 /* ===== Inline Function End for 3.45.2. KMMAWB2.u ===== */
3737 
3738 /* ===== Inline Function Start for 3.46.1. KMMAWT ===== */
3793 __STATIC_FORCEINLINE long __RV_KMMAWT(long t, unsigned long a, unsigned long b)
3794 {
3795  __ASM volatile("kmmawt %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
3796  return t;
3797 }
3798 /* ===== Inline Function End for 3.46.1. KMMAWT ===== */
3799 
3800 /* ===== Inline Function Start for 3.46.2. KMMAWT.u ===== */
3855 __STATIC_FORCEINLINE long __RV_KMMAWT_U(long t, unsigned long a, unsigned long b)
3856 {
3857  __ASM volatile("kmmawt.u %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
3858  return t;
3859 }
3860 /* ===== Inline Function End for 3.46.2. KMMAWT.u ===== */
3861 
3862 /* ===== Inline Function Start for 3.47.1. KMMAWT2 ===== */
3923 __STATIC_FORCEINLINE long __RV_KMMAWT2(long t, unsigned long a, unsigned long b)
3924 {
3925  __ASM volatile("kmmawt2 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
3926  return t;
3927 }
3928 /* ===== Inline Function End for 3.47.1. KMMAWT2 ===== */
3929 
3930 /* ===== Inline Function Start for 3.47.2. KMMAWT2.u ===== */
3991 __STATIC_FORCEINLINE long __RV_KMMAWT2_U(long t, unsigned long a, unsigned long b)
3992 {
3993  __ASM volatile("kmmawt2.u %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
3994  return t;
3995 }
3996 /* ===== Inline Function End for 3.47.2. KMMAWT2.u ===== */
3997 
3998 /* ===== Inline Function Start for 3.48.1. KMMSB ===== */
4051 __STATIC_FORCEINLINE long __RV_KMMSB(long t, long a, long b)
4052 {
4053  __ASM volatile("kmmsb %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
4054  return t;
4055 }
4056 /* ===== Inline Function End for 3.48.1. KMMSB ===== */
4057 
4058 /* ===== Inline Function Start for 3.48.2. KMMSB.u ===== */
4111 __STATIC_FORCEINLINE long __RV_KMMSB_U(long t, long a, long b)
4112 {
4113  __ASM volatile("kmmsb.u %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
4114  return t;
4115 }
4116 /* ===== Inline Function End for 3.48.2. KMMSB.u ===== */
4117 
4118 /* ===== Inline Function Start for 3.49.1. KMMWB2 ===== */
4166 __STATIC_FORCEINLINE long __RV_KMMWB2(long a, unsigned long b)
4167 {
4168  long result;
4169  __ASM volatile("kmmwb2 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
4170  return result;
4171 }
4172 /* ===== Inline Function End for 3.49.1. KMMWB2 ===== */
4173 
4174 /* ===== Inline Function Start for 3.49.2. KMMWB2.u ===== */
4222 __STATIC_FORCEINLINE long __RV_KMMWB2_U(long a, unsigned long b)
4223 {
4224  long result;
4225  __ASM volatile("kmmwb2.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
4226  return result;
4227 }
4228 /* ===== Inline Function End for 3.49.2. KMMWB2.u ===== */
4229 
4230 /* ===== Inline Function Start for 3.50.1. KMMWT2 ===== */
4278 __STATIC_FORCEINLINE long __RV_KMMWT2(long a, unsigned long b)
4279 {
4280  long result;
4281  __ASM volatile("kmmwt2 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
4282  return result;
4283 }
4284 /* ===== Inline Function End for 3.50.1. KMMWT2 ===== */
4285 
4286 /* ===== Inline Function Start for 3.50.2. KMMWT2.u ===== */
4334 __STATIC_FORCEINLINE long __RV_KMMWT2_U(long a, unsigned long b)
4335 {
4336  long result;
4337  __ASM volatile("kmmwt2.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
4338  return result;
4339 }
4340 /* ===== Inline Function End for 3.50.2. KMMWT2.u ===== */
4341 
4342 /* ===== Inline Function Start for 3.51.1. KMSDA ===== */
4397 __STATIC_FORCEINLINE long __RV_KMSDA(long t, unsigned long a, unsigned long b)
4398 {
4399  __ASM volatile("kmsda %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
4400  return t;
4401 }
4402 /* ===== Inline Function End for 3.51.1. KMSDA ===== */
4403 
4404 /* ===== Inline Function Start for 3.51.2. KMSXDA ===== */
4459 __STATIC_FORCEINLINE long __RV_KMSXDA(long t, unsigned long a, unsigned long b)
4460 {
4461  __ASM volatile("kmsxda %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
4462  return t;
4463 }
4464 /* ===== Inline Function End for 3.51.2. KMSXDA ===== */
4465 
4466 /* ===== Inline Function Start for 3.52. KMSR64 ===== */
4527 __STATIC_FORCEINLINE long long __RV_KMSR64(long long t, long a, long b)
4528 {
4529  __ASM volatile("kmsr64 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
4530  return t;
4531 }
4532 /* ===== Inline Function End for 3.52. KMSR64 ===== */
4533 
4534 /* ===== Inline Function Start for 3.53. KSLLW ===== */
4574 __STATIC_FORCEINLINE long __RV_KSLLW(long a, unsigned int b)
4575 {
4576  long result;
4577  __ASM volatile("ksllw %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
4578  return result;
4579 }
4580 /* ===== Inline Function End for 3.53. KSLLW ===== */
4581 
4582 /* ===== Inline Function Start for 3.54. KSLLIW ===== */
4621 #define __RV_KSLLIW(a, b) \
4622  ({ \
4623  long __res; \
4624  long __a = (long)(a); \
4625  __ASM volatile("kslliw %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
4626  __res; \
4627  })
4628 /* ===== Inline Function End for 3.54. KSLLIW ===== */
4629 
4630 /* ===== Inline Function Start for 3.55. KSLL8 ===== */
4675 __STATIC_FORCEINLINE unsigned long __RV_KSLL8(unsigned long a, unsigned int b)
4676 {
4677  unsigned long result;
4678  __ASM volatile("ksll8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
4679  return result;
4680 }
4681 /* ===== Inline Function End for 3.55. KSLL8 ===== */
4682 
4683 /* ===== Inline Function Start for 3.56. KSLLI8 ===== */
4727 #define __RV_KSLLI8(a, b) \
4728  ({ \
4729  unsigned long __res; \
4730  unsigned long __a = (unsigned long)(a); \
4731  __ASM volatile("kslli8 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
4732  __res; \
4733  })
4734 /* ===== Inline Function End for 3.56. KSLLI8 ===== */
4735 
4736 /* ===== Inline Function Start for 3.57. KSLL16 ===== */
4781 __STATIC_FORCEINLINE unsigned long __RV_KSLL16(unsigned long a, unsigned int b)
4782 {
4783  unsigned long result;
4784  __ASM volatile("ksll16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
4785  return result;
4786 }
4787 /* ===== Inline Function End for 3.57. KSLL16 ===== */
4788 
4789 /* ===== Inline Function Start for 3.58. KSLLI16 ===== */
4833 #define __RV_KSLLI16(a, b) \
4834  ({ \
4835  unsigned long __res; \
4836  unsigned long __a = (unsigned long)(a); \
4837  __ASM volatile("kslli16 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
4838  __res; \
4839  })
4840 /* ===== Inline Function End for 3.58. KSLLI16 ===== */
4841 
4842 /* ===== Inline Function Start for 3.59.1. KSLRA8 ===== */
4901 __STATIC_FORCEINLINE unsigned long __RV_KSLRA8(unsigned long a, int b)
4902 {
4903  unsigned long result;
4904  __ASM volatile("kslra8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
4905  return result;
4906 }
4907 /* ===== Inline Function End for 3.59.1. KSLRA8 ===== */
4908 
4909 /* ===== Inline Function Start for 3.59.2. KSLRA8.u ===== */
4968 __STATIC_FORCEINLINE unsigned long __RV_KSLRA8_U(unsigned long a, int b)
4969 {
4970  unsigned long result;
4971  __ASM volatile("kslra8.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
4972  return result;
4973 }
4974 /* ===== Inline Function End for 3.59.2. KSLRA8.u ===== */
4975 
4976 /* ===== Inline Function Start for 3.60.1. KSLRA16 ===== */
5035 __STATIC_FORCEINLINE unsigned long __RV_KSLRA16(unsigned long a, int b)
5036 {
5037  unsigned long result;
5038  __ASM volatile("kslra16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
5039  return result;
5040 }
5041 /* ===== Inline Function End for 3.60.1. KSLRA16 ===== */
5042 
5043 /* ===== Inline Function Start for 3.60.2. KSLRA16.u ===== */
5102 __STATIC_FORCEINLINE unsigned long __RV_KSLRA16_U(unsigned long a, int b)
5103 {
5104  unsigned long result;
5105  __ASM volatile("kslra16.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
5106  return result;
5107 }
5108 /* ===== Inline Function End for 3.60.2. KSLRA16.u ===== */
5109 
5110 /* ===== Inline Function Start for 3.61. KSLRAW ===== */
5163 {
5164  long result;
5165  __ASM volatile("kslraw %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
5166  return result;
5167 }
5168 /* ===== Inline Function End for 3.61. KSLRAW ===== */
5169 
5170 /* ===== Inline Function Start for 3.62. KSLRAW.u ===== */
5226 {
5227  long result;
5228  __ASM volatile("kslraw.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
5229  return result;
5230 }
5231 /* ===== Inline Function End for 3.62. KSLRAW.u ===== */
5232 
5233 /* ===== Inline Function Start for 3.63. KSTAS16 ===== */
5282 __STATIC_FORCEINLINE unsigned long __RV_KSTAS16(unsigned long a, unsigned long b)
5283 {
5284  unsigned long result;
5285  __ASM volatile("kstas16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
5286  return result;
5287 }
5288 /* ===== Inline Function End for 3.63. KSTAS16 ===== */
5289 
5290 /* ===== Inline Function Start for 3.64. KSTSA16 ===== */
5339 __STATIC_FORCEINLINE unsigned long __RV_KSTSA16(unsigned long a, unsigned long b)
5340 {
5341  unsigned long result;
5342  __ASM volatile("kstsa16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
5343  return result;
5344 }
5345 /* ===== Inline Function End for 3.64. KSTSA16 ===== */
5346 
5347 /* ===== Inline Function Start for 3.65. KSUB8 ===== */
5386 __STATIC_FORCEINLINE unsigned long __RV_KSUB8(unsigned long a, unsigned long b)
5387 {
5388  unsigned long result;
5389  __ASM volatile("ksub8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
5390  return result;
5391 }
5392 /* ===== Inline Function End for 3.65. KSUB8 ===== */
5393 
5394 /* ===== Inline Function Start for 3.66. KSUB16 ===== */
5434 __STATIC_FORCEINLINE unsigned long __RV_KSUB16(unsigned long a, unsigned long b)
5435 {
5436  unsigned long result;
5437  __ASM volatile("ksub16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
5438  return result;
5439 }
5440 /* ===== Inline Function End for 3.66. KSUB16 ===== */
5441 
5442 /* ===== Inline Function Start for 3.67. KSUB64 ===== */
5500 __STATIC_FORCEINLINE long long __RV_KSUB64(long long a, long long b)
5501 {
5502  long long result;
5503  __ASM volatile("ksub64 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
5504  return result;
5505 }
5506 /* ===== Inline Function End for 3.67. KSUB64 ===== */
5507 
5508 /* ===== Inline Function Start for 3.68. KSUBH ===== */
5548 {
5549  long result;
5550  __ASM volatile("ksubh %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
5551  return result;
5552 }
5553 /* ===== Inline Function End for 3.68. KSUBH ===== */
5554 
5555 /* ===== Inline Function Start for 3.69. KSUBW ===== */
5596 {
5597  long result;
5598  __ASM volatile("ksubw %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
5599  return result;
5600 }
5601 /* ===== Inline Function End for 3.69. KSUBW ===== */
5602 
5603 /* ===== Inline Function Start for 3.70.1. KWMMUL ===== */
5651 __STATIC_FORCEINLINE long __RV_KWMMUL(long a, long b)
5652 {
5653  long result;
5654  __ASM volatile("kwmmul %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
5655  return result;
5656 }
5657 /* ===== Inline Function End for 3.70.1. KWMMUL ===== */
5658 
5659 /* ===== Inline Function Start for 3.70.2. KWMMUL.u ===== */
5708 {
5709  long result;
5710  __ASM volatile("kwmmul.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
5711  return result;
5712 }
5713 /* ===== Inline Function End for 3.70.2. KWMMUL.u ===== */
5714 
5715 /* ===== Inline Function Start for 3.71. MADDR32 ===== */
5753 __STATIC_FORCEINLINE unsigned long __RV_MADDR32(unsigned long t, unsigned long a, unsigned long b)
5754 {
5755  __ASM volatile("maddr32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
5756  return t;
5757 }
5758 /* ===== Inline Function End for 3.71. MADDR32 ===== */
5759 
5760 /* ===== Inline Function Start for 3.72. MAXW ===== */
5793 {
5794  long result;
5795  __ASM volatile("maxw %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
5796  return result;
5797 }
5798 /* ===== Inline Function End for 3.72. MAXW ===== */
5799 
5800 /* ===== Inline Function Start for 3.73. MINW ===== */
5829 {
5830  long result;
5831  __ASM volatile("minw %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
5832  return result;
5833 }
5834 /* ===== Inline Function End for 3.73. MINW ===== */
5835 
5836 /* ===== Inline Function Start for 3.74. MSUBR32 ===== */
5875 __STATIC_FORCEINLINE unsigned long __RV_MSUBR32(unsigned long t, unsigned long a, unsigned long b)
5876 {
5877  __ASM volatile("msubr32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
5878  return t;
5879 }
5880 /* ===== Inline Function End for 3.74. MSUBR32 ===== */
5881 
5882 /* ===== Inline Function Start for 3.75. MULR64 ===== */
5926 __STATIC_FORCEINLINE unsigned long long __RV_MULR64(unsigned long a, unsigned long b)
5927 {
5928  unsigned long long result;
5929  __ASM volatile("mulr64 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
5930  return result;
5931 }
5932 /* ===== Inline Function End for 3.75. MULR64 ===== */
5933 
5934 /* ===== Inline Function Start for 3.76. MULSR64 ===== */
5978 __STATIC_FORCEINLINE long long __RV_MULSR64(long a, long b)
5979 {
5980  long long result;
5981  __ASM volatile("mulsr64 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
5982  return result;
5983 }
5984 /* ===== Inline Function End for 3.76. MULSR64 ===== */
5985 
5986 /* ===== Inline Function Start for 3.77. PBSAD ===== */
6017 __STATIC_FORCEINLINE unsigned long __RV_PBSAD(unsigned long a, unsigned long b)
6018 {
6019  unsigned long result;
6020  __ASM volatile("pbsad %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
6021  return result;
6022 }
6023 /* ===== Inline Function End for 3.77. PBSAD ===== */
6024 
6025 /* ===== Inline Function Start for 3.78. PBSADA ===== */
6059 __STATIC_FORCEINLINE unsigned long __RV_PBSADA(unsigned long t, unsigned long a, unsigned long b)
6060 {
6061  __ASM volatile("pbsada %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
6062  return t;
6063 }
6064 /* ===== Inline Function End for 3.78. PBSADA ===== */
6065 
6066 /* ===== Inline Function Start for 3.79.1. PKBB16 ===== */
6109 __STATIC_FORCEINLINE unsigned long __RV_PKBB16(unsigned long a, unsigned long b)
6110 {
6111  unsigned long result;
6112  __ASM volatile("pkbb16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
6113  return result;
6114 }
6115 /* ===== Inline Function End for 3.79.1. PKBB16 ===== */
6116 
6117 /* ===== Inline Function Start for 3.79.2. PKBT16 ===== */
6160 __STATIC_FORCEINLINE unsigned long __RV_PKBT16(unsigned long a, unsigned long b)
6161 {
6162  unsigned long result;
6163  __ASM volatile("pkbt16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
6164  return result;
6165 }
6166 /* ===== Inline Function End for 3.79.2. PKBT16 ===== */
6167 
6168 /* ===== Inline Function Start for 3.79.3. PKTT16 ===== */
6211 __STATIC_FORCEINLINE unsigned long __RV_PKTT16(unsigned long a, unsigned long b)
6212 {
6213  unsigned long result;
6214  __ASM volatile("pktt16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
6215  return result;
6216 }
6217 /* ===== Inline Function End for 3.79.3. PKTT16 ===== */
6218 
6219 /* ===== Inline Function Start for 3.79.4. PKTB16 ===== */
6262 __STATIC_FORCEINLINE unsigned long __RV_PKTB16(unsigned long a, unsigned long b)
6263 {
6264  unsigned long result;
6265  __ASM volatile("pktb16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
6266  return result;
6267 }
6268 /* ===== Inline Function End for 3.79.4. PKTB16 ===== */
6269 
6270 /* ===== Inline Function Start for 3.80. RADD8 ===== */
6307 __STATIC_FORCEINLINE unsigned long __RV_RADD8(unsigned long a, unsigned long b)
6308 {
6309  unsigned long result;
6310  __ASM volatile("radd8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
6311  return result;
6312 }
6313 /* ===== Inline Function End for 3.80. RADD8 ===== */
6314 
6315 /* ===== Inline Function Start for 3.81. RADD16 ===== */
6352 __STATIC_FORCEINLINE unsigned long __RV_RADD16(unsigned long a, unsigned long b)
6353 {
6354  unsigned long result;
6355  __ASM volatile("radd16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
6356  return result;
6357 }
6358 /* ===== Inline Function End for 3.81. RADD16 ===== */
6359 
6360 /* ===== Inline Function Start for 3.82. RADD64 ===== */
6405 __STATIC_FORCEINLINE long long __RV_RADD64(long long a, long long b)
6406 {
6407  long long result;
6408  __ASM volatile("radd64 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
6409  return result;
6410 }
6411 /* ===== Inline Function End for 3.82. RADD64 ===== */
6412 
6413 /* ===== Inline Function Start for 3.83. RADDW ===== */
6454 {
6455  long result;
6456  __ASM volatile("raddw %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
6457  return result;
6458 }
6459 /* ===== Inline Function End for 3.83. RADDW ===== */
6460 
6461 /* ===== Inline Function Start for 3.84. RCRAS16 ===== */
6502 __STATIC_FORCEINLINE unsigned long __RV_RCRAS16(unsigned long a, unsigned long b)
6503 {
6504  unsigned long result;
6505  __ASM volatile("rcras16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
6506  return result;
6507 }
6508 /* ===== Inline Function End for 3.84. RCRAS16 ===== */
6509 
6510 /* ===== Inline Function Start for 3.85. RCRSA16 ===== */
6551 __STATIC_FORCEINLINE unsigned long __RV_RCRSA16(unsigned long a, unsigned long b)
6552 {
6553  unsigned long result;
6554  __ASM volatile("rcrsa16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
6555  return result;
6556 }
6557 /* ===== Inline Function End for 3.85. RCRSA16 ===== */
6558 
6559 /* ===== Inline Function Start for 3.86. RDOV ===== */
6578 __STATIC_FORCEINLINE unsigned long __RV_RDOV(void)
6579 {
6580  unsigned long result;
6581  __ASM volatile("rdov %0" : "=r"(result));
6582  return result;
6583 }
6584 /* ===== Inline Function End for 3.86. RDOV ===== */
6585 
6586 /* ===== Inline Function Start for 3.87. RSTAS16 ===== */
6627 __STATIC_FORCEINLINE unsigned long __RV_RSTAS16(unsigned long a, unsigned long b)
6628 {
6629  unsigned long result;
6630  __ASM volatile("rstas16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
6631  return result;
6632 }
6633 /* ===== Inline Function End for 3.87. RSTAS16 ===== */
6634 
6635 /* ===== Inline Function Start for 3.88. RSTSA16 ===== */
6676 __STATIC_FORCEINLINE unsigned long __RV_RSTSA16(unsigned long a, unsigned long b)
6677 {
6678  unsigned long result;
6679  __ASM volatile("rstsa16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
6680  return result;
6681 }
6682 /* ===== Inline Function End for 3.88. RSTSA16 ===== */
6683 
6684 /* ===== Inline Function Start for 3.89. RSUB8 ===== */
6723 __STATIC_FORCEINLINE unsigned long __RV_RSUB8(unsigned long a, unsigned long b)
6724 {
6725  unsigned long result;
6726  __ASM volatile("rsub8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
6727  return result;
6728 }
6729 /* ===== Inline Function End for 3.89. RSUB8 ===== */
6730 
6731 /* ===== Inline Function Start for 3.90. RSUB16 ===== */
6770 __STATIC_FORCEINLINE unsigned long __RV_RSUB16(unsigned long a, unsigned long b)
6771 {
6772  unsigned long result;
6773  __ASM volatile("rsub16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
6774  return result;
6775 }
6776 /* ===== Inline Function End for 3.90. RSUB16 ===== */
6777 
6778 /* ===== Inline Function Start for 3.91. RSUB64 ===== */
6824 __STATIC_FORCEINLINE long long __RV_RSUB64(long long a, long long b)
6825 {
6826  long long result;
6827  __ASM volatile("rsub64 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
6828  return result;
6829 }
6830 /* ===== Inline Function End for 3.91. RSUB64 ===== */
6831 
6832 /* ===== Inline Function Start for 3.92. RSUBW ===== */
6873 {
6874  long result;
6875  __ASM volatile("rsubw %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
6876  return result;
6877 }
6878 /* ===== Inline Function End for 3.92. RSUBW ===== */
6879 
6880 /* ===== Inline Function Start for 3.93. SCLIP8 ===== */
6920 #define __RV_SCLIP8(a, b) \
6921  ({ \
6922  unsigned long __res; \
6923  unsigned long __a = (unsigned long)(a); \
6924  __ASM volatile("sclip8 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
6925  __res; \
6926  })
6927 /* ===== Inline Function End for 3.93. SCLIP8 ===== */
6928 
6929 /* ===== Inline Function Start for 3.94. SCLIP16 ===== */
6969 #define __RV_SCLIP16(a, b) \
6970  ({ \
6971  unsigned long __res; \
6972  unsigned long __a = (unsigned long)(a); \
6973  __ASM volatile("sclip16 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
6974  __res; \
6975  })
6976 /* ===== Inline Function End for 3.94. SCLIP16 ===== */
6977 
6978 /* ===== Inline Function Start for 3.95. SCLIP32 ===== */
7018 #define __RV_SCLIP32(a, b) \
7019  ({ \
7020  long __res; \
7021  long __a = (long)(a); \
7022  __ASM volatile("sclip32 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
7023  __res; \
7024  })
7025 /* ===== Inline Function End for 3.95. SCLIP32 ===== */
7026 
7027 /* ===== Inline Function Start for 3.96. SCMPLE8 ===== */
7059 __STATIC_FORCEINLINE unsigned long __RV_SCMPLE8(unsigned long a, unsigned long b)
7060 {
7061  unsigned long result;
7062  __ASM volatile("scmple8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
7063  return result;
7064 }
7065 /* ===== Inline Function End for 3.96. SCMPLE8 ===== */
7066 
7067 /* ===== Inline Function Start for 3.97. SCMPLE16 ===== */
7099 __STATIC_FORCEINLINE unsigned long __RV_SCMPLE16(unsigned long a, unsigned long b)
7100 {
7101  unsigned long result;
7102  __ASM volatile("scmple16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
7103  return result;
7104 }
7105 /* ===== Inline Function End for 3.97. SCMPLE16 ===== */
7106 
7107 /* ===== Inline Function Start for 3.98. SCMPLT8 ===== */
7138 __STATIC_FORCEINLINE unsigned long __RV_SCMPLT8(unsigned long a, unsigned long b)
7139 {
7140  unsigned long result;
7141  __ASM volatile("scmplt8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
7142  return result;
7143 }
7144 /* ===== Inline Function End for 3.98. SCMPLT8 ===== */
7145 
7146 /* ===== Inline Function Start for 3.99. SCMPLT16 ===== */
7177 __STATIC_FORCEINLINE unsigned long __RV_SCMPLT16(unsigned long a, unsigned long b)
7178 {
7179  unsigned long result;
7180  __ASM volatile("scmplt16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
7181  return result;
7182 }
7183 /* ===== Inline Function End for 3.99. SCMPLT16 ===== */
7184 
7185 /* ===== Inline Function Start for 3.100. SLL8 ===== */
7218 __STATIC_FORCEINLINE unsigned long __RV_SLL8(unsigned long a, unsigned int b)
7219 {
7220  unsigned long result;
7221  __ASM volatile("sll8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
7222  return result;
7223 }
7224 /* ===== Inline Function End for 3.100. SLL8 ===== */
7225 
7226 /* ===== Inline Function Start for 3.101. SLLI8 ===== */
7258 #define __RV_SLLI8(a, b) \
7259  ({ \
7260  unsigned long __res; \
7261  unsigned long __a = (unsigned long)(a); \
7262  __ASM volatile("slli8 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
7263  __res; \
7264  })
7265 /* ===== Inline Function End for 3.101. SLLI8 ===== */
7266 
7267 /* ===== Inline Function Start for 3.102. SLL16 ===== */
7300 __STATIC_FORCEINLINE unsigned long __RV_SLL16(unsigned long a, unsigned int b)
7301 {
7302  unsigned long result;
7303  __ASM volatile("sll16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
7304  return result;
7305 }
7306 /* ===== Inline Function End for 3.102. SLL16 ===== */
7307 
7308 /* ===== Inline Function Start for 3.103. SLLI16 ===== */
7340 #define __RV_SLLI16(a, b) \
7341  ({ \
7342  unsigned long __res; \
7343  unsigned long __a = (unsigned long)(a); \
7344  __ASM volatile("slli16 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
7345  __res; \
7346  })
7347 /* ===== Inline Function End for 3.103. SLLI16 ===== */
7348 
7349 /* ===== Inline Function Start for 3.104. SMAL ===== */
7401 __STATIC_FORCEINLINE long long __RV_SMAL(long long a, unsigned long b)
7402 {
7403  long long result;
7404  __ASM volatile("smal %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
7405  return result;
7406 }
7407 /* ===== Inline Function End for 3.104. SMAL ===== */
7408 
7409 /* ===== Inline Function Start for 3.105.1. SMALBB ===== */
7484 __STATIC_FORCEINLINE long long __RV_SMALBB(long long t, unsigned long a, unsigned long b)
7485 {
7486  __ASM volatile("smalbb %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
7487  return t;
7488 }
7489 /* ===== Inline Function End for 3.105.1. SMALBB ===== */
7490 
7491 /* ===== Inline Function Start for 3.105.2. SMALBT ===== */
7566 __STATIC_FORCEINLINE long long __RV_SMALBT(long long t, unsigned long a, unsigned long b)
7567 {
7568  __ASM volatile("smalbt %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
7569  return t;
7570 }
7571 /* ===== Inline Function End for 3.105.2. SMALBT ===== */
7572 
7573 /* ===== Inline Function Start for 3.105.3. SMALTT ===== */
7648 __STATIC_FORCEINLINE long long __RV_SMALTT(long long t, unsigned long a, unsigned long b)
7649 {
7650  __ASM volatile("smaltt %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
7651  return t;
7652 }
7653 /* ===== Inline Function End for 3.105.3. SMALTT ===== */
7654 
7655 /* ===== Inline Function Start for 3.106.1. SMALDA ===== */
7732 __STATIC_FORCEINLINE long long __RV_SMALDA(long long t, unsigned long a, unsigned long b)
7733 {
7734  __ASM volatile("smalda %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
7735  return t;
7736 }
7737 /* ===== Inline Function End for 3.106.1. SMALDA ===== */
7738 
7739 /* ===== Inline Function Start for 3.106.2. SMALXDA ===== */
7816 __STATIC_FORCEINLINE long long __RV_SMALXDA(long long t, unsigned long a, unsigned long b)
7817 {
7818  __ASM volatile("smalxda %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
7819  return t;
7820 }
7821 /* ===== Inline Function End for 3.106.2. SMALXDA ===== */
7822 
7823 /* ===== Inline Function Start for 3.107.1. SMALDS ===== */
7907 __STATIC_FORCEINLINE long long __RV_SMALDS(long long t, unsigned long a, unsigned long b)
7908 {
7909  __ASM volatile("smalds %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
7910  return t;
7911 }
7912 /* ===== Inline Function End for 3.107.1. SMALDS ===== */
7913 
7914 /* ===== Inline Function Start for 3.107.2. SMALDRS ===== */
7998 __STATIC_FORCEINLINE long long __RV_SMALDRS(long long t, unsigned long a, unsigned long b)
7999 {
8000  __ASM volatile("smaldrs %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
8001  return t;
8002 }
8003 /* ===== Inline Function End for 3.107.2. SMALDRS ===== */
8004 
8005 /* ===== Inline Function Start for 3.107.3. SMALXDS ===== */
8089 __STATIC_FORCEINLINE long long __RV_SMALXDS(long long t, unsigned long a, unsigned long b)
8090 {
8091  __ASM volatile("smalxds %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
8092  return t;
8093 }
8094 /* ===== Inline Function End for 3.107.3. SMALXDS ===== */
8095 
8096 /* ===== Inline Function Start for 3.108. SMAR64 ===== */
8141 __STATIC_FORCEINLINE long long __RV_SMAR64(long long t, long a, long b)
8142 {
8143  __ASM volatile("smar64 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
8144  return t;
8145 }
8146 /* ===== Inline Function End for 3.108. SMAR64 ===== */
8147 
8148 /* ===== Inline Function Start for 3.109. SMAQA ===== */
8185 __STATIC_FORCEINLINE long __RV_SMAQA(long t, unsigned long a, unsigned long b)
8186 {
8187  __ASM volatile("smaqa %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
8188  return t;
8189 }
8190 /* ===== Inline Function End for 3.109. SMAQA ===== */
8191 
8192 /* ===== Inline Function Start for 3.110. SMAQA.SU ===== */
8230 __STATIC_FORCEINLINE long __RV_SMAQA_SU(long t, unsigned long a, unsigned long b)
8231 {
8232  __ASM volatile("smaqa.su %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
8233  return t;
8234 }
8235 /* ===== Inline Function End for 3.110. SMAQA.SU ===== */
8236 
8237 /* ===== Inline Function Start for 3.111. SMAX8 ===== */
8268 __STATIC_FORCEINLINE unsigned long __RV_SMAX8(unsigned long a, unsigned long b)
8269 {
8270  unsigned long result;
8271  __ASM volatile("smax8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
8272  return result;
8273 }
8274 /* ===== Inline Function End for 3.111. SMAX8 ===== */
8275 
8276 /* ===== Inline Function Start for 3.112. SMAX16 ===== */
8307 __STATIC_FORCEINLINE unsigned long __RV_SMAX16(unsigned long a, unsigned long b)
8308 {
8309  unsigned long result;
8310  __ASM volatile("smax16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
8311  return result;
8312 }
8313 /* ===== Inline Function End for 3.112. SMAX16 ===== */
8314 
8315 /* ===== Inline Function Start for 3.113.1. SMBB16 ===== */
8359 __STATIC_FORCEINLINE long __RV_SMBB16(unsigned long a, unsigned long b)
8360 {
8361  long result;
8362  __ASM volatile("smbb16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
8363  return result;
8364 }
8365 /* ===== Inline Function End for 3.113.1. SMBB16 ===== */
8366 
8367 /* ===== Inline Function Start for 3.113.2. SMBT16 ===== */
8411 __STATIC_FORCEINLINE long __RV_SMBT16(unsigned long a, unsigned long b)
8412 {
8413  long result;
8414  __ASM volatile("smbt16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
8415  return result;
8416 }
8417 /* ===== Inline Function End for 3.113.2. SMBT16 ===== */
8418 
8419 /* ===== Inline Function Start for 3.113.3. SMTT16 ===== */
8463 __STATIC_FORCEINLINE long __RV_SMTT16(unsigned long a, unsigned long b)
8464 {
8465  long result;
8466  __ASM volatile("smtt16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
8467  return result;
8468 }
8469 /* ===== Inline Function End for 3.113.3. SMTT16 ===== */
8470 
8471 /* ===== Inline Function Start for 3.114.1. SMDS ===== */
8522 __STATIC_FORCEINLINE long __RV_SMDS(unsigned long a, unsigned long b)
8523 {
8524  long result;
8525  __ASM volatile("smds %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
8526  return result;
8527 }
8528 /* ===== Inline Function End for 3.114.1. SMDS ===== */
8529 
8530 /* ===== Inline Function Start for 3.114.2. SMDRS ===== */
8581 __STATIC_FORCEINLINE long __RV_SMDRS(unsigned long a, unsigned long b)
8582 {
8583  long result;
8584  __ASM volatile("smdrs %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
8585  return result;
8586 }
8587 /* ===== Inline Function End for 3.114.2. SMDRS ===== */
8588 
8589 /* ===== Inline Function Start for 3.114.3. SMXDS ===== */
8640 __STATIC_FORCEINLINE long __RV_SMXDS(unsigned long a, unsigned long b)
8641 {
8642  long result;
8643  __ASM volatile("smxds %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
8644  return result;
8645 }
8646 /* ===== Inline Function End for 3.114.3. SMXDS ===== */
8647 
8648 /* ===== Inline Function Start for 3.115. SMIN8 ===== */
8679 __STATIC_FORCEINLINE unsigned long __RV_SMIN8(unsigned long a, unsigned long b)
8680 {
8681  unsigned long result;
8682  __ASM volatile("smin8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
8683  return result;
8684 }
8685 /* ===== Inline Function End for 3.115. SMIN8 ===== */
8686 
8687 /* ===== Inline Function Start for 3.116. SMIN16 ===== */
8718 __STATIC_FORCEINLINE unsigned long __RV_SMIN16(unsigned long a, unsigned long b)
8719 {
8720  unsigned long result;
8721  __ASM volatile("smin16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
8722  return result;
8723 }
8724 /* ===== Inline Function End for 3.116. SMIN16 ===== */
8725 
8726 /* ===== Inline Function Start for 3.117.1. SMMUL ===== */
8769 __STATIC_FORCEINLINE long __RV_SMMUL(long a, long b)
8770 {
8771  long result;
8772  __ASM volatile("smmul %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
8773  return result;
8774 }
8775 /* ===== Inline Function End for 3.117.1. SMMUL ===== */
8776 
8777 /* ===== Inline Function Start for 3.117.2. SMMUL.u ===== */
8821 {
8822  long result;
8823  __ASM volatile("smmul.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
8824  return result;
8825 }
8826 /* ===== Inline Function End for 3.117.2. SMMUL.u ===== */
8827 
8828 /* ===== Inline Function Start for 3.118.1. SMMWB ===== */
8870 __STATIC_FORCEINLINE long __RV_SMMWB(long a, unsigned long b)
8871 {
8872  long result;
8873  __ASM volatile("smmwb %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
8874  return result;
8875 }
8876 /* ===== Inline Function End for 3.118.1. SMMWB ===== */
8877 
8878 /* ===== Inline Function Start for 3.118.2. SMMWB.u ===== */
8920 __STATIC_FORCEINLINE long __RV_SMMWB_U(long a, unsigned long b)
8921 {
8922  long result;
8923  __ASM volatile("smmwb.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
8924  return result;
8925 }
8926 /* ===== Inline Function End for 3.118.2. SMMWB.u ===== */
8927 
8928 /* ===== Inline Function Start for 3.119.1. SMMWT ===== */
8970 __STATIC_FORCEINLINE long __RV_SMMWT(long a, unsigned long b)
8971 {
8972  long result;
8973  __ASM volatile("smmwt %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
8974  return result;
8975 }
8976 /* ===== Inline Function End for 3.119.1. SMMWT ===== */
8977 
8978 /* ===== Inline Function Start for 3.119.2. SMMWT.u ===== */
9020 __STATIC_FORCEINLINE long __RV_SMMWT_U(long a, unsigned long b)
9021 {
9022  long result;
9023  __ASM volatile("smmwt.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
9024  return result;
9025 }
9026 /* ===== Inline Function End for 3.119.2. SMMWT.u ===== */
9027 
9028 /* ===== Inline Function Start for 3.120.1. SMSLDA ===== */
9103 __STATIC_FORCEINLINE long long __RV_SMSLDA(long long t, unsigned long a, unsigned long b)
9104 {
9105  __ASM volatile("smslda %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
9106  return t;
9107 }
9108 /* ===== Inline Function End for 3.120.1. SMSLDA ===== */
9109 
9110 /* ===== Inline Function Start for 3.120.2. SMSLXDA ===== */
9185 __STATIC_FORCEINLINE long long __RV_SMSLXDA(long long t, unsigned long a, unsigned long b)
9186 {
9187  __ASM volatile("smslxda %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
9188  return t;
9189 }
9190 /* ===== Inline Function End for 3.120.2. SMSLXDA ===== */
9191 
9192 /* ===== Inline Function Start for 3.121. SMSR64 ===== */
9238 __STATIC_FORCEINLINE long long __RV_SMSR64(long long t, long a, long b)
9239 {
9240  __ASM volatile("smsr64 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
9241  return t;
9242 }
9243 /* ===== Inline Function End for 3.121. SMSR64 ===== */
9244 
9245 /* ===== Inline Function Start for 3.122.1. SMUL8 ===== */
9320 __STATIC_FORCEINLINE unsigned long long __RV_SMUL8(unsigned int a, unsigned int b)
9321 {
9322  unsigned long long result;
9323  __ASM volatile("smul8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
9324  return result;
9325 }
9326 /* ===== Inline Function End for 3.122.1. SMUL8 ===== */
9327 
9328 /* ===== Inline Function Start for 3.122.2. SMULX8 ===== */
9403 __STATIC_FORCEINLINE unsigned long long __RV_SMULX8(unsigned int a, unsigned int b)
9404 {
9405  unsigned long long result;
9406  __ASM volatile("smulx8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
9407  return result;
9408 }
9409 /* ===== Inline Function End for 3.122.2. SMULX8 ===== */
9410 
9411 /* ===== Inline Function Start for 3.123.1. SMUL16 ===== */
9488 __STATIC_FORCEINLINE unsigned long long __RV_SMUL16(unsigned int a, unsigned int b)
9489 {
9490  unsigned long long result;
9491  __ASM volatile("smul16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
9492  return result;
9493 }
9494 /* ===== Inline Function End for 3.123.1. SMUL16 ===== */
9495 
9496 /* ===== Inline Function Start for 3.123.2. SMULX16 ===== */
9573 __STATIC_FORCEINLINE unsigned long long __RV_SMULX16(unsigned int a, unsigned int b)
9574 {
9575  unsigned long long result;
9576  __ASM volatile("smulx16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
9577  return result;
9578 }
9579 /* ===== Inline Function End for 3.123.2. SMULX16 ===== */
9580 
9581 /* ===== Inline Function Start for 3.124. SRA.u ===== */
9627 __STATIC_FORCEINLINE long __RV_SRA_U(long a, unsigned int b)
9628 {
9629  long result;
9630  __ASM volatile("sra.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
9631  return result;
9632 }
9633 /* ===== Inline Function End for 3.124. SRA.u ===== */
9634 
9635 /* ===== Inline Function Start for 3.125. SRAI.u ===== */
9682 #define __RV_SRAI_U(a, b) \
9683  ({ \
9684  long __res; \
9685  long __a = (long)(a); \
9686  __ASM volatile("srai.u %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
9687  __res; \
9688  })
9689 /* ===== Inline Function End for 3.125. SRAI.u ===== */
9690 
9691 /* ===== Inline Function Start for 3.126.1. SRA8 ===== */
9737 __STATIC_FORCEINLINE unsigned long __RV_SRA8(unsigned long a, unsigned int b)
9738 {
9739  unsigned long result;
9740  __ASM volatile("sra8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
9741  return result;
9742 }
9743 /* ===== Inline Function End for 3.126.1. SRA8 ===== */
9744 
9745 /* ===== Inline Function Start for 3.126.2. SRA8.u ===== */
9791 __STATIC_FORCEINLINE unsigned long __RV_SRA8_U(unsigned long a, unsigned int b)
9792 {
9793  unsigned long result;
9794  __ASM volatile("sra8.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
9795  return result;
9796 }
9797 /* ===== Inline Function End for 3.126.2. SRA8.u ===== */
9798 
9799 /* ===== Inline Function Start for 3.127.1. SRAI8 ===== */
9844 #define __RV_SRAI8(a, b) \
9845  ({ \
9846  unsigned long __res; \
9847  unsigned long __a = (unsigned long)(a); \
9848  __ASM volatile("srai8 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
9849  __res; \
9850  })
9851 /* ===== Inline Function End for 3.127.1. SRAI8 ===== */
9852 
9853 /* ===== Inline Function Start for 3.127.2. SRAI8.u ===== */
9898 #define __RV_SRAI8_U(a, b) \
9899  ({ \
9900  unsigned long __res; \
9901  unsigned long __a = (unsigned long)(a); \
9902  __ASM volatile("srai8.u %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
9903  __res; \
9904  })
9905 /* ===== Inline Function End for 3.127.2. SRAI8.u ===== */
9906 
9907 /* ===== Inline Function Start for 3.128.1. SRA16 ===== */
9953 __STATIC_FORCEINLINE unsigned long __RV_SRA16(unsigned long a, unsigned long b)
9954 {
9955  unsigned long result;
9956  __ASM volatile("sra16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
9957  return result;
9958 }
9959 /* ===== Inline Function End for 3.128.1. SRA16 ===== */
9960 
9961 /* ===== Inline Function Start for 3.128.2. SRA16.u ===== */
10007 __STATIC_FORCEINLINE unsigned long __RV_SRA16_U(unsigned long a, unsigned long b)
10008 {
10009  unsigned long result;
10010  __ASM volatile("sra16.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
10011  return result;
10012 }
10013 /* ===== Inline Function End for 3.128.2. SRA16.u ===== */
10014 
10015 /* ===== Inline Function Start for 3.129.1. SRAI16 ===== */
10061 #define __RV_SRAI16(a, b) \
10062  ({ \
10063  unsigned long __res; \
10064  unsigned long __a = (unsigned long)(a); \
10065  __ASM volatile("srai16 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
10066  __res; \
10067  })
10068 /* ===== Inline Function End for 3.129.1. SRAI16 ===== */
10069 
10070 /* ===== Inline Function Start for 3.129.2. SRAI16.u ===== */
10116 #define __RV_SRAI16_U(a, b) \
10117  ({ \
10118  unsigned long __res; \
10119  unsigned long __a = (unsigned long)(a); \
10120  __ASM volatile("srai16.u %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
10121  __res; \
10122  })
10123 /* ===== Inline Function End for 3.129.2. SRAI16.u ===== */
10124 
10125 /* ===== Inline Function Start for 3.130.1. SRL8 ===== */
10170 __STATIC_FORCEINLINE unsigned long __RV_SRL8(unsigned long a, unsigned int b)
10171 {
10172  unsigned long result;
10173  __ASM volatile("srl8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
10174  return result;
10175 }
10176 /* ===== Inline Function End for 3.130.1. SRL8 ===== */
10177 
10178 /* ===== Inline Function Start for 3.130.2. SRL8.u ===== */
10223 __STATIC_FORCEINLINE unsigned long __RV_SRL8_U(unsigned long a, unsigned int b)
10224 {
10225  unsigned long result;
10226  __ASM volatile("srl8.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
10227  return result;
10228 }
10229 /* ===== Inline Function End for 3.130.2. SRL8.u ===== */
10230 
10231 /* ===== Inline Function Start for 3.131.1. SRLI8 ===== */
10275 #define __RV_SRLI8(a, b) \
10276  ({ \
10277  unsigned long __res; \
10278  unsigned long __a = (unsigned long)(a); \
10279  __ASM volatile("srli8 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
10280  __res; \
10281  })
10282 /* ===== Inline Function End for 3.131.1. SRLI8 ===== */
10283 
10284 /* ===== Inline Function Start for 3.131.2. SRLI8.u ===== */
10328 #define __RV_SRLI8_U(a, b) \
10329  ({ \
10330  unsigned long __res; \
10331  unsigned long __a = (unsigned long)(a); \
10332  __ASM volatile("srli8.u %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
10333  __res; \
10334  })
10335 /* ===== Inline Function End for 3.131.2. SRLI8.u ===== */
10336 
10337 /* ===== Inline Function Start for 3.132.1. SRL16 ===== */
10381 __STATIC_FORCEINLINE unsigned long __RV_SRL16(unsigned long a, unsigned int b)
10382 {
10383  unsigned long result;
10384  __ASM volatile("srl16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
10385  return result;
10386 }
10387 /* ===== Inline Function End for 3.132.1. SRL16 ===== */
10388 
10389 /* ===== Inline Function Start for 3.132.2. SRL16.u ===== */
10433 __STATIC_FORCEINLINE unsigned long __RV_SRL16_U(unsigned long a, unsigned int b)
10434 {
10435  unsigned long result;
10436  __ASM volatile("srl16.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
10437  return result;
10438 }
10439 /* ===== Inline Function End for 3.132.2. SRL16.u ===== */
10440 
10441 /* ===== Inline Function Start for 3.133.1. SRLI16 ===== */
10485 #define __RV_SRLI16(a, b) \
10486  ({ \
10487  unsigned long __res; \
10488  unsigned long __a = (unsigned long)(a); \
10489  __ASM volatile("srli16 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
10490  __res; \
10491  })
10492 /* ===== Inline Function End for 3.133.1. SRLI16 ===== */
10493 
10494 /* ===== Inline Function Start for 3.133.2. SRLI16.u ===== */
10538 #define __RV_SRLI16_U(a, b) \
10539  ({ \
10540  unsigned long __res; \
10541  unsigned long __a = (unsigned long)(a); \
10542  __ASM volatile("srli16.u %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
10543  __res; \
10544  })
10545 /* ===== Inline Function End for 3.133.2. SRLI16.u ===== */
10546 
10547 /* ===== Inline Function Start for 3.134. STAS16 ===== */
10585 __STATIC_FORCEINLINE unsigned long __RV_STAS16(unsigned long a, unsigned long b)
10586 {
10587  unsigned long result;
10588  __ASM volatile("stas16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
10589  return result;
10590 }
10591 /* ===== Inline Function End for 3.134. STAS16 ===== */
10592 
10593 /* ===== Inline Function Start for 3.135. STSA16 ===== */
10631 __STATIC_FORCEINLINE unsigned long __RV_STSA16(unsigned long a, unsigned long b)
10632 {
10633  unsigned long result;
10634  __ASM volatile("stsa16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
10635  return result;
10636 }
10637 /* ===== Inline Function End for 3.135. STSA16 ===== */
10638 
10639 /* ===== Inline Function Start for 3.136. SUB8 ===== */
10672 __STATIC_FORCEINLINE unsigned long __RV_SUB8(unsigned long a, unsigned long b)
10673 {
10674  unsigned long result;
10675  __ASM volatile("sub8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
10676  return result;
10677 }
10678 /* ===== Inline Function End for 3.136. SUB8 ===== */
10679 
10680 /* ===== Inline Function Start for 3.137. SUB16 ===== */
10713 __STATIC_FORCEINLINE unsigned long __RV_SUB16(unsigned long a, unsigned long b)
10714 {
10715  unsigned long result;
10716  __ASM volatile("sub16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
10717  return result;
10718 }
10719 /* ===== Inline Function End for 3.137. SUB16 ===== */
10720 
10721 /* ===== Inline Function Start for 3.138. SUB64 ===== */
10767 __STATIC_FORCEINLINE unsigned long long __RV_SUB64(unsigned long long a, unsigned long long b)
10768 {
10769  unsigned long long result;
10770  __ASM volatile("sub64 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
10771  return result;
10772 }
10773 /* ===== Inline Function End for 3.138. SUB64 ===== */
10774 
10775 /* ===== Inline Function Start for 3.139.1. SUNPKD810 ===== */
10813 __STATIC_FORCEINLINE unsigned long __RV_SUNPKD810(unsigned long a)
10814 {
10815  unsigned long result;
10816  __ASM volatile("sunpkd810 %0, %1" : "=r"(result) : "r"(a));
10817  return result;
10818 }
10819 /* ===== Inline Function End for 3.139.1. SUNPKD810 ===== */
10820 
10821 /* ===== Inline Function Start for 3.139.2. SUNPKD820 ===== */
10859 __STATIC_FORCEINLINE unsigned long __RV_SUNPKD820(unsigned long a)
10860 {
10861  unsigned long result;
10862  __ASM volatile("sunpkd820 %0, %1" : "=r"(result) : "r"(a));
10863  return result;
10864 }
10865 /* ===== Inline Function End for 3.139.2. SUNPKD820 ===== */
10866 
10867 /* ===== Inline Function Start for 3.139.3. SUNPKD830 ===== */
10905 __STATIC_FORCEINLINE unsigned long __RV_SUNPKD830(unsigned long a)
10906 {
10907  unsigned long result;
10908  __ASM volatile("sunpkd830 %0, %1" : "=r"(result) : "r"(a));
10909  return result;
10910 }
10911 /* ===== Inline Function End for 3.139.3. SUNPKD830 ===== */
10912 
10913 /* ===== Inline Function Start for 3.139.4. SUNPKD831 ===== */
10951 __STATIC_FORCEINLINE unsigned long __RV_SUNPKD831(unsigned long a)
10952 {
10953  unsigned long result;
10954  __ASM volatile("sunpkd831 %0, %1" : "=r"(result) : "r"(a));
10955  return result;
10956 }
10957 /* ===== Inline Function End for 3.139.4. SUNPKD831 ===== */
10958 
10959 /* ===== Inline Function Start for 3.139.5. SUNPKD832 ===== */
10997 __STATIC_FORCEINLINE unsigned long __RV_SUNPKD832(unsigned long a)
10998 {
10999  unsigned long result;
11000  __ASM volatile("sunpkd832 %0, %1" : "=r"(result) : "r"(a));
11001  return result;
11002 }
11003 /* ===== Inline Function End for 3.139.5. SUNPKD832 ===== */
11004 
11005 /* ===== Inline Function Start for 3.140. SWAP8 ===== */
11034 __STATIC_FORCEINLINE unsigned long __RV_SWAP8(unsigned long a)
11035 {
11036  unsigned long result;
11037  __ASM volatile("swap8 %0, %1" : "=r"(result) : "r"(a));
11038  return result;
11039 }
11040 /* ===== Inline Function End for 3.140. SWAP8 ===== */
11041 
11042 /* ===== Inline Function Start for 3.141. SWAP16 ===== */
11071 __STATIC_FORCEINLINE unsigned long __RV_SWAP16(unsigned long a)
11072 {
11073  unsigned long result;
11074  __ASM volatile("swap16 %0, %1" : "=r"(result) : "r"(a));
11075  return result;
11076 }
11077 /* ===== Inline Function End for 3.141. SWAP16 ===== */
11078 
11079 /* ===== Inline Function Start for 3.142. UCLIP8 ===== */
11118 #define __RV_UCLIP8(a, b) \
11119  ({ \
11120  unsigned long __res; \
11121  unsigned long __a = (unsigned long)(a); \
11122  __ASM volatile("uclip8 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
11123  __res; \
11124  })
11125 /* ===== Inline Function End for 3.142. UCLIP8 ===== */
11126 
11127 /* ===== Inline Function Start for 3.143. UCLIP16 ===== */
11167 #define __RV_UCLIP16(a, b) \
11168  ({ \
11169  unsigned long __res; \
11170  unsigned long __a = (unsigned long)(a); \
11171  __ASM volatile("uclip16 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
11172  __res; \
11173  })
11174 /* ===== Inline Function End for 3.143. UCLIP16 ===== */
11175 
11176 /* ===== Inline Function Start for 3.144. UCLIP32 ===== */
11217 #define __RV_UCLIP32(a, b) \
11218  ({ \
11219  unsigned long __res; \
11220  unsigned long __a = (unsigned long)(a); \
11221  __ASM volatile("uclip32 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
11222  __res; \
11223  })
11224 /* ===== Inline Function End for 3.144. UCLIP32 ===== */
11225 
11226 /* ===== Inline Function Start for 3.145. UCMPLE8 ===== */
11258 __STATIC_FORCEINLINE unsigned long __RV_UCMPLE8(unsigned long a, unsigned long b)
11259 {
11260  unsigned long result;
11261  __ASM volatile("ucmple8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
11262  return result;
11263 }
11264 /* ===== Inline Function End for 3.145. UCMPLE8 ===== */
11265 
11266 /* ===== Inline Function Start for 3.146. UCMPLE16 ===== */
11298 __STATIC_FORCEINLINE unsigned long __RV_UCMPLE16(unsigned long a, unsigned long b)
11299 {
11300  unsigned long result;
11301  __ASM volatile("ucmple16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
11302  return result;
11303 }
11304 /* ===== Inline Function End for 3.146. UCMPLE16 ===== */
11305 
11306 /* ===== Inline Function Start for 3.147. UCMPLT8 ===== */
11337 __STATIC_FORCEINLINE unsigned long __RV_UCMPLT8(unsigned long a, unsigned long b)
11338 {
11339  unsigned long result;
11340  __ASM volatile("ucmplt8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
11341  return result;
11342 }
11343 /* ===== Inline Function End for 3.147. UCMPLT8 ===== */
11344 
11345 /* ===== Inline Function Start for 3.148. UCMPLT16 ===== */
11376 __STATIC_FORCEINLINE unsigned long __RV_UCMPLT16(unsigned long a, unsigned long b)
11377 {
11378  unsigned long result;
11379  __ASM volatile("ucmplt16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
11380  return result;
11381 }
11382 /* ===== Inline Function End for 3.148. UCMPLT16 ===== */
11383 
11384 /* ===== Inline Function Start for 3.149. UKADD8 ===== */
11421 __STATIC_FORCEINLINE unsigned long __RV_UKADD8(unsigned long a, unsigned long b)
11422 {
11423  unsigned long result;
11424  __ASM volatile("ukadd8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
11425  return result;
11426 }
11427 /* ===== Inline Function End for 3.149. UKADD8 ===== */
11428 
11429 /* ===== Inline Function Start for 3.150. UKADD16 ===== */
11466 __STATIC_FORCEINLINE unsigned long __RV_UKADD16(unsigned long a, unsigned long b)
11467 {
11468  unsigned long result;
11469  __ASM volatile("ukadd16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
11470  return result;
11471 }
11472 /* ===== Inline Function End for 3.150. UKADD16 ===== */
11473 
11474 /* ===== Inline Function Start for 3.151. UKADD64 ===== */
11528 __STATIC_FORCEINLINE unsigned long long __RV_UKADD64(unsigned long long a, unsigned long long b)
11529 {
11530  unsigned long long result;
11531  __ASM volatile("ukadd64 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
11532  return result;
11533 }
11534 /* ===== Inline Function End for 3.151. UKADD64 ===== */
11535 
11536 /* ===== Inline Function Start for 3.152. UKADDH ===== */
11570 __STATIC_FORCEINLINE unsigned long __RV_UKADDH(unsigned int a, unsigned int b)
11571 {
11572  unsigned long result;
11573  __ASM volatile("ukaddh %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
11574  return result;
11575 }
11576 /* ===== Inline Function End for 3.152. UKADDH ===== */
11577 
11578 /* ===== Inline Function Start for 3.153. UKADDW ===== */
11613 __STATIC_FORCEINLINE unsigned long __RV_UKADDW(unsigned int a, unsigned int b)
11614 {
11615  unsigned long result;
11616  __ASM volatile("ukaddw %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
11617  return result;
11618 }
11619 /* ===== Inline Function End for 3.153. UKADDW ===== */
11620 
11621 /* ===== Inline Function Start for 3.154. UKCRAS16 ===== */
11669 __STATIC_FORCEINLINE unsigned long __RV_UKCRAS16(unsigned long a, unsigned long b)
11670 {
11671  unsigned long result;
11672  __ASM volatile("ukcras16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
11673  return result;
11674 }
11675 /* ===== Inline Function End for 3.154. UKCRAS16 ===== */
11676 
11677 /* ===== Inline Function Start for 3.155. UKCRSA16 ===== */
11724 __STATIC_FORCEINLINE unsigned long __RV_UKCRSA16(unsigned long a, unsigned long b)
11725 {
11726  unsigned long result;
11727  __ASM volatile("ukcrsa16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
11728  return result;
11729 }
11730 /* ===== Inline Function End for 3.155. UKCRSA16 ===== */
11731 
11732 /* ===== Inline Function Start for 3.156. UKMAR64 ===== */
11789 __STATIC_FORCEINLINE unsigned long long __RV_UKMAR64(unsigned long long t, unsigned long a, unsigned long b)
11790 {
11791  __ASM volatile("ukmar64 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
11792  return t;
11793 }
11794 /* ===== Inline Function End for 3.156. UKMAR64 ===== */
11795 
11796 /* ===== Inline Function Start for 3.157. UKMSR64 ===== */
11854 __STATIC_FORCEINLINE unsigned long long __RV_UKMSR64(unsigned long long t, unsigned long a, unsigned long b)
11855 {
11856  __ASM volatile("ukmsr64 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
11857  return t;
11858 }
11859 /* ===== Inline Function End for 3.157. UKMSR64 ===== */
11860 
11861 /* ===== Inline Function Start for 3.158. UKSTAS16 ===== */
11909 __STATIC_FORCEINLINE unsigned long __RV_UKSTAS16(unsigned long a, unsigned long b)
11910 {
11911  unsigned long result;
11912  __ASM volatile("ukstas16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
11913  return result;
11914 }
11915 /* ===== Inline Function End for 3.158. UKSTAS16 ===== */
11916 
11917 /* ===== Inline Function Start for 3.159. UKSTSA16 ===== */
11964 __STATIC_FORCEINLINE unsigned long __RV_UKSTSA16(unsigned long a, unsigned long b)
11965 {
11966  unsigned long result;
11967  __ASM volatile("ukstsa16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
11968  return result;
11969 }
11970 /* ===== Inline Function End for 3.159. UKSTSA16 ===== */
11971 
11972 /* ===== Inline Function Start for 3.160. UKSUB8 ===== */
12009 __STATIC_FORCEINLINE unsigned long __RV_UKSUB8(unsigned long a, unsigned long b)
12010 {
12011  unsigned long result;
12012  __ASM volatile("uksub8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
12013  return result;
12014 }
12015 /* ===== Inline Function End for 3.160. UKSUB8 ===== */
12016 
12017 /* ===== Inline Function Start for 3.161. UKSUB16 ===== */
12054 __STATIC_FORCEINLINE unsigned long __RV_UKSUB16(unsigned long a, unsigned long b)
12055 {
12056  unsigned long result;
12057  __ASM volatile("uksub16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
12058  return result;
12059 }
12060 /* ===== Inline Function End for 3.161. UKSUB16 ===== */
12061 
12062 /* ===== Inline Function Start for 3.162. UKSUB64 ===== */
12117 __STATIC_FORCEINLINE unsigned long long __RV_UKSUB64(unsigned long long a, unsigned long long b)
12118 {
12119  unsigned long long result;
12120  __ASM volatile("uksub64 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
12121  return result;
12122 }
12123 /* ===== Inline Function End for 3.162. UKSUB64 ===== */
12124 
12125 /* ===== Inline Function Start for 3.163. UKSUBH ===== */
12163 __STATIC_FORCEINLINE unsigned long __RV_UKSUBH(unsigned int a, unsigned int b)
12164 {
12165  unsigned long result;
12166  __ASM volatile("uksubh %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
12167  return result;
12168 }
12169 /* ===== Inline Function End for 3.163. UKSUBH ===== */
12170 
12171 /* ===== Inline Function Start for 3.164. UKSUBW ===== */
12207 __STATIC_FORCEINLINE unsigned long __RV_UKSUBW(unsigned int a, unsigned int b)
12208 {
12209  unsigned long result;
12210  __ASM volatile("uksubw %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
12211  return result;
12212 }
12213 /* ===== Inline Function End for 3.164. UKSUBW ===== */
12214 
12215 /* ===== Inline Function Start for 3.165. UMAR64 ===== */
12261 __STATIC_FORCEINLINE unsigned long long __RV_UMAR64(unsigned long long t, unsigned long a, unsigned long b)
12262 {
12263  __ASM volatile("umar64 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
12264  return t;
12265 }
12266 /* ===== Inline Function End for 3.165. UMAR64 ===== */
12267 
12268 /* ===== Inline Function Start for 3.166. UMAQA ===== */
12305 __STATIC_FORCEINLINE unsigned long __RV_UMAQA(unsigned long t, unsigned long a, unsigned long b)
12306 {
12307  __ASM volatile("umaqa %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
12308  return t;
12309 }
12310 /* ===== Inline Function End for 3.166. UMAQA ===== */
12311 
12312 /* ===== Inline Function Start for 3.167. UMAX8 ===== */
12343 __STATIC_FORCEINLINE unsigned long __RV_UMAX8(unsigned long a, unsigned long b)
12344 {
12345  unsigned long result;
12346  __ASM volatile("umax8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
12347  return result;
12348 }
12349 /* ===== Inline Function End for 3.167. UMAX8 ===== */
12350 
12351 /* ===== Inline Function Start for 3.168. UMAX16 ===== */
12382 __STATIC_FORCEINLINE unsigned long __RV_UMAX16(unsigned long a, unsigned long b)
12383 {
12384  unsigned long result;
12385  __ASM volatile("umax16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
12386  return result;
12387 }
12388 /* ===== Inline Function End for 3.168. UMAX16 ===== */
12389 
12390 /* ===== Inline Function Start for 3.169. UMIN8 ===== */
12421 __STATIC_FORCEINLINE unsigned long __RV_UMIN8(unsigned long a, unsigned long b)
12422 {
12423  unsigned long result;
12424  __ASM volatile("umin8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
12425  return result;
12426 }
12427 /* ===== Inline Function End for 3.169. UMIN8 ===== */
12428 
12429 /* ===== Inline Function Start for 3.170. UMIN16 ===== */
12460 __STATIC_FORCEINLINE unsigned long __RV_UMIN16(unsigned long a, unsigned long b)
12461 {
12462  unsigned long result;
12463  __ASM volatile("umin16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
12464  return result;
12465 }
12466 /* ===== Inline Function End for 3.170. UMIN16 ===== */
12467 
12468 /* ===== Inline Function Start for 3.171. UMSR64 ===== */
12514 __STATIC_FORCEINLINE unsigned long long __RV_UMSR64(unsigned long long t, unsigned long a, unsigned long b)
12515 {
12516  __ASM volatile("umsr64 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
12517  return t;
12518 }
12519 /* ===== Inline Function End for 3.171. UMSR64 ===== */
12520 
12521 /* ===== Inline Function Start for 3.172.1. UMUL8 ===== */
12597 __STATIC_FORCEINLINE unsigned long long __RV_UMUL8(unsigned int a, unsigned int b)
12598 {
12599  unsigned long long result;
12600  __ASM volatile("umul8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
12601  return result;
12602 }
12603 /* ===== Inline Function End for 3.172.1. UMUL8 ===== */
12604 
12605 /* ===== Inline Function Start for 3.172.2. UMULX8 ===== */
12681 __STATIC_FORCEINLINE unsigned long long __RV_UMULX8(unsigned int a, unsigned int b)
12682 {
12683  unsigned long long result;
12684  __ASM volatile("umulx8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
12685  return result;
12686 }
12687 /* ===== Inline Function End for 3.172.2. UMULX8 ===== */
12688 
12689 /* ===== Inline Function Start for 3.173.1. UMUL16 ===== */
12766 __STATIC_FORCEINLINE unsigned long long __RV_UMUL16(unsigned int a, unsigned int b)
12767 {
12768  unsigned long long result;
12769  __ASM volatile("umul16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
12770  return result;
12771 }
12772 /* ===== Inline Function End for 3.173.1. UMUL16 ===== */
12773 
12774 /* ===== Inline Function Start for 3.173.2. UMULX16 ===== */
12851 __STATIC_FORCEINLINE unsigned long long __RV_UMULX16(unsigned int a, unsigned int b)
12852 {
12853  unsigned long long result;
12854  __ASM volatile("umulx16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
12855  return result;
12856 }
12857 /* ===== Inline Function End for 3.173.2. UMULX16 ===== */
12858 
12859 /* ===== Inline Function Start for 3.174. URADD8 ===== */
12898 __STATIC_FORCEINLINE unsigned long __RV_URADD8(unsigned long a, unsigned long b)
12899 {
12900  unsigned long result;
12901  __ASM volatile("uradd8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
12902  return result;
12903 }
12904 /* ===== Inline Function End for 3.174. URADD8 ===== */
12905 
12906 /* ===== Inline Function Start for 3.175. URADD16 ===== */
12945 __STATIC_FORCEINLINE unsigned long __RV_URADD16(unsigned long a, unsigned long b)
12946 {
12947  unsigned long result;
12948  __ASM volatile("uradd16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
12949  return result;
12950 }
12951 /* ===== Inline Function End for 3.175. URADD16 ===== */
12952 
12953 /* ===== Inline Function Start for 3.176. URADD64 ===== */
12997 __STATIC_FORCEINLINE unsigned long long __RV_URADD64(unsigned long long a, unsigned long long b)
12998 {
12999  unsigned long long result;
13000  __ASM volatile("uradd64 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
13001  return result;
13002 }
13003 /* ===== Inline Function End for 3.176. URADD64 ===== */
13004 
13005 /* ===== Inline Function Start for 3.177. URADDW ===== */
13045 __STATIC_FORCEINLINE unsigned long __RV_URADDW(unsigned int a, unsigned int b)
13046 {
13047  unsigned long result;
13048  __ASM volatile("uraddw %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
13049  return result;
13050 }
13051 /* ===== Inline Function End for 3.177. URADDW ===== */
13052 
13053 /* ===== Inline Function Start for 3.178. URCRAS16 ===== */
13094 __STATIC_FORCEINLINE unsigned long __RV_URCRAS16(unsigned long a, unsigned long b)
13095 {
13096  unsigned long result;
13097  __ASM volatile("urcras16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
13098  return result;
13099 }
13100 /* ===== Inline Function End for 3.178. URCRAS16 ===== */
13101 
13102 /* ===== Inline Function Start for 3.179. URCRSA16 ===== */
13143 __STATIC_FORCEINLINE unsigned long __RV_URCRSA16(unsigned long a, unsigned long b)
13144 {
13145  unsigned long result;
13146  __ASM volatile("urcrsa16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
13147  return result;
13148 }
13149 /* ===== Inline Function End for 3.179. URCRSA16 ===== */
13150 
13151 /* ===== Inline Function Start for 3.180. URSTAS16 ===== */
13192 __STATIC_FORCEINLINE unsigned long __RV_URSTAS16(unsigned long a, unsigned long b)
13193 {
13194  unsigned long result;
13195  __ASM volatile("urstas16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
13196  return result;
13197 }
13198 /* ===== Inline Function End for 3.180. URSTAS16 ===== */
13199 
13200 /* ===== Inline Function Start for 3.181. URSTSA16 ===== */
13241 __STATIC_FORCEINLINE unsigned long __RV_URSTSA16(unsigned long a, unsigned long b)
13242 {
13243  unsigned long result;
13244  __ASM volatile("urstsa16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
13245  return result;
13246 }
13247 /* ===== Inline Function End for 3.181. URSTSA16 ===== */
13248 
13249 /* ===== Inline Function Start for 3.182. URSUB8 ===== */
13288 __STATIC_FORCEINLINE unsigned long __RV_URSUB8(unsigned long a, unsigned long b)
13289 {
13290  unsigned long result;
13291  __ASM volatile("ursub8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
13292  return result;
13293 }
13294 /* ===== Inline Function End for 3.182. URSUB8 ===== */
13295 
13296 /* ===== Inline Function Start for 3.183. URSUB16 ===== */
13335 __STATIC_FORCEINLINE unsigned long __RV_URSUB16(unsigned long a, unsigned long b)
13336 {
13337  unsigned long result;
13338  __ASM volatile("ursub16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
13339  return result;
13340 }
13341 /* ===== Inline Function End for 3.183. URSUB16 ===== */
13342 
13343 /* ===== Inline Function Start for 3.184. URSUB64 ===== */
13389 __STATIC_FORCEINLINE unsigned long long __RV_URSUB64(unsigned long long a, unsigned long long b)
13390 {
13391  unsigned long long result;
13392  __ASM volatile("ursub64 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
13393  return result;
13394 }
13395 /* ===== Inline Function End for 3.184. URSUB64 ===== */
13396 
13397 /* ===== Inline Function Start for 3.185. URSUBW ===== */
13437 __STATIC_FORCEINLINE unsigned long __RV_URSUBW(unsigned int a, unsigned int b)
13438 {
13439  unsigned long result;
13440  __ASM volatile("ursubw %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
13441  return result;
13442 }
13443 /* ===== Inline Function End for 3.185. URSUBW ===== */
13444 
13445 /* ===== Inline Function Start for 3.186. WEXTI ===== */
13490 #define __RV_WEXTI(a, b) \
13491  ({ \
13492  unsigned long __res; \
13493  long long __a = (long long)(a); \
13494  __ASM volatile("wexti %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
13495  __res; \
13496  })
13497 /* ===== Inline Function End for 3.186. WEXTI ===== */
13498 
13499 /* ===== Inline Function Start for 3.187. WEXT ===== */
13541 __STATIC_FORCEINLINE unsigned long __RV_WEXT(long long a, unsigned int b)
13542 {
13543  unsigned long result;
13544  __ASM volatile("wext %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
13545  return result;
13546 }
13547 /* ===== Inline Function End for 3.187. WEXT ===== */
13548 
13549 /* ===== Inline Function Start for 3.188.1. ZUNPKD810 ===== */
13587 __STATIC_FORCEINLINE unsigned long __RV_ZUNPKD810(unsigned long a)
13588 {
13589  unsigned long result;
13590  __ASM volatile("zunpkd810 %0, %1" : "=r"(result) : "r"(a));
13591  return result;
13592 }
13593 /* ===== Inline Function End for 3.188.1. ZUNPKD810 ===== */
13594 
13595 /* ===== Inline Function Start for 3.188.2. ZUNPKD820 ===== */
13633 __STATIC_FORCEINLINE unsigned long __RV_ZUNPKD820(unsigned long a)
13634 {
13635  unsigned long result;
13636  __ASM volatile("zunpkd820 %0, %1" : "=r"(result) : "r"(a));
13637  return result;
13638 }
13639 /* ===== Inline Function End for 3.188.2. ZUNPKD820 ===== */
13640 
13641 /* ===== Inline Function Start for 3.188.3. ZUNPKD830 ===== */
13679 __STATIC_FORCEINLINE unsigned long __RV_ZUNPKD830(unsigned long a)
13680 {
13681  unsigned long result;
13682  __ASM volatile("zunpkd830 %0, %1" : "=r"(result) : "r"(a));
13683  return result;
13684 }
13685 /* ===== Inline Function End for 3.188.3. ZUNPKD830 ===== */
13686 
13687 /* ===== Inline Function Start for 3.188.4. ZUNPKD831 ===== */
13725 __STATIC_FORCEINLINE unsigned long __RV_ZUNPKD831(unsigned long a)
13726 {
13727  unsigned long result;
13728  __ASM volatile("zunpkd831 %0, %1" : "=r"(result) : "r"(a));
13729  return result;
13730 }
13731 /* ===== Inline Function End for 3.188.4. ZUNPKD831 ===== */
13732 
13733 /* ===== Inline Function Start for 3.188.5. ZUNPKD832 ===== */
13771 __STATIC_FORCEINLINE unsigned long __RV_ZUNPKD832(unsigned long a)
13772 {
13773  unsigned long result;
13774  __ASM volatile("zunpkd832 %0, %1" : "=r"(result) : "r"(a));
13775  return result;
13776 }
13777 /* ===== Inline Function End for 3.188.5. ZUNPKD832 ===== */
13778 
13779 #if (__RISCV_XLEN == 64) || defined(__ONLY_FOR_DOXYGEN_DOCUMENT_GENERATION__)
13780 
13781 /* ===== Inline Function Start for 4.1. ADD32 ===== */
13813 __STATIC_FORCEINLINE unsigned long __RV_ADD32(unsigned long a, unsigned long b)
13814 {
13815  unsigned long result;
13816  __ASM volatile("add32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
13817  return result;
13818 }
13819 /* ===== Inline Function End for 4.1. ADD32 ===== */
13820 
13821 /* ===== Inline Function Start for 4.2. CRAS32 ===== */
13856 __STATIC_FORCEINLINE unsigned long __RV_CRAS32(unsigned long a, unsigned long b)
13857 {
13858  unsigned long result;
13859  __ASM volatile("cras32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
13860  return result;
13861 }
13862 /* ===== Inline Function End for 4.2. CRAS32 ===== */
13863 
13864 /* ===== Inline Function Start for 4.3. CRSA32 ===== */
13898 __STATIC_FORCEINLINE unsigned long __RV_CRSA32(unsigned long a, unsigned long b)
13899 {
13900  unsigned long result;
13901  __ASM volatile("crsa32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
13902  return result;
13903 }
13904 /* ===== Inline Function End for 4.3. CRSA32 ===== */
13905 
13906 /* ===== Inline Function Start for 4.4. KABS32 ===== */
13958 __STATIC_FORCEINLINE unsigned long __RV_KABS32(unsigned long a)
13959 {
13960  unsigned long result;
13961  __ASM volatile("kabs32 %0, %1" : "=r"(result) : "r"(a));
13962  return result;
13963 }
13964 /* ===== Inline Function End for 4.4. KABS32 ===== */
13965 
13966 /* ===== Inline Function Start for 4.5. KADD32 ===== */
14004 __STATIC_FORCEINLINE unsigned long __RV_KADD32(unsigned long a, unsigned long b)
14005 {
14006  unsigned long result;
14007  __ASM volatile("kadd32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
14008  return result;
14009 }
14010 /* ===== Inline Function End for 4.5. KADD32 ===== */
14011 
14012 /* ===== Inline Function Start for 4.6. KCRAS32 ===== */
14055 __STATIC_FORCEINLINE unsigned long __RV_KCRAS32(unsigned long a, unsigned long b)
14056 {
14057  unsigned long result;
14058  __ASM volatile("kcras32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
14059  return result;
14060 }
14061 /* ===== Inline Function End for 4.6. KCRAS32 ===== */
14062 
14063 /* ===== Inline Function Start for 4.7. KCRSA32 ===== */
14105 __STATIC_FORCEINLINE unsigned long __RV_KCRSA32(unsigned long a, unsigned long b)
14106 {
14107  unsigned long result;
14108  __ASM volatile("kcrsa32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
14109  return result;
14110 }
14111 /* ===== Inline Function End for 4.7. KCRSA32 ===== */
14112 
14113 /* ===== Inline Function Start for 4.8.1. KDMBB16 ===== */
14157 __STATIC_FORCEINLINE unsigned long __RV_KDMBB16(unsigned long a, unsigned long b)
14158 {
14159  unsigned long result;
14160  __ASM volatile("kdmbb16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
14161  return result;
14162 }
14163 /* ===== Inline Function End for 4.8.1. KDMBB16 ===== */
14164 
14165 /* ===== Inline Function Start for 4.8.2. KDMBT16 ===== */
14209 __STATIC_FORCEINLINE unsigned long __RV_KDMBT16(unsigned long a, unsigned long b)
14210 {
14211  unsigned long result;
14212  __ASM volatile("kdmbt16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
14213  return result;
14214 }
14215 /* ===== Inline Function End for 4.8.2. KDMBT16 ===== */
14216 
14217 /* ===== Inline Function Start for 4.8.3. KDMTT16 ===== */
14261 __STATIC_FORCEINLINE unsigned long __RV_KDMTT16(unsigned long a, unsigned long b)
14262 {
14263  unsigned long result;
14264  __ASM volatile("kdmtt16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
14265  return result;
14266 }
14267 /* ===== Inline Function End for 4.8.3. KDMTT16 ===== */
14268 
14269 /* ===== Inline Function Start for 4.9.1. KDMABB16 ===== */
14327 __STATIC_FORCEINLINE unsigned long __RV_KDMABB16(unsigned long t, unsigned long a, unsigned long b)
14328 {
14329  __ASM volatile("kdmabb16 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
14330  return t;
14331 }
14332 /* ===== Inline Function End for 4.9.1. KDMABB16 ===== */
14333 
14334 /* ===== Inline Function Start for 4.9.2. KDMABT16 ===== */
14392 __STATIC_FORCEINLINE unsigned long __RV_KDMABT16(unsigned long t, unsigned long a, unsigned long b)
14393 {
14394  __ASM volatile("kdmabt16 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
14395  return t;
14396 }
14397 /* ===== Inline Function End for 4.9.2. KDMABT16 ===== */
14398 
14399 /* ===== Inline Function Start for 4.9.3. KDMATT16 ===== */
14457 __STATIC_FORCEINLINE unsigned long __RV_KDMATT16(unsigned long t, unsigned long a, unsigned long b)
14458 {
14459  __ASM volatile("kdmatt16 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
14460  return t;
14461 }
14462 /* ===== Inline Function End for 4.9.3. KDMATT16 ===== */
14463 
14464 /* ===== Inline Function Start for 4.10.1. KHMBB16 ===== */
14509 __STATIC_FORCEINLINE unsigned long __RV_KHMBB16(unsigned long a, unsigned long b)
14510 {
14511  unsigned long result;
14512  __ASM volatile("khmbb16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
14513  return result;
14514 }
14515 /* ===== Inline Function End for 4.10.1. KHMBB16 ===== */
14516 
14517 /* ===== Inline Function Start for 4.10.2. KHMBT16 ===== */
14562 __STATIC_FORCEINLINE unsigned long __RV_KHMBT16(unsigned long a, unsigned long b)
14563 {
14564  unsigned long result;
14565  __ASM volatile("khmbt16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
14566  return result;
14567 }
14568 /* ===== Inline Function End for 4.10.2. KHMBT16 ===== */
14569 
14570 /* ===== Inline Function Start for 4.10.3. KHMTT16 ===== */
14615 __STATIC_FORCEINLINE unsigned long __RV_KHMTT16(unsigned long a, unsigned long b)
14616 {
14617  unsigned long result;
14618  __ASM volatile("khmtt16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
14619  return result;
14620 }
14621 /* ===== Inline Function End for 4.10.3. KHMTT16 ===== */
14622 
14623 /* ===== Inline Function Start for 4.11.1. KMABB32 ===== */
14678 __STATIC_FORCEINLINE long __RV_KMABB32(long t, unsigned long a, unsigned long b)
14679 {
14680  __ASM volatile("kmabb32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
14681  return t;
14682 }
14683 /* ===== Inline Function End for 4.11.1. KMABB32 ===== */
14684 
14685 /* ===== Inline Function Start for 4.11.2. KMABT32 ===== */
14740 __STATIC_FORCEINLINE long __RV_KMABT32(long t, unsigned long a, unsigned long b)
14741 {
14742  __ASM volatile("kmabt32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
14743  return t;
14744 }
14745 /* ===== Inline Function End for 4.11.2. KMABT32 ===== */
14746 
14747 /* ===== Inline Function Start for 4.11.3. KMATT32 ===== */
14802 __STATIC_FORCEINLINE long __RV_KMATT32(long t, unsigned long a, unsigned long b)
14803 {
14804  __ASM volatile("kmatt32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
14805  return t;
14806 }
14807 /* ===== Inline Function End for 4.11.3. KMATT32 ===== */
14808 
14809 /* ===== Inline Function Start for 4.12.1. KMADA32 ===== */
14858 __STATIC_FORCEINLINE long __RV_KMADA32(long t, unsigned long a, unsigned long b)
14859 {
14860  __ASM volatile("kmada32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
14861  return t;
14862 }
14863 /* ===== Inline Function End for 4.12.1. KMADA32 ===== */
14864 
14865 /* ===== Inline Function Start for 4.12.2. KMAXDA32 ===== */
14914 __STATIC_FORCEINLINE long __RV_KMAXDA32(long t, unsigned long a, unsigned long b)
14915 {
14916  __ASM volatile("kmaxda32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
14917  return t;
14918 }
14919 /* ===== Inline Function End for 4.12.2. KMAXDA32 ===== */
14920 
14921 /* ===== Inline Function Start for 4.13.1. KMDA32 ===== */
14965 __STATIC_FORCEINLINE long __RV_KMDA32(unsigned long a, unsigned long b)
14966 {
14967  long result;
14968  __ASM volatile("kmda32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
14969  return result;
14970 }
14971 /* ===== Inline Function End for 4.13.1. KMDA32 ===== */
14972 
14973 /* ===== Inline Function Start for 4.13.2. KMXDA32 ===== */
15017 __STATIC_FORCEINLINE long __RV_KMXDA32(unsigned long a, unsigned long b)
15018 {
15019  long result;
15020  __ASM volatile("kmxda32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
15021  return result;
15022 }
15023 /* ===== Inline Function End for 4.13.2. KMXDA32 ===== */
15024 
15025 /* ===== Inline Function Start for 4.14.1. KMADS32 ===== */
15082 __STATIC_FORCEINLINE long __RV_KMADS32(long t, unsigned long a, unsigned long b)
15083 {
15084  __ASM volatile("kmads32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
15085  return t;
15086 }
15087 /* ===== Inline Function End for 4.14.1. KMADS32 ===== */
15088 
15089 /* ===== Inline Function Start for 4.14.2. KMADRS32 ===== */
15146 __STATIC_FORCEINLINE long __RV_KMADRS32(long t, unsigned long a, unsigned long b)
15147 {
15148  __ASM volatile("kmadrs32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
15149  return t;
15150 }
15151 /* ===== Inline Function End for 4.14.2. KMADRS32 ===== */
15152 
15153 /* ===== Inline Function Start for 4.14.3. KMAXDS32 ===== */
15210 __STATIC_FORCEINLINE long __RV_KMAXDS32(long t, unsigned long a, unsigned long b)
15211 {
15212  __ASM volatile("kmaxds32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
15213  return t;
15214 }
15215 /* ===== Inline Function End for 4.14.3. KMAXDS32 ===== */
15216 
15217 /* ===== Inline Function Start for 4.15.1. KMSDA32 ===== */
15265 __STATIC_FORCEINLINE long __RV_KMSDA32(long t, unsigned long a, unsigned long b)
15266 {
15267  __ASM volatile("kmsda32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
15268  return t;
15269 }
15270 /* ===== Inline Function End for 4.15.1. KMSDA32 ===== */
15271 
15272 /* ===== Inline Function Start for 4.15.2. KMSXDA32 ===== */
15320 __STATIC_FORCEINLINE long __RV_KMSXDA32(long t, unsigned long a, unsigned long b)
15321 {
15322  __ASM volatile("kmsxda32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
15323  return t;
15324 }
15325 /* ===== Inline Function End for 4.15.2. KMSXDA32 ===== */
15326 
15327 /* ===== Inline Function Start for 4.16. KSLL32 ===== */
15371 __STATIC_FORCEINLINE unsigned long __RV_KSLL32(unsigned long a, unsigned int b)
15372 {
15373  unsigned long result;
15374  __ASM volatile("ksll32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
15375  return result;
15376 }
15377 /* ===== Inline Function End for 4.16. KSLL32 ===== */
15378 
15379 /* ===== Inline Function Start for 4.17. KSLLI32 ===== */
15422 #define __RV_KSLLI32(a, b) \
15423  ({ \
15424  unsigned long __res; \
15425  unsigned long __a = (unsigned long)(a); \
15426  __ASM volatile("kslli32 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
15427  __res; \
15428  })
15429 /* ===== Inline Function End for 4.17. KSLLI32 ===== */
15430 
15431 /* ===== Inline Function Start for 4.18.1. KSLRA32 ===== */
15489 __STATIC_FORCEINLINE unsigned long __RV_KSLRA32(unsigned long a, int b)
15490 {
15491  unsigned long result;
15492  __ASM volatile("kslra32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
15493  return result;
15494 }
15495 /* ===== Inline Function End for 4.18.1. KSLRA32 ===== */
15496 
15497 /* ===== Inline Function Start for 4.18.2. KSLRA32.u ===== */
15555 __STATIC_FORCEINLINE unsigned long __RV_KSLRA32_U(unsigned long a, int b)
15556 {
15557  unsigned long result;
15558  __ASM volatile("kslra32.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
15559  return result;
15560 }
15561 /* ===== Inline Function End for 4.18.2. KSLRA32.u ===== */
15562 
15563 /* ===== Inline Function Start for 4.19. KSTAS32 ===== */
15607 __STATIC_FORCEINLINE unsigned long __RV_KSTAS32(unsigned long a, unsigned long b)
15608 {
15609  unsigned long result;
15610  __ASM volatile("kstas32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
15611  return result;
15612 }
15613 /* ===== Inline Function End for 4.19. KSTAS32 ===== */
15614 
15615 /* ===== Inline Function Start for 4.20. KSTSA32 ===== */
15658 __STATIC_FORCEINLINE unsigned long __RV_KSTSA32(unsigned long a, unsigned long b)
15659 {
15660  unsigned long result;
15661  __ASM volatile("kstsa32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
15662  return result;
15663 }
15664 /* ===== Inline Function End for 4.20. KSTSA32 ===== */
15665 
15666 /* ===== Inline Function Start for 4.21. KSUB32 ===== */
15705 __STATIC_FORCEINLINE unsigned long __RV_KSUB32(unsigned long a, unsigned long b)
15706 {
15707  unsigned long result;
15708  __ASM volatile("ksub32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
15709  return result;
15710 }
15711 /* ===== Inline Function End for 4.21. KSUB32 ===== */
15712 
15713 /* ===== Inline Function Start for 4.22.1. PKBB32 ===== */
15753 __STATIC_FORCEINLINE unsigned long __RV_PKBB32(unsigned long a, unsigned long b)
15754 {
15755  unsigned long result;
15756  __ASM volatile("pkbb32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
15757  return result;
15758 }
15759 /* ===== Inline Function End for 4.22.1. PKBB32 ===== */
15760 
15761 /* ===== Inline Function Start for 4.22.2. PKBT32 ===== */
15801 __STATIC_FORCEINLINE unsigned long __RV_PKBT32(unsigned long a, unsigned long b)
15802 {
15803  unsigned long result;
15804  __ASM volatile("pkbt32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
15805  return result;
15806 }
15807 /* ===== Inline Function End for 4.22.2. PKBT32 ===== */
15808 
15809 /* ===== Inline Function Start for 4.22.3. PKTT32 ===== */
15849 __STATIC_FORCEINLINE unsigned long __RV_PKTT32(unsigned long a, unsigned long b)
15850 {
15851  unsigned long result;
15852  __ASM volatile("pktt32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
15853  return result;
15854 }
15855 /* ===== Inline Function End for 4.22.3. PKTT32 ===== */
15856 
15857 /* ===== Inline Function Start for 4.22.4. PKTB32 ===== */
15897 __STATIC_FORCEINLINE unsigned long __RV_PKTB32(unsigned long a, unsigned long b)
15898 {
15899  unsigned long result;
15900  __ASM volatile("pktb32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
15901  return result;
15902 }
15903 /* ===== Inline Function End for 4.22.4. PKTB32 ===== */
15904 
15905 /* ===== Inline Function Start for 4.23. RADD32 ===== */
15943 __STATIC_FORCEINLINE unsigned long __RV_RADD32(unsigned long a, unsigned long b)
15944 {
15945  unsigned long result;
15946  __ASM volatile("radd32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
15947  return result;
15948 }
15949 /* ===== Inline Function End for 4.23. RADD32 ===== */
15950 
15951 /* ===== Inline Function Start for 4.24. RCRAS32 ===== */
15990 __STATIC_FORCEINLINE unsigned long __RV_RCRAS32(unsigned long a, unsigned long b)
15991 {
15992  unsigned long result;
15993  __ASM volatile("rcras32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
15994  return result;
15995 }
15996 /* ===== Inline Function End for 4.24. RCRAS32 ===== */
15997 
15998 /* ===== Inline Function Start for 4.25. RCRSA32 ===== */
16037 __STATIC_FORCEINLINE unsigned long __RV_RCRSA32(unsigned long a, unsigned long b)
16038 {
16039  unsigned long result;
16040  __ASM volatile("rcrsa32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
16041  return result;
16042 }
16043 /* ===== Inline Function End for 4.25. RCRSA32 ===== */
16044 
16045 /* ===== Inline Function Start for 4.26. RSTAS32 ===== */
16084 __STATIC_FORCEINLINE unsigned long __RV_RSTAS32(unsigned long a, unsigned long b)
16085 {
16086  unsigned long result;
16087  __ASM volatile("rstas32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
16088  return result;
16089 }
16090 /* ===== Inline Function End for 4.26. RSTAS32 ===== */
16091 
16092 /* ===== Inline Function Start for 4.27. RSTSA32 ===== */
16130 __STATIC_FORCEINLINE unsigned long __RV_RSTSA32(unsigned long a, unsigned long b)
16131 {
16132  unsigned long result;
16133  __ASM volatile("rstsa32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
16134  return result;
16135 }
16136 /* ===== Inline Function End for 4.27. RSTSA32 ===== */
16137 
16138 /* ===== Inline Function Start for 4.28. RSUB32 ===== */
16176 __STATIC_FORCEINLINE unsigned long __RV_RSUB32(unsigned long a, unsigned long b)
16177 {
16178  unsigned long result;
16179  __ASM volatile("rsub32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
16180  return result;
16181 }
16182 /* ===== Inline Function End for 4.28. RSUB32 ===== */
16183 
16184 /* ===== Inline Function Start for 4.29. SLL32 ===== */
16216 __STATIC_FORCEINLINE unsigned long __RV_SLL32(unsigned long a, unsigned int b)
16217 {
16218  unsigned long result;
16219  __ASM volatile("sll32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
16220  return result;
16221 }
16222 /* ===== Inline Function End for 4.29. SLL32 ===== */
16223 
16224 /* ===== Inline Function Start for 4.30. SLLI32 ===== */
16255 #define __RV_SLLI32(a, b) \
16256  ({ \
16257  unsigned long __res; \
16258  unsigned long __a = (unsigned long)(a); \
16259  __ASM volatile("slli32 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
16260  __res; \
16261  })
16262 /* ===== Inline Function End for 4.30. SLLI32 ===== */
16263 
16264 /* ===== Inline Function Start for 4.31. SMAX32 ===== */
16294 __STATIC_FORCEINLINE unsigned long __RV_SMAX32(unsigned long a, unsigned long b)
16295 {
16296  unsigned long result;
16297  __ASM volatile("smax32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
16298  return result;
16299 }
16300 /* ===== Inline Function End for 4.31. SMAX32 ===== */
16301 
16302 /* ===== Inline Function Start for 4.32.1. SMBB32 ===== */
16343 __STATIC_FORCEINLINE long __RV_SMBB32(unsigned long a, unsigned long b)
16344 {
16345  long result;
16346  __ASM volatile("smbb32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
16347  return result;
16348 }
16349 /* ===== Inline Function End for 4.32.1. SMBB32 ===== */
16350 
16351 /* ===== Inline Function Start for 4.32.2. SMBT32 ===== */
16392 __STATIC_FORCEINLINE long __RV_SMBT32(unsigned long a, unsigned long b)
16393 {
16394  long result;
16395  __ASM volatile("smbt32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
16396  return result;
16397 }
16398 /* ===== Inline Function End for 4.32.2. SMBT32 ===== */
16399 
16400 /* ===== Inline Function Start for 4.32.3. SMTT32 ===== */
16441 __STATIC_FORCEINLINE long __RV_SMTT32(unsigned long a, unsigned long b)
16442 {
16443  long result;
16444  __ASM volatile("smtt32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
16445  return result;
16446 }
16447 /* ===== Inline Function End for 4.32.3. SMTT32 ===== */
16448 
16449 /* ===== Inline Function Start for 4.33.1. SMDS32 ===== */
16494 __STATIC_FORCEINLINE long __RV_SMDS32(unsigned long a, unsigned long b)
16495 {
16496  long result;
16497  __ASM volatile("smds32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
16498  return result;
16499 }
16500 /* ===== Inline Function End for 4.33.1. SMDS32 ===== */
16501 
16502 /* ===== Inline Function Start for 4.33.2. SMDRS32 ===== */
16547 __STATIC_FORCEINLINE long __RV_SMDRS32(unsigned long a, unsigned long b)
16548 {
16549  long result;
16550  __ASM volatile("smdrs32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
16551  return result;
16552 }
16553 /* ===== Inline Function End for 4.33.2. SMDRS32 ===== */
16554 
16555 /* ===== Inline Function Start for 4.33.3. SMXDS32 ===== */
16600 __STATIC_FORCEINLINE long __RV_SMXDS32(unsigned long a, unsigned long b)
16601 {
16602  long result;
16603  __ASM volatile("smxds32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
16604  return result;
16605 }
16606 /* ===== Inline Function End for 4.33.3. SMXDS32 ===== */
16607 
16608 /* ===== Inline Function Start for 4.34. SMIN32 ===== */
16638 __STATIC_FORCEINLINE unsigned long __RV_SMIN32(unsigned long a, unsigned long b)
16639 {
16640  unsigned long result;
16641  __ASM volatile("smin32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
16642  return result;
16643 }
16644 /* ===== Inline Function End for 4.34. SMIN32 ===== */
16645 
16646 /* ===== Inline Function Start for 4.35.1. SRA32 ===== */
16691 __STATIC_FORCEINLINE unsigned long __RV_SRA32(unsigned long a, unsigned int b)
16692 {
16693  unsigned long result;
16694  __ASM volatile("sra32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
16695  return result;
16696 }
16697 /* ===== Inline Function End for 4.35.1. SRA32 ===== */
16698 
16699 /* ===== Inline Function Start for 4.35.2. SRA32.u ===== */
16744 __STATIC_FORCEINLINE unsigned long __RV_SRA32_U(unsigned long a, unsigned int b)
16745 {
16746  unsigned long result;
16747  __ASM volatile("sra32.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
16748  return result;
16749 }
16750 /* ===== Inline Function End for 4.35.2. SRA32.u ===== */
16751 
16752 /* ===== Inline Function Start for 4.36.1. SRAI32 ===== */
16797 #define __RV_SRAI32(a, b) \
16798  ({ \
16799  unsigned long __res; \
16800  unsigned long __a = (unsigned long)(a); \
16801  __ASM volatile("srai32 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
16802  __res; \
16803  })
16804 /* ===== Inline Function End for 4.36.1. SRAI32 ===== */
16805 
16806 /* ===== Inline Function Start for 4.36.2. SRAI32.u ===== */
16851 #define __RV_SRAI32_U(a, b) \
16852  ({ \
16853  unsigned long __res; \
16854  unsigned long __a = (unsigned long)(a); \
16855  __ASM volatile("srai32.u %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
16856  __res; \
16857  })
16858 /* ===== Inline Function End for 4.36.2. SRAI32.u ===== */
16859 
16860 /* ===== Inline Function Start for 4.37. SRAIW.u ===== */
16897 #define __RV_SRAIW_U(a, b) \
16898  ({ \
16899  long __res; \
16900  int __a = (int)(a); \
16901  __ASM volatile("sraiw.u %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
16902  __res; \
16903  })
16904 /* ===== Inline Function End for 4.37. SRAIW.u ===== */
16905 
16906 /* ===== Inline Function Start for 4.38.1. SRL32 ===== */
16951 __STATIC_FORCEINLINE unsigned long __RV_SRL32(unsigned long a, unsigned int b)
16952 {
16953  unsigned long result;
16954  __ASM volatile("srl32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
16955  return result;
16956 }
16957 /* ===== Inline Function End for 4.38.1. SRL32 ===== */
16958 
16959 /* ===== Inline Function Start for 4.38.2. SRL32.u ===== */
17004 __STATIC_FORCEINLINE unsigned long __RV_SRL32_U(unsigned long a, unsigned int b)
17005 {
17006  unsigned long result;
17007  __ASM volatile("srl32.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
17008  return result;
17009 }
17010 /* ===== Inline Function End for 4.38.2. SRL32.u ===== */
17011 
17012 /* ===== Inline Function Start for 4.39.1. SRLI32 ===== */
17055 #define __RV_SRLI32(a, b) \
17056  ({ \
17057  unsigned long __res; \
17058  unsigned long __a = (unsigned long)(a); \
17059  __ASM volatile("srli32 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
17060  __res; \
17061  })
17062 /* ===== Inline Function End for 4.39.1. SRLI32 ===== */
17063 
17064 /* ===== Inline Function Start for 4.39.2. SRLI32.u ===== */
17107 #define __RV_SRLI32_U(a, b) \
17108  ({ \
17109  unsigned long __res; \
17110  unsigned long __a = (unsigned long)(a); \
17111  __ASM volatile("srli32.u %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
17112  __res; \
17113  })
17114 /* ===== Inline Function End for 4.39.2. SRLI32.u ===== */
17115 
17116 /* ===== Inline Function Start for 4.40. STAS32 ===== */
17151 __STATIC_FORCEINLINE unsigned long __RV_STAS32(unsigned long a, unsigned long b)
17152 {
17153  unsigned long result;
17154  __ASM volatile("stas32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
17155  return result;
17156 }
17157 /* ===== Inline Function End for 4.40. STAS32 ===== */
17158 
17159 /* ===== Inline Function Start for 4.41. STSA32 ===== */
17193 __STATIC_FORCEINLINE unsigned long __RV_STSA32(unsigned long a, unsigned long b)
17194 {
17195  unsigned long result;
17196  __ASM volatile("stsa32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
17197  return result;
17198 }
17199 /* ===== Inline Function End for 4.41. STSA32 ===== */
17200 
17201 /* ===== Inline Function Start for 4.42. SUB32 ===== */
17233 __STATIC_FORCEINLINE unsigned long __RV_SUB32(unsigned long a, unsigned long b)
17234 {
17235  unsigned long result;
17236  __ASM volatile("sub32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
17237  return result;
17238 }
17239 /* ===== Inline Function End for 4.42. SUB32 ===== */
17240 
17241 /* ===== Inline Function Start for 4.43. UKADD32 ===== */
17277 __STATIC_FORCEINLINE unsigned long __RV_UKADD32(unsigned long a, unsigned long b)
17278 {
17279  unsigned long result;
17280  __ASM volatile("ukadd32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
17281  return result;
17282 }
17283 /* ===== Inline Function End for 4.43. UKADD32 ===== */
17284 
17285 /* ===== Inline Function Start for 4.44. UKCRAS32 ===== */
17330 __STATIC_FORCEINLINE unsigned long __RV_UKCRAS32(unsigned long a, unsigned long b)
17331 {
17332  unsigned long result;
17333  __ASM volatile("ukcras32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
17334  return result;
17335 }
17336 /* ===== Inline Function End for 4.44. UKCRAS32 ===== */
17337 
17338 /* ===== Inline Function Start for 4.45. UKCRSA32 ===== */
17382 __STATIC_FORCEINLINE unsigned long __RV_UKCRSA32(unsigned long a, unsigned long b)
17383 {
17384  unsigned long result;
17385  __ASM volatile("ukcrsa32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
17386  return result;
17387 }
17388 /* ===== Inline Function End for 4.45. UKCRSA32 ===== */
17389 
17390 /* ===== Inline Function Start for 4.46. UKSTAS32 ===== */
17435 __STATIC_FORCEINLINE unsigned long __RV_UKSTAS32(unsigned long a, unsigned long b)
17436 {
17437  unsigned long result;
17438  __ASM volatile("ukstas32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
17439  return result;
17440 }
17441 /* ===== Inline Function End for 4.46. UKSTAS32 ===== */
17442 
17443 /* ===== Inline Function Start for 4.47. UKSTSA32 ===== */
17487 __STATIC_FORCEINLINE unsigned long __RV_UKSTSA32(unsigned long a, unsigned long b)
17488 {
17489  unsigned long result;
17490  __ASM volatile("ukstsa32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
17491  return result;
17492 }
17493 /* ===== Inline Function End for 4.47. UKSTSA32 ===== */
17494 
17495 /* ===== Inline Function Start for 4.48. UKSUB32 ===== */
17531 __STATIC_FORCEINLINE unsigned long __RV_UKSUB32(unsigned long a, unsigned long b)
17532 {
17533  unsigned long result;
17534  __ASM volatile("uksub32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
17535  return result;
17536 }
17537 /* ===== Inline Function End for 4.48. UKSUB32 ===== */
17538 
17539 /* ===== Inline Function Start for 4.49. UMAX32 ===== */
17569 __STATIC_FORCEINLINE unsigned long __RV_UMAX32(unsigned long a, unsigned long b)
17570 {
17571  unsigned long result;
17572  __ASM volatile("umax32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
17573  return result;
17574 }
17575 /* ===== Inline Function End for 4.49. UMAX32 ===== */
17576 
17577 /* ===== Inline Function Start for 4.50. UMIN32 ===== */
17607 __STATIC_FORCEINLINE unsigned long __RV_UMIN32(unsigned long a, unsigned long b)
17608 {
17609  unsigned long result;
17610  __ASM volatile("umin32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
17611  return result;
17612 }
17613 /* ===== Inline Function End for 4.50. UMIN32 ===== */
17614 
17615 /* ===== Inline Function Start for 4.51. URADD32 ===== */
17653 __STATIC_FORCEINLINE unsigned long __RV_URADD32(unsigned long a, unsigned long b)
17654 {
17655  unsigned long result;
17656  __ASM volatile("uradd32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
17657  return result;
17658 }
17659 /* ===== Inline Function End for 4.51. URADD32 ===== */
17660 
17661 /* ===== Inline Function Start for 4.52. URCRAS32 ===== */
17700 __STATIC_FORCEINLINE unsigned long __RV_URCRAS32(unsigned long a, unsigned long b)
17701 {
17702  unsigned long result;
17703  __ASM volatile("urcras32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
17704  return result;
17705 }
17706 /* ===== Inline Function End for 4.52. URCRAS32 ===== */
17707 
17708 /* ===== Inline Function Start for 4.53. URCRSA32 ===== */
17747 __STATIC_FORCEINLINE unsigned long __RV_URCRSA32(unsigned long a, unsigned long b)
17748 {
17749  unsigned long result;
17750  __ASM volatile("urcrsa32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
17751  return result;
17752 }
17753 /* ===== Inline Function End for 4.53. URCRSA32 ===== */
17754 
17755 /* ===== Inline Function Start for 4.54. URSTAS32 ===== */
17794 __STATIC_FORCEINLINE unsigned long __RV_URSTAS32(unsigned long a, unsigned long b)
17795 {
17796  unsigned long result;
17797  __ASM volatile("urstas32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
17798  return result;
17799 }
17800 /* ===== Inline Function End for 4.54. URSTAS32 ===== */
17801 
17802 /* ===== Inline Function Start for 4.55. URSTSA32 ===== */
17841 __STATIC_FORCEINLINE unsigned long __RV_URSTSA32(unsigned long a, unsigned long b)
17842 {
17843  unsigned long result;
17844  __ASM volatile("urstsa32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
17845  return result;
17846 }
17847 /* ===== Inline Function End for 4.55. URSTSA32 ===== */
17848 
17849 /* ===== Inline Function Start for 4.56. URSUB32 ===== */
17887 __STATIC_FORCEINLINE unsigned long __RV_URSUB32(unsigned long a, unsigned long b)
17888 {
17889  unsigned long result;
17890  __ASM volatile("ursub32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
17891  return result;
17892 }
17893 /* ===== Inline Function End for 4.56. URSUB32 ===== */
17894 
17895 #endif /* __RISCV_XLEN == 64 */
17896 
17904 /* ===== Inline Function Start for EXPD80 ===== */
17932 __STATIC_FORCEINLINE unsigned long __RV_EXPD80(unsigned long a)
17933 {
17934  unsigned long result;
17935  __ASM volatile("expd80 %0, %1" : "=r"(result) : "r"(a));
17936  return result;
17937 }
17938 /* ===== Inline Function End for EXPD80 ===== */
17939 
17940 /* ===== Inline Function Start for EXPD81 ===== */
17967 __STATIC_FORCEINLINE unsigned long __RV_EXPD81(unsigned long a)
17968 {
17969  unsigned long result;
17970  __ASM volatile("expd81 %0, %1" : "=r"(result) : "r"(a));
17971  return result;
17972 }
17973 /* ===== Inline Function End for EXPD81 ===== */
17974 
17975 /* ===== Inline Function Start for EXPD82 ===== */
18002 __STATIC_FORCEINLINE unsigned long __RV_EXPD82(unsigned long a)
18003 {
18004  unsigned long result;
18005  __ASM volatile("expd82 %0, %1" : "=r"(result) : "r"(a));
18006  return result;
18007 }
18008 /* ===== Inline Function End for EXPD82 ===== */
18009 
18010 /* ===== Inline Function Start for EXPD83 ===== */
18037 __STATIC_FORCEINLINE unsigned long __RV_EXPD83(unsigned long a)
18038 {
18039  unsigned long result;
18040  __ASM volatile("expd83 %0, %1" : "=r"(result) : "r"(a));
18041  return result;
18042 }
18043 /* ===== Inline Function End for EXPD83 ===== */
18044 
18045 #if (__RISCV_XLEN == 64)
18046 /* ===== Inline Function Start for EXPD84 ===== */
18073 __STATIC_FORCEINLINE unsigned long __RV_EXPD84(unsigned long a)
18074 {
18075  unsigned long result;
18076  __ASM volatile("expd84 %0, %1" : "=r"(result) : "r"(a));
18077  return result;
18078 }
18079 /* ===== Inline Function End for EXPD84 ===== */
18080 
18081 /* ===== Inline Function Start for EXPD85 ===== */
18108 __STATIC_FORCEINLINE unsigned long __RV_EXPD85(unsigned long a)
18109 {
18110  unsigned long result;
18111  __ASM volatile("expd85 %0, %1" : "=r"(result) : "r"(a));
18112  return result;
18113 }
18114 /* ===== Inline Function End for EXPD85 ===== */
18115 
18116 /* ===== Inline Function Start for EXPD86 ===== */
18143 __STATIC_FORCEINLINE unsigned long __RV_EXPD86(unsigned long a)
18144 {
18145  unsigned long result;
18146  __ASM volatile("expd86 %0, %1" : "=r"(result) : "r"(a));
18147  return result;
18148 }
18149 /* ===== Inline Function End for EXPD86 ===== */
18150 
18151 /* ===== Inline Function Start for EXPD87 ===== */
18178 __STATIC_FORCEINLINE unsigned long __RV_EXPD87(unsigned long a)
18179 {
18180  unsigned long result;
18181  __ASM volatile("expd87 %0, %1" : "=r"(result) : "r"(a));
18182  return result;
18183 }
18184 /* ===== Inline Function End for EXPD87 ===== */
18185 #endif /* __RISCV_XLEN == 64 */
18186 
18187 #if (__RISCV_XLEN == 32) || defined(__ONLY_FOR_DOXYGEN_DOCUMENT_GENERATION__)
18188 /* XXXXX Nuclei Extended DSP Instructions for RV32 XXXXX */
18189 
18209 /* ===== Inline Function Start for DKHM8 ===== */
18255 __STATIC_FORCEINLINE unsigned long long __RV_DKHM8(unsigned long long a, unsigned long long b)
18256 {
18257  unsigned long long result;
18258  __ASM volatile("dkhm8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
18259  return result;
18260 }
18261 /* ===== Inline Function End for DKHM8 ===== */
18262 
18263 /* ===== Inline Function Start for DKHM16 ===== */
18310 __STATIC_FORCEINLINE unsigned long long __RV_DKHM16(unsigned long long a, unsigned long long b)
18311 {
18312  unsigned long long result;
18313  __ASM volatile("dkhm16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
18314  return result;
18315 }
18316 /* ===== Inline Function End for DKHM16 ===== */
18317 
18318 /* ===== Inline Function Start for DKABS8 ===== */
18355 __STATIC_FORCEINLINE unsigned long long __RV_DKABS8(unsigned long long a)
18356 {
18357  unsigned long long result;
18358  __ASM volatile("dkabs8 %0, %1" : "=r"(result) : "r"(a));
18359  return result;
18360 }
18361 /* ===== Inline Function End for DKABS8 ===== */
18362 
18363 /* ===== Inline Function Start for DKABS16 ===== */
18400 __STATIC_FORCEINLINE unsigned long long __RV_DKABS16(unsigned long long a)
18401 {
18402  unsigned long long result;
18403  __ASM volatile("dkabs16 %0, %1" : "=r"(result) : "r"(a));
18404  return result;
18405 }
18406 /* ===== Inline Function End for DKABS16 ===== */
18407 
18408 /* ===== Inline Function Start for DKSLRA8 ===== */
18458 __STATIC_FORCEINLINE unsigned long long __RV_DKSLRA8(unsigned long long a, int b)
18459 {
18460  unsigned long long result;
18461  __ASM volatile("dkslra8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
18462  return result;
18463 }
18464 /* ===== Inline Function End for DKSLRA8 ===== */
18465 
18466 /* ===== Inline Function Start for DKSLRA16 ===== */
18517 __STATIC_FORCEINLINE unsigned long long __RV_DKSLRA16(unsigned long long a, int b)
18518 {
18519  unsigned long long result;
18520  __ASM volatile("dkslra16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
18521  return result;
18522 }
18523 /* ===== Inline Function End for DKSLRA16 ===== */
18524 
18525 /* ===== Inline Function Start for DKADD8 ===== */
18564 __STATIC_FORCEINLINE unsigned long long __RV_DKADD8(unsigned long long a, unsigned long long b)
18565 {
18566  unsigned long long result;
18567  __ASM volatile("dkadd8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
18568  return result;
18569 }
18570 /* ===== Inline Function End for DKADD8 ===== */
18571 
18572 /* ===== Inline Function Start for DKADD16 ===== */
18611 __STATIC_FORCEINLINE unsigned long long __RV_DKADD16(unsigned long long a, unsigned long long b)
18612 {
18613  unsigned long long result;
18614  __ASM volatile("dkadd16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
18615  return result;
18616 }
18617 /* ===== Inline Function End for DKADD16 ===== */
18618 
18619 /* ===== Inline Function Start for DKSUB8 ===== */
18658 __STATIC_FORCEINLINE unsigned long long __RV_DKSUB8(unsigned long long a, unsigned long long b)
18659 {
18660  unsigned long long result;
18661  __ASM volatile("dksub8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
18662  return result;
18663 }
18664 /* ===== Inline Function End for DKSUB8 ===== */
18665 
18666 /* ===== Inline Function Start for DKSUB16 ===== */
18706 __STATIC_FORCEINLINE unsigned long long __RV_DKSUB16(unsigned long long a, unsigned long long b)
18707 {
18708  unsigned long long result;
18709  __ASM volatile("dksub16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
18710  return result;
18711 }
18712 /* ===== Inline Function End for DKSUB16 ===== */
18713 
18714 /* ===== Inline Function Start for DKHMX8 ===== */
18759 __STATIC_FORCEINLINE unsigned long long __RV_DKHMX8(unsigned long long a, unsigned long long b)
18760 {
18761  unsigned long long result;
18762  __ASM volatile("dkhmx8 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
18763  return result;
18764 }
18765 /* ===== Inline Function End for DKHMX8 ===== */
18766 
18767 /* ===== Inline Function Start for DKHMX16 ===== */
18812 __STATIC_FORCEINLINE unsigned long long __RV_DKHMX16(unsigned long long a, unsigned long long b)
18813 {
18814  unsigned long long result;
18815  __ASM volatile("dkhmx16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
18816  return result;
18817 }
18818 /* ===== Inline Function End for DKHMX16 ===== */
18819 
18820 /* ===== Inline Function Start for DSMMUL ===== */
18857 __STATIC_FORCEINLINE unsigned long long __RV_DSMMUL(unsigned long long a, unsigned long long b)
18858 {
18859  unsigned long long result;
18860  __ASM volatile("dsmmul %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
18861  return result;
18862 }
18863 /* ===== Inline Function End for DSMMUL ===== */
18864 
18865 /* ===== Inline Function Start for DSMMUL.u ===== */
18902 __STATIC_FORCEINLINE unsigned long long __RV_DSMMUL_U(unsigned long long a, unsigned long long b)
18903 {
18904  unsigned long long result;
18905  __ASM volatile("dsmmul.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
18906  return result;
18907 }
18908 /* ===== Inline Function End for DSMMUL.u ===== */
18909 
18910 /* ===== Inline Function Start for DKWMMUL ===== */
18949 __STATIC_FORCEINLINE unsigned long long __RV_DKWMMUL(unsigned long long a, unsigned long long b)
18950 {
18951  unsigned long long result;
18952  __ASM volatile("dkwmmul %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
18953  return result;
18954 }
18955 /* ===== Inline Function End for DKWMMUL ===== */
18956 
18957 /* ===== Inline Function Start for DKWMMUL.u ===== */
18996 __STATIC_FORCEINLINE unsigned long long __RV_DKWMMUL_U(unsigned long long a, unsigned long long b)
18997 {
18998  unsigned long long result;
18999  __ASM volatile("dkwmmul.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19000  return result;
19001 }
19002 /* ===== Inline Function End for DKWMMUL.u ===== */
19003 
19004 /* ===== Inline Function Start for DKABS32 ===== */
19041 __STATIC_FORCEINLINE unsigned long long __RV_DKABS32(unsigned long long a)
19042 {
19043  unsigned long long result;
19044  __ASM volatile("dkabs32 %0, %1" : "=r"(result) : "r"(a));
19045  return result;
19046 }
19047 /* ===== Inline Function End for DKABS32 ===== */
19048 
19049 /* ===== Inline Function Start for DKSLRA32 ===== */
19094 __STATIC_FORCEINLINE unsigned long long __RV_DKSLRA32(unsigned long long a, int b)
19095 {
19096  unsigned long long result;
19097  __ASM volatile("dkslra32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19098  return result;
19099 }
19100 /* ===== Inline Function End for DKSLRA32 ===== */
19101 
19102 /* ===== Inline Function Start for DKADD32 ===== */
19141 __STATIC_FORCEINLINE unsigned long long __RV_DKADD32(unsigned long long a, unsigned long long b)
19142 {
19143  unsigned long long result;
19144  __ASM volatile("dkadd32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19145  return result;
19146 }
19147 /* ===== Inline Function End for DKADD32 ===== */
19148 
19149 /* ===== Inline Function Start for DKSUB32 ===== */
19188 __STATIC_FORCEINLINE unsigned long long __RV_DKSUB32(unsigned long long a, unsigned long long b)
19189 {
19190  unsigned long long result;
19191  __ASM volatile("dksub32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19192  return result;
19193 }
19194 /* ===== Inline Function End for DKSUB32 ===== */
19195 
19196 /* ===== Inline Function Start for DRADD16 ===== */
19226 __STATIC_FORCEINLINE unsigned long long __RV_DRADD16(unsigned long long a, unsigned long long b)
19227 {
19228  unsigned long long result;
19229  __ASM volatile("dradd16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19230  return result;
19231 }
19232 /* ===== Inline Function End for DRADD16 ===== */
19233 
19234 /* ===== Inline Function Start for DSUB16 ===== */
19264 __STATIC_FORCEINLINE unsigned long long __RV_DSUB16(unsigned long long a, unsigned long long b)
19265 {
19266  unsigned long long result;
19267  __ASM volatile("dsub16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19268  return result;
19269 }
19270 /* ===== Inline Function End for DSUB16 ===== */
19271 
19272 /* ===== Inline Function Start for DRADD32 ===== */
19302 __STATIC_FORCEINLINE unsigned long long __RV_DRADD32(unsigned long long a, unsigned long long b)
19303 {
19304  unsigned long long result;
19305  __ASM volatile("dradd32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19306  return result;
19307 }
19308 /* ===== Inline Function End for DRADD32 ===== */
19309 
19310 /* ===== Inline Function Start for DSUB32 ===== */
19340 __STATIC_FORCEINLINE unsigned long long __RV_DSUB32(unsigned long long a, unsigned long long b)
19341 {
19342  unsigned long long result;
19343  __ASM volatile("dsub32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19344  return result;
19345 }
19346 /* ===== Inline Function End for DSUB32 ===== */
19347 
19348 /* ===== Inline Function Start for DMSR16 ===== */
19385 __STATIC_FORCEINLINE unsigned long long __RV_DMSR16(unsigned long a, unsigned long b)
19386 {
19387  unsigned long long result;
19388  __ASM volatile("dmsr16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19389  return result;
19390 }
19391 /* ===== Inline Function End for DMSR16 ===== */
19392 
19393 /* ===== Inline Function Start for DMSR17 ===== */
19431 __STATIC_FORCEINLINE unsigned long long __RV_DMSR17(unsigned long a, unsigned long b)
19432 {
19433  unsigned long long result;
19434  __ASM volatile("dmsr17 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19435  return result;
19436 }
19437 /* ===== Inline Function End for DMSR17 ===== */
19438 
19439 /* ===== Inline Function Start for DMSR33 ===== */
19472 __STATIC_FORCEINLINE unsigned long long __RV_DMSR33(unsigned long long a, unsigned long long b)
19473 {
19474  unsigned long long result;
19475  __ASM volatile("dmsr33 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19476  return result;
19477 }
19478 /* ===== Inline Function End for DMSR33 ===== */
19479 
19480 /* ===== Inline Function Start for DMXSR33 ===== */
19513 __STATIC_FORCEINLINE unsigned long long __RV_DMXSR33(unsigned long long a, unsigned long long b)
19514 {
19515  unsigned long long result;
19516  __ASM volatile("dmxsr33 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19517  return result;
19518 }
19519 /* ===== Inline Function End for DMXSR33 ===== */
19520 
19521 /* ===== Inline Function Start for DREDAS16 ===== */
19551 __STATIC_FORCEINLINE unsigned long __RV_DREDAS16(unsigned long long a)
19552 {
19553  unsigned long result;
19554  __ASM volatile("dredas16 %0, %1" : "=r"(result) : "r"(a));
19555  return result;
19556 }
19557 /* ===== Inline Function End for DREDAS16 ===== */
19558 
19559 /* ===== Inline Function Start for DREDSA16 ===== */
19587 __STATIC_FORCEINLINE unsigned long __RV_DREDSA16(unsigned long long a)
19588 {
19589  unsigned long result;
19590  __ASM volatile("dredsa16 %0, %1" : "=r"(result) : "r"(a));
19591  return result;
19592 }
19593 /* ===== Inline Function End for DREDSA16 ===== */
19594 
19595 /* ===== Inline Function Start for DKCLIP64 ===== */
19634 __STATIC_FORCEINLINE int16_t __RV_DKCLIP64(unsigned long long a)
19635 {
19636  int16_t result;
19637  __ASM volatile("dkclip64 %0, %1" : "=r"(result) : "r"(a));
19638  return result;
19639 }
19640 /* ===== Inline Function End for DKCLIP64 ===== */
19641 
19642 /* ===== Inline Function Start for DKMDA ===== */
19681 __STATIC_FORCEINLINE unsigned long long __RV_DKMDA(unsigned long long a, unsigned long long b)
19682 {
19683  unsigned long long result;
19684  __ASM volatile("dkmda %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19685  return result;
19686 }
19687 /* ===== Inline Function End for DKMDA ===== */
19688 
19689 /* ===== Inline Function Start for DKMXDA ===== */
19728 __STATIC_FORCEINLINE unsigned long long __RV_DKMXDA(unsigned long long a, unsigned long long b)
19729 {
19730  unsigned long long result;
19731  __ASM volatile("dkmxda %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19732  return result;
19733 }
19734 /* ===== Inline Function End for DKMXDA ===== */
19735 
19736 /* ===== Inline Function Start for DSMDRS ===== */
19769 __STATIC_FORCEINLINE unsigned long long __RV_DSMDRS(unsigned long long a, unsigned long long b)
19770 {
19771  unsigned long long result;
19772  __ASM volatile("dsmdrs %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19773  return result;
19774 }
19775 /* ===== Inline Function End for DSMDRS ===== */
19776 
19777 /* ===== Inline Function Start for DSMXDS ===== */
19810 __STATIC_FORCEINLINE unsigned long long __RV_DSMXDS(unsigned long long a, unsigned long long b)
19811 {
19812  unsigned long long result;
19813  __ASM volatile("dsmxds %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19814  return result;
19815 }
19816 /* ===== Inline Function End for DSMXDS ===== */
19817 
19818 /* ===== Inline Function Start for DSMBB32 ===== */
19848 __STATIC_FORCEINLINE long long __RV_DSMBB32(unsigned long long a, unsigned long long b)
19849 {
19850  long long result;
19851  __ASM volatile("dsmbb32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19852  return result;
19853 }
19854 /* ===== Inline Function End for DSMBB32 ===== */
19855 
19856 /* ===== Inline Function Start for DSMBB32.sra14 ===== */
19887 __STATIC_FORCEINLINE long long __RV_DSMBB32_SRA14(unsigned long long a, unsigned long long b)
19888 {
19889  long long result;
19890  __ASM volatile("dsmbb32.sra14 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19891  return result;
19892 }
19893 /* ===== Inline Function End for DSMBB32.sra14 ===== */
19894 
19895 /* ===== Inline Function Start for DSMBB32.sra32 ===== */
19927 __STATIC_FORCEINLINE long long __RV_DSMBB32_SRA32(unsigned long long a, unsigned long long b)
19928 {
19929  long long result;
19930  __ASM volatile("dsmbb32.sra32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19931  return result;
19932 }
19933 /* ===== Inline Function End for DSMBB32.sra32 ===== */
19934 
19935 /* ===== Inline Function Start for DSMBT32 ===== */
19967 __STATIC_FORCEINLINE long long __RV_DSMBT32(unsigned long long a, unsigned long long b)
19968 {
19969  long long result;
19970  __ASM volatile("dsmbt32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
19971  return result;
19972 }
19973 /* ===== Inline Function End for DSMBT32 ===== */
19974 
19975 /* ===== Inline Function Start for DSMBT32.sra14 ===== */
20007 __STATIC_FORCEINLINE long long __RV_DSMBT32_SRA14(unsigned long long a, unsigned long long b)
20008 {
20009  long long result;
20010  __ASM volatile("dsmbt32.sra14 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20011  return result;
20012 }
20013 /* ===== Inline Function End for DSMBT32.sra14 ===== */
20014 
20015 /* ===== Inline Function Start for DSMBT32.sra32 ===== */
20047 __STATIC_FORCEINLINE long long __RV_DSMBT32_SRA32(unsigned long long a, unsigned long long b)
20048 {
20049  long long result;
20050  __ASM volatile("dsmbt32.sra32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20051  return result;
20052 }
20053 /* ===== Inline Function End for DSMBT32.sra32 ===== */
20054 
20055 /* ===== Inline Function Start for DSMTT32 ===== */
20087 __STATIC_FORCEINLINE long long __RV_DSMTT32(unsigned long long a, unsigned long long b)
20088 {
20089  long long result;
20090  __ASM volatile("dsmtt32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20091  return result;
20092 }
20093 /* ===== Inline Function End for DSMTT32 ===== */
20094 
20095 /* ===== Inline Function Start for DSMTT32.sra14 ===== */
20127 __STATIC_FORCEINLINE long long __RV_DSMTT32_SRA14(unsigned long long a, unsigned long long b)
20128 {
20129  long long result;
20130  __ASM volatile("dsmtt32.sra14 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20131  return result;
20132 }
20133 /* ===== Inline Function End for DSMTT32.sra14 ===== */
20134 
20135 /* ===== Inline Function Start for DSMTT32.sra32 ===== */
20167 __STATIC_FORCEINLINE long long __RV_DSMTT32_SRA32(unsigned long long a, unsigned long long b)
20168 {
20169  long long result;
20170  __ASM volatile("dsmtt32.sra32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20171  return result;
20172 }
20173 /* ===== Inline Function End for DSMTT32.sra32 ===== */
20174 
20175 /* ===== Inline Function Start for DPKBB32 ===== */
20204 __STATIC_FORCEINLINE unsigned long long __RV_DPKBB32(unsigned long long a, unsigned long long b)
20205 {
20206  unsigned long long result;
20207  __ASM volatile("dpkbb32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20208  return result;
20209 }
20210 /* ===== Inline Function End for DPKBB32 ===== */
20211 
20212 /* ===== Inline Function Start for DPKBT32 ===== */
20241 __STATIC_FORCEINLINE unsigned long long __RV_DPKBT32(unsigned long long a, unsigned long long b)
20242 {
20243  unsigned long long result;
20244  __ASM volatile("dpkbt32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20245  return result;
20246 }
20247 /* ===== Inline Function End for DPKBT32 ===== */
20248 
20249 /* ===== Inline Function Start for DPKTT32 ===== */
20278 __STATIC_FORCEINLINE unsigned long long __RV_DPKTT32(unsigned long long a, unsigned long long b)
20279 {
20280  unsigned long long result;
20281  __ASM volatile("dpktt32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20282  return result;
20283 }
20284 /* ===== Inline Function End for DPKTT32 ===== */
20285 
20286 /* ===== Inline Function Start for DPKTB32 ===== */
20315 __STATIC_FORCEINLINE unsigned long long __RV_DPKTB32(unsigned long long a, unsigned long long b)
20316 {
20317  unsigned long long result;
20318  __ASM volatile("dpktb32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20319  return result;
20320 }
20321 /* ===== Inline Function End for DPKTB32 ===== */
20322 
20323 /* ===== Inline Function Start for DPKTB16 ===== */
20353 __STATIC_FORCEINLINE unsigned long long __RV_DPKTB16(unsigned long long a, unsigned long long b)
20354 {
20355  unsigned long long result;
20356  __ASM volatile("dpktb16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20357  return result;
20358 }
20359 /* ===== Inline Function End for DPKTB16 ===== */
20360 
20361 /* ===== Inline Function Start for DPKBB16 ===== */
20391 __STATIC_FORCEINLINE unsigned long long __RV_DPKBB16(unsigned long long a, unsigned long long b)
20392 {
20393  unsigned long long result;
20394  __ASM volatile("dpkbb16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20395  return result;
20396 }
20397 /* ===== Inline Function End for DPKBB16 ===== */
20398 
20399 /* ===== Inline Function Start for DPKBT16 ===== */
20429 __STATIC_FORCEINLINE unsigned long long __RV_DPKBT16(unsigned long long a, unsigned long long b)
20430 {
20431  unsigned long long result;
20432  __ASM volatile("dpkbt16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20433  return result;
20434 }
20435 /* ===== Inline Function End for DPKBT16 ===== */
20436 
20437 /* ===== Inline Function Start for DPKTT16 ===== */
20467 __STATIC_FORCEINLINE unsigned long long __RV_DPKTT16(unsigned long long a, unsigned long long b)
20468 {
20469  unsigned long long result;
20470  __ASM volatile("dpktt16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20471  return result;
20472 }
20473 /* ===== Inline Function End for DPKTT16 ===== */
20474 
20475 /* ===== Inline Function Start for DSRA16 ===== */
20512 __STATIC_FORCEINLINE unsigned long long __RV_DSRA16(unsigned long long a, unsigned long b)
20513 {
20514  unsigned long long result;
20515  __ASM volatile("dsra16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20516  return result;
20517 }
20518 /* ===== Inline Function End for DSRA16 ===== */
20519 
20520 /* ===== Inline Function Start for DADD16 ===== */
20550 __STATIC_FORCEINLINE unsigned long long __RV_DADD16(unsigned long long a, unsigned long long b)
20551 {
20552  unsigned long long result;
20553  __ASM volatile("dadd16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20554  return result;
20555 }
20556 /* ===== Inline Function End for DADD16 ===== */
20557 
20558 /* ===== Inline Function Start for DADD32 ===== */
20588 __STATIC_FORCEINLINE unsigned long long __RV_DADD32(unsigned long long a, unsigned long long b)
20589 {
20590  unsigned long long result;
20591  __ASM volatile("dadd32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20592  return result;
20593 }
20594 /* ===== Inline Function End for DADD32 ===== */
20595 
20596 /* ===== Inline Function Start for DSMBB16 ===== */
20629 __STATIC_FORCEINLINE unsigned long long __RV_DSMBB16(unsigned long long a, unsigned long long b) /* pass */
20630 {
20631  unsigned long long result;
20632  __ASM volatile("dsmbb16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20633  return result;
20634 }
20635 /* ===== Inline Function End for DSMBB16 ===== */
20636 
20637 /* ===== Inline Function Start for DSMBT16 ===== */
20670 __STATIC_FORCEINLINE unsigned long long __RV_DSMBT16(unsigned long long a, unsigned long long b) /* pass */
20671 {
20672  unsigned long long result;
20673  __ASM volatile("dsmbt16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20674  return result;
20675 }
20676 /* ===== Inline Function End for DSMBT16 ===== */
20677 
20678 /* ===== Inline Function Start for DSMTT16 ===== */
20711 __STATIC_FORCEINLINE unsigned long long __RV_DSMTT16(unsigned long long a, unsigned long long b)
20712 {
20713  unsigned long long result;
20714  __ASM volatile("dsmtt16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20715  return result;
20716 }
20717 /* ===== Inline Function End for DSMTT16 ===== */
20718 
20719 /* ===== Inline Function Start for DRCRSA16 ===== */
20753 __STATIC_FORCEINLINE unsigned long long __RV_DRCRSA16(unsigned long long a, unsigned long long b)
20754 {
20755  unsigned long long result;
20756  __ASM volatile("drcrsa16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20757  return result;
20758 }
20759 /* ===== Inline Function End for DRCRSA16 ===== */
20760 
20761 /* ===== Inline Function Start for DRCRSA32 ===== */
20794 __STATIC_FORCEINLINE unsigned long long __RV_DRCRSA32(unsigned long long a, unsigned long long b)
20795 {
20796  unsigned long long result;
20797  __ASM volatile("drcrsa32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20798  return result;
20799 }
20800 /* ===== Inline Function End for DRCRSA32 ===== */
20801 
20802 /* ===== Inline Function Start for DRCRAS16 ===== */
20836 __STATIC_FORCEINLINE unsigned long long __RV_DRCRAS16(unsigned long long a, unsigned long long b)
20837 {
20838  unsigned long long result;
20839  __ASM volatile("drcras16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20840  return result;
20841 }
20842 /* ===== Inline Function End for DRCRAS16 ===== */
20843 
20844 /* ===== Inline Function Start for DRCRAS32 ===== */
20877 __STATIC_FORCEINLINE unsigned long long __RV_DRCRAS32(unsigned long long a, unsigned long long b)
20878 {
20879  unsigned long long result;
20880  __ASM volatile("DRCRAS32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20881  return result;
20882 }
20883 /* ===== Inline Function End for DRCRAS32 ===== */
20884 
20885 /* ===== Inline Function Start for DKCRAS16 ===== */
20932 __STATIC_FORCEINLINE unsigned long long __RV_DKCRAS16(unsigned long long a, unsigned long long b)
20933 {
20934  unsigned long long result;
20935  __ASM volatile("dkcras16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20936  return result;
20937 }
20938 /* ===== Inline Function End for DKCRAS16 ===== */
20939 
20940 /* ===== Inline Function Start for DKCRSA16 ===== */
20987 __STATIC_FORCEINLINE unsigned long long __RV_DKCRSA16(unsigned long long a, unsigned long long b)
20988 {
20989  unsigned long long result;
20990  __ASM volatile("dkcrsa16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
20991  return result;
20992 }
20993 /* ===== Inline Function End for DKCRSA16 ===== */
20994 
20995 /* ===== Inline Function Start for DRSUB16 ===== */
21025 __STATIC_FORCEINLINE unsigned long long __RV_DRSUB16(unsigned long long a, unsigned long long b)
21026 {
21027  unsigned long long result;
21028  __ASM volatile("drsub16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
21029  return result;
21030 }
21031 /* ===== Inline Function End for DRSUB16 ===== */
21032 
21033 /* ===== Inline Function Start for DSTSA32 ===== */
21065 __STATIC_FORCEINLINE unsigned long long __RV_DSTSA32(unsigned long long a, unsigned long long b)
21066 {
21067  unsigned long long result;
21068  __ASM volatile("dstsa32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
21069  return result;
21070 }
21071 /* ===== Inline Function End for DSTSA32 ===== */
21072 
21073 /* ===== Inline Function Start for DSTAS32 ===== */
21105 __STATIC_FORCEINLINE unsigned long long __RV_DSTAS32(unsigned long long a, unsigned long long b)
21106 {
21107  unsigned long long result;
21108  __ASM volatile("DSTAS32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
21109  return result;
21110 }
21111 /* ===== Inline Function End for DSTAS32 ===== */
21112 
21113 /* ===== Inline Function Start for DKCRSA32 ===== */
21155 __STATIC_FORCEINLINE unsigned long long __RV_DKCRSA32(unsigned long long a, unsigned long long b)
21156 {
21157  unsigned long long result;
21158  __ASM volatile("dkcrsa32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
21159  return result;
21160 }
21161 /* ===== Inline Function End for DKCRSA32 ===== */
21162 
21163 /* ===== Inline Function Start for DKCRAS32 ===== */
21205 __STATIC_FORCEINLINE unsigned long long __RV_DKCRAS32(unsigned long long a, unsigned long long b)
21206 {
21207  unsigned long long result;
21208  __ASM volatile("dkcras32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
21209  return result;
21210 }
21211 /* ===== Inline Function End for DKCRAS32 ===== */
21212 
21213 /* ===== Inline Function Start for DCRSA32 ===== */
21245 __STATIC_FORCEINLINE unsigned long long __RV_DCRSA32(unsigned long long a, unsigned long long b)
21246 {
21247  unsigned long long result;
21248  __ASM volatile("dcrsa32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
21249  return result;
21250 }
21251 /* ===== Inline Function End for DCRSA32 ===== */
21252 
21253 /* ===== Inline Function Start for DCRAS32 ===== */
21285 __STATIC_FORCEINLINE unsigned long long __RV_DCRAS32(unsigned long long a, unsigned long long b)
21286 {
21287  unsigned long long result;
21288  __ASM volatile("dcras32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
21289  return result;
21290 }
21291 /* ===== Inline Function End for DCRAS32 ===== */
21292 
21293 /* ===== Inline Function Start for DKSTSA16 ===== */
21340 __STATIC_FORCEINLINE unsigned long long __RV_DKSTSA16(unsigned long long a, unsigned long long b)
21341 {
21342  unsigned long long result;
21343  __ASM volatile("dkstsa16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
21344  return result;
21345 }
21346 /* ===== Inline Function End for DKSTSA16 ===== */
21347 
21348 /* ===== Inline Function Start for DKSTAS16 ===== */
21395 __STATIC_FORCEINLINE unsigned long long __RV_DKSTAS16(unsigned long long a, unsigned long long b)
21396 {
21397  unsigned long long result;
21398  __ASM volatile("dkstas16 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
21399  return result;
21400 }
21401 /* ===== Inline Function End for DKSTAS16 ===== */
21402 
21403 /* ===== Inline Function Start for DSCLIP8 ===== */
21442 #define __RV_DSCLIP8(a, b) \
21443  ({ \
21444  unsigned long long __res; \
21445  unsigned long long __a = (unsigned long long)(a); \
21446  __ASM volatile("dsclip8 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
21447  __res; \
21448  })
21449 /* ===== Inline Function End for DSCLIP8 ===== */
21450 
21451 /* ===== Inline Function Start for DSCLIP16 ===== */
21490 #define __RV_DSCLIP16(a, b) \
21491  ({ \
21492  unsigned long long __res; \
21493  unsigned long long __a = (unsigned long long)(a); \
21494  __ASM volatile("dsclip16 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
21495  __res; \
21496  })
21497 /* ===== Inline Function End for DSCLIP16 ===== */
21498 
21499 /* ===== Inline Function Start for DSCLIP32 ===== */
21538 #define __RV_DSCLIP32(a, b) \
21539  ({ \
21540  unsigned long long __res; \
21541  unsigned long long __a = (unsigned long long)(a); \
21542  __ASM volatile("dsclip32 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
21543  __res; \
21544  })
21545 /* ===== Inline Function End for DSCLIP32 ===== */
21546 
21547 /* ===== Inline Function Start for DRSUB32 ===== */
21577 __STATIC_FORCEINLINE unsigned long long __RV_DRSUB32(unsigned long long a, unsigned long long b)
21578 {
21579  unsigned long long result;
21580  __ASM volatile("drsub32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
21581  return result;
21582 }
21583 /* ===== Inline Function End for DRSUB32 ===== */
21584 
21585 /* ===== Inline Function Start for DPACK32 ===== */
21613 __STATIC_FORCEINLINE unsigned long long __RV_DPACK32(signed long a, signed long b)
21614 {
21615  unsigned long long result;
21616  __ASM volatile("dpack32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
21617  return result;
21618 }
21619 /* ===== Inline Function End for DPACK32 ===== */
21620 
21621 /* ===== Inline Function Start for DSUNPKD810 ===== */
21650 __STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD810(unsigned long long a)
21651 {
21652  unsigned long long result;
21653  __ASM volatile("dsunpkd810 %0, %1" : "=r"(result) : "r"(a));
21654  return result;
21655 }
21656 /* ===== Inline Function End for DSUNPKD810 ===== */
21657 
21658 /* ===== Inline Function Start for DSUNPKD820 ===== */
21687 __STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD820(unsigned long long a)
21688 {
21689  unsigned long long result;
21690  __ASM volatile("dsunpkd820 %0, %1" : "=r"(result) : "r"(a));
21691  return result;
21692 }
21693 /* ===== Inline Function End for DSUNPKD820 ===== */
21694 
21695 /* ===== Inline Function Start for DSUNPKD830 ===== */
21724 __STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD830(unsigned long long a)
21725 {
21726  unsigned long long result;
21727  __ASM volatile("dsunpkd830 %0, %1" : "=r"(result) : "r"(a));
21728  return result;
21729 }
21730 /* ===== Inline Function End for DSUNPKD830 ===== */
21731 
21732 /* ===== Inline Function Start for DSUNPKD831 ===== */
21761 __STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD831(unsigned long long a)
21762 {
21763  unsigned long long result;
21764  __ASM volatile("dsunpkd831 %0, %1" : "=r"(result) : "r"(a));
21765  return result;
21766 }
21767 /* ===== Inline Function End for DSUNPKD831 ===== */
21768 
21769 /* ===== Inline Function Start for DSUNPKD832 ===== */
21798 __STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD832(unsigned long long a)
21799 {
21800  unsigned long long result;
21801  __ASM volatile("dsunpkd832 %0, %1" : "=r"(result) : "r"(a));
21802  return result;
21803 }
21804 /* ===== Inline Function End for DSUNPKD832 ===== */
21805 
21806 /* ===== Inline Function Start for DZUNPKD810 ===== */
21835 __STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD810(unsigned long long a)
21836 {
21837  unsigned long long result;
21838  __ASM volatile("dzunpkd810 %0, %1" : "=r"(result) : "r"(a));
21839  return result;
21840 }
21841 /* ===== Inline Function End for DZUNPKD810 ===== */
21842 
21843 /* ===== Inline Function Start for DZUNPKD820 ===== */
21872 __STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD820(unsigned long long a)
21873 {
21874  unsigned long long result;
21875  __ASM volatile("dzunpkd820 %0, %1" : "=r"(result) : "r"(a));
21876  return result;
21877 }
21878 /* ===== Inline Function End for DZUNPKD820 ===== */
21879 
21880 /* ===== Inline Function Start for DZUNPKD830 ===== */
21909 __STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD830(unsigned long long a)
21910 {
21911  unsigned long long result;
21912  __ASM volatile("dzunpkd830 %0, %1" : "=r"(result) : "r"(a));
21913  return result;
21914 }
21915 /* ===== Inline Function End for DZUNPKD830 ===== */
21916 
21917 /* ===== Inline Function Start for DZUNPKD831 ===== */
21946 __STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD831(unsigned long long a)
21947 {
21948  unsigned long long result;
21949  __ASM volatile("dzunpkd831 %0, %1" : "=r"(result) : "r"(a));
21950  return result;
21951 }
21952 /* ===== Inline Function End for DZUNPKD831 ===== */
21953 
21954 /* ===== Inline Function Start for DZUNPKD832 ===== */
21983 __STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD832(unsigned long long a)
21984 {
21985  unsigned long long result;
21986  __ASM volatile("dzunpkd832 %0, %1" : "=r"(result) : "r"(a));
21987  return result;
21988 }
21989 /* ===== Inline Function End for DZUNPKD832 ===== */
21990 
21991 /* ===== Inline Function Start for DKMMAC ===== */
22031 __STATIC_FORCEINLINE unsigned long long __RV_DKMMAC(unsigned long long t, unsigned long long a, unsigned long long b)
22032 {
22033  __ASM volatile("dkmmac %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22034  return t;
22035 }
22036 /* ===== Inline Function End for DKMMAC ===== */
22037 
22038 /* ===== Inline Function Start for DKMMAC.u ===== */
22078 __STATIC_FORCEINLINE unsigned long long __RV_DKMMAC_U(unsigned long long t, unsigned long long a, unsigned long long b)
22079 {
22080  __ASM volatile("dkmmac.u %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22081  return t;
22082 }
22083 /* ===== Inline Function End for DKMMAC.u ===== */
22084 
22085 /* ===== Inline Function Start for DKMMSB ===== */
22125 __STATIC_FORCEINLINE unsigned long long __RV_DKMMSB(unsigned long long t, unsigned long long a, unsigned long long b)
22126 {
22127  __ASM volatile("dkmmsb %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22128  return t;
22129 }
22130 /* ===== Inline Function End for DKMMSB ===== */
22131 
22132 /* ===== Inline Function Start for DKMMSB.u ===== */
22172 __STATIC_FORCEINLINE unsigned long long __RV_DKMMSB_U(unsigned long long t, unsigned long long a, unsigned long long b)
22173 {
22174  __ASM volatile("dkmmsb.u %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22175  return t;
22176 }
22177 /* ===== Inline Function End for DKMMSB.u ===== */
22178 
22179 /* ===== Inline Function Start for DKMADA ===== */
22219 __STATIC_FORCEINLINE unsigned long long __RV_DKMADA(unsigned long long t, unsigned long long a, unsigned long long b)
22220 {
22221  __ASM volatile("dkmada %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22222  return t;
22223 }
22224 /* ===== Inline Function End for DKMADA ===== */
22225 
22226 /* ===== Inline Function Start for DKMAXDA ===== */
22265 __STATIC_FORCEINLINE unsigned long long __RV_DKMAXDA(unsigned long long t, unsigned long long a, unsigned long long b)
22266 {
22267  __ASM volatile("dkmaxda %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22268  return t;
22269 }
22270 /* ===== Inline Function End for DKMAXDA ===== */
22271 
22272 /* ===== Inline Function Start for DKMADS ===== */
22312 __STATIC_FORCEINLINE unsigned long long __RV_DKMADS(unsigned long long t, unsigned long long a, unsigned long long b)
22313 {
22314  __ASM volatile("dkmads %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22315  return t;
22316 }
22317 /* ===== Inline Function End for DKMADS ===== */
22318 
22319 /* ===== Inline Function Start for DKMADRS ===== */
22359 __STATIC_FORCEINLINE unsigned long long __RV_DKMADRS(unsigned long long t, unsigned long long a, unsigned long long b)
22360 {
22361  __ASM volatile("dkmadrs %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22362  return t;
22363 }
22364 /* ===== Inline Function End for DKMADRS ===== */
22365 
22366 /* ===== Inline Function Start for DKMAXDS ===== */
22405 __STATIC_FORCEINLINE unsigned long long __RV_DKMAXDS(unsigned long long t, unsigned long long a, unsigned long long b)
22406 {
22407  __ASM volatile("dkmaxds %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22408  return t;
22409 }
22410 /* ===== Inline Function End for DKMAXDS ===== */
22411 
22412 /* ===== Inline Function Start for DKMSDA ===== */
22451 __STATIC_FORCEINLINE unsigned long long __RV_DKMSDA(unsigned long long t, unsigned long long a, unsigned long long b)
22452 {
22453  __ASM volatile("dkmsda %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22454  return t;
22455 }
22456 /* ===== Inline Function End for DKMSDA ===== */
22457 
22458 /* ===== Inline Function Start for DKMSXDA ===== */
22497 __STATIC_FORCEINLINE unsigned long long __RV_DKMSXDA(unsigned long long t, unsigned long long a, unsigned long long b)
22498 {
22499  __ASM volatile("dkmsxda %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22500  return t;
22501 }
22502 /* ===== Inline Function End for DKMSXDA ===== */
22503 
22504 /* ===== Inline Function Start for DSMAQA ===== */
22546 __STATIC_FORCEINLINE unsigned long long __RV_DSMAQA(unsigned long long t, unsigned long long a, unsigned long long b)
22547 {
22548  __ASM volatile("dsmaqa %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22549  return t;
22550 }
22551 /* ===== Inline Function End for DSMAQA ===== */
22552 
22553 /* ===== Inline Function Start for DSMAQA.SU ===== */
22595 __STATIC_FORCEINLINE unsigned long long __RV_DSMAQA_SU(unsigned long long t, unsigned long long a, unsigned long long b)
22596 {
22597  __ASM volatile("dsmaqa.su %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22598  return t;
22599 }
22600 /* ===== Inline Function End for DSMAQA.SU ===== */
22601 
22602 /* ===== Inline Function Start for DUMAQA ===== */
22644 __STATIC_FORCEINLINE unsigned long long __RV_DUMAQA(unsigned long long t, unsigned long long a, unsigned long long b)
22645 {
22646  __ASM volatile("dumaqa %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22647  return t;
22648 }
22649 /* ===== Inline Function End for DUMAQA ===== */
22650 
22651 /* ===== Inline Function Start for DKMDA32 ===== */
22685 __STATIC_FORCEINLINE long long __RV_DKMDA32(unsigned long long a, unsigned long long b)
22686 {
22687  long long result;
22688  __ASM volatile("dkmda32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
22689  return result;
22690 }
22691 /* ===== Inline Function End for DKMDA32 ===== */
22692 
22693 /* ===== Inline Function Start for DKMXDA32 ===== */
22727 __STATIC_FORCEINLINE long long __RV_DKMXDA32(unsigned long long a, unsigned long long b)
22728 {
22729  long long result;
22730  __ASM volatile("dkmxda32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
22731  return result;
22732 }
22733 /* ===== Inline Function End for DKMXDA32 ===== */
22734 
22735 /* ===== Inline Function Start for DKMADA32 ===== */
22770 __STATIC_FORCEINLINE long long __RV_DKMADA32(long long t, unsigned long long a, unsigned long long b)
22771 {
22772  __ASM volatile("dkmada32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22773  return t;
22774 }
22775 /* ===== Inline Function End for DKMADA32 ===== */
22776 
22777 /* ===== Inline Function Start for DKMAXDA32 ===== */
22813 __STATIC_FORCEINLINE long long __RV_DKMAXDA32(long long t, unsigned long long a, unsigned long long b)
22814 {
22815  __ASM volatile("dkmaxda32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22816  return t;
22817 }
22818 /* ===== Inline Function End for DKMAXDA32 ===== */
22819 
22820 /* ===== Inline Function Start for DKMADS32 ===== */
22857 __STATIC_FORCEINLINE long long __RV_DKMADS32(long long t, unsigned long long a, unsigned long long b)
22858 {
22859  __ASM volatile("dkmads32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22860  return t;
22861 }
22862 /* ===== Inline Function End for DKMADS32 ===== */
22863 
22864 /* ===== Inline Function Start for DKMADRS32 ===== */
22901 __STATIC_FORCEINLINE long long __RV_DKMADRS32(long long t, unsigned long long a, unsigned long long b)
22902 {
22903  __ASM volatile("dkmadrs32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22904  return t;
22905 }
22906 /* ===== Inline Function End for DKMADRS32 ===== */
22907 
22908 /* ===== Inline Function Start for DKMAXDS32 ===== */
22945 __STATIC_FORCEINLINE long long __RV_DKMAXDS32(long long t, unsigned long long a, unsigned long long b)
22946 {
22947  __ASM volatile("dkmaxds32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22948  return t;
22949 }
22950 /* ===== Inline Function End for DKMAXDS32 ===== */
22951 
22952 /* ===== Inline Function Start for DKMSDA32 ===== */
22988 __STATIC_FORCEINLINE long long __RV_DKMSDA32(long long t, unsigned long long a, unsigned long long b)
22989 {
22990  __ASM volatile("dkmsda32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
22991  return t;
22992 }
22993 /* ===== Inline Function End for DKMSDA32 ===== */
22994 
22995 /* ===== Inline Function Start for DKMSXDA32 ===== */
23031 __STATIC_FORCEINLINE long long __RV_DKMSXDA32(long long t, unsigned long long a, unsigned long long b)
23032 {
23033  __ASM volatile("dkmsxda32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
23034  return t;
23035 }
23036 /* ===== Inline Function End for DKMSXDA32 ===== */
23037 
23038 /* ===== Inline Function Start for DSMDS32 ===== */
23074 __STATIC_FORCEINLINE long long __RV_DSMDS32(unsigned long long a, unsigned long long b)
23075 {
23076  long long result;
23077  __ASM volatile("dsmds32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
23078  return result;
23079 }
23080 /* ===== Inline Function End for DSMDS32 ===== */
23081 
23082 /* ===== Inline Function Start for DSMDRS32 ===== */
23117 __STATIC_FORCEINLINE long long __RV_DSMDRS32(unsigned long long a, unsigned long long b)
23118 {
23119  long long result;
23120  __ASM volatile("dsmdrs32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
23121  return result;
23122 }
23123 /* ===== Inline Function End for DSMDRS32 ===== */
23124 
23125 /* ===== Inline Function Start for DSMXDS32 ===== */
23161 __STATIC_FORCEINLINE long long __RV_DSMXDS32(unsigned long long a, unsigned long long b)
23162 {
23163  long long result;
23164  __ASM volatile("dsmxds32 %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
23165  return result;
23166 }
23167 /* ===== Inline Function End for DSMXDS32 ===== */
23168 
23169 /* ===== Inline Function Start for DSMALDA ===== */
23208 __STATIC_FORCEINLINE long long __RV_DSMALDA(long long t, unsigned long long a, unsigned long long b)
23209 {
23210  __ASM volatile("dsmalda %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
23211  return t;
23212 }
23213 /* ===== Inline Function End for DSMALDA ===== */
23214 
23215 /* ===== Inline Function Start for DSMALXDA ===== */
23254 __STATIC_FORCEINLINE long long __RV_DSMALXDA(long long t, unsigned long long a, unsigned long long b)
23255 {
23256  __ASM volatile("dsmalxda %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
23257  return t;
23258 }
23259 /* ===== Inline Function End for DSMALXDA ===== */
23260 
23261 /* ===== Inline Function Start for DSMALDS ===== */
23300 __STATIC_FORCEINLINE long long __RV_DSMALDS(long long t, unsigned long long a, unsigned long long b)
23301 {
23302  __ASM volatile("dsmalds %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
23303  return t;
23304 }
23305 /* ===== Inline Function End for DSMALDS ===== */
23306 
23307 /* ===== Inline Function Start for DSMALDRS ===== */
23346 __STATIC_FORCEINLINE long long __RV_DSMALDRS(long long t, unsigned long long a, unsigned long long b)
23347 {
23348  __ASM volatile("dsmaldrs %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
23349  return t;
23350 }
23351 /* ===== Inline Function End for DSMALDRS ===== */
23352 
23353 /* ===== Inline Function Start for DSMALXDS ===== */
23392 __STATIC_FORCEINLINE long long __RV_DSMALXDS(long long t, unsigned long long a, unsigned long long b)
23393 {
23394  __ASM volatile("dsmalxds %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
23395  return t;
23396 }
23397 /* ===== Inline Function End for DSMALXDS ===== */
23398 
23399 /* ===== Inline Function Start for DSMSLDA ===== */
23437 __STATIC_FORCEINLINE long long __RV_DSMSLDA(long long t, unsigned long long a, unsigned long long b)
23438 {
23439  __ASM volatile("dsmslda %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
23440  return t;
23441 }
23442 /* ===== Inline Function End for DSMSLDA ===== */
23443 
23444 /* ===== Inline Function Start for DSMSLXDA ===== */
23482 __STATIC_FORCEINLINE long long __RV_DSMSLXDA(long long t, unsigned long long a, unsigned long long b)
23483 {
23484  __ASM volatile("dsmslxda %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
23485  return t;
23486 }
23487 /* ===== Inline Function End for DSMSLXDA ===== */
23488 
23489 /* ===== Inline Function Start for DDSMAQA ===== */
23533 __STATIC_FORCEINLINE long long __RV_DDSMAQA(long long t, unsigned long long a, unsigned long long b)
23534 {
23535  __ASM volatile("ddsmaqa %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
23536  return t;
23537 }
23538 /* ===== Inline Function End for DDSMAQA ===== */
23539 
23540 /* ===== Inline Function Start for DDSMAQA.SU ===== */
23584 __STATIC_FORCEINLINE long long __RV_DDSMAQA_SU(long long t, unsigned long long a, unsigned long long b)
23585 {
23586  __ASM volatile("ddsmaqa.su %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
23587  return t;
23588 }
23589 /* ===== Inline Function End for DDSMAQA.SU ===== */
23590 
23591 /* ===== Inline Function Start for DDUMAQA ===== */
23635 __STATIC_FORCEINLINE long long __RV_DDUMAQA(long long t, unsigned long long a, unsigned long long b)
23636 {
23637  __ASM volatile("ddumaqa %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
23638  return t;
23639 }
23640 /* ===== Inline Function End for DDUMAQA ===== */
23641 
23642 /* ===== Inline Function Start for DSMA32.u ===== */
23675 __STATIC_FORCEINLINE long __RV_DSMA32_U(unsigned long long a, unsigned long long b)
23676 {
23677  long result;
23678  __ASM volatile("dsma32.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
23679  return result;
23680 }
23681 /* ===== Inline Function End for DSMA32.u ===== */
23682 
23683 /* ===== Inline Function Start for DSMXS32.u ===== */
23716 __STATIC_FORCEINLINE long __RV_DSMXS32_U(unsigned long long a, unsigned long long b)
23717 {
23718  long result;
23719  __ASM volatile("dsmxs32.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
23720  return result;
23721 }
23722 /* ===== Inline Function End for DSMXS32.u ===== */
23723 
23724 /* ===== Inline Function Start for DSMXA32.u ===== */
23757 __STATIC_FORCEINLINE long __RV_DSMXA32_U(unsigned long long a, unsigned long long b)
23758 {
23759  long result;
23760  __ASM volatile("dsmxa32.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
23761  return result;
23762 }
23763 /* ===== Inline Function End for DSMXA32.u ===== */
23764 
23765 /* ===== Inline Function Start for DSMS32.u ===== */
23798 __STATIC_FORCEINLINE long __RV_DSMS32_U(unsigned long long a, unsigned long long b)
23799 {
23800  long result;
23801  __ASM volatile("dsms32.u %0, %1, %2" : "=r"(result) : "r"(a), "r"(b));
23802  return result;
23803 }
23804 /* ===== Inline Function End for DSMS32.u ===== */
23805 
23806 /* ===== Inline Function Start for DSMADA16 ===== */
23843 __STATIC_FORCEINLINE long __RV_DSMADA16(long long t, unsigned long long a, unsigned long long b)
23844 {
23845  __ASM volatile("dsmada16 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
23846  return (long)t;
23847 }
23848 /* ===== Inline Function End for DSMADA16 ===== */
23849 
23850 /* ===== Inline Function Start for DSMAXDA16 ===== */
23887 __STATIC_FORCEINLINE long __RV_DSMAXDA16(long long t, unsigned long long a, unsigned long long b)
23888 {
23889  __ASM volatile("dsmaxda16 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
23890  return (long)t;
23891 }
23892 /* ===== Inline Function End for DSMAXDA16 ===== */
23893 
23894 /* ===== Inline Function Start for DKSMS32.u ===== */
23926 __STATIC_FORCEINLINE unsigned long long __RV_DKSMS32_U(unsigned long long t, unsigned long long a, unsigned long long b)
23927 {
23928  __ASM volatile("dksms32.u %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
23929  return t;
23930 }
23931 /* ===== Inline Function End for DKSMS32.u ===== */
23932 
23933 /* ===== Inline Function Start for DMADA32 ===== */
23965 __STATIC_FORCEINLINE long __RV_DMADA32(long long t, unsigned long long a, unsigned long long b)
23966 {
23967  __ASM volatile("dmada32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
23968  return (long)t;
23969 }
23970 /* ===== Inline Function End for DMADA32 ===== */
23971 
23972 /* ===== Inline Function Start for DSMALBB ===== */
24006 __STATIC_FORCEINLINE long long __RV_DSMALBB(long long t, unsigned long long a, unsigned long long b)
24007 {
24008  __ASM volatile("dsmalbb %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
24009  return t;
24010 }
24011 /* ===== Inline Function End for DSMALBB ===== */
24012 
24013 /* ===== Inline Function Start for DSMALBT ===== */
24049 __STATIC_FORCEINLINE long long __RV_DSMALBT(long long t, unsigned long long a, unsigned long long b)
24050 {
24051  __ASM volatile("dsmalbt %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
24052  return t;
24053 }
24054 /* ===== Inline Function End for DSMALBT ===== */
24055 
24056 /* ===== Inline Function Start for DSMALTT ===== */
24092 __STATIC_FORCEINLINE long long __RV_DSMALTT(long long t, unsigned long long a, unsigned long long b)
24093 {
24094  __ASM volatile("dsmaltt %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
24095  return t;
24096 }
24097 /* ===== Inline Function End for DSMALTT ===== */
24098 
24099 /* ===== Inline Function Start for DKMABB32 ===== */
24140 __STATIC_FORCEINLINE long long __RV_DKMABB32(long long t, unsigned long long a, unsigned long long b)
24141 {
24142  __ASM volatile("dkmabb32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
24143  return t;
24144 }
24145 /* ===== Inline Function End for DKMABB32 ===== */
24146 
24147 /* ===== Inline Function Start for DKMABT32 ===== */
24188 __STATIC_FORCEINLINE long long __RV_DKMABT32(long long t, unsigned long long a, unsigned long long b)
24189 {
24190  __ASM volatile("dkmabt32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
24191  return t;
24192 }
24193 /* ===== Inline Function End for DKMABT32 ===== */
24194 
24195 /* ===== Inline Function Start for DKMATT32 ===== */
24236 __STATIC_FORCEINLINE long long __RV_DKMATT32(long long t, unsigned long long a, unsigned long long b)
24237 {
24238  __ASM volatile("dkmatt32 %0, %1, %2" : "+r"(t) : "r"(a), "r"(b));
24239  return t;
24240 }
24241 /* ===== Inline Function End for DKMATT32 ===== */
24242 #endif /* __RISCV_XLEN == 32 */
24243 
24244 #elif defined (__ICCRISCV__)
24245 
24246 #if __riscv_xlen == 32
24247 #include "iar_nds32_intrinsic.h"
24248 #elif __riscv_xlen == 64
24249 #include "iar_nds64_intrinsic.h"
24250 #else
24251 #error "Unexpected RISC-V XLEN size."
24252 #endif /* __riscv_xlen == 32 */
24253 
24254 #pragma language=save
24255 #pragma language=extended
24256 
24257 // Redefine those compatible instruction name supplied by IAR
24258 #define __RV_CLROV __nds__clrov
24259 #define __RV_RDOV __nds__rdov
24260 #define __RV_ADD8 __nds__add8
24261 #define __RV_SUB8 __nds__sub8
24262 #define __RV_ADD16 __nds__add16
24263 #define __RV_SUB16 __nds__sub16
24264 #define __RV_ADD64 __nds__add64
24265 #define __RV_SUB64 __nds__sub64
24266 #define __RV_RADD8 __nds__radd8
24267 #define __RV_RSUB8 __nds__rsub8
24268 #define __RV_RADD16 __nds__radd16
24269 #define __RV_RSUB16 __nds__rsub16
24270 #define __RV_RADD64 __nds__radd64
24271 #define __RV_RSUB64 __nds__rsub64
24272 #define __RV_RADDW __nds__raddw
24273 #define __RV_RSUBW __nds__rsubw
24274 #define __RV_URADD8 __nds__uradd8
24275 #define __RV_URSUB8 __nds__ursub8
24276 #define __RV_URADD16 __nds__uradd16
24277 #define __RV_URSUB16 __nds__ursub16
24278 #define __RV_URADD64 __nds__uradd64
24279 #define __RV_URSUB64 __nds__ursub64
24280 #define __RV_URADDW __nds__uraddw
24281 #define __RV_URSUBW __nds__ursubw
24282 #define __RV_KADD8 __nds__kadd8
24283 #define __RV_KSUB8 __nds__ksub8
24284 #define __RV_KADD16 __nds__kadd16
24285 #define __RV_KSUB16 __nds__ksub16
24286 #define __RV_KADD64 __nds__kadd64
24287 #define __RV_KSUB64 __nds__ksub64
24288 #define __RV_KADDH __nds__kaddh
24289 #define __RV_KSUBH __nds__ksubh
24290 #define __RV_KADDW __nds__kaddw
24291 #define __RV_KSUBW __nds__ksubw
24292 #define __RV_UKADD8 __nds__ukadd8
24293 #define __RV_UKSUB8 __nds__uksub8
24294 #define __RV_UKADD16 __nds__ukadd16
24295 #define __RV_UKSUB16 __nds__uksub16
24296 #define __RV_UKADD64 __nds__ukadd64
24297 #define __RV_UKSUB64 __nds__uksub64
24298 #define __RV_UKADDH __nds__ukaddh
24299 #define __RV_UKSUBH __nds__uksubh
24300 #define __RV_UKADDW __nds__ukaddw
24301 #define __RV_UKSUBW __nds__uksubw
24302 #define __RV_CRAS16 __nds__cras16
24303 #define __RV_CRSA16 __nds__crsa16
24304 #define __RV_RCRAS16 __nds__rcras16
24305 #define __RV_RCRSA16 __nds__rcrsa16
24306 #define __RV_URCRAS16 __nds__urcras16
24307 #define __RV_URCRSA16 __nds__urcrsa16
24308 #define __RV_KCRAS16 __nds__kcras16
24309 #define __RV_KCRSA16 __nds__kcrsa16
24310 #define __RV_UKCRAS16 __nds__ukcras16
24311 #define __RV_UKCRSA16 __nds__ukcrsa16
24312 #define __RV_SRA8 __nds__sra8
24313 #define __RV_SRAI8 __nds__sra8
24314 #define __RV_SRA16 __nds__sra16
24315 #define __RV_SRAI16 __nds__sra16
24316 #define __RV_SRL8 __nds__srl8
24317 #define __RV_SRL16 __nds__srl16
24318 #define __RV_SLL8 __nds__sll8
24319 #define __RV_SLL16 __nds__sll16
24320 #define __RV_SRA_U __nds__sra_u
24321 #define __RV_SRA8_U __nds__sra8_u
24322 #define __RV_SRA16_U __nds__sra16_u
24323 #define __RV_SRL8_U __nds__srl8_u
24324 #define __RV_SRL16_U __nds__srl16_u
24325 #define __RV_KSLL8 __nds__ksll8
24326 #define __RV_KSLL16 __nds__ksll16
24327 #define __RV_KSLLW __nds__ksllw
24328 #define __RV_KSLRA8 __nds__kslra8
24329 #define __RV_KSLRA8_U __nds__kslra8_u
24330 #define __RV_KSLRA16 __nds__kslra16
24331 #define __RV_KSLRA16_U __nds__kslra16_u
24332 #define __RV_KSLRAW __nds__kslraw
24333 #define __RV_KSLRAW_U __nds__kslraw_u
24334 #define __RV_CMPEQ8 __nds__cmpeq8
24335 #define __RV_CMPEQ16 __nds__cmpeq16
24336 #define __RV_SCMPLE8 __nds__scmple8
24337 #define __RV_SCMPLE16 __nds__scmple16
24338 #define __RV_SCMPLT8 __nds__scmplt8
24339 #define __RV_SCMPLT16 __nds__scmplt16
24340 #define __RV_UCMPLE8 __nds__ucmple8
24341 #define __RV_UCMPLE16 __nds__ucmple16
24342 #define __RV_UCMPLT8 __nds__ucmplt8
24343 #define __RV_UCMPLT16 __nds__ucmplt16
24344 #define __RV_SMUL8 __nds__smul8
24345 #define __RV_UMUL8 __nds__umul8
24346 #define __RV_SMUL16 __nds__smul16
24347 #define __RV_UMUL16 __nds__umul16
24348 #define __RV_SMULX8 __nds__smulx8
24349 #define __RV_UMULX8 __nds__umulx8
24350 #define __RV_SMULX16 __nds__smulx16
24351 #define __RV_UMULX16 __nds__umulx16
24352 #define __RV_KHM8 __nds__khm8
24353 #define __RV_KHMX8 __nds__khmx8
24354 #define __RV_KHM16 __nds__khm16
24355 #define __RV_KHMX16 __nds__khmx16
24356 #define __RV_MULR64 __nds__mulr64
24357 #define __RV_MULSR64 __nds__mulsr64
24358 #define __RV_SMMUL __nds__smmul
24359 #define __RV_SMMUL_U __nds__smmul_u
24360 #define __RV_WEXT __nds__wext
24361 #define __RV_SUNPKD810 __nds__sunpkd810
24362 #define __RV_SUNPKD820 __nds__sunpkd820
24363 #define __RV_SUNPKD830 __nds__sunpkd830
24364 #define __RV_SUNPKD831 __nds__sunpkd831
24365 #define __RV_SUNPKD832 __nds__sunpkd832
24366 #define __RV_ZUNPKD810 __nds__zunpkd810
24367 #define __RV_ZUNPKD820 __nds__zunpkd820
24368 #define __RV_ZUNPKD830 __nds__zunpkd830
24369 #define __RV_ZUNPKD831 __nds__zunpkd831
24370 #define __RV_ZUNPKD832 __nds__zunpkd832
24371 #define __RV_PKBB16 __nds__pkbb16
24372 #define __RV_PKBT16 __nds__pkbt16
24373 #define __RV_PKTT16 __nds__pktt16
24374 #define __RV_PKTB16 __nds__pktb16
24375 #define __RV_KMMAC __nds__kmmac
24376 #define __RV_KMMAC_U __nds__kmmac_u
24377 #define __RV_KMMSB __nds__kmmsb
24378 #define __RV_KMMSB_U __nds__kmmsb_u
24379 #define __RV_KWMMUL __nds__kwmmul
24380 #define __RV_KWMMUL_U __nds__kwmmul_u
24381 #define __RV_SMMWB __nds__smmwb
24382 #define __RV_SMMWB_U __nds__smmwb_u
24383 #define __RV_SMMWT __nds__smmwt
24384 #define __RV_SMMWT_U __nds__smmwt_u
24385 #define __RV_KMMAWB __nds__kmmawb
24386 #define __RV_KMMAWB_U __nds__kmmawb_u
24387 #define __RV_KMMAWT __nds__kmmawt
24388 #define __RV_KMMAWT_U __nds__kmmawt_u
24389 #define __RV_KMMWB2 __nds__kmmwb2
24390 #define __RV_KMMWB2_U __nds__kmmwb2_u
24391 #define __RV_KMMWT2 __nds__kmmwt2
24392 #define __RV_KMMWT2_U __nds__kmmwt2_u
24393 #define __RV_KMMAWB2 __nds__kmmawb2
24394 #define __RV_KMMAWB2_U __nds__kmmawb2_u
24395 #define __RV_KMMAWT2 __nds__kmmawt2
24396 #define __RV_KMMAWT2_U __nds__kmmawt2_u
24397 #define __RV_SMBB16 __nds__smbb16
24398 #define __RV_SMBT16 __nds__smbt16
24399 #define __RV_SMTT16 __nds__smtt16
24400 #define __RV_KMDA __nds__kmda
24401 #define __RV_KMXDA __nds__kmxda
24402 #define __RV_SMDS __nds__smds
24403 #define __RV_SMDRS __nds__smdrs
24404 #define __RV_SMXDS __nds__smxds
24405 #define __RV_KMABB __nds__kmabb
24406 #define __RV_KMABT __nds__kmabt
24407 #define __RV_KMATT __nds__kmatt
24408 #define __RV_KMADA __nds__kmada
24409 #define __RV_KMAXDA __nds__kmaxda
24410 #define __RV_KMADS __nds__kmads
24411 #define __RV_KMADRS __nds__kmadrs
24412 #define __RV_KMAXDS __nds__kmaxds
24413 #define __RV_KMSDA __nds__kmsda
24414 #define __RV_KMSXDA __nds__kmsxda
24415 #define __RV_SMAL __nds__smal
24416 #define __RV_SMAQA __nds__smaqa
24417 #define __RV_UMAQA __nds__umaqa
24418 #define __RV_SMAQA_SU __nds__smaqa_su
24419 #define __RV_SMAR64 __nds__smar64
24420 #define __RV_SMSR64 __nds__smsr64
24421 #define __RV_UMAR64 __nds__umar64
24422 #define __RV_UMSR64 __nds__umsr64
24423 #define __RV_KMAR64 __nds__kmar64
24424 #define __RV_KMSR64 __nds__kmsr64
24425 #define __RV_UKMAR64 __nds__ukmar64
24426 #define __RV_UKMSR64 __nds__ukmsr64
24427 #define __RV_SMALBB __nds__smalbb
24428 #define __RV_SMALBT __nds__smalbt
24429 #define __RV_SMALTT __nds__smaltt
24430 #define __RV_SMALDA __nds__smalda
24431 #define __RV_SMALXDA __nds__smalxda
24432 #define __RV_SMALDS __nds__smalds
24433 #define __RV_SMALDRS __nds__smaldrs
24434 #define __RV_SMALXDS __nds__smalxds
24435 #define __RV_SMSLDA __nds__smslda
24436 #define __RV_SMSLXDA __nds__smslxda
24437 #define __RV_MINW __nds__minw
24438 #define __RV_MAXW __nds__maxw
24439 #define __RV_SMIN8 __nds__smin8
24440 #define __RV_SMAX8 __nds__smax8
24441 #define __RV_SMIN16 __nds__smin16
24442 #define __RV_SMAX16 __nds__smax16
24443 #define __RV_UMIN8 __nds__umin8
24444 #define __RV_UMAX8 __nds__umax8
24445 #define __RV_UMIN16 __nds__umin16
24446 #define __RV_UMAX16 __nds__umax16
24447 #define __RV_KABS8 __nds__kabs8
24448 #define __RV_KABS16 __nds__kabs16
24449 #define __RV_KABSW __nds__kabsw
24450 #define __RV_SCLIP8 __nds__sclip8
24451 #define __RV_SCLIP16 __nds__sclip16
24452 #define __RV_SCLIP32 __nds__sclip32
24453 #define __RV_UCLIP8 __nds__uclip8
24454 #define __RV_UCLIP16 __nds__uclip16
24455 #define __RV_UCLIP32 __nds__uclip32
24456 #define __RV_CLO8 __nds__clo8
24457 #define __RV_CLO16 __nds__clo16
24458 #define __RV_CLO32 __nds__clo32
24459 #define __RV_CLZ8 __nds__clz8
24460 #define __RV_CLZ16 __nds__clz16
24461 #define __RV_CLZ32 __nds__clz32
24462 #define __RV_CLRS8 __nds__clrs8
24463 #define __RV_CLRS16 __nds__clrs16
24464 #define __RV_CLRS32 __nds__clrs32
24465 #define __RV_SWAP8 __nds__swap8
24466 #define __RV_SWAP16 __nds__swap16
24467 #define __RV_KHMBB __nds__khmbb
24468 #define __RV_KHMBT __nds__khmbt
24469 #define __RV_KHMTT __nds__khmtt
24470 #define __RV_KDMBB __nds__kdmbb
24471 #define __RV_KDMBT __nds__kdmbt
24472 #define __RV_KDMTT __nds__kdmtt
24473 #define __RV_KDMABB __nds__kdmabb
24474 #define __RV_KDMABT __nds__kdmabt
24475 #define __RV_KDMATT __nds__kdmatt
24476 #define __RV_MADDR32 __nds__maddr32
24477 #define __RV_MSUBR32 __nds__msubr32
24478 #define __RV_PBSAD __nds__pbsad
24479 #define __RV_PBSADA __nds__pbsada
24480 #define __RV_AVE __nds__ave
24481 #define __RV_BITREV __nds__bitrev
24482 #define __RV_INSB __nds__insb
24483 
24484 #if (__riscv_xlen == 64)
24485 #define __RV_ADD32 __nds__add32
24486 #define __RV_SUB32 __nds__sub32
24487 #define __RV_RADD32 __nds__radd32
24488 #define __RV_RSUB32 __nds__rsub32
24489 #define __RV_URADD32 __nds__uradd32
24490 #define __RV_URSUB32 __nds__ursub32
24491 #define __RV_KADD32 __nds__kadd32
24492 #define __RV_KSUB32 __nds__ksub32
24493 #define __RV_UKADD32 __nds__ukadd32
24494 #define __RV_UKSUB32 __nds__uksub32
24495 #define __RV_CRAS32 __nds__cras32
24496 #define __RV_CRSA32 __nds__crsa32
24497 #define __RV_RCRAS32 __nds__rcras32
24498 #define __RV_RCRSA32 __nds__rcrsa32
24499 #define __RV_URCRAS32 __nds__urcras32
24500 #define __RV_URCRSA32 __nds__urcrsa32
24501 #define __RV_KCRAS32 __nds__kcras32
24502 #define __RV_KCRSA32 __nds__kcrsa32
24503 #define __RV_UKCRAS32 __nds__ukcras32
24504 #define __RV_UKCRSA32 __nds__ukcrsa32
24505 #define __RV_SRA32 __nds__sra32
24506 #define __RV_SRAI32 __nds__sra32
24507 #define __RV_SRL32 __nds__srl32
24508 #define __RV_SLL32 __nds__sll32
24509 #define __RV_SLLI32 __nds__sll32
24510 #define __RV_SRAW_U __nds__sraw_u
24511 #define __RV_SRA32_U __nds__sra32_u
24512 #define __RV_SRL32_U __nds__srl32_u
24513 #define __RV_KSLL32 __nds__ksll32
24514 #define __RV_KSLRA32 __nds__kslra32
24515 #define __RV_KSLRA32_U __nds__kslra32_u
24516 #define __RV_SMBB32 __nds__smbb32
24517 #define __RV_SMBT32 __nds__smbt32
24518 #define __RV_SMTT32 __nds__smtt32
24519 #define __RV_PKBB32 __nds__pkbb32
24520 #define __RV_PKBT32 __nds__pkbt32
24521 #define __RV_PKTT32 __nds__pktt32
24522 #define __RV_PKTB32 __nds__pktb32
24523 #define __RV_SMIN32 __nds__smin32
24524 #define __RV_SMAX32 __nds__smax32
24525 #define __RV_UMIN32 __nds__umin32
24526 #define __RV_UMAX32 __nds__umax32
24527 #define __RV_KABS32 __nds__kabs32
24528 #define __RV_KHMBB16 __nds__khmbb16
24529 #define __RV_KHMBT16 __nds__khmbt16
24530 #define __RV_KHMTT16 __nds__khmtt16
24531 #define __RV_KDMBB16 __nds__kdmbb16
24532 #define __RV_KDMBT16 __nds__kdmbt16
24533 #define __RV_KDMTT16 __nds__kdmtt16
24534 #define __RV_KDMABB16 __nds__kdmabb16
24535 #define __RV_KDMABT16 __nds__kdmabt16
24536 #define __RV_KDMATT16 __nds__kdmatt16
24537 #define __RV_KMABB32 __nds__kmabb32
24538 #define __RV_KMABT32 __nds__kmabt32
24539 #define __RV_KMATT32 __nds__kmatt32
24540 #define __RV_KMDA32 __nds__kmda32
24541 #define __RV_KMXDA32 __nds__kmxda32
24542 #define __RV_KMADA32 __nds__kmada32
24543 #define __RV_KMAXDA32 __nds__kmaxda32
24544 #define __RV_KMADS32 __nds__kmads32
24545 #define __RV_KMADRS32 __nds__kmadrs32
24546 #define __RV_KMAXDS32 __nds__kmaxds32
24547 #define __RV_KMSDA32 __nds__kmsda32
24548 #define __RV_KMSXDA32 __nds__kmsxda32
24549 #define __RV_SMDS32 __nds__smds32
24550 #define __RV_SMDRS32 __nds__smdrs32
24551 #define __RV_SMXDS32 __nds__smxds32
24552 #endif /* __riscv_xlen == 64 */
24553 
24554 // For now, the P-extention version of IAR IDE is 0.5.0, but Nuclei's supports 0.5.4
24555 // so Nuclei supplies a workround to add custom instructions of those not natively
24556 // supported by the IAR Assembler. Note that __RV_BPICK remains to be implemented in future.
24557 // And we only implement Xxldsp Nuclei custom instruction set, bpick not implemented, expdxx
24558 // implemented in c, not via .insn variant
24559 
24560 #pragma inline=forced_no_body
24561 unsigned long __RV_STAS16(unsigned long a, unsigned long b) {
24562  unsigned long r;
24563  __asm(".insn r 0x7F, 0x2, 0x7A, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24564  return r;
24565 }
24566 
24567 #pragma inline=forced_no_body
24568 unsigned long __RV_RSTAS16(unsigned long a, unsigned long b) {
24569  unsigned long r;
24570  __asm(".insn r 0x7F, 0x2, 0x5A, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24571  return r;
24572 }
24573 
24574 #pragma inline=forced_no_body
24575 unsigned long __RV_KSTAS16(unsigned long a, unsigned long b) {
24576  unsigned long r;
24577  __asm(".insn r 0x7F, 0x2, 0x62, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24578  return r;
24579 }
24580 
24581 #pragma inline=forced_no_body
24582 unsigned long __RV_URSTAS16(unsigned long a, unsigned long b) {
24583  unsigned long r;
24584  __asm(".insn r 0x7F, 0x2, 0x6A, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24585  return r;
24586 }
24587 
24588 #pragma inline=forced_no_body
24589 unsigned long __RV_UKSTAS16(unsigned long a, unsigned long b) {
24590  unsigned long r;
24591  __asm(".insn r 0x7F, 0x2, 0x72, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24592  return r;
24593 }
24594 
24595 #pragma inline=forced_no_body
24596 unsigned long __RV_STSA16(unsigned long a, unsigned long b) {
24597  unsigned long r;
24598  __asm(".insn r 0x7F, 0x2, 0x7B, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24599  return r;
24600 }
24601 
24602 #pragma inline=forced_no_body
24603 unsigned long __RV_RSTSA16(unsigned long a, unsigned long b) {
24604  unsigned long r;
24605  __asm(".insn r 0x7F, 0x2, 0x5B, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24606  return r;
24607 }
24608 
24609 #pragma inline=forced_no_body
24610 unsigned long __RV_KSTSA16(unsigned long a, unsigned long b) {
24611  unsigned long r;
24612  __asm(".insn r 0x7F, 0x2, 0x63, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24613  return r;
24614 }
24615 
24616 #pragma inline=forced_no_body
24617 unsigned long __RV_URSTSA16(unsigned long a, unsigned long b) {
24618  unsigned long r;
24619  __asm(".insn r 0x7F, 0x2, 0x6B, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24620  return r;
24621 }
24622 
24623 #pragma inline=forced_no_body
24624 unsigned long __RV_UKSTSA16(unsigned long a, unsigned long b) {
24625  unsigned long r;
24626  __asm(".insn r 0x7F, 0x2, 0x73, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24627  return r;
24628 }
24629 
24630 // #pragma inline=forced_no_body
24631 // unsigned long __RV_BPICK(unsigned long a, unsigned long b, unsigned long c) {
24632  // TODO: remains to be done
24633 // }
24634 
24635 // RV64 only
24636 #pragma inline=forced_no_body
24637 unsigned long __RV_STAS32(unsigned long a, unsigned long b) {
24638  unsigned long r;
24639  __asm(".insn r 0x7F, 0x2, 0x78, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24640  return r;
24641 }
24642 
24643 #pragma inline=forced_no_body
24644 unsigned long __RV_RSTAS32(unsigned long a, unsigned long b) {
24645  unsigned long r;
24646  __asm(".insn r 0x7F, 0x2, 0x58, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24647  return r;
24648 }
24649 
24650 #pragma inline=forced_no_body
24651 unsigned long __RV_KSTAS32(unsigned long a, unsigned long b) {
24652  unsigned long r;
24653  __asm(".insn r 0x7F, 0x2, 0x60, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24654  return r;
24655 }
24656 
24657 #pragma inline=forced_no_body
24658 unsigned long __RV_URSTAS32(unsigned long a, unsigned long b) {
24659  unsigned long r;
24660  __asm(".insn r 0x7F, 0x2, 0x68, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24661  return r;
24662 }
24663 
24664 #pragma inline=forced_no_body
24665 unsigned long __RV_UKSTAS32(unsigned long a, unsigned long b) {
24666  unsigned long r;
24667  __asm(".insn r 0x7F, 0x2, 0x70, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24668  return r;
24669 }
24670 
24671 #pragma inline=forced_no_body
24672 unsigned long __RV_STSA32(unsigned long a, unsigned long b) {
24673  unsigned long r;
24674  __asm(".insn r 0x7F, 0x2, 0x79, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24675  return r;
24676 }
24677 
24678 #pragma inline=forced_no_body
24679 unsigned long __RV_RSTSA32(unsigned long a, unsigned long b) {
24680  unsigned long r;
24681  __asm(".insn r 0x7F, 0x2, 0x59, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24682  return r;
24683 }
24684 
24685 #pragma inline=forced_no_body
24686 unsigned long __RV_KSTSA32(unsigned long a, unsigned long b) {
24687  unsigned long r;
24688  __asm(".insn r 0x7F, 0x2, 0x61, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24689  return r;
24690 }
24691 
24692 #pragma inline=forced_no_body
24693 unsigned long __RV_URSTSA32(unsigned long a, unsigned long b) {
24694  unsigned long r;
24695  __asm(".insn r 0x7F, 0x2, 0x69, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24696  return r;
24697 }
24698 
24699 #pragma inline=forced_no_body
24700 unsigned long __RV_UKSTSA32(unsigned long a, unsigned long b) {
24701  unsigned long r;
24702  __asm(".insn r 0x7F, 0x2, 0x71, %0,%1,%2":"=r"(r) : "r"(a), "r"(b) );
24703  return r;
24704 }
24705 
24706 #pragma inline=forced_no_body
24707 unsigned long __RV_EXPD80(unsigned long a)
24708 {
24709  return __EXPD_BYTE((uint8_t)(a & 0xff));
24710 }
24711 
24712 #pragma inline=forced_no_body
24713 unsigned long __RV_EXPD81(unsigned long a)
24714 {
24715  return __EXPD_BYTE((uint8_t)((a >> 8) & 0xff));
24716 }
24717 
24718 #pragma inline=forced_no_body
24719 unsigned long __RV_EXPD82(unsigned long a)
24720 {
24721  return __EXPD_BYTE((uint8_t)((a >> 16) & 0xff));
24722 }
24723 
24724 #pragma inline=forced_no_body
24725 unsigned long __RV_EXPD83(unsigned long a)
24726 {
24727  return __EXPD_BYTE((uint8_t)((a >> 24) & 0xff));
24728 }
24729 
24730 #if __RISCV_XLEN == 64
24731 // RV64 only
24732 #pragma inline=forced_no_body
24733 unsigned long __RV_EXPD84(unsigned long a)
24734 {
24735  return __EXPD_BYTE((uint8_t)((a >> 32) & 0xff));
24736 }
24737 
24738 #pragma inline=forced_no_body
24739 unsigned long __RV_EXPD85(unsigned long a)
24740 {
24741  return __EXPD_BYTE((uint8_t)((a >> 40) & 0xff));
24742 }
24743 
24744 #pragma inline=forced_no_body
24745 unsigned long __RV_EXPD86(unsigned long a)
24746 {
24747  return __EXPD_BYTE((uint8_t)((a >> 48) & 0xff));
24748 }
24749 
24750 #pragma inline=forced_no_body
24751 unsigned long __RV_EXPD87(unsigned long a)
24752 {
24753  return __EXPD_BYTE((uint8_t)((a >> 56) & 0xff));
24754 }
24755 #endif
24756 #pragma language=restore
24757 
24758 #else
24759  #error Unknown compiler
24760 #endif /* __ICCRISCV__ */
24761 
24762 
24763 /* XXXXX ARM Compatiable SIMD API XXXXX */
24765 #define __QADD8(x, y) __RV_KADD8(x, y)
24767 #define __QSUB8(x, y) __RV_KSUB8((x), (y))
24769 #define __QADD16(x, y) __RV_KADD16((x), (y))
24771 #define __SHADD16(x, y) __RV_RADD16((x), (y))
24773 #define __QSUB16(x, y) __RV_KSUB16((x), (y))
24775 #define __SHSUB16(x, y) __RV_RSUB16((x), (y))
24777 #define __QASX(x, y) __RV_KCRAS16((x), (y))
24779 #define __SHASX(x, y) __RV_RCRAS16((x), (y))
24781 #define __QSAX(x, y) __RV_KCRSA16((x), (y))
24783 #define __SHSAX(x, y) __RV_RCRSA16((x), (y))
24785 #define __SMUSDX(x, y) __RV_SMXDS((y), (x))
24787 __STATIC_FORCEINLINE long __SMUADX (unsigned long op1, unsigned long op2)
24788 {
24789  return __RV_KMXDA(op1, op2);
24790 }
24792 #define __QADD(x, y) __RV_KADDW((x), (y))
24794 #define __QSUB(x, y) __RV_KSUBW((x), (y))
24796 __STATIC_FORCEINLINE long __SMLAD(unsigned long op1, unsigned long op2, long acc)
24797 {
24798  return __RV_KMADA(acc, op1, op2);
24799 }
24801 __STATIC_FORCEINLINE long __SMLADX(unsigned long op1, unsigned long op2, long acc)
24802 {
24803  return __RV_KMAXDA(acc, op1, op2);
24804 }
24806 __STATIC_FORCEINLINE long __SMLSDX(unsigned long op1, unsigned long op2, long acc)
24807 {
24808  return (acc - __RV_SMXDS(op1, op2));
24809 }
24811 __STATIC_FORCEINLINE long long __SMLALD(unsigned long op1, unsigned long op2, long long acc)
24812 {
24813  return __RV_SMALDA(acc, op1, op2);
24814 }
24816 __STATIC_FORCEINLINE long long __SMLALDX(unsigned long op1, unsigned long op2, long long acc)
24817 {
24818  return __RV_SMALXDA(acc, op1, op2);
24819 }
24821 __STATIC_FORCEINLINE long __SMUAD(unsigned long op1, unsigned long op2)
24822 {
24823  return __RV_KMDA(op1, op2);
24824 }
24826 __STATIC_FORCEINLINE long __SMUSD(unsigned long op1, unsigned long op2)
24827 {
24828  return __RV_SMDRS(op1, op2);
24829 }
24831 #define __SXTB16(x) __RV_SUNPKD820(x)
24833 __STATIC_FORCEINLINE unsigned long __SXTAB16(unsigned long op1, unsigned long op2)
24834 {
24835  return __RV_ADD16(op1, __RV_SUNPKD820(op2));
24836 }
24837 #define __SXTAB16_RORn(ARG1, ARG2, ROTATE) __SXTAB16(ARG1, __ROR(ARG2, ROTATE))
24838 
24840 __STATIC_FORCEINLINE long __SMMLA(long op1, long op2, long acc)
24841 {
24842  long mul;
24843  mul = __RV_SMMUL(op1, op2);
24844  return (acc + mul);
24845 }
24846 #define __DKHM8 __RV_DKHM8
24847 #define __DKHM16 __RV_DKHM16
24848 #define __DKSUB16 __RV_DKSUB16
24849 #define __SMAQA __RV_SMAQA
24850 #define __MULSR64 __RV_MULSR64
24851 #define __DQADD8 __RV_DKADD8
24852 #define __DQSUB8 __RV_DKSUB8
24853 #define __DKADD16 __RV_DKADD16
24854 #define __PKBB16 __RV_PKBB16
24855 #define __DKSLRA16 __RV_DKSLRA16
24856 #define __DKSLRA8 __RV_DKSLRA8
24857 #define __KABSW __RV_KABSW
24858 #define __DKABS8 __RV_DKABS8
24859 #define __DKABS16 __RV_DKABS16
24860 #define __SMALDA __RV_SMALDA
24861 #define __SMSLDA __RV_SMSLDA
24862 #define __SMALBB __RV_SMALBB
24863 #define __SUB64 __RV_SUB64
24864 #define __ADD64 __RV_ADD64
24865 #define __SMBB16 __RV_SMBB16
24866 #define __SMBT16 __RV_SMBT16
24867 #define __SMTT16 __RV_SMTT16
24868 #define __EXPD80 __RV_EXPD80
24869 #define __SMAX8 __RV_SMAX8
24870 #define __SMAX16 __RV_SMAX16
24871 #define __PKTT16 __RV_PKTT16
24872 #define __KADD16 __RV_KADD16
24873 #define __SADD16 __RV_ADD16
24874 #define __SSUB8 __RV_KSUB8
24875 #define __SADD8 __RV_KADD8
24876 #define __USAT16 __RV_UCLIP16
24877 #define __SMALTT __RV_SMALTT
24878 
24880 #define __PKHBT(ARG1, ARG2, ARG3) ((ARG3 == 0) ? __RV_PKTB16(ARG2, ARG1) : \
24881  (ARG3 == 16) ? __RV_PKBB16(ARG2, ARG1) : \
24882  (((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
24883  ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)))
24884 
24886 #define __PKHTB(ARG1, ARG2, ARG3) ((ARG3 == 0) ? __RV_PKTB16(ARG1, ARG2) : \
24887  (ARG3 == 16) ? __RV_PKTT16(ARG1, ARG2) : \
24888  (((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
24889  ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)))
24890 
24891 #if __RISCV_XLEN == 64
24894 #define __PKHBT64(ARG1, ARG2, ARG3) ((ARG3 == 0) ? __RV_PKTB16(ARG2, ARG1) : \
24895  (ARG3 == 16) ? __RV_PKBB16(ARG2, ARG1) : \
24896  ((int64_t)((((uint32_t)((uint64_t)ARG1 >> 32)) & 0x0000FFFFUL) | \
24897  ((((uint32_t)((uint64_t)ARG2 >> 32)) << (ARG3)) & 0xFFFF0000UL)) << 32) | \
24898  ((int64_t)(((((uint32_t)(ARG1))) & 0x0000FFFFUL) | \
24899  ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)) & 0xFFFFFFFFUL))
24900 
24903 #define __PKHTB64(ARG1, ARG2, ARG3) ((ARG3 == 0) ? __RV_PKTB16(ARG1, ARG2) : \
24904  (ARG3 == 16) ? __RV_PKTT16(ARG1, ARG2) : \
24905  ((uint64_t)(((uint32_t)((uint64_t)ARG1 >> 32) & 0xFFFF0000UL) | \
24906  ((((uint32_t)((uint64_t)ARG2 >> 32)) >> (ARG3)) & 0x0000FFFFUL)) << 32) | \
24907  ((uint64_t)(((uint32_t)(ARG1) & 0xFFFF0000UL) | \
24908  ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)) & 0xFFFFFFFFUL))
24909 #else
24912 #define __PKHBT64(ARG1, ARG2, ARG3) ((ARG3 == 0) ? __RV_DPKTB16(ARG2, ARG1) : \
24913  (ARG3 == 16) ? __RV_DPKBB16(ARG2, ARG1) : \
24914  ((int64_t)((((uint32_t)((uint64_t)ARG1 >> 32)) & 0x0000FFFFUL) | \
24915  ((((uint32_t)((uint64_t)ARG2 >> 32)) << (ARG3)) & 0xFFFF0000UL)) << 32) | \
24916  ((int64_t)(((((uint32_t)(ARG1))) & 0x0000FFFFUL) | \
24917  ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)) & 0xFFFFFFFFUL))
24918 
24921 #define __PKHTB64(ARG1, ARG2, ARG3) ((ARG3 == 0) ? __RV_DPKTB16(ARG1, ARG2) : \
24922  (ARG3 == 16) ? __RV_DPKTT16(ARG1, ARG2) : \
24923  ((uint64_t)(((uint32_t)((uint64_t)ARG1 >> 32) & 0xFFFF0000UL) | \
24924  ((((uint32_t)((uint64_t)ARG2 >> 32)) >> (ARG3)) & 0x0000FFFFUL)) << 32) | \
24925  ((uint64_t)(((uint32_t)(ARG1) & 0xFFFF0000UL) | \
24926  ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)) & 0xFFFFFFFFUL))
24927 #endif /* __RISCV_XLEN == 64 */
24928 
24930 #define __SXTB16_RORn(ARG1, ARG2) __RV_SUNPKD820(__ROR(ARG1, ARG2))
24931 
24932 #endif /* defined(__DSP_PRESENT) && (__DSP_PRESENT == 1) */
24933 
24934 #ifdef __cplusplus
24935 }
24936 #endif
24937 
24938 #endif /* __CORE_FEATURE_DSP__ */
#define __EXPD_BYTE(x)
Expand byte to unsigned long value.
#define __ASM
Pass information from the compiler to the assembler.
Definition: nmsis_gcc.h:55
#define __STATIC_FORCEINLINE
Define a static function that should be always inlined by the compiler.
Definition: nmsis_gcc.h:70
__STATIC_FORCEINLINE long __RV_RADDW(int a, int b)
RADDW (32-bit Signed Halving Addition)
__STATIC_FORCEINLINE long __RV_MINW(int a, int b)
MINW (32-bit Signed Word Minimum)
__STATIC_FORCEINLINE long __RV_MAXW(int a, int b)
MAXW (32-bit Signed Word Maximum)
__STATIC_FORCEINLINE long long __RV_MULSR64(long a, long b)
MULSR64 (Multiply Word Signed to 64-bit Data)
__STATIC_FORCEINLINE unsigned long long __RV_MULR64(unsigned long a, unsigned long b)
MULR64 (Multiply Word Unsigned to 64-bit Data)
__STATIC_FORCEINLINE long __RV_RSUBW(int a, int b)
RSUBW (32-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_URADDW(unsigned int a, unsigned int b)
URADDW (32-bit Unsigned Halving Addition)
__STATIC_FORCEINLINE unsigned long __RV_URSUBW(unsigned int a, unsigned int b)
URSUBW (32-bit Unsigned Halving Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_UMSR64(unsigned long long t, unsigned long a, unsigned long b)
UMSR64 (Unsigned Multiply and Subtract from 64-Bit Data)
__STATIC_FORCEINLINE unsigned long long __RV_UKMSR64(unsigned long long t, unsigned long a, unsigned long b)
UKMSR64 (Unsigned Multiply and Saturating Subtract from 64-Bit Data)
__STATIC_FORCEINLINE long long __RV_SMAR64(long long t, long a, long b)
SMAR64 (Signed Multiply and Add to 64-Bit Data)
__STATIC_FORCEINLINE long long __RV_KMAR64(long long t, long a, long b)
KMAR64 (Signed Multiply and Saturating Add to 64-Bit Data)
__STATIC_FORCEINLINE long long __RV_SMSR64(long long t, long a, long b)
SMSR64 (Signed Multiply and Subtract from 64- Bit Data)
__STATIC_FORCEINLINE unsigned long long __RV_UKMAR64(unsigned long long t, unsigned long a, unsigned long b)
UKMAR64 (Unsigned Multiply and Saturating Add to 64-Bit Data)
__STATIC_FORCEINLINE long long __RV_KMSR64(long long t, long a, long b)
KMSR64 (Signed Multiply and Saturating Subtract from 64-Bit Data)
__STATIC_FORCEINLINE unsigned long long __RV_UMAR64(unsigned long long t, unsigned long a, unsigned long b)
UMAR64 (Unsigned Multiply and Add to 64-Bit Data)
__STATIC_FORCEINLINE unsigned long long __RV_SUB64(unsigned long long a, unsigned long long b)
SUB64 (64-bit Subtraction)
__STATIC_FORCEINLINE long long __RV_RADD64(long long a, long long b)
RADD64 (64-bit Signed Halving Addition)
__STATIC_FORCEINLINE unsigned long long __RV_URADD64(unsigned long long a, unsigned long long b)
URADD64 (64-bit Unsigned Halving Addition)
__STATIC_FORCEINLINE unsigned long long __RV_ADD64(unsigned long long a, unsigned long long b)
ADD64 (64-bit Addition)
__STATIC_FORCEINLINE unsigned long long __RV_UKSUB64(unsigned long long a, unsigned long long b)
UKSUB64 (64-bit Unsigned Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_UKADD64(unsigned long long a, unsigned long long b)
UKADD64 (64-bit Unsigned Saturating Addition)
__STATIC_FORCEINLINE unsigned long long __RV_URSUB64(unsigned long long a, unsigned long long b)
URSUB64 (64-bit Unsigned Halving Subtraction)
__STATIC_FORCEINLINE long long __RV_KADD64(long long a, long long b)
KADD64 (64-bit Signed Saturating Addition)
__STATIC_FORCEINLINE long long __RV_KSUB64(long long a, long long b)
KSUB64 (64-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE long long __RV_RSUB64(long long a, long long b)
RSUB64 (64-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE long __RV_SMAQA(long t, unsigned long a, unsigned long b)
SMAQA (Signed Multiply Four Bytes with 32-bit Adds)
__STATIC_FORCEINLINE long __RV_SMAQA_SU(long t, unsigned long a, unsigned long b)
SMAQA.SU (Signed and Unsigned Multiply Four Bytes with 32-bit Adds)
__STATIC_FORCEINLINE unsigned long __RV_UMAQA(unsigned long t, unsigned long a, unsigned long b)
UMAQA (Unsigned Multiply Four Bytes with 32- bit Adds)
__STATIC_FORCEINLINE unsigned long __RV_BPICK(unsigned long a, unsigned long b, unsigned long c)
BPICK (Bit-wise Pick)
__STATIC_FORCEINLINE unsigned long __RV_MADDR32(unsigned long t, unsigned long a, unsigned long b)
MADDR32 (Multiply and Add to 32-Bit Word)
__STATIC_FORCEINLINE long __RV_AVE(long a, long b)
AVE (Average with Rounding)
__STATIC_FORCEINLINE unsigned long __RV_BITREV(unsigned long a, unsigned long b)
BITREV (Bit Reverse)
__STATIC_FORCEINLINE unsigned long __RV_MSUBR32(unsigned long t, unsigned long a, unsigned long b)
MSUBR32 (Multiply and Subtract from 32-Bit Word)
__STATIC_FORCEINLINE unsigned long __RV_WEXT(long long a, unsigned int b)
WEXT (Extract Word from 64-bit)
__STATIC_FORCEINLINE unsigned long __RV_SWAP8(unsigned long a)
SWAP8 (Swap Byte within Halfword)
__STATIC_FORCEINLINE long __RV_SRA_U(long a, unsigned int b)
SRA.u (Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_SWAP16(unsigned long a)
SWAP16 (Swap Halfword within Word)
__STATIC_FORCEINLINE long __RV_KHMTT(unsigned int a, unsigned int b)
KHMTT (Signed Saturating Half Multiply T16 x T16)
__STATIC_FORCEINLINE long __RV_KHMBB(unsigned int a, unsigned int b)
KHMBB (Signed Saturating Half Multiply B16 x B16)
__STATIC_FORCEINLINE long __RV_KADDH(int a, int b)
KADDH (Signed Addition with Q15 Saturation)
__STATIC_FORCEINLINE unsigned long __RV_UKSUBH(unsigned int a, unsigned int b)
UKSUBH (Unsigned Subtraction with U16 Saturation)
__STATIC_FORCEINLINE long __RV_KHMBT(unsigned int a, unsigned int b)
KHMBT (Signed Saturating Half Multiply B16 x T16)
__STATIC_FORCEINLINE long __RV_KSUBH(int a, int b)
KSUBH (Signed Subtraction with Q15 Saturation)
__STATIC_FORCEINLINE unsigned long __RV_UKADDH(unsigned int a, unsigned int b)
UKADDH (Unsigned Addition with U16 Saturation)
__STATIC_FORCEINLINE long __RV_KDMBT(unsigned int a, unsigned int b)
KDMBT (Signed Saturating Double Multiply B16 x T16)
__STATIC_FORCEINLINE long __RV_KDMABT(long t, unsigned int a, unsigned int b)
KDMABT (Signed Saturating Double Multiply Addition B16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_KABSW(signed long a)
KABSW (Scalar 32-bit Absolute Value with Saturation)
__STATIC_FORCEINLINE long __RV_KDMABB(long t, unsigned int a, unsigned int b)
KDMABB (Signed Saturating Double Multiply Addition B16 x B16)
__STATIC_FORCEINLINE long __RV_KSUBW(int a, int b)
KSUBW (Signed Subtraction with Q31 Saturation)
__STATIC_FORCEINLINE long __RV_KSLRAW(int a, int b)
KSLRAW (Shift Left Logical with Q31 Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE long __RV_KDMATT(long t, unsigned int a, unsigned int b)
KDMATT (Signed Saturating Double Multiply Addition T16 x T16)
__STATIC_FORCEINLINE long __RV_KDMBB(unsigned int a, unsigned int b)
KDMBB (Signed Saturating Double Multiply B16 x B16)
__STATIC_FORCEINLINE unsigned long __RV_UKADDW(unsigned int a, unsigned int b)
UKADDW (Unsigned Addition with U32 Saturation)
__STATIC_FORCEINLINE long __RV_KSLRAW_U(int a, int b)
KSLRAW.u (Shift Left Logical with Q31 Saturation or Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE long __RV_KADDW(int a, int b)
KADDW (Signed Addition with Q31 Saturation)
__STATIC_FORCEINLINE long __RV_KSLLW(long a, unsigned int b)
KSLLW (Saturating Shift Left Logical for Word)
__STATIC_FORCEINLINE unsigned long __RV_UKSUBW(unsigned int a, unsigned int b)
UKSUBW (Unsigned Subtraction with U32 Saturation)
__STATIC_FORCEINLINE long __RV_KDMTT(unsigned int a, unsigned int b)
KDMTT (Signed Saturating Double Multiply T16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_EXPD80(unsigned long a)
EXPD80 (Expand and Copy Byte 0 to 32bit(when rv32) or 64bit(when rv64))
__STATIC_FORCEINLINE unsigned long __RV_EXPD83(unsigned long a)
EXPD83 (Expand and Copy Byte 3 to 32bit(rv32) or 64bit(when rv64))
__STATIC_FORCEINLINE unsigned long __RV_EXPD81(unsigned long a)
EXPD81 (Expand and Copy Byte 1 to 32bit(rv32) or 64bit(when rv64))
__STATIC_FORCEINLINE unsigned long __RV_EXPD82(unsigned long a)
EXPD82 (Expand and Copy Byte 2 to 32bit(rv32) or 64bit(when rv64))
__STATIC_FORCEINLINE unsigned long long __RV_DKHM8(unsigned long long a, unsigned long long b)
DKHM8 (64-bit SIMD Signed Saturating Q7 Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_DKSLRA8(unsigned long long a, int b)
DKSLRA8 (64-bit SIMD 8-bit Shift Left Logical with Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long long __RV_DKSLRA16(unsigned long long a, int b)
DKSLRA16 (64-bit SIMD 16-bit Shift Left Logical with Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long long __RV_DKSUB8(unsigned long long a, unsigned long long b)
DKSUB8 (64-bit SIMD 8-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DKABS8(unsigned long long a)
DKABS8 (64-bit SIMD 8-bit Saturating Absolute)
__STATIC_FORCEINLINE unsigned long long __RV_DKABS16(unsigned long long a)
DKABS16 (64-bit SIMD 16-bit Saturating Absolute)
__STATIC_FORCEINLINE unsigned long long __RV_DKADD8(unsigned long long a, unsigned long long b)
DKADD8 (64-bit SIMD 8-bit Signed Saturating Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DKSUB16(unsigned long long a, unsigned long long b)
DKSUB16 (64-bit SIMD 16-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DKHM16(unsigned long long a, unsigned long long b)
DKHM16 (64-bit SIMD Signed Saturating Q15 Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_DKADD16(unsigned long long a, unsigned long long b)
DKADD16 (64-bit SIMD 16-bit Signed Saturating Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DSMMUL(unsigned long long a, unsigned long long b)
DSMMUL (64-bit MSW 32x32 Signed Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_DKMDA(unsigned long long a, unsigned long long b)
DKMDA (Signed Multiply Two Halfs and Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKSTSA16(unsigned long long a, unsigned long long b)
DKSTSA16 (16-bit Signed Saturating Straight Subtraction & Addition)
__STATIC_FORCEINLINE long long __RV_DSMTT32_SRA32(unsigned long long a, unsigned long long b)
DSMTT32.sra32 (Signed Multiply Top Word & Top Word with Right Shift 32-bit)
__STATIC_FORCEINLINE unsigned long long __RV_DMSR16(unsigned long a, unsigned long b)
DMSR16 (Signed Multiply Halfs with Right Shift 16-bit and Cross Multiply Halfs with Right Shift 16-bi...
__STATIC_FORCEINLINE unsigned long __RV_DREDSA16(unsigned long long a)
DREDSA16 (Reduced Subtraction and Reduced Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DKHMX16(unsigned long long a, unsigned long long b)
DKHMX16 (64-bit SIMD Signed Crossed Saturating Q15 Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_DKCRAS16(unsigned long long a, unsigned long long b)
DKCRAS16 (16-bit Signed Saturating Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DPKBT32(unsigned long long a, unsigned long long b)
DPKBT32 (Pack Two 32-bit Data from Bottom and Top Half)
__STATIC_FORCEINLINE long long __RV_DSMBT32_SRA32(unsigned long long a, unsigned long long b)
DSMBT32.sra32 (Signed Crossed Multiply Two Halfs and Subtract with Right Shift 32)
__STATIC_FORCEINLINE unsigned long long __RV_DSMTT16(unsigned long long a, unsigned long long b)
DSMTT16 (Signed Multiply Top Half & Top Half)
__STATIC_FORCEINLINE unsigned long long __RV_DRCRSA16(unsigned long long a, unsigned long long b)
DRCRSA16 (16-bit Signed Halving Cross Subtraction & Addition)
__STATIC_FORCEINLINE long long __RV_DSMBB32(unsigned long long a, unsigned long long b)
DSMBB32 (Signed Multiply Bottom Word & Bottom Word)
__STATIC_FORCEINLINE unsigned long long __RV_DSMBT16(unsigned long long a, unsigned long long b)
DSMBT16 (Signed Multiply Bottom Half & Top Half)
__STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD820(unsigned long long a)
DSUNPKD820 (Signed Unpacking Bytes 2 & 0)
__STATIC_FORCEINLINE unsigned long long __RV_DCRAS32(unsigned long long a, unsigned long long b)
DCRAS32 (32-bit Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DKCRAS32(unsigned long long a, unsigned long long b)
DKCRAS32 (32-bit Signed Saturating Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DSMDRS(unsigned long long a, unsigned long long b)
DSMDRS (Signed Multiply Two Halfs and Reverse Subtract)
__STATIC_FORCEINLINE long long __RV_DSMBT32(unsigned long long a, unsigned long long b)
SMBT32 (Signed Multiply Bottom Word & Top Word)
__STATIC_FORCEINLINE unsigned long long __RV_DSUB16(unsigned long long a, unsigned long long b)
DSUB16 (64-bit SIMD 16-bit Halving Signed Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DSTSA32(unsigned long long a, unsigned long long b)
DSTSA32 (32-bit Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DSUB32(unsigned long long a, unsigned long long b)
DSUB32 (64-bit SIMD 32-bit Halving Signed Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DPKBB16(unsigned long long a, unsigned long long b)
DPKBB16 (Pack Two 16-bit Data from Both Bottom Half)
__STATIC_FORCEINLINE unsigned long long __RV_DKCRSA16(unsigned long long a, unsigned long long b)
DKCRSA16 (16-bit Signed Saturating Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DRCRAS32(unsigned long long a, unsigned long long b)
DRCRAS32 (32-bit Signed Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD832(unsigned long long a)
DZUNPKD832 (UnSigned Unpacking Bytes 3 & 2)
__STATIC_FORCEINLINE unsigned long long __RV_DADD32(unsigned long long a, unsigned long long b)
DADD32 (32-bit Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD810(unsigned long long a)
DZUNPKD810 (UnSigned Unpacking Bytes 1 & 0)
__STATIC_FORCEINLINE unsigned long long __RV_DRADD32(unsigned long long a, unsigned long long b)
DRADD32 (64-bit SIMD 32-bit Halving Signed Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DPKTB16(unsigned long long a, unsigned long long b)
DPKTB16 (Pack Two 32-bit Data from Top and Bottom Half)
__STATIC_FORCEINLINE unsigned long long __RV_DKHMX8(unsigned long long a, unsigned long long b)
DKHMX8 (64-bit SIMD Signed Crossed Saturating Q7 Multiply)
__STATIC_FORCEINLINE long long __RV_DSMBB32_SRA32(unsigned long long a, unsigned long long b)
DSMBB32.sra32 (Signed Crossed Multiply Two Halfs and Subtract with Right Shift 32)
__STATIC_FORCEINLINE unsigned long long __RV_DADD16(unsigned long long a, unsigned long long b)
DADD16 (16-bit Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD831(unsigned long long a)
DZUNPKD831 (UnSigned Unpacking Bytes 3 & 1)
__STATIC_FORCEINLINE unsigned long long __RV_DMSR33(unsigned long long a, unsigned long long b)
DMSR33 (Signed Multiply with Right Shift 33-bit and Cross Multiply with Right Shift 33-bit)
__STATIC_FORCEINLINE long long __RV_DSMBB32_SRA14(unsigned long long a, unsigned long long b)
DSMBB32.sra14 (Signed Crossed Multiply Two Halfs and Subtract with Right Shift 14)
__STATIC_FORCEINLINE unsigned long long __RV_DRSUB16(unsigned long long a, unsigned long long b)
DRSUB16 (16-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DPKBB32(unsigned long long a, unsigned long long b)
DPKBB32 (Pack Two 32-bit Data from Both Bottom Half)
__STATIC_FORCEINLINE unsigned long long __RV_DKCRSA32(unsigned long long a, unsigned long long b)
DKCRSA32 (32-bit Signed Saturating Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD830(unsigned long long a)
DSUNPKD830 (Signed Unpacking Bytes 3 & 0)
__STATIC_FORCEINLINE unsigned long long __RV_DKADD32(unsigned long long a, unsigned long long b)
DKADD32(64-bit SIMD 32-bit Signed Saturating Addition)
__STATIC_FORCEINLINE long long __RV_DSMTT32(unsigned long long a, unsigned long long b)
DSMTT32 (Signed Multiply Top Word & Top Word)
__STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD830(unsigned long long a)
DZUNPKD830 (UnSigned Unpacking Bytes 3 & 0)
__STATIC_FORCEINLINE unsigned long long __RV_DSMXDS(unsigned long long a, unsigned long long b)
DSMXDS (Signed Crossed Multiply Two Halfs and Subtract)
__STATIC_FORCEINLINE unsigned long long __RV_DSMBB16(unsigned long long a, unsigned long long b)
DSMBB16 (Signed Multiply Bottom Half & Bottom Half)
__STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD832(unsigned long long a)
DSUNPKD832 (Signed Unpacking Bytes 3 & 2)
__STATIC_FORCEINLINE unsigned long long __RV_DPKBT16(unsigned long long a, unsigned long long b)
DPKBT16 (Pack Two 16-bit Data from Bottom and Top Half)
__STATIC_FORCEINLINE unsigned long long __RV_DPKTB32(unsigned long long a, unsigned long long b)
DPKTB32 (Pack Two 32-bit Data from Top and Bottom Half)
__STATIC_FORCEINLINE unsigned long long __RV_DMSR17(unsigned long a, unsigned long b)
DMSR17 (Signed Multiply Halfs with Right Shift 17-bit and Cross Multiply Halfs with Right Shift 17-bi...
__STATIC_FORCEINLINE unsigned long long __RV_DKWMMUL(unsigned long long a, unsigned long long b)
DKWMMUL (64-bit MSW 32x32 Signed Multiply & Double)
__STATIC_FORCEINLINE unsigned long long __RV_DPACK32(signed long a, signed long b)
DPACK32 (SIMD Pack Two 32-bit Data To 64-bit)
__STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD820(unsigned long long a)
DZUNPKD820 (UnSigned Unpacking Bytes 2 & 0)
__STATIC_FORCEINLINE unsigned long long __RV_DRADD16(unsigned long long a, unsigned long long b)
DRADD16 (64-bit SIMD 16-bit Halving Signed Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DPKTT32(unsigned long long a, unsigned long long b)
DPKTT32 (Pack Two 32-bit Data from Both Top Half)
__STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD810(unsigned long long a)
DSUNPKD810 (Signed Unpacking Bytes 1 & 0)
__STATIC_FORCEINLINE unsigned long long __RV_DKABS32(unsigned long long a)
DKABS32 (64-bit SIMD 32-bit Saturating Absolute)
__STATIC_FORCEINLINE unsigned long long __RV_DKSTAS16(unsigned long long a, unsigned long long b)
DKSTAS16 (16-bit Signed Saturating Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DCRSA32(unsigned long long a, unsigned long long b)
DCRSA32 (32-bit Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DRCRAS16(unsigned long long a, unsigned long long b)
DRCRAS16 (16-bit Signed Halving Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DRCRSA32(unsigned long long a, unsigned long long b)
DRCRSA32 (32-bit Signed Halving Cross Subtraction & Addition)
__STATIC_FORCEINLINE int16_t __RV_DKCLIP64(unsigned long long a)
DKCLIP64 (64-bit Clipped to 16-bit Saturation Value)
__STATIC_FORCEINLINE unsigned long long __RV_DPKTT16(unsigned long long a, unsigned long long b)
DPKTT16 (Pack Two 16-bit Data from Both Top Half)
__STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD831(unsigned long long a)
DSUNPKD831 (Signed Unpacking Bytes 3 & 1)
__STATIC_FORCEINLINE unsigned long long __RV_DSRA16(unsigned long long a, unsigned long b)
DSRA16 (SIMD 16-bit Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long long __RV_DKSLRA32(unsigned long long a, int b)
DKSLRA32 (64-bit SIMD 32-bit Shift Left Logical with Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long long __RV_DRSUB32(unsigned long long a, unsigned long long b)
DRSUB32 (32-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DSMMUL_U(unsigned long long a, unsigned long long b)
DSMMUL.u (64-bit MSW 32x32 Unsigned Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_DSTAS32(unsigned long long a, unsigned long long b)
DSTAS32 (SIMD 32-bit Straight Addition & Subtractionn)
__STATIC_FORCEINLINE long long __RV_DSMBT32_SRA14(unsigned long long a, unsigned long long b)
DSMBT32.sra14 (Signed Multiply Bottom Word & Top Word with Right Shift 14)
__STATIC_FORCEINLINE unsigned long __RV_DREDAS16(unsigned long long a)
DREDAS16 (Reduced Addition and Reduced Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DKMXDA(unsigned long long a, unsigned long long b)
DKMXDA (Signed Crossed Multiply Two Halfs and Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKSUB32(unsigned long long a, unsigned long long b)
DKSUB32 (64-bit SIMD 32-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DKWMMUL_U(unsigned long long a, unsigned long long b)
DKWMMUL.u (64-bit MSW 32x32 Unsigned Multiply & Double)
__STATIC_FORCEINLINE unsigned long long __RV_DMXSR33(unsigned long long a, unsigned long long b)
DMXSR33 (Signed Multiply with Right Shift 33-bit and Cross Multiply with Right Shift 33-bit)
__STATIC_FORCEINLINE long long __RV_DSMTT32_SRA14(unsigned long long a, unsigned long long b)
DSMTT32.sra14 (Signed Multiply Top Word & Top Word with Right Shift 14-bit)
__STATIC_FORCEINLINE long long __RV_DKMXDA32(unsigned long long a, unsigned long long b)
DKMXDA32 (Two Cross Signed 32x32 with 64-bit Saturation Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKMADS(unsigned long long t, unsigned long long a, unsigned long long b)
DKMADS (Two 16x16 with 32-bit Signed Add and Sub)
__STATIC_FORCEINLINE unsigned long long __RV_DUMAQA(unsigned long long t, unsigned long long a, unsigned long long b)
DUMAQA (Four Unsigned 8x8 with 32-bit Unsigned Add)
__STATIC_FORCEINLINE long long __RV_DSMALXDA(long long t, unsigned long long a, unsigned long long b)
DSMALXDA (Four Signed 16x16 with 64-bit Add)
__STATIC_FORCEINLINE unsigned long long __RV_DSMAQA(unsigned long long t, unsigned long long a, unsigned long long b)
DSMAQA (Four Signed 8x8 with 32-bit Signed Add)
__STATIC_FORCEINLINE long long __RV_DKMSXDA32(long long t, unsigned long long a, unsigned long long b)
DKMSXDA32 (Two Cross Signed 32x32 with 64-bit Saturation Sub)
__STATIC_FORCEINLINE long long __RV_DSMALXDS(long long t, unsigned long long a, unsigned long long b)
DSMALXDS (Four Cross Signed 16x16 with 64-bit Add and Sub)
__STATIC_FORCEINLINE long long __RV_DSMALDA(long long t, unsigned long long a, unsigned long long b)
DSMALDA (Four Signed 16x16 with 64-bit Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKMADRS(unsigned long long t, unsigned long long a, unsigned long long b)
DKMADRS (Two 16x16 with 32-bit Signed Add and Reversed Sub)
__STATIC_FORCEINLINE unsigned long long __RV_DKMADA(unsigned long long t, unsigned long long a, unsigned long long b)
DKMADA (Saturating Signed Multiply Two Halfs and Two Adds)
__STATIC_FORCEINLINE unsigned long long __RV_DSMAQA_SU(unsigned long long t, unsigned long long a, unsigned long long b)
DSMAQA.SU (Four Signed 8 x Unsigned 8 with 32-bit Signed Add)
__STATIC_FORCEINLINE long long __RV_DSMALDRS(long long t, unsigned long long a, unsigned long long b)
DSMALDRS (Four Signed 16x16 with 64-bit Add and Revered Sub)
__STATIC_FORCEINLINE long __RV_DSMA32_U(unsigned long long a, unsigned long long b)
DSMA32.u (64-bit SIMD 32-bit Signed Multiply Addition With Rounding and Clip)
__STATIC_FORCEINLINE long long __RV_DSMDRS32(unsigned long long a, unsigned long long b)
DSMDRS32 (Two Signed 32x32 with 64-bit Revered Sub)
__STATIC_FORCEINLINE long __RV_DSMXS32_U(unsigned long long a, unsigned long long b)
DSMXS32.u (64-bit SIMD 32-bit Signed Multiply Cross Subtraction With Rounding and Clip)
__STATIC_FORCEINLINE long long __RV_DKMSDA32(long long t, unsigned long long a, unsigned long long b)
DKMSDA32 (Two Signed 32x32 with 64-bit Saturation Sub)
__STATIC_FORCEINLINE unsigned long long __RV_DKSMS32_U(unsigned long long t, unsigned long long a, unsigned long long b)
DKSMS32.u (Two Signed Multiply Shift-clip and Saturation with Rounding)
__STATIC_FORCEINLINE unsigned long long __RV_DKMMSB(unsigned long long t, unsigned long long a, unsigned long long b)
DKMMSB (64-bit MSW 32x32 Signed Multiply and Saturating Sub)
__STATIC_FORCEINLINE long long __RV_DDSMAQA_SU(long long t, unsigned long long a, unsigned long long b)
DDSMAQA.SU (Eight Signed 8 x Unsigned 8 with 64-bit Add)
__STATIC_FORCEINLINE long long __RV_DSMALTT(long long t, unsigned long long a, unsigned long long b)
DSMALTT (Signed Multiply Top Half & Add 64-bit)
__STATIC_FORCEINLINE long long __RV_DSMDS32(unsigned long long a, unsigned long long b)
DSMDS32 (Two Signed 32x32 with 64-bit Sub)
__STATIC_FORCEINLINE long long __RV_DSMSLDA(long long t, unsigned long long a, unsigned long long b)
DSMSLDA (Four Signed 16x16 with 64-bit Sub)
__STATIC_FORCEINLINE long __RV_DMADA32(long long t, unsigned long long a, unsigned long long b)
DMADA32 ((Two Cross Signed 32x32 with 64-bit Add and Clip to 32-bit)
__STATIC_FORCEINLINE long long __RV_DSMALDS(long long t, unsigned long long a, unsigned long long b)
DSMALDS (Four Signed 16x16 with 64-bit Add and Sub)
__STATIC_FORCEINLINE long long __RV_DKMAXDS32(long long t, unsigned long long a, unsigned long long b)
DKMAXDS32 (Two Cross Signed 32x32 with 64-bit Saturation Add and Sub)
__STATIC_FORCEINLINE unsigned long long __RV_DKMMAC(unsigned long long t, unsigned long long a, unsigned long long b)
DKMMAC (64-bit MSW 32x32 Signed Multiply and Saturating Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKMSXDA(unsigned long long t, unsigned long long a, unsigned long long b)
DKMSXDA (Two Cross 16x16 with 32-bit Signed Double Sub)
__STATIC_FORCEINLINE long long __RV_DKMATT32(long long t, unsigned long long a, unsigned long long b)
DKMATT32 (Saturating Signed Multiply Bottom & Top Words & Add)
__STATIC_FORCEINLINE long long __RV_DKMAXDA32(long long t, unsigned long long a, unsigned long long b)
DKMAXDA32 (Two Cross Signed 32x32 with 64-bit Saturation Add)
__STATIC_FORCEINLINE long long __RV_DDSMAQA(long long t, unsigned long long a, unsigned long long b)
DDSMAQA (Eight Signed 8x8 with 64-bit Add)
__STATIC_FORCEINLINE long __RV_DSMXA32_U(unsigned long long a, unsigned long long b)
DSMXA32.u (64-bit SIMD 32-bit Signed Cross Multiply Addition with Rounding and Clip)
__STATIC_FORCEINLINE unsigned long long __RV_DKMSDA(unsigned long long t, unsigned long long a, unsigned long long b)
DKMSDA (Two 16x16 with 32-bit Signed Double Sub)
__STATIC_FORCEINLINE long long __RV_DSMXDS32(unsigned long long a, unsigned long long b)
DSMXDS32 (Two Cross Signed 32x32 with 64-bit Sub)
__STATIC_FORCEINLINE long __RV_DSMS32_U(unsigned long long a, unsigned long long b)
DSMS32.u (64-bit SIMD 32-bit Signed Multiply Subtraction with Rounding and Clip)
__STATIC_FORCEINLINE long long __RV_DSMALBT(long long t, unsigned long long a, unsigned long long b)
DSMALBT (Signed Multiply Bottom Half & Top Half & Add 64-bit)
__STATIC_FORCEINLINE long long __RV_DSMALBB(long long t, unsigned long long a, unsigned long long b)
DSMALBB (Signed Multiply Bottom Halfs & Add 64-bit)
__STATIC_FORCEINLINE long long __RV_DDUMAQA(long long t, unsigned long long a, unsigned long long b)
DDUMAQA (Eight Unsigned 8x8 with 64-bit Unsigned Add)
__STATIC_FORCEINLINE long long __RV_DKMADS32(long long t, unsigned long long a, unsigned long long b)
DKMADS32 (Two Signed 32x32 with 64-bit Saturation Add and Sub)
__STATIC_FORCEINLINE long long __RV_DKMABB32(long long t, unsigned long long a, unsigned long long b)
DKMABB32 (Saturating Signed Multiply Bottom Words & Add)
__STATIC_FORCEINLINE long long __RV_DKMADRS32(long long t, unsigned long long a, unsigned long long b)
DKMADRS32 (Two Signed 32x32 with 64-bit Saturation Revered Add and Sub)
__STATIC_FORCEINLINE long __RV_DSMADA16(long long t, unsigned long long a, unsigned long long b)
DSMADA16 (Signed Multiply Two Halfs and Two Adds 32-bit)
__STATIC_FORCEINLINE unsigned long long __RV_DKMAXDA(unsigned long long t, unsigned long long a, unsigned long long b)
DKMAXDA (Two Cross 16x16 with 32-bit Signed Double Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKMMAC_U(unsigned long long t, unsigned long long a, unsigned long long b)
DKMMAC.u (64-bit MSW 32x32 Unsigned Multiply and Saturating Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKMMSB_U(unsigned long long t, unsigned long long a, unsigned long long b)
DKMMSB.u (64-bit MSW 32x32 Unsigned Multiply and Saturating Sub)
__STATIC_FORCEINLINE long __RV_DSMAXDA16(long long t, unsigned long long a, unsigned long long b)
DSMAXDA16 (Signed Crossed Multiply Two Halfs and Two Adds 32-bit)
__STATIC_FORCEINLINE unsigned long long __RV_DKMAXDS(unsigned long long t, unsigned long long a, unsigned long long b)
DKMAXDS (Saturating Signed Crossed Multiply Two Halfs & Subtract & Add)
__STATIC_FORCEINLINE long long __RV_DKMDA32(unsigned long long a, unsigned long long b)
DKMDA32 (Two Signed 32x32 with 64-bit Saturation Add)
__STATIC_FORCEINLINE long long __RV_DKMABT32(long long t, unsigned long long a, unsigned long long b)
DKMABT32 (Saturating Signed Multiply Bottom & Top Words & Add)
__STATIC_FORCEINLINE long long __RV_DSMSLXDA(long long t, unsigned long long a, unsigned long long b)
DSMSLXDA (Four Cross Signed 16x16 with 64-bit Sub)
__STATIC_FORCEINLINE long long __RV_DKMADA32(long long t, unsigned long long a, unsigned long long b)
DKMADA32 (Two Signed 32x32 with 64-bit Saturation Add)
__STATIC_FORCEINLINE void __RV_CLROV(void)
CLROV (Clear OV flag)
__STATIC_FORCEINLINE unsigned long __RV_RDOV(void)
RDOV (Read OV flag)
__STATIC_FORCEINLINE unsigned long __RV_PBSAD(unsigned long a, unsigned long b)
PBSAD (Parallel Byte Sum of Absolute Difference)
__STATIC_FORCEINLINE unsigned long __RV_CLRS32(unsigned long a)
CLRS32 (SIMD 32-bit Count Leading Redundant Sign)
__STATIC_FORCEINLINE unsigned long __RV_CLZ32(unsigned long a)
CLZ32 (SIMD 32-bit Count Leading Zero)
__STATIC_FORCEINLINE unsigned long __RV_CLO32(unsigned long a)
CLO32 (SIMD 32-bit Count Leading One)
__STATIC_FORCEINLINE unsigned long __RV_PBSADA(unsigned long t, unsigned long a, unsigned long b)
PBSADA (Parallel Byte Sum of Absolute Difference Accum)
__STATIC_FORCEINLINE long __RV_KMABT32(long t, unsigned long a, unsigned long b)
KMABT32 (Saturating Signed Multiply Bottom & Top Words & Add)
__STATIC_FORCEINLINE long __RV_KMABB32(long t, unsigned long a, unsigned long b)
KMABB32 (Saturating Signed Multiply Bottom Words & Add)
__STATIC_FORCEINLINE long __RV_KMATT32(long t, unsigned long a, unsigned long b)
KMATT32 (Saturating Signed Multiply Top Words & Add)
__STATIC_FORCEINLINE long __RV_SMTT32(unsigned long a, unsigned long b)
SMTT32 (Signed Multiply Top Word & Top Word)
__STATIC_FORCEINLINE long __RV_SMBB32(unsigned long a, unsigned long b)
SMBB32 (Signed Multiply Bottom Word & Bottom Word)
__STATIC_FORCEINLINE long __RV_SMBT32(unsigned long a, unsigned long b)
SMBT32 (Signed Multiply Bottom Word & Top Word)
__STATIC_FORCEINLINE unsigned long __RV_PKBT32(unsigned long a, unsigned long b)
PKBT32 (Pack Two 32-bit Data from Bottom and Top Half)
__STATIC_FORCEINLINE unsigned long __RV_PKBB32(unsigned long a, unsigned long b)
PKBB32 (Pack Two 32-bit Data from Both Bottom Half)
__STATIC_FORCEINLINE unsigned long __RV_PKTT32(unsigned long a, unsigned long b)
PKTT32 (Pack Two 32-bit Data from Both Top Half)
__STATIC_FORCEINLINE unsigned long __RV_PKTB32(unsigned long a, unsigned long b)
PKTB32 (Pack Two 32-bit Data from Top and Bottom Half)
__STATIC_FORCEINLINE long __RV_KMDA32(unsigned long a, unsigned long b)
KMDA32 (Signed Multiply Two Words and Add)
__STATIC_FORCEINLINE long __RV_KMADS32(long t, unsigned long a, unsigned long b)
KMADS32 (Saturating Signed Multiply Two Words & Subtract & Add)
__STATIC_FORCEINLINE long __RV_KMADRS32(long t, unsigned long a, unsigned long b)
KMADRS32 (Saturating Signed Multiply Two Words & Reverse Subtract & Add)
__STATIC_FORCEINLINE long __RV_SMDS32(unsigned long a, unsigned long b)
SMDS32 (Signed Multiply Two Words and Subtract)
__STATIC_FORCEINLINE long __RV_KMADA32(long t, unsigned long a, unsigned long b)
KMADA32 (Saturating Signed Multiply Two Words and Two Adds)
__STATIC_FORCEINLINE long __RV_KMSDA32(long t, unsigned long a, unsigned long b)
KMSDA32 (Saturating Signed Multiply Two Words & Add & Subtract)
__STATIC_FORCEINLINE long __RV_SMXDS32(unsigned long a, unsigned long b)
SMXDS32 (Signed Crossed Multiply Two Words and Subtract)
__STATIC_FORCEINLINE long __RV_SMDRS32(unsigned long a, unsigned long b)
SMDRS32 (Signed Multiply Two Words and Reverse Subtract)
__STATIC_FORCEINLINE long __RV_KMXDA32(unsigned long a, unsigned long b)
KMXDA32 (Signed Crossed Multiply Two Words and Add)
__STATIC_FORCEINLINE long __RV_KMAXDS32(long t, unsigned long a, unsigned long b)
KMAXDS32 (Saturating Signed Crossed Multiply Two Words & Subtract & Add)
__STATIC_FORCEINLINE long __RV_KMAXDA32(long t, unsigned long a, unsigned long b)
KMAXDA32 (Saturating Signed Crossed Multiply Two Words and Two Adds)
__STATIC_FORCEINLINE long __RV_KMSXDA32(long t, unsigned long a, unsigned long b)
KMSXDA32 (Saturating Signed Crossed Multiply Two Words & Add & Subtract)
__STATIC_FORCEINLINE unsigned long __RV_UKSTAS32(unsigned long a, unsigned long b)
UKSTAS32 (SIMD 32-bit Unsigned Saturating Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UKADD32(unsigned long a, unsigned long b)
UKADD32 (SIMD 32-bit Unsigned Saturating Addition)
__STATIC_FORCEINLINE unsigned long __RV_UKSUB32(unsigned long a, unsigned long b)
UKSUB32 (SIMD 32-bit Unsigned Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_CRSA32(unsigned long a, unsigned long b)
CRSA32 (SIMD 32-bit Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_KSUB32(unsigned long a, unsigned long b)
KSUB32 (SIMD 32-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_RCRAS32(unsigned long a, unsigned long b)
RCRAS32 (SIMD 32-bit Signed Halving Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UKCRSA32(unsigned long a, unsigned long b)
UKCRSA32 (SIMD 32-bit Unsigned Saturating Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_RCRSA32(unsigned long a, unsigned long b)
RCRSA32 (SIMD 32-bit Signed Halving Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_CRAS32(unsigned long a, unsigned long b)
CRAS32 (SIMD 32-bit Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_KCRSA32(unsigned long a, unsigned long b)
KCRSA32 (SIMD 32-bit Signed Saturating Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_KCRAS32(unsigned long a, unsigned long b)
KCRAS32 (SIMD 32-bit Signed Saturating Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_URSTAS32(unsigned long a, unsigned long b)
URSTAS32 (SIMD 32-bit Unsigned Halving Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_SUB32(unsigned long a, unsigned long b)
SUB32 (SIMD 32-bit Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_URADD32(unsigned long a, unsigned long b)
URADD32 (SIMD 32-bit Unsigned Halving Addition)
__STATIC_FORCEINLINE unsigned long __RV_RSUB32(unsigned long a, unsigned long b)
RSUB32 (SIMD 32-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_STSA32(unsigned long a, unsigned long b)
STSA32 (SIMD 32-bit Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_RSTSA32(unsigned long a, unsigned long b)
RSTSA32 (SIMD 32-bit Signed Halving Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_URSTSA32(unsigned long a, unsigned long b)
URSTSA32 (SIMD 32-bit Unsigned Halving Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_KADD32(unsigned long a, unsigned long b)
KADD32 (SIMD 32-bit Signed Saturating Addition)
__STATIC_FORCEINLINE unsigned long __RV_URCRSA32(unsigned long a, unsigned long b)
URCRSA32 (SIMD 32-bit Unsigned Halving Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_URSUB32(unsigned long a, unsigned long b)
URSUB32 (SIMD 32-bit Unsigned Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_KSTAS32(unsigned long a, unsigned long b)
KSTAS32 (SIMD 32-bit Signed Saturating Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_ADD32(unsigned long a, unsigned long b)
ADD32 (SIMD 32-bit Addition)
__STATIC_FORCEINLINE unsigned long __RV_RSTAS32(unsigned long a, unsigned long b)
RSTAS32 (SIMD 32-bit Signed Halving Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_URCRAS32(unsigned long a, unsigned long b)
URCRAS32 (SIMD 32-bit Unsigned Halving Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UKCRAS32(unsigned long a, unsigned long b)
UKCRAS32 (SIMD 32-bit Unsigned Saturating Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_STAS32(unsigned long a, unsigned long b)
STAS32 (SIMD 32-bit Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_RADD32(unsigned long a, unsigned long b)
RADD32 (SIMD 32-bit Signed Halving Addition)
__STATIC_FORCEINLINE unsigned long __RV_UKSTSA32(unsigned long a, unsigned long b)
UKSTSA32 (SIMD 32-bit Unsigned Saturating Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_KSTSA32(unsigned long a, unsigned long b)
KSTSA32 (SIMD 32-bit Signed Saturating Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_UMAX32(unsigned long a, unsigned long b)
UMAX32 (SIMD 32-bit Unsigned Maximum)
__STATIC_FORCEINLINE unsigned long __RV_UMIN32(unsigned long a, unsigned long b)
UMIN32 (SIMD 32-bit Unsigned Minimum)
__STATIC_FORCEINLINE unsigned long __RV_SMAX32(unsigned long a, unsigned long b)
SMAX32 (SIMD 32-bit Signed Maximum)
__STATIC_FORCEINLINE unsigned long __RV_KABS32(unsigned long a)
KABS32 (Scalar 32-bit Absolute Value with Saturation)
__STATIC_FORCEINLINE unsigned long __RV_SMIN32(unsigned long a, unsigned long b)
SMIN32 (SIMD 32-bit Signed Minimum)
__STATIC_FORCEINLINE unsigned long __RV_SRA32(unsigned long a, unsigned int b)
SRA32 (SIMD 32-bit Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_SRL32_U(unsigned long a, unsigned int b)
SRL32.u (SIMD 32-bit Rounding Shift Right Logical)
__STATIC_FORCEINLINE unsigned long __RV_KSLL32(unsigned long a, unsigned int b)
KSLL32 (SIMD 32-bit Saturating Shift Left Logical)
__STATIC_FORCEINLINE unsigned long __RV_SRL32(unsigned long a, unsigned int b)
SRL32 (SIMD 32-bit Shift Right Logical)
__STATIC_FORCEINLINE unsigned long __RV_KSLRA32(unsigned long a, int b)
KSLRA32 (SIMD 32-bit Shift Left Logical with Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_SLL32(unsigned long a, unsigned int b)
SLL32 (SIMD 32-bit Shift Left Logical)
__STATIC_FORCEINLINE unsigned long __RV_SRA32_U(unsigned long a, unsigned int b)
SRA32.u (SIMD 32-bit Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_KSLRA32_U(unsigned long a, int b)
KSLRA32.u (SIMD 32-bit Shift Left Logical with Saturation or Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_KDMTT16(unsigned long a, unsigned long b)
KDMTT16 (SIMD Signed Saturating Double Multiply T16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_KDMABT16(unsigned long t, unsigned long a, unsigned long b)
KDMABT16 (SIMD Signed Saturating Double Multiply Addition B16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_KDMABB16(unsigned long t, unsigned long a, unsigned long b)
KDMABB16 (SIMD Signed Saturating Double Multiply Addition B16 x B16)
__STATIC_FORCEINLINE unsigned long __RV_KDMBB16(unsigned long a, unsigned long b)
KDMBB16 (SIMD Signed Saturating Double Multiply B16 x B16)
__STATIC_FORCEINLINE unsigned long __RV_KHMBT16(unsigned long a, unsigned long b)
KHMBT16 (SIMD Signed Saturating Half Multiply B16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_KHMBB16(unsigned long a, unsigned long b)
KHMBB16 (SIMD Signed Saturating Half Multiply B16 x B16)
__STATIC_FORCEINLINE unsigned long __RV_KHMTT16(unsigned long a, unsigned long b)
KHMTT16 (SIMD Signed Saturating Half Multiply T16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_KDMATT16(unsigned long t, unsigned long a, unsigned long b)
KDMATT16 (SIMD Signed Saturating Double Multiply Addition T16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_KDMBT16(unsigned long a, unsigned long b)
KDMBT16 (SIMD Signed Saturating Double Multiply B16 x T16)
__STATIC_FORCEINLINE long __RV_SMDS(unsigned long a, unsigned long b)
SMDS (SIMD Signed Multiply Two Halfs and Subtract)
__STATIC_FORCEINLINE long __RV_KMADRS(long t, unsigned long a, unsigned long b)
KMADRS (SIMD Saturating Signed Multiply Two Halfs & Reverse Subtract & Add)
__STATIC_FORCEINLINE long __RV_KMAXDA(long t, unsigned long a, unsigned long b)
KMAXDA (SIMD Saturating Signed Crossed Multiply Two Halfs and Two Adds)
__STATIC_FORCEINLINE long __RV_KMDA(unsigned long a, unsigned long b)
KMDA (SIMD Signed Multiply Two Halfs and Add)
__STATIC_FORCEINLINE long __RV_SMBT16(unsigned long a, unsigned long b)
SMBT16 (SIMD Signed Multiply Bottom Half & Top Half)
__STATIC_FORCEINLINE long __RV_KMXDA(unsigned long a, unsigned long b)
KMXDA (SIMD Signed Crossed Multiply Two Halfs and Add)
__STATIC_FORCEINLINE long __RV_KMSDA(long t, unsigned long a, unsigned long b)
KMSDA (SIMD Saturating Signed Multiply Two Halfs & Add & Subtract)
__STATIC_FORCEINLINE long __RV_KMABB(long t, unsigned long a, unsigned long b)
KMABB (SIMD Saturating Signed Multiply Bottom Halfs & Add)
__STATIC_FORCEINLINE long __RV_KMABT(long t, unsigned long a, unsigned long b)
KMABT (SIMD Saturating Signed Multiply Bottom & Top Halfs & Add)
__STATIC_FORCEINLINE long __RV_SMXDS(unsigned long a, unsigned long b)
SMXDS (SIMD Signed Crossed Multiply Two Halfs and Subtract)
__STATIC_FORCEINLINE long __RV_KMAXDS(long t, unsigned long a, unsigned long b)
KMAXDS (SIMD Saturating Signed Crossed Multiply Two Halfs & Subtract & Add)
__STATIC_FORCEINLINE long __RV_SMBB16(unsigned long a, unsigned long b)
SMBB16 (SIMD Signed Multiply Bottom Half & Bottom Half)
__STATIC_FORCEINLINE long __RV_KMADS(long t, unsigned long a, unsigned long b)
KMADS (SIMD Saturating Signed Multiply Two Halfs & Subtract & Add)
__STATIC_FORCEINLINE long __RV_KMADA(long t, unsigned long a, unsigned long b)
KMADA (SIMD Saturating Signed Multiply Two Halfs and Two Adds)
__STATIC_FORCEINLINE long __RV_KMSXDA(long t, unsigned long a, unsigned long b)
KMSXDA (SIMD Saturating Signed Crossed Multiply Two Halfs & Add & Subtract)
__STATIC_FORCEINLINE long __RV_SMTT16(unsigned long a, unsigned long b)
SMTT16 (SIMD Signed Multiply Top Half & Top Half)
__STATIC_FORCEINLINE long __RV_KMATT(long t, unsigned long a, unsigned long b)
KMATT (SIMD Saturating Signed Multiply Top Halfs & Add)
__STATIC_FORCEINLINE long __RV_SMDRS(unsigned long a, unsigned long b)
SMDRS (SIMD Signed Multiply Two Halfs and Reverse Subtract)
__STATIC_FORCEINLINE long long __RV_SMALXDA(long long t, unsigned long a, unsigned long b)
SMALXDA (Signed Crossed Multiply Two Halfs and Two Adds 64-bit)
__STATIC_FORCEINLINE long long __RV_SMALTT(long long t, unsigned long a, unsigned long b)
SMALTT (Signed Multiply Top Halfs & Add 64-bit)
__STATIC_FORCEINLINE long long __RV_SMAL(long long a, unsigned long b)
SMAL (Signed Multiply Halfs & Add 64-bit)
__STATIC_FORCEINLINE long long __RV_SMALDS(long long t, unsigned long a, unsigned long b)
SMALDS (Signed Multiply Two Halfs & Subtract & Add 64-bit)
__STATIC_FORCEINLINE long long __RV_SMALBT(long long t, unsigned long a, unsigned long b)
SMALBT (Signed Multiply Bottom Half & Top Half & Add 64-bit)
__STATIC_FORCEINLINE long long __RV_SMALDA(long long t, unsigned long a, unsigned long b)
SMALDA (Signed Multiply Two Halfs and Two Adds 64-bit)
__STATIC_FORCEINLINE long long __RV_SMALDRS(long long t, unsigned long a, unsigned long b)
SMALDRS (Signed Multiply Two Halfs & Reverse Subtract & Add 64- bit)
__STATIC_FORCEINLINE long long __RV_SMSLDA(long long t, unsigned long a, unsigned long b)
SMSLDA (Signed Multiply Two Halfs & Add & Subtract 64-bit)
__STATIC_FORCEINLINE long long __RV_SMALBB(long long t, unsigned long a, unsigned long b)
SMALBB (Signed Multiply Bottom Halfs & Add 64-bit)
__STATIC_FORCEINLINE long long __RV_SMSLXDA(long long t, unsigned long a, unsigned long b)
SMSLXDA (Signed Crossed Multiply Two Halfs & Add & Subtract 64- bit)
__STATIC_FORCEINLINE long long __RV_SMALXDS(long long t, unsigned long a, unsigned long b)
SMALXDS (Signed Crossed Multiply Two Halfs & Subtract & Add 64- bit)
__STATIC_FORCEINLINE long __RV_KMMAWB(long t, unsigned long a, unsigned long b)
KMMAWB (SIMD Saturating MSW Signed Multiply Word and Bottom Half and Add)
__STATIC_FORCEINLINE long __RV_KMMAWB2(long t, unsigned long a, unsigned long b)
KMMAWB2 (SIMD Saturating MSW Signed Multiply Word and Bottom Half & 2 and Add)
__STATIC_FORCEINLINE long __RV_KMMWT2_U(long a, unsigned long b)
KMMWT2.u (SIMD Saturating MSW Signed Multiply Word and Top Half & 2 with Rounding)
__STATIC_FORCEINLINE long __RV_KMMAWT2_U(long t, unsigned long a, unsigned long b)
KMMAWT2.u (SIMD Saturating MSW Signed Multiply Word and Top Half & 2 and Add with Rounding)
__STATIC_FORCEINLINE long __RV_KMMAWT(long t, unsigned long a, unsigned long b)
KMMAWT (SIMD Saturating MSW Signed Multiply Word and Top Half and Add)
__STATIC_FORCEINLINE long __RV_KMMAWT_U(long t, unsigned long a, unsigned long b)
KMMAWT.u (SIMD Saturating MSW Signed Multiply Word and Top Half and Add with Rounding)
__STATIC_FORCEINLINE long __RV_SMMWB_U(long a, unsigned long b)
SMMWB.u (SIMD MSW Signed Multiply Word and Bottom Half with Rounding)
__STATIC_FORCEINLINE long __RV_KMMAWT2(long t, unsigned long a, unsigned long b)
KMMAWT2 (SIMD Saturating MSW Signed Multiply Word and Top Half & 2 and Add)
__STATIC_FORCEINLINE long __RV_KMMAWB_U(long t, unsigned long a, unsigned long b)
KMMAWB.u (SIMD Saturating MSW Signed Multiply Word and Bottom Half and Add with Rounding)
__STATIC_FORCEINLINE long __RV_SMMWT_U(long a, unsigned long b)
SMMWT.u (SIMD MSW Signed Multiply Word and Top Half with Rounding)
__STATIC_FORCEINLINE long __RV_SMMWT(long a, unsigned long b)
SMMWT (SIMD MSW Signed Multiply Word and Top Half)
__STATIC_FORCEINLINE long __RV_SMMWB(long a, unsigned long b)
SMMWB (SIMD MSW Signed Multiply Word and Bottom Half)
__STATIC_FORCEINLINE long __RV_KMMWB2_U(long a, unsigned long b)
KMMWB2.u (SIMD Saturating MSW Signed Multiply Word and Bottom Half & 2 with Rounding)
__STATIC_FORCEINLINE long __RV_KMMWT2(long a, unsigned long b)
KMMWT2 (SIMD Saturating MSW Signed Multiply Word and Top Half & 2)
__STATIC_FORCEINLINE long __RV_KMMWB2(long a, unsigned long b)
KMMWB2 (SIMD Saturating MSW Signed Multiply Word and Bottom Half & 2)
__STATIC_FORCEINLINE long __RV_KMMAWB2_U(long t, unsigned long a, unsigned long b)
KMMAWB2.u (SIMD Saturating MSW Signed Multiply Word and Bottom Half & 2 and Add with Rounding)
__STATIC_FORCEINLINE long __RV_KMMSB(long t, long a, long b)
KMMSB (SIMD Saturating MSW Signed Multiply Word and Subtract)
__STATIC_FORCEINLINE long __RV_SMMUL(long a, long b)
SMMUL (SIMD MSW Signed Multiply Word)
__STATIC_FORCEINLINE long __RV_SMMUL_U(long a, long b)
SMMUL.u (SIMD MSW Signed Multiply Word with Rounding)
__STATIC_FORCEINLINE long __RV_KWMMUL(long a, long b)
KWMMUL (SIMD Saturating MSW Signed Multiply Word & Double)
__STATIC_FORCEINLINE long __RV_KWMMUL_U(long a, long b)
KWMMUL.u (SIMD Saturating MSW Signed Multiply Word & Double with Rounding)
__STATIC_FORCEINLINE long __RV_KMMAC_U(long t, long a, long b)
KMMAC.u (SIMD Saturating MSW Signed Multiply Word and Add with Rounding)
__STATIC_FORCEINLINE long __RV_KMMAC(long t, long a, long b)
KMMAC (SIMD Saturating MSW Signed Multiply Word and Add)
__STATIC_FORCEINLINE long __RV_KMMSB_U(long t, long a, long b)
KMMSB.u (SIMD Saturating MSW Signed Multiply Word and Subtraction with Rounding)
__STATIC_FORCEINLINE unsigned long __RV_KSUB16(unsigned long a, unsigned long b)
KSUB16 (SIMD 16-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_SUB16(unsigned long a, unsigned long b)
SUB16 (SIMD 16-bit Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_URSUB16(unsigned long a, unsigned long b)
URSUB16 (SIMD 16-bit Unsigned Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UKCRAS16(unsigned long a, unsigned long b)
UKCRAS16 (SIMD 16-bit Unsigned Saturating Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_RCRAS16(unsigned long a, unsigned long b)
RCRAS16 (SIMD 16-bit Signed Halving Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UKCRSA16(unsigned long a, unsigned long b)
UKCRSA16 (SIMD 16-bit Unsigned Saturating Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_URADD16(unsigned long a, unsigned long b)
URADD16 (SIMD 16-bit Unsigned Halving Addition)
__STATIC_FORCEINLINE unsigned long __RV_STSA16(unsigned long a, unsigned long b)
STSA16 (SIMD 16-bit Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_URCRAS16(unsigned long a, unsigned long b)
URCRAS16 (SIMD 16-bit Unsigned Halving Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_KCRSA16(unsigned long a, unsigned long b)
KCRSA16 (SIMD 16-bit Signed Saturating Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_RADD16(unsigned long a, unsigned long b)
RADD16 (SIMD 16-bit Signed Halving Addition)
__STATIC_FORCEINLINE unsigned long __RV_KSTAS16(unsigned long a, unsigned long b)
KSTAS16 (SIMD 16-bit Signed Saturating Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UKADD16(unsigned long a, unsigned long b)
UKADD16 (SIMD 16-bit Unsigned Saturating Addition)
__STATIC_FORCEINLINE unsigned long __RV_KSTSA16(unsigned long a, unsigned long b)
KSTSA16 (SIMD 16-bit Signed Saturating Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_CRAS16(unsigned long a, unsigned long b)
CRAS16 (SIMD 16-bit Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_URSTAS16(unsigned long a, unsigned long b)
URSTAS16 (SIMD 16-bit Unsigned Halving Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_RCRSA16(unsigned long a, unsigned long b)
RCRSA16 (SIMD 16-bit Signed Halving Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_RSUB16(unsigned long a, unsigned long b)
RSUB16 (SIMD 16-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_CRSA16(unsigned long a, unsigned long b)
CRSA16 (SIMD 16-bit Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_KCRAS16(unsigned long a, unsigned long b)
KCRAS16 (SIMD 16-bit Signed Saturating Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UKSTAS16(unsigned long a, unsigned long b)
UKSTAS16 (SIMD 16-bit Unsigned Saturating Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_ADD16(unsigned long a, unsigned long b)
ADD16 (SIMD 16-bit Addition)
__STATIC_FORCEINLINE unsigned long __RV_URCRSA16(unsigned long a, unsigned long b)
URCRSA16 (SIMD 16-bit Unsigned Halving Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_URSTSA16(unsigned long a, unsigned long b)
URSTSA16 (SIMD 16-bit Unsigned Halving Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_UKSUB16(unsigned long a, unsigned long b)
UKSUB16 (SIMD 16-bit Unsigned Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_KADD16(unsigned long a, unsigned long b)
KADD16 (SIMD 16-bit Signed Saturating Addition)
__STATIC_FORCEINLINE unsigned long __RV_RSTAS16(unsigned long a, unsigned long b)
RSTAS16 (SIMD 16-bit Signed Halving Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UKSTSA16(unsigned long a, unsigned long b)
UKSTSA16 (SIMD 16-bit Unsigned Saturating Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_RSTSA16(unsigned long a, unsigned long b)
RSTSA16 (SIMD 16-bit Signed Halving Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_STAS16(unsigned long a, unsigned long b)
STAS16 (SIMD 16-bit Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_SCMPLT16(unsigned long a, unsigned long b)
SCMPLT16 (SIMD 16-bit Signed Compare Less Than)
__STATIC_FORCEINLINE unsigned long __RV_UCMPLE16(unsigned long a, unsigned long b)
UCMPLE16 (SIMD 16-bit Unsigned Compare Less Than & Equal)
__STATIC_FORCEINLINE unsigned long __RV_UCMPLT16(unsigned long a, unsigned long b)
UCMPLT16 (SIMD 16-bit Unsigned Compare Less Than)
__STATIC_FORCEINLINE unsigned long __RV_CMPEQ16(unsigned long a, unsigned long b)
CMPEQ16 (SIMD 16-bit Integer Compare Equal)
__STATIC_FORCEINLINE unsigned long __RV_SCMPLE16(unsigned long a, unsigned long b)
SCMPLE16 (SIMD 16-bit Signed Compare Less Than & Equal)
__STATIC_FORCEINLINE unsigned long __RV_CLO16(unsigned long a)
CLO16 (SIMD 16-bit Count Leading One)
__STATIC_FORCEINLINE unsigned long __RV_KABS16(unsigned long a)
KABS16 (SIMD 16-bit Saturating Absolute)
__STATIC_FORCEINLINE unsigned long __RV_CLRS16(unsigned long a)
CLRS16 (SIMD 16-bit Count Leading Redundant Sign)
__STATIC_FORCEINLINE unsigned long __RV_SMIN16(unsigned long a, unsigned long b)
SMIN16 (SIMD 16-bit Signed Minimum)
__STATIC_FORCEINLINE unsigned long __RV_CLZ16(unsigned long a)
CLZ16 (SIMD 16-bit Count Leading Zero)
__STATIC_FORCEINLINE unsigned long __RV_SMAX16(unsigned long a, unsigned long b)
SMAX16 (SIMD 16-bit Signed Maximum)
__STATIC_FORCEINLINE unsigned long __RV_UMAX16(unsigned long a, unsigned long b)
UMAX16 (SIMD 16-bit Unsigned Maximum)
__STATIC_FORCEINLINE unsigned long __RV_UMIN16(unsigned long a, unsigned long b)
UMIN16 (SIMD 16-bit Unsigned Minimum)
__STATIC_FORCEINLINE unsigned long long __RV_UMUL16(unsigned int a, unsigned int b)
UMUL16 (SIMD Unsigned 16-bit Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_SMULX16(unsigned int a, unsigned int b)
SMULX16 (SIMD Signed Crossed 16-bit Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_UMULX16(unsigned int a, unsigned int b)
UMULX16 (SIMD Unsigned Crossed 16-bit Multiply)
__STATIC_FORCEINLINE unsigned long __RV_KHM16(unsigned long a, unsigned long b)
KHM16 (SIMD Signed Saturating Q15 Multiply)
__STATIC_FORCEINLINE unsigned long __RV_KHMX16(unsigned long a, unsigned long b)
KHMX16 (SIMD Signed Saturating Crossed Q15 Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_SMUL16(unsigned int a, unsigned int b)
SMUL16 (SIMD Signed 16-bit Multiply)
__STATIC_FORCEINLINE unsigned long __RV_PKTT16(unsigned long a, unsigned long b)
PKTT16 (Pack Two 16-bit Data from Both Top Half)
__STATIC_FORCEINLINE unsigned long __RV_PKBB16(unsigned long a, unsigned long b)
PKBB16 (Pack Two 16-bit Data from Both Bottom Half)
__STATIC_FORCEINLINE unsigned long __RV_PKTB16(unsigned long a, unsigned long b)
PKTB16 (Pack Two 16-bit Data from Top and Bottom Half)
__STATIC_FORCEINLINE unsigned long __RV_PKBT16(unsigned long a, unsigned long b)
PKBT16 (Pack Two 16-bit Data from Bottom and Top Half)
__STATIC_FORCEINLINE unsigned long __RV_SRA16_U(unsigned long a, unsigned long b)
SRA16.u (SIMD 16-bit Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_SLL16(unsigned long a, unsigned int b)
SLL16 (SIMD 16-bit Shift Left Logical)
__STATIC_FORCEINLINE unsigned long __RV_SRL16_U(unsigned long a, unsigned int b)
SRL16.u (SIMD 16-bit Rounding Shift Right Logical)
__STATIC_FORCEINLINE unsigned long __RV_KSLRA16(unsigned long a, int b)
KSLRA16 (SIMD 16-bit Shift Left Logical with Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_SRA16(unsigned long a, unsigned long b)
SRA16 (SIMD 16-bit Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_SRL16(unsigned long a, unsigned int b)
SRL16 (SIMD 16-bit Shift Right Logical)
__STATIC_FORCEINLINE unsigned long __RV_KSLL16(unsigned long a, unsigned int b)
KSLL16 (SIMD 16-bit Saturating Shift Left Logical)
__STATIC_FORCEINLINE unsigned long __RV_KSLRA16_U(unsigned long a, int b)
KSLRA16.u (SIMD 16-bit Shift Left Logical with Saturation or Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_RADD8(unsigned long a, unsigned long b)
RADD8 (SIMD 8-bit Signed Halving Addition)
__STATIC_FORCEINLINE unsigned long __RV_UKSUB8(unsigned long a, unsigned long b)
UKSUB8 (SIMD 8-bit Unsigned Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_URADD8(unsigned long a, unsigned long b)
URADD8 (SIMD 8-bit Unsigned Halving Addition)
__STATIC_FORCEINLINE unsigned long __RV_UKADD8(unsigned long a, unsigned long b)
UKADD8 (SIMD 8-bit Unsigned Saturating Addition)
__STATIC_FORCEINLINE unsigned long __RV_ADD8(unsigned long a, unsigned long b)
ADD8 (SIMD 8-bit Addition)
__STATIC_FORCEINLINE unsigned long __RV_KADD8(unsigned long a, unsigned long b)
KADD8 (SIMD 8-bit Signed Saturating Addition)
__STATIC_FORCEINLINE unsigned long __RV_KSUB8(unsigned long a, unsigned long b)
KSUB8 (SIMD 8-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_SUB8(unsigned long a, unsigned long b)
SUB8 (SIMD 8-bit Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_URSUB8(unsigned long a, unsigned long b)
URSUB8 (SIMD 8-bit Unsigned Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_RSUB8(unsigned long a, unsigned long b)
RSUB8 (SIMD 8-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_SCMPLE8(unsigned long a, unsigned long b)
SCMPLE8 (SIMD 8-bit Signed Compare Less Than & Equal)
__STATIC_FORCEINLINE unsigned long __RV_UCMPLT8(unsigned long a, unsigned long b)
UCMPLT8 (SIMD 8-bit Unsigned Compare Less Than)
__STATIC_FORCEINLINE unsigned long __RV_SCMPLT8(unsigned long a, unsigned long b)
SCMPLT8 (SIMD 8-bit Signed Compare Less Than)
__STATIC_FORCEINLINE unsigned long __RV_UCMPLE8(unsigned long a, unsigned long b)
UCMPLE8 (SIMD 8-bit Unsigned Compare Less Than & Equal)
__STATIC_FORCEINLINE unsigned long __RV_CMPEQ8(unsigned long a, unsigned long b)
CMPEQ8 (SIMD 8-bit Integer Compare Equal)
__STATIC_FORCEINLINE unsigned long __RV_KABS8(unsigned long a)
KABS8 (SIMD 8-bit Saturating Absolute)
__STATIC_FORCEINLINE unsigned long __RV_SMIN8(unsigned long a, unsigned long b)
SMIN8 (SIMD 8-bit Signed Minimum)
__STATIC_FORCEINLINE unsigned long __RV_CLRS8(unsigned long a)
CLRS8 (SIMD 8-bit Count Leading Redundant Sign)
__STATIC_FORCEINLINE unsigned long __RV_UMAX8(unsigned long a, unsigned long b)
UMAX8 (SIMD 8-bit Unsigned Maximum)
__STATIC_FORCEINLINE unsigned long __RV_CLZ8(unsigned long a)
CLZ8 (SIMD 8-bit Count Leading Zero)
__STATIC_FORCEINLINE unsigned long __RV_UMIN8(unsigned long a, unsigned long b)
UMIN8 (SIMD 8-bit Unsigned Minimum)
__STATIC_FORCEINLINE unsigned long __RV_CLO8(unsigned long a)
CLO8 (SIMD 8-bit Count Leading One)
__STATIC_FORCEINLINE unsigned long __RV_SMAX8(unsigned long a, unsigned long b)
SMAX8 (SIMD 8-bit Signed Maximum)
__STATIC_FORCEINLINE unsigned long long __RV_UMUL8(unsigned int a, unsigned int b)
UMUL8 (SIMD Unsigned 8-bit Multiply)
__STATIC_FORCEINLINE unsigned long __RV_KHMX8(unsigned long a, unsigned long b)
KHMX8 (SIMD Signed Saturating Crossed Q7 Multiply)
__STATIC_FORCEINLINE unsigned long __RV_KHM8(unsigned long a, unsigned long b)
KHM8 (SIMD Signed Saturating Q7 Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_SMULX8(unsigned int a, unsigned int b)
SMULX8 (SIMD Signed Crossed 8-bit Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_SMUL8(unsigned int a, unsigned int b)
SMUL8 (SIMD Signed 8-bit Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_UMULX8(unsigned int a, unsigned int b)
UMULX8 (SIMD Unsigned Crossed 8-bit Multiply)
__STATIC_FORCEINLINE unsigned long __RV_SRL8(unsigned long a, unsigned int b)
SRL8 (SIMD 8-bit Shift Right Logical)
__STATIC_FORCEINLINE unsigned long __RV_KSLRA8_U(unsigned long a, int b)
KSLRA8.u (SIMD 8-bit Shift Left Logical with Saturation or Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_SRL8_U(unsigned long a, unsigned int b)
SRL8.u (SIMD 8-bit Rounding Shift Right Logical)
__STATIC_FORCEINLINE unsigned long __RV_SRA8(unsigned long a, unsigned int b)
SRA8 (SIMD 8-bit Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_KSLRA8(unsigned long a, int b)
KSLRA8 (SIMD 8-bit Shift Left Logical with Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_SLL8(unsigned long a, unsigned int b)
SLL8 (SIMD 8-bit Shift Left Logical)
__STATIC_FORCEINLINE unsigned long __RV_SRA8_U(unsigned long a, unsigned int b)
SRA8.u (SIMD 8-bit Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_KSLL8(unsigned long a, unsigned int b)
KSLL8 (SIMD 8-bit Saturating Shift Left Logical)
__STATIC_FORCEINLINE unsigned long __RV_ZUNPKD820(unsigned long a)
ZUNPKD820 (Unsigned Unpacking Bytes 2 & 0)
__STATIC_FORCEINLINE unsigned long __RV_ZUNPKD810(unsigned long a)
ZUNPKD810 (Unsigned Unpacking Bytes 1 & 0)
__STATIC_FORCEINLINE unsigned long __RV_ZUNPKD830(unsigned long a)
ZUNPKD830 (Unsigned Unpacking Bytes 3 & 0)
__STATIC_FORCEINLINE unsigned long __RV_ZUNPKD832(unsigned long a)
ZUNPKD832 (Unsigned Unpacking Bytes 3 & 2)
__STATIC_FORCEINLINE unsigned long __RV_ZUNPKD831(unsigned long a)
ZUNPKD831 (Unsigned Unpacking Bytes 3 & 1)
__STATIC_FORCEINLINE unsigned long __RV_SUNPKD832(unsigned long a)
SUNPKD832 (Signed Unpacking Bytes 3 & 2)
__STATIC_FORCEINLINE unsigned long __RV_SUNPKD830(unsigned long a)
SUNPKD830 (Signed Unpacking Bytes 3 & 0)
__STATIC_FORCEINLINE unsigned long __RV_SUNPKD820(unsigned long a)
SUNPKD820 (Signed Unpacking Bytes 2 & 0)
__STATIC_FORCEINLINE unsigned long __RV_SUNPKD831(unsigned long a)
SUNPKD831 (Signed Unpacking Bytes 3 & 1)
__STATIC_FORCEINLINE unsigned long __RV_SUNPKD810(unsigned long a)
SUNPKD810 (Signed Unpacking Bytes 1 & 0)