18 #ifndef __CORE_FEATURE_DSP__
19 #define __CORE_FEATURE_DSP__
35 #include "core_feature_base.h"
37 #if defined(__DSP_PRESENT) && (__DSP_PRESENT == 1)
39 #if defined(__INC_INTRINSIC_API) && (__INC_INTRINSIC_API == 1)
41 #include <rvp_intrinsic.h>
43 #if !defined(__ICCRISCV__) && !defined(__llvm__)
44 #include <rvp_intrinsic.h>
455 unsigned long result;
456 __ASM volatile(
"add8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
496 unsigned long result;
497 __ASM volatile(
"add16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
549 unsigned long long result;
550 __ASM volatile(
"add64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
589 __ASM volatile(
"ave %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
629 unsigned long result;
630 __ASM volatile(
"bitrev %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
669 #define __RV_BITREVI(a, b) \
671 unsigned long __res; \
672 unsigned long __a = (unsigned long)(a); \
673 __ASM volatile("bitrevi %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
712 unsigned long result;
713 __ASM volatile(
"bpick %0, %1, %2, %3" :
"=r"(result) :
"r"(a),
"r"(b),
"r"(c));
737 __ASM volatile(
"clrov ");
782 unsigned long result;
783 __ASM volatile(
"clrs8 %0, %1" :
"=r"(result) :
"r"(a));
829 unsigned long result;
830 __ASM volatile(
"clrs16 %0, %1" :
"=r"(result) :
"r"(a));
876 unsigned long result;
877 __ASM volatile(
"clrs32 %0, %1" :
"=r"(result) :
"r"(a));
923 unsigned long result;
924 __ASM volatile(
"clo8 %0, %1" :
"=r"(result) :
"r"(a));
970 unsigned long result;
971 __ASM volatile(
"clo16 %0, %1" :
"=r"(result) :
"r"(a));
1017 unsigned long result;
1018 __ASM volatile(
"clo32 %0, %1" :
"=r"(result) :
"r"(a));
1064 unsigned long result;
1065 __ASM volatile(
"clz8 %0, %1" :
"=r"(result) :
"r"(a));
1111 unsigned long result;
1112 __ASM volatile(
"clz16 %0, %1" :
"=r"(result) :
"r"(a));
1158 unsigned long result;
1159 __ASM volatile(
"clz32 %0, %1" :
"=r"(result) :
"r"(a));
1200 unsigned long result;
1201 __ASM volatile(
"cmpeq8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1242 unsigned long result;
1243 __ASM volatile(
"cmpeq16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1288 unsigned long result;
1289 __ASM volatile(
"cras16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1334 unsigned long result;
1335 __ASM volatile(
"crsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1372 #define __RV_INSB(t, a, b) \
1374 unsigned long __t = (unsigned long)(t); \
1375 unsigned long __a = (unsigned long)(a); \
1376 __ASM volatile("insb %0, %1, %2" : "+r"(__t) : "r"(__a), "K"(b)); \
1420 unsigned long result;
1421 __ASM volatile(
"kabs8 %0, %1" :
"=r"(result) :
"r"(a));
1465 unsigned long result;
1466 __ASM volatile(
"kabs16 %0, %1" :
"=r"(result) :
"r"(a));
1512 unsigned long result;
1513 __ASM volatile(
"kabsw %0, %1" :
"=r"(result) :
"r"(a));
1559 unsigned long result;
1560 __ASM volatile(
"kadd8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1606 unsigned long result;
1607 __ASM volatile(
"kadd16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1673 __ASM volatile(
"kadd64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1720 __ASM volatile(
"kaddh %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1768 __ASM volatile(
"kaddw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1824 unsigned long result;
1825 __ASM volatile(
"kcras16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1881 unsigned long result;
1882 __ASM volatile(
"kcrsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1937 __ASM volatile(
"kdmbb %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1992 __ASM volatile(
"kdmbt %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
2047 __ASM volatile(
"kdmtt %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
2111 __ASM volatile(
"kdmabb %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
2175 __ASM volatile(
"kdmabt %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
2239 __ASM volatile(
"kdmatt %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
2300 unsigned long result;
2301 __ASM volatile(
"khm8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
2362 unsigned long result;
2363 __ASM volatile(
"khmx8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
2425 unsigned long result;
2426 __ASM volatile(
"khm16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
2488 unsigned long result;
2489 __ASM volatile(
"khmx16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
2542 __ASM volatile(
"khmbb %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
2595 __ASM volatile(
"khmbt %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
2648 __ASM volatile(
"khmtt %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
2711 __ASM volatile(
"kmabb %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
2774 __ASM volatile(
"kmabt %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
2837 __ASM volatile(
"kmatt %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
2901 __ASM volatile(
"kmada %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
2965 __ASM volatile(
"kmaxda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3037 __ASM volatile(
"kmads %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3109 __ASM volatile(
"kmadrs %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3181 __ASM volatile(
"kmaxds %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3249 __ASM volatile(
"kmar64 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3300 __ASM volatile(
"kmda %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
3351 __ASM volatile(
"kmxda %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
3412 __ASM volatile(
"kmmac %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3473 __ASM volatile(
"kmmac.u %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3535 __ASM volatile(
"kmmawb %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3597 __ASM volatile(
"kmmawb.u %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3665 __ASM volatile(
"kmmawb2 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3733 __ASM volatile(
"kmmawb2.u %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3795 __ASM volatile(
"kmmawt %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3857 __ASM volatile(
"kmmawt.u %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3925 __ASM volatile(
"kmmawt2 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3993 __ASM volatile(
"kmmawt2.u %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
4053 __ASM volatile(
"kmmsb %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
4113 __ASM volatile(
"kmmsb.u %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
4169 __ASM volatile(
"kmmwb2 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
4225 __ASM volatile(
"kmmwb2.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
4281 __ASM volatile(
"kmmwt2 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
4337 __ASM volatile(
"kmmwt2.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
4399 __ASM volatile(
"kmsda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
4461 __ASM volatile(
"kmsxda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
4529 __ASM volatile(
"kmsr64 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
4577 __ASM volatile(
"ksllw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
4621 #define __RV_KSLLIW(a, b) \
4624 long __a = (long)(a); \
4625 __ASM volatile("kslliw %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
4677 unsigned long result;
4678 __ASM volatile(
"ksll8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
4727 #define __RV_KSLLI8(a, b) \
4729 unsigned long __res; \
4730 unsigned long __a = (unsigned long)(a); \
4731 __ASM volatile("kslli8 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
4783 unsigned long result;
4784 __ASM volatile(
"ksll16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
4833 #define __RV_KSLLI16(a, b) \
4835 unsigned long __res; \
4836 unsigned long __a = (unsigned long)(a); \
4837 __ASM volatile("kslli16 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
4903 unsigned long result;
4904 __ASM volatile(
"kslra8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
4970 unsigned long result;
4971 __ASM volatile(
"kslra8.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5037 unsigned long result;
5038 __ASM volatile(
"kslra16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5104 unsigned long result;
5105 __ASM volatile(
"kslra16.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5165 __ASM volatile(
"kslraw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5228 __ASM volatile(
"kslraw.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5284 unsigned long result;
5285 __ASM volatile(
"kstas16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5341 unsigned long result;
5342 __ASM volatile(
"kstsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5388 unsigned long result;
5389 __ASM volatile(
"ksub8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5436 unsigned long result;
5437 __ASM volatile(
"ksub16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5503 __ASM volatile(
"ksub64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5550 __ASM volatile(
"ksubh %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5598 __ASM volatile(
"ksubw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5654 __ASM volatile(
"kwmmul %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5710 __ASM volatile(
"kwmmul.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5755 __ASM volatile(
"maddr32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
5795 __ASM volatile(
"maxw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5831 __ASM volatile(
"minw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5877 __ASM volatile(
"msubr32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
5928 unsigned long long result;
5929 __ASM volatile(
"mulr64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5981 __ASM volatile(
"mulsr64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6019 unsigned long result;
6020 __ASM volatile(
"pbsad %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6061 __ASM volatile(
"pbsada %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
6111 unsigned long result;
6112 __ASM volatile(
"pkbb16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6162 unsigned long result;
6163 __ASM volatile(
"pkbt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6213 unsigned long result;
6214 __ASM volatile(
"pktt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6264 unsigned long result;
6265 __ASM volatile(
"pktb16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6309 unsigned long result;
6310 __ASM volatile(
"radd8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6354 unsigned long result;
6355 __ASM volatile(
"radd16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6408 __ASM volatile(
"radd64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6456 __ASM volatile(
"raddw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6504 unsigned long result;
6505 __ASM volatile(
"rcras16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6553 unsigned long result;
6554 __ASM volatile(
"rcrsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6580 unsigned long result;
6581 __ASM volatile(
"rdov %0" :
"=r"(result));
6629 unsigned long result;
6630 __ASM volatile(
"rstas16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6678 unsigned long result;
6679 __ASM volatile(
"rstsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6725 unsigned long result;
6726 __ASM volatile(
"rsub8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6772 unsigned long result;
6773 __ASM volatile(
"rsub16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6827 __ASM volatile(
"rsub64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6875 __ASM volatile(
"rsubw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6920 #define __RV_SCLIP8(a, b) \
6922 unsigned long __res; \
6923 unsigned long __a = (unsigned long)(a); \
6924 __ASM volatile("sclip8 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
6969 #define __RV_SCLIP16(a, b) \
6971 unsigned long __res; \
6972 unsigned long __a = (unsigned long)(a); \
6973 __ASM volatile("sclip16 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
7018 #define __RV_SCLIP32(a, b) \
7021 long __a = (long)(a); \
7022 __ASM volatile("sclip32 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
7061 unsigned long result;
7062 __ASM volatile(
"scmple8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
7101 unsigned long result;
7102 __ASM volatile(
"scmple16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
7140 unsigned long result;
7141 __ASM volatile(
"scmplt8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
7179 unsigned long result;
7180 __ASM volatile(
"scmplt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
7220 unsigned long result;
7221 __ASM volatile(
"sll8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
7258 #define __RV_SLLI8(a, b) \
7260 unsigned long __res; \
7261 unsigned long __a = (unsigned long)(a); \
7262 __ASM volatile("slli8 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
7302 unsigned long result;
7303 __ASM volatile(
"sll16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
7340 #define __RV_SLLI16(a, b) \
7342 unsigned long __res; \
7343 unsigned long __a = (unsigned long)(a); \
7344 __ASM volatile("slli16 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
7404 __ASM volatile(
"smal %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
7486 __ASM volatile(
"smalbb %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
7568 __ASM volatile(
"smalbt %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
7650 __ASM volatile(
"smaltt %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
7734 __ASM volatile(
"smalda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
7818 __ASM volatile(
"smalxda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
7909 __ASM volatile(
"smalds %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
8000 __ASM volatile(
"smaldrs %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
8091 __ASM volatile(
"smalxds %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
8143 __ASM volatile(
"smar64 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
8187 __ASM volatile(
"smaqa %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
8232 __ASM volatile(
"smaqa.su %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
8270 unsigned long result;
8271 __ASM volatile(
"smax8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8309 unsigned long result;
8310 __ASM volatile(
"smax16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8362 __ASM volatile(
"smbb16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8414 __ASM volatile(
"smbt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8466 __ASM volatile(
"smtt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8525 __ASM volatile(
"smds %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8584 __ASM volatile(
"smdrs %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8643 __ASM volatile(
"smxds %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8681 unsigned long result;
8682 __ASM volatile(
"smin8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8720 unsigned long result;
8721 __ASM volatile(
"smin16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8772 __ASM volatile(
"smmul %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8823 __ASM volatile(
"smmul.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8873 __ASM volatile(
"smmwb %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8923 __ASM volatile(
"smmwb.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8973 __ASM volatile(
"smmwt %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
9023 __ASM volatile(
"smmwt.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
9105 __ASM volatile(
"smslda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
9187 __ASM volatile(
"smslxda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
9240 __ASM volatile(
"smsr64 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
9322 unsigned long long result;
9323 __ASM volatile(
"smul8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
9405 unsigned long long result;
9406 __ASM volatile(
"smulx8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
9490 unsigned long long result;
9491 __ASM volatile(
"smul16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
9575 unsigned long long result;
9576 __ASM volatile(
"smulx16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
9630 __ASM volatile(
"sra.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
9682 #define __RV_SRAI_U(a, b) \
9685 long __a = (long)(a); \
9686 __ASM volatile("srai.u %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
9739 unsigned long result;
9740 __ASM volatile(
"sra8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
9793 unsigned long result;
9794 __ASM volatile(
"sra8.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
9844 #define __RV_SRAI8(a, b) \
9846 unsigned long __res; \
9847 unsigned long __a = (unsigned long)(a); \
9848 __ASM volatile("srai8 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
9898 #define __RV_SRAI8_U(a, b) \
9900 unsigned long __res; \
9901 unsigned long __a = (unsigned long)(a); \
9902 __ASM volatile("srai8.u %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
9955 unsigned long result;
9956 __ASM volatile(
"sra16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10009 unsigned long result;
10010 __ASM volatile(
"sra16.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10061 #define __RV_SRAI16(a, b) \
10063 unsigned long __res; \
10064 unsigned long __a = (unsigned long)(a); \
10065 __ASM volatile("srai16 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
10116 #define __RV_SRAI16_U(a, b) \
10118 unsigned long __res; \
10119 unsigned long __a = (unsigned long)(a); \
10120 __ASM volatile("srai16.u %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
10172 unsigned long result;
10173 __ASM volatile(
"srl8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10225 unsigned long result;
10226 __ASM volatile(
"srl8.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10275 #define __RV_SRLI8(a, b) \
10277 unsigned long __res; \
10278 unsigned long __a = (unsigned long)(a); \
10279 __ASM volatile("srli8 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
10328 #define __RV_SRLI8_U(a, b) \
10330 unsigned long __res; \
10331 unsigned long __a = (unsigned long)(a); \
10332 __ASM volatile("srli8.u %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
10383 unsigned long result;
10384 __ASM volatile(
"srl16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10435 unsigned long result;
10436 __ASM volatile(
"srl16.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10485 #define __RV_SRLI16(a, b) \
10487 unsigned long __res; \
10488 unsigned long __a = (unsigned long)(a); \
10489 __ASM volatile("srli16 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
10538 #define __RV_SRLI16_U(a, b) \
10540 unsigned long __res; \
10541 unsigned long __a = (unsigned long)(a); \
10542 __ASM volatile("srli16.u %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
10587 unsigned long result;
10588 __ASM volatile(
"stas16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10633 unsigned long result;
10634 __ASM volatile(
"stsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10674 unsigned long result;
10675 __ASM volatile(
"sub8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10715 unsigned long result;
10716 __ASM volatile(
"sub16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10769 unsigned long long result;
10770 __ASM volatile(
"sub64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10815 unsigned long result;
10816 __ASM volatile(
"sunpkd810 %0, %1" :
"=r"(result) :
"r"(a));
10861 unsigned long result;
10862 __ASM volatile(
"sunpkd820 %0, %1" :
"=r"(result) :
"r"(a));
10907 unsigned long result;
10908 __ASM volatile(
"sunpkd830 %0, %1" :
"=r"(result) :
"r"(a));
10953 unsigned long result;
10954 __ASM volatile(
"sunpkd831 %0, %1" :
"=r"(result) :
"r"(a));
10999 unsigned long result;
11000 __ASM volatile(
"sunpkd832 %0, %1" :
"=r"(result) :
"r"(a));
11036 unsigned long result;
11037 __ASM volatile(
"swap8 %0, %1" :
"=r"(result) :
"r"(a));
11073 unsigned long result;
11074 __ASM volatile(
"swap16 %0, %1" :
"=r"(result) :
"r"(a));
11118 #define __RV_UCLIP8(a, b) \
11120 unsigned long __res; \
11121 unsigned long __a = (unsigned long)(a); \
11122 __ASM volatile("uclip8 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
11167 #define __RV_UCLIP16(a, b) \
11169 unsigned long __res; \
11170 unsigned long __a = (unsigned long)(a); \
11171 __ASM volatile("uclip16 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
11217 #define __RV_UCLIP32(a, b) \
11219 unsigned long __res; \
11220 unsigned long __a = (unsigned long)(a); \
11221 __ASM volatile("uclip32 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
11260 unsigned long result;
11261 __ASM volatile(
"ucmple8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11300 unsigned long result;
11301 __ASM volatile(
"ucmple16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11339 unsigned long result;
11340 __ASM volatile(
"ucmplt8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11378 unsigned long result;
11379 __ASM volatile(
"ucmplt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11423 unsigned long result;
11424 __ASM volatile(
"ukadd8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11468 unsigned long result;
11469 __ASM volatile(
"ukadd16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11530 unsigned long long result;
11531 __ASM volatile(
"ukadd64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11572 unsigned long result;
11573 __ASM volatile(
"ukaddh %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11615 unsigned long result;
11616 __ASM volatile(
"ukaddw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11671 unsigned long result;
11672 __ASM volatile(
"ukcras16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11726 unsigned long result;
11727 __ASM volatile(
"ukcrsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11791 __ASM volatile(
"ukmar64 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
11856 __ASM volatile(
"ukmsr64 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
11911 unsigned long result;
11912 __ASM volatile(
"ukstas16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11966 unsigned long result;
11967 __ASM volatile(
"ukstsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12011 unsigned long result;
12012 __ASM volatile(
"uksub8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12056 unsigned long result;
12057 __ASM volatile(
"uksub16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12119 unsigned long long result;
12120 __ASM volatile(
"uksub64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12165 unsigned long result;
12166 __ASM volatile(
"uksubh %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12209 unsigned long result;
12210 __ASM volatile(
"uksubw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12263 __ASM volatile(
"umar64 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
12307 __ASM volatile(
"umaqa %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
12345 unsigned long result;
12346 __ASM volatile(
"umax8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12384 unsigned long result;
12385 __ASM volatile(
"umax16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12423 unsigned long result;
12424 __ASM volatile(
"umin8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12462 unsigned long result;
12463 __ASM volatile(
"umin16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12516 __ASM volatile(
"umsr64 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
12599 unsigned long long result;
12600 __ASM volatile(
"umul8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12683 unsigned long long result;
12684 __ASM volatile(
"umulx8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12768 unsigned long long result;
12769 __ASM volatile(
"umul16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12853 unsigned long long result;
12854 __ASM volatile(
"umulx16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12900 unsigned long result;
12901 __ASM volatile(
"uradd8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12947 unsigned long result;
12948 __ASM volatile(
"uradd16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12999 unsigned long long result;
13000 __ASM volatile(
"uradd64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13047 unsigned long result;
13048 __ASM volatile(
"uraddw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13096 unsigned long result;
13097 __ASM volatile(
"urcras16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13145 unsigned long result;
13146 __ASM volatile(
"urcrsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13194 unsigned long result;
13195 __ASM volatile(
"urstas16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13243 unsigned long result;
13244 __ASM volatile(
"urstsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13290 unsigned long result;
13291 __ASM volatile(
"ursub8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13337 unsigned long result;
13338 __ASM volatile(
"ursub16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13391 unsigned long long result;
13392 __ASM volatile(
"ursub64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13439 unsigned long result;
13440 __ASM volatile(
"ursubw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13490 #define __RV_WEXTI(a, b) \
13492 unsigned long __res; \
13493 long long __a = (long long)(a); \
13494 __ASM volatile("wexti %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
13543 unsigned long result;
13544 __ASM volatile(
"wext %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13589 unsigned long result;
13590 __ASM volatile(
"zunpkd810 %0, %1" :
"=r"(result) :
"r"(a));
13635 unsigned long result;
13636 __ASM volatile(
"zunpkd820 %0, %1" :
"=r"(result) :
"r"(a));
13681 unsigned long result;
13682 __ASM volatile(
"zunpkd830 %0, %1" :
"=r"(result) :
"r"(a));
13727 unsigned long result;
13728 __ASM volatile(
"zunpkd831 %0, %1" :
"=r"(result) :
"r"(a));
13773 unsigned long result;
13774 __ASM volatile(
"zunpkd832 %0, %1" :
"=r"(result) :
"r"(a));
13779 #if (__RISCV_XLEN == 64) || defined(__ONLY_FOR_DOXYGEN_DOCUMENT_GENERATION__)
13815 unsigned long result;
13816 __ASM volatile(
"add32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13858 unsigned long result;
13859 __ASM volatile(
"cras32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13900 unsigned long result;
13901 __ASM volatile(
"crsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13960 unsigned long result;
13961 __ASM volatile(
"kabs32 %0, %1" :
"=r"(result) :
"r"(a));
14006 unsigned long result;
14007 __ASM volatile(
"kadd32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
14057 unsigned long result;
14058 __ASM volatile(
"kcras32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
14107 unsigned long result;
14108 __ASM volatile(
"kcrsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
14159 unsigned long result;
14160 __ASM volatile(
"kdmbb16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
14211 unsigned long result;
14212 __ASM volatile(
"kdmbt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
14263 unsigned long result;
14264 __ASM volatile(
"kdmtt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
14329 __ASM volatile(
"kdmabb16 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
14394 __ASM volatile(
"kdmabt16 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
14459 __ASM volatile(
"kdmatt16 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
14511 unsigned long result;
14512 __ASM volatile(
"khmbb16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
14564 unsigned long result;
14565 __ASM volatile(
"khmbt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
14617 unsigned long result;
14618 __ASM volatile(
"khmtt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
14680 __ASM volatile(
"kmabb32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
14742 __ASM volatile(
"kmabt32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
14804 __ASM volatile(
"kmatt32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
14860 __ASM volatile(
"kmada32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
14916 __ASM volatile(
"kmaxda32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
14968 __ASM volatile(
"kmda32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15020 __ASM volatile(
"kmxda32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15084 __ASM volatile(
"kmads32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
15148 __ASM volatile(
"kmadrs32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
15212 __ASM volatile(
"kmaxds32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
15267 __ASM volatile(
"kmsda32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
15322 __ASM volatile(
"kmsxda32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
15373 unsigned long result;
15374 __ASM volatile(
"ksll32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15422 #define __RV_KSLLI32(a, b) \
15424 unsigned long __res; \
15425 unsigned long __a = (unsigned long)(a); \
15426 __ASM volatile("kslli32 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
15491 unsigned long result;
15492 __ASM volatile(
"kslra32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15557 unsigned long result;
15558 __ASM volatile(
"kslra32.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15609 unsigned long result;
15610 __ASM volatile(
"kstas32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15660 unsigned long result;
15661 __ASM volatile(
"kstsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15707 unsigned long result;
15708 __ASM volatile(
"ksub32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15755 unsigned long result;
15756 __ASM volatile(
"pkbb32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15803 unsigned long result;
15804 __ASM volatile(
"pkbt32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15851 unsigned long result;
15852 __ASM volatile(
"pktt32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15899 unsigned long result;
15900 __ASM volatile(
"pktb32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15945 unsigned long result;
15946 __ASM volatile(
"radd32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15992 unsigned long result;
15993 __ASM volatile(
"rcras32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16039 unsigned long result;
16040 __ASM volatile(
"rcrsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16086 unsigned long result;
16087 __ASM volatile(
"rstas32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16132 unsigned long result;
16133 __ASM volatile(
"rstsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16178 unsigned long result;
16179 __ASM volatile(
"rsub32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16218 unsigned long result;
16219 __ASM volatile(
"sll32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16255 #define __RV_SLLI32(a, b) \
16257 unsigned long __res; \
16258 unsigned long __a = (unsigned long)(a); \
16259 __ASM volatile("slli32 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
16296 unsigned long result;
16297 __ASM volatile(
"smax32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16346 __ASM volatile(
"smbb32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16395 __ASM volatile(
"smbt32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16444 __ASM volatile(
"smtt32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16497 __ASM volatile(
"smds32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16550 __ASM volatile(
"smdrs32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16603 __ASM volatile(
"smxds32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16640 unsigned long result;
16641 __ASM volatile(
"smin32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16693 unsigned long result;
16694 __ASM volatile(
"sra32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16746 unsigned long result;
16747 __ASM volatile(
"sra32.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16797 #define __RV_SRAI32(a, b) \
16799 unsigned long __res; \
16800 unsigned long __a = (unsigned long)(a); \
16801 __ASM volatile("srai32 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
16851 #define __RV_SRAI32_U(a, b) \
16853 unsigned long __res; \
16854 unsigned long __a = (unsigned long)(a); \
16855 __ASM volatile("srai32.u %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
16897 #define __RV_SRAIW_U(a, b) \
16900 int __a = (int)(a); \
16901 __ASM volatile("sraiw.u %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
16953 unsigned long result;
16954 __ASM volatile(
"srl32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17006 unsigned long result;
17007 __ASM volatile(
"srl32.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17055 #define __RV_SRLI32(a, b) \
17057 unsigned long __res; \
17058 unsigned long __a = (unsigned long)(a); \
17059 __ASM volatile("srli32 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
17107 #define __RV_SRLI32_U(a, b) \
17109 unsigned long __res; \
17110 unsigned long __a = (unsigned long)(a); \
17111 __ASM volatile("srli32.u %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
17153 unsigned long result;
17154 __ASM volatile(
"stas32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17195 unsigned long result;
17196 __ASM volatile(
"stsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17235 unsigned long result;
17236 __ASM volatile(
"sub32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17279 unsigned long result;
17280 __ASM volatile(
"ukadd32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17332 unsigned long result;
17333 __ASM volatile(
"ukcras32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17384 unsigned long result;
17385 __ASM volatile(
"ukcrsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17437 unsigned long result;
17438 __ASM volatile(
"ukstas32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17489 unsigned long result;
17490 __ASM volatile(
"ukstsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17533 unsigned long result;
17534 __ASM volatile(
"uksub32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17571 unsigned long result;
17572 __ASM volatile(
"umax32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17609 unsigned long result;
17610 __ASM volatile(
"umin32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17655 unsigned long result;
17656 __ASM volatile(
"uradd32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17702 unsigned long result;
17703 __ASM volatile(
"urcras32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17749 unsigned long result;
17750 __ASM volatile(
"urcrsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17796 unsigned long result;
17797 __ASM volatile(
"urstas32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17843 unsigned long result;
17844 __ASM volatile(
"urstsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17889 unsigned long result;
17890 __ASM volatile(
"ursub32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17934 unsigned long result;
17935 __ASM volatile(
"expd80 %0, %1" :
"=r"(result) :
"r"(a));
17969 unsigned long result;
17970 __ASM volatile(
"expd81 %0, %1" :
"=r"(result) :
"r"(a));
18004 unsigned long result;
18005 __ASM volatile(
"expd82 %0, %1" :
"=r"(result) :
"r"(a));
18039 unsigned long result;
18040 __ASM volatile(
"expd83 %0, %1" :
"=r"(result) :
"r"(a));
18045 #if (__RISCV_XLEN == 64)
18075 unsigned long result;
18076 __ASM volatile(
"expd84 %0, %1" :
"=r"(result) :
"r"(a));
18110 unsigned long result;
18111 __ASM volatile(
"expd85 %0, %1" :
"=r"(result) :
"r"(a));
18145 unsigned long result;
18146 __ASM volatile(
"expd86 %0, %1" :
"=r"(result) :
"r"(a));
18180 unsigned long result;
18181 __ASM volatile(
"expd87 %0, %1" :
"=r"(result) :
"r"(a));
18187 #if (__RISCV_XLEN == 32) || defined(__ONLY_FOR_DOXYGEN_DOCUMENT_GENERATION__)
18257 unsigned long long result;
18258 __ASM volatile(
"dkhm8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18312 unsigned long long result;
18313 __ASM volatile(
"dkhm16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18357 unsigned long long result;
18358 __ASM volatile(
"dkabs8 %0, %1" :
"=r"(result) :
"r"(a));
18402 unsigned long long result;
18403 __ASM volatile(
"dkabs16 %0, %1" :
"=r"(result) :
"r"(a));
18460 unsigned long long result;
18461 __ASM volatile(
"dkslra8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18519 unsigned long long result;
18520 __ASM volatile(
"dkslra16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18566 unsigned long long result;
18567 __ASM volatile(
"dkadd8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18613 unsigned long long result;
18614 __ASM volatile(
"dkadd16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18660 unsigned long long result;
18661 __ASM volatile(
"dksub8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18708 unsigned long long result;
18709 __ASM volatile(
"dksub16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18761 unsigned long long result;
18762 __ASM volatile(
"dkhmx8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18814 unsigned long long result;
18815 __ASM volatile(
"dkhmx16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18859 unsigned long long result;
18860 __ASM volatile(
"dsmmul %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18904 unsigned long long result;
18905 __ASM volatile(
"dsmmul.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18951 unsigned long long result;
18952 __ASM volatile(
"dkwmmul %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18998 unsigned long long result;
18999 __ASM volatile(
"dkwmmul.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19043 unsigned long long result;
19044 __ASM volatile(
"dkabs32 %0, %1" :
"=r"(result) :
"r"(a));
19096 unsigned long long result;
19097 __ASM volatile(
"dkslra32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19143 unsigned long long result;
19144 __ASM volatile(
"dkadd32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19190 unsigned long long result;
19191 __ASM volatile(
"dksub32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19228 unsigned long long result;
19229 __ASM volatile(
"dradd16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19266 unsigned long long result;
19267 __ASM volatile(
"dsub16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19304 unsigned long long result;
19305 __ASM volatile(
"dradd32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19342 unsigned long long result;
19343 __ASM volatile(
"dsub32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19387 unsigned long long result;
19388 __ASM volatile(
"dmsr16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19433 unsigned long long result;
19434 __ASM volatile(
"dmsr17 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19474 unsigned long long result;
19475 __ASM volatile(
"dmsr33 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19515 unsigned long long result;
19516 __ASM volatile(
"dmxsr33 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19553 unsigned long result;
19554 __ASM volatile(
"dredas16 %0, %1" :
"=r"(result) :
"r"(a));
19589 unsigned long result;
19590 __ASM volatile(
"dredsa16 %0, %1" :
"=r"(result) :
"r"(a));
19637 __ASM volatile(
"dkclip64 %0, %1" :
"=r"(result) :
"r"(a));
19683 unsigned long long result;
19684 __ASM volatile(
"dkmda %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19730 unsigned long long result;
19731 __ASM volatile(
"dkmxda %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19771 unsigned long long result;
19772 __ASM volatile(
"dsmdrs %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19812 unsigned long long result;
19813 __ASM volatile(
"dsmxds %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19851 __ASM volatile(
"dsmbb32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19890 __ASM volatile(
"dsmbb32.sra14 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19930 __ASM volatile(
"dsmbb32.sra32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19970 __ASM volatile(
"dsmbt32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20010 __ASM volatile(
"dsmbt32.sra14 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20050 __ASM volatile(
"dsmbt32.sra32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20090 __ASM volatile(
"dsmtt32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20130 __ASM volatile(
"dsmtt32.sra14 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20170 __ASM volatile(
"dsmtt32.sra32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20206 unsigned long long result;
20207 __ASM volatile(
"dpkbb32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20243 unsigned long long result;
20244 __ASM volatile(
"dpkbt32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20280 unsigned long long result;
20281 __ASM volatile(
"dpktt32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20317 unsigned long long result;
20318 __ASM volatile(
"dpktb32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20355 unsigned long long result;
20356 __ASM volatile(
"dpktb16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20393 unsigned long long result;
20394 __ASM volatile(
"dpkbb16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20431 unsigned long long result;
20432 __ASM volatile(
"dpkbt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20469 unsigned long long result;
20470 __ASM volatile(
"dpktt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20514 unsigned long long result;
20515 __ASM volatile(
"dsra16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20552 unsigned long long result;
20553 __ASM volatile(
"dadd16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20590 unsigned long long result;
20591 __ASM volatile(
"dadd32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20631 unsigned long long result;
20632 __ASM volatile(
"dsmbb16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20672 unsigned long long result;
20673 __ASM volatile(
"dsmbt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20713 unsigned long long result;
20714 __ASM volatile(
"dsmtt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20755 unsigned long long result;
20756 __ASM volatile(
"drcrsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20796 unsigned long long result;
20797 __ASM volatile(
"drcrsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20838 unsigned long long result;
20839 __ASM volatile(
"drcras16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20879 unsigned long long result;
20880 __ASM volatile(
"DRCRAS32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20934 unsigned long long result;
20935 __ASM volatile(
"dkcras16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20989 unsigned long long result;
20990 __ASM volatile(
"dkcrsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21027 unsigned long long result;
21028 __ASM volatile(
"drsub16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21067 unsigned long long result;
21068 __ASM volatile(
"dstsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21107 unsigned long long result;
21108 __ASM volatile(
"DSTAS32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21157 unsigned long long result;
21158 __ASM volatile(
"dkcrsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21207 unsigned long long result;
21208 __ASM volatile(
"dkcras32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21247 unsigned long long result;
21248 __ASM volatile(
"dcrsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21287 unsigned long long result;
21288 __ASM volatile(
"dcras32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21342 unsigned long long result;
21343 __ASM volatile(
"dkstsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21397 unsigned long long result;
21398 __ASM volatile(
"dkstas16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21442 #define __RV_DSCLIP8(a, b) \
21444 unsigned long long __res; \
21445 unsigned long long __a = (unsigned long long)(a); \
21446 __ASM volatile("dsclip8 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
21490 #define __RV_DSCLIP16(a, b) \
21492 unsigned long long __res; \
21493 unsigned long long __a = (unsigned long long)(a); \
21494 __ASM volatile("dsclip16 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
21538 #define __RV_DSCLIP32(a, b) \
21540 unsigned long long __res; \
21541 unsigned long long __a = (unsigned long long)(a); \
21542 __ASM volatile("dsclip32 %0, %1, %2" : "=r"(__res) : "r"(__a), "K"(b)); \
21579 unsigned long long result;
21580 __ASM volatile(
"drsub32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21615 unsigned long long result;
21616 __ASM volatile(
"dpack32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21652 unsigned long long result;
21653 __ASM volatile(
"dsunpkd810 %0, %1" :
"=r"(result) :
"r"(a));
21689 unsigned long long result;
21690 __ASM volatile(
"dsunpkd820 %0, %1" :
"=r"(result) :
"r"(a));
21726 unsigned long long result;
21727 __ASM volatile(
"dsunpkd830 %0, %1" :
"=r"(result) :
"r"(a));
21763 unsigned long long result;
21764 __ASM volatile(
"dsunpkd831 %0, %1" :
"=r"(result) :
"r"(a));
21800 unsigned long long result;
21801 __ASM volatile(
"dsunpkd832 %0, %1" :
"=r"(result) :
"r"(a));
21837 unsigned long long result;
21838 __ASM volatile(
"dzunpkd810 %0, %1" :
"=r"(result) :
"r"(a));
21874 unsigned long long result;
21875 __ASM volatile(
"dzunpkd820 %0, %1" :
"=r"(result) :
"r"(a));
21911 unsigned long long result;
21912 __ASM volatile(
"dzunpkd830 %0, %1" :
"=r"(result) :
"r"(a));
21948 unsigned long long result;
21949 __ASM volatile(
"dzunpkd831 %0, %1" :
"=r"(result) :
"r"(a));
21985 unsigned long long result;
21986 __ASM volatile(
"dzunpkd832 %0, %1" :
"=r"(result) :
"r"(a));
22033 __ASM volatile(
"dkmmac %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22080 __ASM volatile(
"dkmmac.u %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22127 __ASM volatile(
"dkmmsb %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22174 __ASM volatile(
"dkmmsb.u %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22221 __ASM volatile(
"dkmada %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22267 __ASM volatile(
"dkmaxda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22314 __ASM volatile(
"dkmads %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22361 __ASM volatile(
"dkmadrs %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22407 __ASM volatile(
"dkmaxds %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22453 __ASM volatile(
"dkmsda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22499 __ASM volatile(
"dkmsxda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22548 __ASM volatile(
"dsmaqa %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22597 __ASM volatile(
"dsmaqa.su %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22646 __ASM volatile(
"dumaqa %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22688 __ASM volatile(
"dkmda32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
22730 __ASM volatile(
"dkmxda32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
22772 __ASM volatile(
"dkmada32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22815 __ASM volatile(
"dkmaxda32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22859 __ASM volatile(
"dkmads32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22903 __ASM volatile(
"dkmadrs32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22947 __ASM volatile(
"dkmaxds32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22990 __ASM volatile(
"dkmsda32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23033 __ASM volatile(
"dkmsxda32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23077 __ASM volatile(
"dsmds32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
23120 __ASM volatile(
"dsmdrs32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
23164 __ASM volatile(
"dsmxds32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
23210 __ASM volatile(
"dsmalda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23256 __ASM volatile(
"dsmalxda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23302 __ASM volatile(
"dsmalds %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23348 __ASM volatile(
"dsmaldrs %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23394 __ASM volatile(
"dsmalxds %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23439 __ASM volatile(
"dsmslda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23484 __ASM volatile(
"dsmslxda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23535 __ASM volatile(
"ddsmaqa %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23586 __ASM volatile(
"ddsmaqa.su %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23637 __ASM volatile(
"ddumaqa %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23678 __ASM volatile(
"dsma32.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
23719 __ASM volatile(
"dsmxs32.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
23760 __ASM volatile(
"dsmxa32.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
23801 __ASM volatile(
"dsms32.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
23845 __ASM volatile(
"dsmada16 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23889 __ASM volatile(
"dsmaxda16 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23928 __ASM volatile(
"dksms32.u %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23967 __ASM volatile(
"dmada32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
24008 __ASM volatile(
"dsmalbb %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
24051 __ASM volatile(
"dsmalbt %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
24094 __ASM volatile(
"dsmaltt %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
24142 __ASM volatile(
"dkmabb32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
24190 __ASM volatile(
"dkmabt32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
24238 __ASM volatile(
"dkmatt32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
24244 #elif defined (__ICCRISCV__)
24246 #if __riscv_xlen == 32
24247 #include "iar_nds32_intrinsic.h"
24248 #elif __riscv_xlen == 64
24249 #include "iar_nds64_intrinsic.h"
24251 #error "Unexpected RISC-V XLEN size."
24254 #pragma language=save
24255 #pragma language=extended
24258 #define __RV_CLROV __nds__clrov
24259 #define __RV_RDOV __nds__rdov
24260 #define __RV_ADD8 __nds__add8
24261 #define __RV_SUB8 __nds__sub8
24262 #define __RV_ADD16 __nds__add16
24263 #define __RV_SUB16 __nds__sub16
24264 #define __RV_ADD64 __nds__add64
24265 #define __RV_SUB64 __nds__sub64
24266 #define __RV_RADD8 __nds__radd8
24267 #define __RV_RSUB8 __nds__rsub8
24268 #define __RV_RADD16 __nds__radd16
24269 #define __RV_RSUB16 __nds__rsub16
24270 #define __RV_RADD64 __nds__radd64
24271 #define __RV_RSUB64 __nds__rsub64
24272 #define __RV_RADDW __nds__raddw
24273 #define __RV_RSUBW __nds__rsubw
24274 #define __RV_URADD8 __nds__uradd8
24275 #define __RV_URSUB8 __nds__ursub8
24276 #define __RV_URADD16 __nds__uradd16
24277 #define __RV_URSUB16 __nds__ursub16
24278 #define __RV_URADD64 __nds__uradd64
24279 #define __RV_URSUB64 __nds__ursub64
24280 #define __RV_URADDW __nds__uraddw
24281 #define __RV_URSUBW __nds__ursubw
24282 #define __RV_KADD8 __nds__kadd8
24283 #define __RV_KSUB8 __nds__ksub8
24284 #define __RV_KADD16 __nds__kadd16
24285 #define __RV_KSUB16 __nds__ksub16
24286 #define __RV_KADD64 __nds__kadd64
24287 #define __RV_KSUB64 __nds__ksub64
24288 #define __RV_KADDH __nds__kaddh
24289 #define __RV_KSUBH __nds__ksubh
24290 #define __RV_KADDW __nds__kaddw
24291 #define __RV_KSUBW __nds__ksubw
24292 #define __RV_UKADD8 __nds__ukadd8
24293 #define __RV_UKSUB8 __nds__uksub8
24294 #define __RV_UKADD16 __nds__ukadd16
24295 #define __RV_UKSUB16 __nds__uksub16
24296 #define __RV_UKADD64 __nds__ukadd64
24297 #define __RV_UKSUB64 __nds__uksub64
24298 #define __RV_UKADDH __nds__ukaddh
24299 #define __RV_UKSUBH __nds__uksubh
24300 #define __RV_UKADDW __nds__ukaddw
24301 #define __RV_UKSUBW __nds__uksubw
24302 #define __RV_CRAS16 __nds__cras16
24303 #define __RV_CRSA16 __nds__crsa16
24304 #define __RV_RCRAS16 __nds__rcras16
24305 #define __RV_RCRSA16 __nds__rcrsa16
24306 #define __RV_URCRAS16 __nds__urcras16
24307 #define __RV_URCRSA16 __nds__urcrsa16
24308 #define __RV_KCRAS16 __nds__kcras16
24309 #define __RV_KCRSA16 __nds__kcrsa16
24310 #define __RV_UKCRAS16 __nds__ukcras16
24311 #define __RV_UKCRSA16 __nds__ukcrsa16
24312 #define __RV_SRA8 __nds__sra8
24313 #define __RV_SRAI8 __nds__sra8
24314 #define __RV_SRA16 __nds__sra16
24315 #define __RV_SRAI16 __nds__sra16
24316 #define __RV_SRL8 __nds__srl8
24317 #define __RV_SRL16 __nds__srl16
24318 #define __RV_SLL8 __nds__sll8
24319 #define __RV_SLL16 __nds__sll16
24320 #define __RV_SRA_U __nds__sra_u
24321 #define __RV_SRA8_U __nds__sra8_u
24322 #define __RV_SRA16_U __nds__sra16_u
24323 #define __RV_SRL8_U __nds__srl8_u
24324 #define __RV_SRL16_U __nds__srl16_u
24325 #define __RV_KSLL8 __nds__ksll8
24326 #define __RV_KSLL16 __nds__ksll16
24327 #define __RV_KSLLW __nds__ksllw
24328 #define __RV_KSLRA8 __nds__kslra8
24329 #define __RV_KSLRA8_U __nds__kslra8_u
24330 #define __RV_KSLRA16 __nds__kslra16
24331 #define __RV_KSLRA16_U __nds__kslra16_u
24332 #define __RV_KSLRAW __nds__kslraw
24333 #define __RV_KSLRAW_U __nds__kslraw_u
24334 #define __RV_CMPEQ8 __nds__cmpeq8
24335 #define __RV_CMPEQ16 __nds__cmpeq16
24336 #define __RV_SCMPLE8 __nds__scmple8
24337 #define __RV_SCMPLE16 __nds__scmple16
24338 #define __RV_SCMPLT8 __nds__scmplt8
24339 #define __RV_SCMPLT16 __nds__scmplt16
24340 #define __RV_UCMPLE8 __nds__ucmple8
24341 #define __RV_UCMPLE16 __nds__ucmple16
24342 #define __RV_UCMPLT8 __nds__ucmplt8
24343 #define __RV_UCMPLT16 __nds__ucmplt16
24344 #define __RV_SMUL8 __nds__smul8
24345 #define __RV_UMUL8 __nds__umul8
24346 #define __RV_SMUL16 __nds__smul16
24347 #define __RV_UMUL16 __nds__umul16
24348 #define __RV_SMULX8 __nds__smulx8
24349 #define __RV_UMULX8 __nds__umulx8
24350 #define __RV_SMULX16 __nds__smulx16
24351 #define __RV_UMULX16 __nds__umulx16
24352 #define __RV_KHM8 __nds__khm8
24353 #define __RV_KHMX8 __nds__khmx8
24354 #define __RV_KHM16 __nds__khm16
24355 #define __RV_KHMX16 __nds__khmx16
24356 #define __RV_MULR64 __nds__mulr64
24357 #define __RV_MULSR64 __nds__mulsr64
24358 #define __RV_SMMUL __nds__smmul
24359 #define __RV_SMMUL_U __nds__smmul_u
24360 #define __RV_WEXT __nds__wext
24361 #define __RV_SUNPKD810 __nds__sunpkd810
24362 #define __RV_SUNPKD820 __nds__sunpkd820
24363 #define __RV_SUNPKD830 __nds__sunpkd830
24364 #define __RV_SUNPKD831 __nds__sunpkd831
24365 #define __RV_SUNPKD832 __nds__sunpkd832
24366 #define __RV_ZUNPKD810 __nds__zunpkd810
24367 #define __RV_ZUNPKD820 __nds__zunpkd820
24368 #define __RV_ZUNPKD830 __nds__zunpkd830
24369 #define __RV_ZUNPKD831 __nds__zunpkd831
24370 #define __RV_ZUNPKD832 __nds__zunpkd832
24371 #define __RV_PKBB16 __nds__pkbb16
24372 #define __RV_PKBT16 __nds__pkbt16
24373 #define __RV_PKTT16 __nds__pktt16
24374 #define __RV_PKTB16 __nds__pktb16
24375 #define __RV_KMMAC __nds__kmmac
24376 #define __RV_KMMAC_U __nds__kmmac_u
24377 #define __RV_KMMSB __nds__kmmsb
24378 #define __RV_KMMSB_U __nds__kmmsb_u
24379 #define __RV_KWMMUL __nds__kwmmul
24380 #define __RV_KWMMUL_U __nds__kwmmul_u
24381 #define __RV_SMMWB __nds__smmwb
24382 #define __RV_SMMWB_U __nds__smmwb_u
24383 #define __RV_SMMWT __nds__smmwt
24384 #define __RV_SMMWT_U __nds__smmwt_u
24385 #define __RV_KMMAWB __nds__kmmawb
24386 #define __RV_KMMAWB_U __nds__kmmawb_u
24387 #define __RV_KMMAWT __nds__kmmawt
24388 #define __RV_KMMAWT_U __nds__kmmawt_u
24389 #define __RV_KMMWB2 __nds__kmmwb2
24390 #define __RV_KMMWB2_U __nds__kmmwb2_u
24391 #define __RV_KMMWT2 __nds__kmmwt2
24392 #define __RV_KMMWT2_U __nds__kmmwt2_u
24393 #define __RV_KMMAWB2 __nds__kmmawb2
24394 #define __RV_KMMAWB2_U __nds__kmmawb2_u
24395 #define __RV_KMMAWT2 __nds__kmmawt2
24396 #define __RV_KMMAWT2_U __nds__kmmawt2_u
24397 #define __RV_SMBB16 __nds__smbb16
24398 #define __RV_SMBT16 __nds__smbt16
24399 #define __RV_SMTT16 __nds__smtt16
24400 #define __RV_KMDA __nds__kmda
24401 #define __RV_KMXDA __nds__kmxda
24402 #define __RV_SMDS __nds__smds
24403 #define __RV_SMDRS __nds__smdrs
24404 #define __RV_SMXDS __nds__smxds
24405 #define __RV_KMABB __nds__kmabb
24406 #define __RV_KMABT __nds__kmabt
24407 #define __RV_KMATT __nds__kmatt
24408 #define __RV_KMADA __nds__kmada
24409 #define __RV_KMAXDA __nds__kmaxda
24410 #define __RV_KMADS __nds__kmads
24411 #define __RV_KMADRS __nds__kmadrs
24412 #define __RV_KMAXDS __nds__kmaxds
24413 #define __RV_KMSDA __nds__kmsda
24414 #define __RV_KMSXDA __nds__kmsxda
24415 #define __RV_SMAL __nds__smal
24416 #define __RV_SMAQA __nds__smaqa
24417 #define __RV_UMAQA __nds__umaqa
24418 #define __RV_SMAQA_SU __nds__smaqa_su
24419 #define __RV_SMAR64 __nds__smar64
24420 #define __RV_SMSR64 __nds__smsr64
24421 #define __RV_UMAR64 __nds__umar64
24422 #define __RV_UMSR64 __nds__umsr64
24423 #define __RV_KMAR64 __nds__kmar64
24424 #define __RV_KMSR64 __nds__kmsr64
24425 #define __RV_UKMAR64 __nds__ukmar64
24426 #define __RV_UKMSR64 __nds__ukmsr64
24427 #define __RV_SMALBB __nds__smalbb
24428 #define __RV_SMALBT __nds__smalbt
24429 #define __RV_SMALTT __nds__smaltt
24430 #define __RV_SMALDA __nds__smalda
24431 #define __RV_SMALXDA __nds__smalxda
24432 #define __RV_SMALDS __nds__smalds
24433 #define __RV_SMALDRS __nds__smaldrs
24434 #define __RV_SMALXDS __nds__smalxds
24435 #define __RV_SMSLDA __nds__smslda
24436 #define __RV_SMSLXDA __nds__smslxda
24437 #define __RV_MINW __nds__minw
24438 #define __RV_MAXW __nds__maxw
24439 #define __RV_SMIN8 __nds__smin8
24440 #define __RV_SMAX8 __nds__smax8
24441 #define __RV_SMIN16 __nds__smin16
24442 #define __RV_SMAX16 __nds__smax16
24443 #define __RV_UMIN8 __nds__umin8
24444 #define __RV_UMAX8 __nds__umax8
24445 #define __RV_UMIN16 __nds__umin16
24446 #define __RV_UMAX16 __nds__umax16
24447 #define __RV_KABS8 __nds__kabs8
24448 #define __RV_KABS16 __nds__kabs16
24449 #define __RV_KABSW __nds__kabsw
24450 #define __RV_SCLIP8 __nds__sclip8
24451 #define __RV_SCLIP16 __nds__sclip16
24452 #define __RV_SCLIP32 __nds__sclip32
24453 #define __RV_UCLIP8 __nds__uclip8
24454 #define __RV_UCLIP16 __nds__uclip16
24455 #define __RV_UCLIP32 __nds__uclip32
24456 #define __RV_CLO8 __nds__clo8
24457 #define __RV_CLO16 __nds__clo16
24458 #define __RV_CLO32 __nds__clo32
24459 #define __RV_CLZ8 __nds__clz8
24460 #define __RV_CLZ16 __nds__clz16
24461 #define __RV_CLZ32 __nds__clz32
24462 #define __RV_CLRS8 __nds__clrs8
24463 #define __RV_CLRS16 __nds__clrs16
24464 #define __RV_CLRS32 __nds__clrs32
24465 #define __RV_SWAP8 __nds__swap8
24466 #define __RV_SWAP16 __nds__swap16
24467 #define __RV_KHMBB __nds__khmbb
24468 #define __RV_KHMBT __nds__khmbt
24469 #define __RV_KHMTT __nds__khmtt
24470 #define __RV_KDMBB __nds__kdmbb
24471 #define __RV_KDMBT __nds__kdmbt
24472 #define __RV_KDMTT __nds__kdmtt
24473 #define __RV_KDMABB __nds__kdmabb
24474 #define __RV_KDMABT __nds__kdmabt
24475 #define __RV_KDMATT __nds__kdmatt
24476 #define __RV_MADDR32 __nds__maddr32
24477 #define __RV_MSUBR32 __nds__msubr32
24478 #define __RV_PBSAD __nds__pbsad
24479 #define __RV_PBSADA __nds__pbsada
24480 #define __RV_AVE __nds__ave
24481 #define __RV_BITREV __nds__bitrev
24482 #define __RV_INSB __nds__insb
24484 #if (__riscv_xlen == 64)
24485 #define __RV_ADD32 __nds__add32
24486 #define __RV_SUB32 __nds__sub32
24487 #define __RV_RADD32 __nds__radd32
24488 #define __RV_RSUB32 __nds__rsub32
24489 #define __RV_URADD32 __nds__uradd32
24490 #define __RV_URSUB32 __nds__ursub32
24491 #define __RV_KADD32 __nds__kadd32
24492 #define __RV_KSUB32 __nds__ksub32
24493 #define __RV_UKADD32 __nds__ukadd32
24494 #define __RV_UKSUB32 __nds__uksub32
24495 #define __RV_CRAS32 __nds__cras32
24496 #define __RV_CRSA32 __nds__crsa32
24497 #define __RV_RCRAS32 __nds__rcras32
24498 #define __RV_RCRSA32 __nds__rcrsa32
24499 #define __RV_URCRAS32 __nds__urcras32
24500 #define __RV_URCRSA32 __nds__urcrsa32
24501 #define __RV_KCRAS32 __nds__kcras32
24502 #define __RV_KCRSA32 __nds__kcrsa32
24503 #define __RV_UKCRAS32 __nds__ukcras32
24504 #define __RV_UKCRSA32 __nds__ukcrsa32
24505 #define __RV_SRA32 __nds__sra32
24506 #define __RV_SRAI32 __nds__sra32
24507 #define __RV_SRL32 __nds__srl32
24508 #define __RV_SLL32 __nds__sll32
24509 #define __RV_SLLI32 __nds__sll32
24510 #define __RV_SRAW_U __nds__sraw_u
24511 #define __RV_SRA32_U __nds__sra32_u
24512 #define __RV_SRL32_U __nds__srl32_u
24513 #define __RV_KSLL32 __nds__ksll32
24514 #define __RV_KSLRA32 __nds__kslra32
24515 #define __RV_KSLRA32_U __nds__kslra32_u
24516 #define __RV_SMBB32 __nds__smbb32
24517 #define __RV_SMBT32 __nds__smbt32
24518 #define __RV_SMTT32 __nds__smtt32
24519 #define __RV_PKBB32 __nds__pkbb32
24520 #define __RV_PKBT32 __nds__pkbt32
24521 #define __RV_PKTT32 __nds__pktt32
24522 #define __RV_PKTB32 __nds__pktb32
24523 #define __RV_SMIN32 __nds__smin32
24524 #define __RV_SMAX32 __nds__smax32
24525 #define __RV_UMIN32 __nds__umin32
24526 #define __RV_UMAX32 __nds__umax32
24527 #define __RV_KABS32 __nds__kabs32
24528 #define __RV_KHMBB16 __nds__khmbb16
24529 #define __RV_KHMBT16 __nds__khmbt16
24530 #define __RV_KHMTT16 __nds__khmtt16
24531 #define __RV_KDMBB16 __nds__kdmbb16
24532 #define __RV_KDMBT16 __nds__kdmbt16
24533 #define __RV_KDMTT16 __nds__kdmtt16
24534 #define __RV_KDMABB16 __nds__kdmabb16
24535 #define __RV_KDMABT16 __nds__kdmabt16
24536 #define __RV_KDMATT16 __nds__kdmatt16
24537 #define __RV_KMABB32 __nds__kmabb32
24538 #define __RV_KMABT32 __nds__kmabt32
24539 #define __RV_KMATT32 __nds__kmatt32
24540 #define __RV_KMDA32 __nds__kmda32
24541 #define __RV_KMXDA32 __nds__kmxda32
24542 #define __RV_KMADA32 __nds__kmada32
24543 #define __RV_KMAXDA32 __nds__kmaxda32
24544 #define __RV_KMADS32 __nds__kmads32
24545 #define __RV_KMADRS32 __nds__kmadrs32
24546 #define __RV_KMAXDS32 __nds__kmaxds32
24547 #define __RV_KMSDA32 __nds__kmsda32
24548 #define __RV_KMSXDA32 __nds__kmsxda32
24549 #define __RV_SMDS32 __nds__smds32
24550 #define __RV_SMDRS32 __nds__smdrs32
24551 #define __RV_SMXDS32 __nds__smxds32
24560 #pragma inline=forced_no_body
24561 unsigned long __RV_STAS16(
unsigned long a,
unsigned long b) {
24563 __asm(
".insn r 0x7F, 0x2, 0x7A, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24567 #pragma inline=forced_no_body
24568 unsigned long __RV_RSTAS16(
unsigned long a,
unsigned long b) {
24570 __asm(
".insn r 0x7F, 0x2, 0x5A, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24574 #pragma inline=forced_no_body
24575 unsigned long __RV_KSTAS16(
unsigned long a,
unsigned long b) {
24577 __asm(
".insn r 0x7F, 0x2, 0x62, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24581 #pragma inline=forced_no_body
24582 unsigned long __RV_URSTAS16(
unsigned long a,
unsigned long b) {
24584 __asm(
".insn r 0x7F, 0x2, 0x6A, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24588 #pragma inline=forced_no_body
24589 unsigned long __RV_UKSTAS16(
unsigned long a,
unsigned long b) {
24591 __asm(
".insn r 0x7F, 0x2, 0x72, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24595 #pragma inline=forced_no_body
24596 unsigned long __RV_STSA16(
unsigned long a,
unsigned long b) {
24598 __asm(
".insn r 0x7F, 0x2, 0x7B, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24602 #pragma inline=forced_no_body
24603 unsigned long __RV_RSTSA16(
unsigned long a,
unsigned long b) {
24605 __asm(
".insn r 0x7F, 0x2, 0x5B, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24609 #pragma inline=forced_no_body
24610 unsigned long __RV_KSTSA16(
unsigned long a,
unsigned long b) {
24612 __asm(
".insn r 0x7F, 0x2, 0x63, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24616 #pragma inline=forced_no_body
24617 unsigned long __RV_URSTSA16(
unsigned long a,
unsigned long b) {
24619 __asm(
".insn r 0x7F, 0x2, 0x6B, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24623 #pragma inline=forced_no_body
24624 unsigned long __RV_UKSTSA16(
unsigned long a,
unsigned long b) {
24626 __asm(
".insn r 0x7F, 0x2, 0x73, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24636 #pragma inline=forced_no_body
24637 unsigned long __RV_STAS32(
unsigned long a,
unsigned long b) {
24639 __asm(
".insn r 0x7F, 0x2, 0x78, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24643 #pragma inline=forced_no_body
24644 unsigned long __RV_RSTAS32(
unsigned long a,
unsigned long b) {
24646 __asm(
".insn r 0x7F, 0x2, 0x58, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24650 #pragma inline=forced_no_body
24651 unsigned long __RV_KSTAS32(
unsigned long a,
unsigned long b) {
24653 __asm(
".insn r 0x7F, 0x2, 0x60, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24657 #pragma inline=forced_no_body
24658 unsigned long __RV_URSTAS32(
unsigned long a,
unsigned long b) {
24660 __asm(
".insn r 0x7F, 0x2, 0x68, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24664 #pragma inline=forced_no_body
24665 unsigned long __RV_UKSTAS32(
unsigned long a,
unsigned long b) {
24667 __asm(
".insn r 0x7F, 0x2, 0x70, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24671 #pragma inline=forced_no_body
24672 unsigned long __RV_STSA32(
unsigned long a,
unsigned long b) {
24674 __asm(
".insn r 0x7F, 0x2, 0x79, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24678 #pragma inline=forced_no_body
24679 unsigned long __RV_RSTSA32(
unsigned long a,
unsigned long b) {
24681 __asm(
".insn r 0x7F, 0x2, 0x59, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24685 #pragma inline=forced_no_body
24686 unsigned long __RV_KSTSA32(
unsigned long a,
unsigned long b) {
24688 __asm(
".insn r 0x7F, 0x2, 0x61, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24692 #pragma inline=forced_no_body
24693 unsigned long __RV_URSTSA32(
unsigned long a,
unsigned long b) {
24695 __asm(
".insn r 0x7F, 0x2, 0x69, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24699 #pragma inline=forced_no_body
24700 unsigned long __RV_UKSTSA32(
unsigned long a,
unsigned long b) {
24702 __asm(
".insn r 0x7F, 0x2, 0x71, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24706 #pragma inline=forced_no_body
24712 #pragma inline=forced_no_body
24718 #pragma inline=forced_no_body
24724 #pragma inline=forced_no_body
24730 #if __RISCV_XLEN == 64
24732 #pragma inline=forced_no_body
24733 unsigned long __RV_EXPD84(
unsigned long a)
24738 #pragma inline=forced_no_body
24739 unsigned long __RV_EXPD85(
unsigned long a)
24744 #pragma inline=forced_no_body
24745 unsigned long __RV_EXPD86(
unsigned long a)
24750 #pragma inline=forced_no_body
24751 unsigned long __RV_EXPD87(
unsigned long a)
24756 #pragma language=restore
24759 #error Unknown compiler
24765 #define __QADD8(x, y) __RV_KADD8(x, y)
24767 #define __QSUB8(x, y) __RV_KSUB8((x), (y))
24769 #define __QADD16(x, y) __RV_KADD16((x), (y))
24771 #define __SHADD16(x, y) __RV_RADD16((x), (y))
24773 #define __QSUB16(x, y) __RV_KSUB16((x), (y))
24775 #define __SHSUB16(x, y) __RV_RSUB16((x), (y))
24777 #define __QASX(x, y) __RV_KCRAS16((x), (y))
24779 #define __SHASX(x, y) __RV_RCRAS16((x), (y))
24781 #define __QSAX(x, y) __RV_KCRSA16((x), (y))
24783 #define __SHSAX(x, y) __RV_RCRSA16((x), (y))
24785 #define __SMUSDX(x, y) __RV_SMXDS((y), (x))
24792 #define __QADD(x, y) __RV_KADDW((x), (y))
24794 #define __QSUB(x, y) __RV_KSUBW((x), (y))
24831 #define __SXTB16(x) __RV_SUNPKD820(x)
24837 #define __SXTAB16_RORn(ARG1, ARG2, ROTATE) __SXTAB16(ARG1, __ROR(ARG2, ROTATE))
24844 return (acc + mul);
24846 #define __DKHM8 __RV_DKHM8
24847 #define __DKHM16 __RV_DKHM16
24848 #define __DKSUB16 __RV_DKSUB16
24849 #define __SMAQA __RV_SMAQA
24850 #define __MULSR64 __RV_MULSR64
24851 #define __DQADD8 __RV_DKADD8
24852 #define __DQSUB8 __RV_DKSUB8
24853 #define __DKADD16 __RV_DKADD16
24854 #define __PKBB16 __RV_PKBB16
24855 #define __DKSLRA16 __RV_DKSLRA16
24856 #define __DKSLRA8 __RV_DKSLRA8
24857 #define __KABSW __RV_KABSW
24858 #define __DKABS8 __RV_DKABS8
24859 #define __DKABS16 __RV_DKABS16
24860 #define __SMALDA __RV_SMALDA
24861 #define __SMSLDA __RV_SMSLDA
24862 #define __SMALBB __RV_SMALBB
24863 #define __SUB64 __RV_SUB64
24864 #define __ADD64 __RV_ADD64
24865 #define __SMBB16 __RV_SMBB16
24866 #define __SMBT16 __RV_SMBT16
24867 #define __SMTT16 __RV_SMTT16
24868 #define __EXPD80 __RV_EXPD80
24869 #define __SMAX8 __RV_SMAX8
24870 #define __SMAX16 __RV_SMAX16
24871 #define __PKTT16 __RV_PKTT16
24872 #define __KADD16 __RV_KADD16
24873 #define __SADD16 __RV_ADD16
24874 #define __SSUB8 __RV_KSUB8
24875 #define __SADD8 __RV_KADD8
24876 #define __USAT16 __RV_UCLIP16
24877 #define __SMALTT __RV_SMALTT
24880 #define __PKHBT(ARG1, ARG2, ARG3) ((ARG3 == 0) ? __RV_PKTB16(ARG2, ARG1) : \
24881 (ARG3 == 16) ? __RV_PKBB16(ARG2, ARG1) : \
24882 (((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
24883 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)))
24886 #define __PKHTB(ARG1, ARG2, ARG3) ((ARG3 == 0) ? __RV_PKTB16(ARG1, ARG2) : \
24887 (ARG3 == 16) ? __RV_PKTT16(ARG1, ARG2) : \
24888 (((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
24889 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)))
24891 #if __RISCV_XLEN == 64
24894 #define __PKHBT64(ARG1, ARG2, ARG3) ((ARG3 == 0) ? __RV_PKTB16(ARG2, ARG1) : \
24895 (ARG3 == 16) ? __RV_PKBB16(ARG2, ARG1) : \
24896 ((int64_t)((((uint32_t)((uint64_t)ARG1 >> 32)) & 0x0000FFFFUL) | \
24897 ((((uint32_t)((uint64_t)ARG2 >> 32)) << (ARG3)) & 0xFFFF0000UL)) << 32) | \
24898 ((int64_t)(((((uint32_t)(ARG1))) & 0x0000FFFFUL) | \
24899 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)) & 0xFFFFFFFFUL))
24903 #define __PKHTB64(ARG1, ARG2, ARG3) ((ARG3 == 0) ? __RV_PKTB16(ARG1, ARG2) : \
24904 (ARG3 == 16) ? __RV_PKTT16(ARG1, ARG2) : \
24905 ((uint64_t)(((uint32_t)((uint64_t)ARG1 >> 32) & 0xFFFF0000UL) | \
24906 ((((uint32_t)((uint64_t)ARG2 >> 32)) >> (ARG3)) & 0x0000FFFFUL)) << 32) | \
24907 ((uint64_t)(((uint32_t)(ARG1) & 0xFFFF0000UL) | \
24908 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)) & 0xFFFFFFFFUL))
24912 #define __PKHBT64(ARG1, ARG2, ARG3) ((ARG3 == 0) ? __RV_DPKTB16(ARG2, ARG1) : \
24913 (ARG3 == 16) ? __RV_DPKBB16(ARG2, ARG1) : \
24914 ((int64_t)((((uint32_t)((uint64_t)ARG1 >> 32)) & 0x0000FFFFUL) | \
24915 ((((uint32_t)((uint64_t)ARG2 >> 32)) << (ARG3)) & 0xFFFF0000UL)) << 32) | \
24916 ((int64_t)(((((uint32_t)(ARG1))) & 0x0000FFFFUL) | \
24917 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)) & 0xFFFFFFFFUL))
24921 #define __PKHTB64(ARG1, ARG2, ARG3) ((ARG3 == 0) ? __RV_DPKTB16(ARG1, ARG2) : \
24922 (ARG3 == 16) ? __RV_DPKTT16(ARG1, ARG2) : \
24923 ((uint64_t)(((uint32_t)((uint64_t)ARG1 >> 32) & 0xFFFF0000UL) | \
24924 ((((uint32_t)((uint64_t)ARG2 >> 32)) >> (ARG3)) & 0x0000FFFFUL)) << 32) | \
24925 ((uint64_t)(((uint32_t)(ARG1) & 0xFFFF0000UL) | \
24926 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)) & 0xFFFFFFFFUL))
24930 #define __SXTB16_RORn(ARG1, ARG2) __RV_SUNPKD820(__ROR(ARG1, ARG2))
#define __EXPD_BYTE(x)
Expand byte to unsigned long value.
#define __ASM
Pass information from the compiler to the assembler.
#define __STATIC_FORCEINLINE
Define a static function that should be always inlined by the compiler.
__STATIC_FORCEINLINE long __RV_RADDW(int a, int b)
RADDW (32-bit Signed Halving Addition)
__STATIC_FORCEINLINE long __RV_MINW(int a, int b)
MINW (32-bit Signed Word Minimum)
__STATIC_FORCEINLINE long __RV_MAXW(int a, int b)
MAXW (32-bit Signed Word Maximum)
__STATIC_FORCEINLINE long long __RV_MULSR64(long a, long b)
MULSR64 (Multiply Word Signed to 64-bit Data)
__STATIC_FORCEINLINE unsigned long long __RV_MULR64(unsigned long a, unsigned long b)
MULR64 (Multiply Word Unsigned to 64-bit Data)
__STATIC_FORCEINLINE long __RV_RSUBW(int a, int b)
RSUBW (32-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_URADDW(unsigned int a, unsigned int b)
URADDW (32-bit Unsigned Halving Addition)
__STATIC_FORCEINLINE unsigned long __RV_URSUBW(unsigned int a, unsigned int b)
URSUBW (32-bit Unsigned Halving Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_UMSR64(unsigned long long t, unsigned long a, unsigned long b)
UMSR64 (Unsigned Multiply and Subtract from 64-Bit Data)
__STATIC_FORCEINLINE unsigned long long __RV_UKMSR64(unsigned long long t, unsigned long a, unsigned long b)
UKMSR64 (Unsigned Multiply and Saturating Subtract from 64-Bit Data)
__STATIC_FORCEINLINE long long __RV_SMAR64(long long t, long a, long b)
SMAR64 (Signed Multiply and Add to 64-Bit Data)
__STATIC_FORCEINLINE long long __RV_KMAR64(long long t, long a, long b)
KMAR64 (Signed Multiply and Saturating Add to 64-Bit Data)
__STATIC_FORCEINLINE long long __RV_SMSR64(long long t, long a, long b)
SMSR64 (Signed Multiply and Subtract from 64- Bit Data)
__STATIC_FORCEINLINE unsigned long long __RV_UKMAR64(unsigned long long t, unsigned long a, unsigned long b)
UKMAR64 (Unsigned Multiply and Saturating Add to 64-Bit Data)
__STATIC_FORCEINLINE long long __RV_KMSR64(long long t, long a, long b)
KMSR64 (Signed Multiply and Saturating Subtract from 64-Bit Data)
__STATIC_FORCEINLINE unsigned long long __RV_UMAR64(unsigned long long t, unsigned long a, unsigned long b)
UMAR64 (Unsigned Multiply and Add to 64-Bit Data)
__STATIC_FORCEINLINE unsigned long long __RV_SUB64(unsigned long long a, unsigned long long b)
SUB64 (64-bit Subtraction)
__STATIC_FORCEINLINE long long __RV_RADD64(long long a, long long b)
RADD64 (64-bit Signed Halving Addition)
__STATIC_FORCEINLINE unsigned long long __RV_URADD64(unsigned long long a, unsigned long long b)
URADD64 (64-bit Unsigned Halving Addition)
__STATIC_FORCEINLINE unsigned long long __RV_ADD64(unsigned long long a, unsigned long long b)
ADD64 (64-bit Addition)
__STATIC_FORCEINLINE unsigned long long __RV_UKSUB64(unsigned long long a, unsigned long long b)
UKSUB64 (64-bit Unsigned Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_UKADD64(unsigned long long a, unsigned long long b)
UKADD64 (64-bit Unsigned Saturating Addition)
__STATIC_FORCEINLINE unsigned long long __RV_URSUB64(unsigned long long a, unsigned long long b)
URSUB64 (64-bit Unsigned Halving Subtraction)
__STATIC_FORCEINLINE long long __RV_KADD64(long long a, long long b)
KADD64 (64-bit Signed Saturating Addition)
__STATIC_FORCEINLINE long long __RV_KSUB64(long long a, long long b)
KSUB64 (64-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE long long __RV_RSUB64(long long a, long long b)
RSUB64 (64-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE long __RV_SMAQA(long t, unsigned long a, unsigned long b)
SMAQA (Signed Multiply Four Bytes with 32-bit Adds)
__STATIC_FORCEINLINE long __RV_SMAQA_SU(long t, unsigned long a, unsigned long b)
SMAQA.SU (Signed and Unsigned Multiply Four Bytes with 32-bit Adds)
__STATIC_FORCEINLINE unsigned long __RV_UMAQA(unsigned long t, unsigned long a, unsigned long b)
UMAQA (Unsigned Multiply Four Bytes with 32- bit Adds)
__STATIC_FORCEINLINE unsigned long __RV_BPICK(unsigned long a, unsigned long b, unsigned long c)
BPICK (Bit-wise Pick)
__STATIC_FORCEINLINE unsigned long __RV_MADDR32(unsigned long t, unsigned long a, unsigned long b)
MADDR32 (Multiply and Add to 32-Bit Word)
__STATIC_FORCEINLINE long __RV_AVE(long a, long b)
AVE (Average with Rounding)
__STATIC_FORCEINLINE unsigned long __RV_BITREV(unsigned long a, unsigned long b)
BITREV (Bit Reverse)
__STATIC_FORCEINLINE unsigned long __RV_MSUBR32(unsigned long t, unsigned long a, unsigned long b)
MSUBR32 (Multiply and Subtract from 32-Bit Word)
__STATIC_FORCEINLINE unsigned long __RV_WEXT(long long a, unsigned int b)
WEXT (Extract Word from 64-bit)
__STATIC_FORCEINLINE unsigned long __RV_SWAP8(unsigned long a)
SWAP8 (Swap Byte within Halfword)
__STATIC_FORCEINLINE long __RV_SRA_U(long a, unsigned int b)
SRA.u (Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_SWAP16(unsigned long a)
SWAP16 (Swap Halfword within Word)
__STATIC_FORCEINLINE long __RV_KHMTT(unsigned int a, unsigned int b)
KHMTT (Signed Saturating Half Multiply T16 x T16)
__STATIC_FORCEINLINE long __RV_KHMBB(unsigned int a, unsigned int b)
KHMBB (Signed Saturating Half Multiply B16 x B16)
__STATIC_FORCEINLINE long __RV_KADDH(int a, int b)
KADDH (Signed Addition with Q15 Saturation)
__STATIC_FORCEINLINE unsigned long __RV_UKSUBH(unsigned int a, unsigned int b)
UKSUBH (Unsigned Subtraction with U16 Saturation)
__STATIC_FORCEINLINE long __RV_KHMBT(unsigned int a, unsigned int b)
KHMBT (Signed Saturating Half Multiply B16 x T16)
__STATIC_FORCEINLINE long __RV_KSUBH(int a, int b)
KSUBH (Signed Subtraction with Q15 Saturation)
__STATIC_FORCEINLINE unsigned long __RV_UKADDH(unsigned int a, unsigned int b)
UKADDH (Unsigned Addition with U16 Saturation)
__STATIC_FORCEINLINE long __RV_KDMBT(unsigned int a, unsigned int b)
KDMBT (Signed Saturating Double Multiply B16 x T16)
__STATIC_FORCEINLINE long __RV_KDMABT(long t, unsigned int a, unsigned int b)
KDMABT (Signed Saturating Double Multiply Addition B16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_KABSW(signed long a)
KABSW (Scalar 32-bit Absolute Value with Saturation)
__STATIC_FORCEINLINE long __RV_KDMABB(long t, unsigned int a, unsigned int b)
KDMABB (Signed Saturating Double Multiply Addition B16 x B16)
__STATIC_FORCEINLINE long __RV_KSUBW(int a, int b)
KSUBW (Signed Subtraction with Q31 Saturation)
__STATIC_FORCEINLINE long __RV_KSLRAW(int a, int b)
KSLRAW (Shift Left Logical with Q31 Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE long __RV_KDMATT(long t, unsigned int a, unsigned int b)
KDMATT (Signed Saturating Double Multiply Addition T16 x T16)
__STATIC_FORCEINLINE long __RV_KDMBB(unsigned int a, unsigned int b)
KDMBB (Signed Saturating Double Multiply B16 x B16)
__STATIC_FORCEINLINE unsigned long __RV_UKADDW(unsigned int a, unsigned int b)
UKADDW (Unsigned Addition with U32 Saturation)
__STATIC_FORCEINLINE long __RV_KSLRAW_U(int a, int b)
KSLRAW.u (Shift Left Logical with Q31 Saturation or Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE long __RV_KADDW(int a, int b)
KADDW (Signed Addition with Q31 Saturation)
__STATIC_FORCEINLINE long __RV_KSLLW(long a, unsigned int b)
KSLLW (Saturating Shift Left Logical for Word)
__STATIC_FORCEINLINE unsigned long __RV_UKSUBW(unsigned int a, unsigned int b)
UKSUBW (Unsigned Subtraction with U32 Saturation)
__STATIC_FORCEINLINE long __RV_KDMTT(unsigned int a, unsigned int b)
KDMTT (Signed Saturating Double Multiply T16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_EXPD80(unsigned long a)
EXPD80 (Expand and Copy Byte 0 to 32bit(when rv32) or 64bit(when rv64))
__STATIC_FORCEINLINE unsigned long __RV_EXPD83(unsigned long a)
EXPD83 (Expand and Copy Byte 3 to 32bit(rv32) or 64bit(when rv64))
__STATIC_FORCEINLINE unsigned long __RV_EXPD81(unsigned long a)
EXPD81 (Expand and Copy Byte 1 to 32bit(rv32) or 64bit(when rv64))
__STATIC_FORCEINLINE unsigned long __RV_EXPD82(unsigned long a)
EXPD82 (Expand and Copy Byte 2 to 32bit(rv32) or 64bit(when rv64))
__STATIC_FORCEINLINE unsigned long long __RV_DKHM8(unsigned long long a, unsigned long long b)
DKHM8 (64-bit SIMD Signed Saturating Q7 Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_DKSLRA8(unsigned long long a, int b)
DKSLRA8 (64-bit SIMD 8-bit Shift Left Logical with Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long long __RV_DKSLRA16(unsigned long long a, int b)
DKSLRA16 (64-bit SIMD 16-bit Shift Left Logical with Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long long __RV_DKSUB8(unsigned long long a, unsigned long long b)
DKSUB8 (64-bit SIMD 8-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DKABS8(unsigned long long a)
DKABS8 (64-bit SIMD 8-bit Saturating Absolute)
__STATIC_FORCEINLINE unsigned long long __RV_DKABS16(unsigned long long a)
DKABS16 (64-bit SIMD 16-bit Saturating Absolute)
__STATIC_FORCEINLINE unsigned long long __RV_DKADD8(unsigned long long a, unsigned long long b)
DKADD8 (64-bit SIMD 8-bit Signed Saturating Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DKSUB16(unsigned long long a, unsigned long long b)
DKSUB16 (64-bit SIMD 16-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DKHM16(unsigned long long a, unsigned long long b)
DKHM16 (64-bit SIMD Signed Saturating Q15 Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_DKADD16(unsigned long long a, unsigned long long b)
DKADD16 (64-bit SIMD 16-bit Signed Saturating Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DSMMUL(unsigned long long a, unsigned long long b)
DSMMUL (64-bit MSW 32x32 Signed Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_DKMDA(unsigned long long a, unsigned long long b)
DKMDA (Signed Multiply Two Halfs and Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKSTSA16(unsigned long long a, unsigned long long b)
DKSTSA16 (16-bit Signed Saturating Straight Subtraction & Addition)
__STATIC_FORCEINLINE long long __RV_DSMTT32_SRA32(unsigned long long a, unsigned long long b)
DSMTT32.sra32 (Signed Multiply Top Word & Top Word with Right Shift 32-bit)
__STATIC_FORCEINLINE unsigned long long __RV_DMSR16(unsigned long a, unsigned long b)
DMSR16 (Signed Multiply Halfs with Right Shift 16-bit and Cross Multiply Halfs with Right Shift 16-bi...
__STATIC_FORCEINLINE unsigned long __RV_DREDSA16(unsigned long long a)
DREDSA16 (Reduced Subtraction and Reduced Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DKHMX16(unsigned long long a, unsigned long long b)
DKHMX16 (64-bit SIMD Signed Crossed Saturating Q15 Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_DKCRAS16(unsigned long long a, unsigned long long b)
DKCRAS16 (16-bit Signed Saturating Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DPKBT32(unsigned long long a, unsigned long long b)
DPKBT32 (Pack Two 32-bit Data from Bottom and Top Half)
__STATIC_FORCEINLINE long long __RV_DSMBT32_SRA32(unsigned long long a, unsigned long long b)
DSMBT32.sra32 (Signed Crossed Multiply Two Halfs and Subtract with Right Shift 32)
__STATIC_FORCEINLINE unsigned long long __RV_DSMTT16(unsigned long long a, unsigned long long b)
DSMTT16 (Signed Multiply Top Half & Top Half)
__STATIC_FORCEINLINE unsigned long long __RV_DRCRSA16(unsigned long long a, unsigned long long b)
DRCRSA16 (16-bit Signed Halving Cross Subtraction & Addition)
__STATIC_FORCEINLINE long long __RV_DSMBB32(unsigned long long a, unsigned long long b)
DSMBB32 (Signed Multiply Bottom Word & Bottom Word)
__STATIC_FORCEINLINE unsigned long long __RV_DSMBT16(unsigned long long a, unsigned long long b)
DSMBT16 (Signed Multiply Bottom Half & Top Half)
__STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD820(unsigned long long a)
DSUNPKD820 (Signed Unpacking Bytes 2 & 0)
__STATIC_FORCEINLINE unsigned long long __RV_DCRAS32(unsigned long long a, unsigned long long b)
DCRAS32 (32-bit Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DKCRAS32(unsigned long long a, unsigned long long b)
DKCRAS32 (32-bit Signed Saturating Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DSMDRS(unsigned long long a, unsigned long long b)
DSMDRS (Signed Multiply Two Halfs and Reverse Subtract)
__STATIC_FORCEINLINE long long __RV_DSMBT32(unsigned long long a, unsigned long long b)
SMBT32 (Signed Multiply Bottom Word & Top Word)
__STATIC_FORCEINLINE unsigned long long __RV_DSUB16(unsigned long long a, unsigned long long b)
DSUB16 (64-bit SIMD 16-bit Halving Signed Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DSTSA32(unsigned long long a, unsigned long long b)
DSTSA32 (32-bit Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DSUB32(unsigned long long a, unsigned long long b)
DSUB32 (64-bit SIMD 32-bit Halving Signed Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DPKBB16(unsigned long long a, unsigned long long b)
DPKBB16 (Pack Two 16-bit Data from Both Bottom Half)
__STATIC_FORCEINLINE unsigned long long __RV_DKCRSA16(unsigned long long a, unsigned long long b)
DKCRSA16 (16-bit Signed Saturating Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DRCRAS32(unsigned long long a, unsigned long long b)
DRCRAS32 (32-bit Signed Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD832(unsigned long long a)
DZUNPKD832 (UnSigned Unpacking Bytes 3 & 2)
__STATIC_FORCEINLINE unsigned long long __RV_DADD32(unsigned long long a, unsigned long long b)
DADD32 (32-bit Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD810(unsigned long long a)
DZUNPKD810 (UnSigned Unpacking Bytes 1 & 0)
__STATIC_FORCEINLINE unsigned long long __RV_DRADD32(unsigned long long a, unsigned long long b)
DRADD32 (64-bit SIMD 32-bit Halving Signed Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DPKTB16(unsigned long long a, unsigned long long b)
DPKTB16 (Pack Two 32-bit Data from Top and Bottom Half)
__STATIC_FORCEINLINE unsigned long long __RV_DKHMX8(unsigned long long a, unsigned long long b)
DKHMX8 (64-bit SIMD Signed Crossed Saturating Q7 Multiply)
__STATIC_FORCEINLINE long long __RV_DSMBB32_SRA32(unsigned long long a, unsigned long long b)
DSMBB32.sra32 (Signed Crossed Multiply Two Halfs and Subtract with Right Shift 32)
__STATIC_FORCEINLINE unsigned long long __RV_DADD16(unsigned long long a, unsigned long long b)
DADD16 (16-bit Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD831(unsigned long long a)
DZUNPKD831 (UnSigned Unpacking Bytes 3 & 1)
__STATIC_FORCEINLINE unsigned long long __RV_DMSR33(unsigned long long a, unsigned long long b)
DMSR33 (Signed Multiply with Right Shift 33-bit and Cross Multiply with Right Shift 33-bit)
__STATIC_FORCEINLINE long long __RV_DSMBB32_SRA14(unsigned long long a, unsigned long long b)
DSMBB32.sra14 (Signed Crossed Multiply Two Halfs and Subtract with Right Shift 14)
__STATIC_FORCEINLINE unsigned long long __RV_DRSUB16(unsigned long long a, unsigned long long b)
DRSUB16 (16-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DPKBB32(unsigned long long a, unsigned long long b)
DPKBB32 (Pack Two 32-bit Data from Both Bottom Half)
__STATIC_FORCEINLINE unsigned long long __RV_DKCRSA32(unsigned long long a, unsigned long long b)
DKCRSA32 (32-bit Signed Saturating Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD830(unsigned long long a)
DSUNPKD830 (Signed Unpacking Bytes 3 & 0)
__STATIC_FORCEINLINE unsigned long long __RV_DKADD32(unsigned long long a, unsigned long long b)
DKADD32(64-bit SIMD 32-bit Signed Saturating Addition)
__STATIC_FORCEINLINE long long __RV_DSMTT32(unsigned long long a, unsigned long long b)
DSMTT32 (Signed Multiply Top Word & Top Word)
__STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD830(unsigned long long a)
DZUNPKD830 (UnSigned Unpacking Bytes 3 & 0)
__STATIC_FORCEINLINE unsigned long long __RV_DSMXDS(unsigned long long a, unsigned long long b)
DSMXDS (Signed Crossed Multiply Two Halfs and Subtract)
__STATIC_FORCEINLINE unsigned long long __RV_DSMBB16(unsigned long long a, unsigned long long b)
DSMBB16 (Signed Multiply Bottom Half & Bottom Half)
__STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD832(unsigned long long a)
DSUNPKD832 (Signed Unpacking Bytes 3 & 2)
__STATIC_FORCEINLINE unsigned long long __RV_DPKBT16(unsigned long long a, unsigned long long b)
DPKBT16 (Pack Two 16-bit Data from Bottom and Top Half)
__STATIC_FORCEINLINE unsigned long long __RV_DPKTB32(unsigned long long a, unsigned long long b)
DPKTB32 (Pack Two 32-bit Data from Top and Bottom Half)
__STATIC_FORCEINLINE unsigned long long __RV_DMSR17(unsigned long a, unsigned long b)
DMSR17 (Signed Multiply Halfs with Right Shift 17-bit and Cross Multiply Halfs with Right Shift 17-bi...
__STATIC_FORCEINLINE unsigned long long __RV_DKWMMUL(unsigned long long a, unsigned long long b)
DKWMMUL (64-bit MSW 32x32 Signed Multiply & Double)
__STATIC_FORCEINLINE unsigned long long __RV_DPACK32(signed long a, signed long b)
DPACK32 (SIMD Pack Two 32-bit Data To 64-bit)
__STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD820(unsigned long long a)
DZUNPKD820 (UnSigned Unpacking Bytes 2 & 0)
__STATIC_FORCEINLINE unsigned long long __RV_DRADD16(unsigned long long a, unsigned long long b)
DRADD16 (64-bit SIMD 16-bit Halving Signed Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DPKTT32(unsigned long long a, unsigned long long b)
DPKTT32 (Pack Two 32-bit Data from Both Top Half)
__STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD810(unsigned long long a)
DSUNPKD810 (Signed Unpacking Bytes 1 & 0)
__STATIC_FORCEINLINE unsigned long long __RV_DKABS32(unsigned long long a)
DKABS32 (64-bit SIMD 32-bit Saturating Absolute)
__STATIC_FORCEINLINE unsigned long long __RV_DKSTAS16(unsigned long long a, unsigned long long b)
DKSTAS16 (16-bit Signed Saturating Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DCRSA32(unsigned long long a, unsigned long long b)
DCRSA32 (32-bit Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DRCRAS16(unsigned long long a, unsigned long long b)
DRCRAS16 (16-bit Signed Halving Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DRCRSA32(unsigned long long a, unsigned long long b)
DRCRSA32 (32-bit Signed Halving Cross Subtraction & Addition)
__STATIC_FORCEINLINE int16_t __RV_DKCLIP64(unsigned long long a)
DKCLIP64 (64-bit Clipped to 16-bit Saturation Value)
__STATIC_FORCEINLINE unsigned long long __RV_DPKTT16(unsigned long long a, unsigned long long b)
DPKTT16 (Pack Two 16-bit Data from Both Top Half)
__STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD831(unsigned long long a)
DSUNPKD831 (Signed Unpacking Bytes 3 & 1)
__STATIC_FORCEINLINE unsigned long long __RV_DSRA16(unsigned long long a, unsigned long b)
DSRA16 (SIMD 16-bit Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long long __RV_DKSLRA32(unsigned long long a, int b)
DKSLRA32 (64-bit SIMD 32-bit Shift Left Logical with Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long long __RV_DRSUB32(unsigned long long a, unsigned long long b)
DRSUB32 (32-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DSMMUL_U(unsigned long long a, unsigned long long b)
DSMMUL.u (64-bit MSW 32x32 Unsigned Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_DSTAS32(unsigned long long a, unsigned long long b)
DSTAS32 (SIMD 32-bit Straight Addition & Subtractionn)
__STATIC_FORCEINLINE long long __RV_DSMBT32_SRA14(unsigned long long a, unsigned long long b)
DSMBT32.sra14 (Signed Multiply Bottom Word & Top Word with Right Shift 14)
__STATIC_FORCEINLINE unsigned long __RV_DREDAS16(unsigned long long a)
DREDAS16 (Reduced Addition and Reduced Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DKMXDA(unsigned long long a, unsigned long long b)
DKMXDA (Signed Crossed Multiply Two Halfs and Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKSUB32(unsigned long long a, unsigned long long b)
DKSUB32 (64-bit SIMD 32-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DKWMMUL_U(unsigned long long a, unsigned long long b)
DKWMMUL.u (64-bit MSW 32x32 Unsigned Multiply & Double)
__STATIC_FORCEINLINE unsigned long long __RV_DMXSR33(unsigned long long a, unsigned long long b)
DMXSR33 (Signed Multiply with Right Shift 33-bit and Cross Multiply with Right Shift 33-bit)
__STATIC_FORCEINLINE long long __RV_DSMTT32_SRA14(unsigned long long a, unsigned long long b)
DSMTT32.sra14 (Signed Multiply Top Word & Top Word with Right Shift 14-bit)
__STATIC_FORCEINLINE long long __RV_DKMXDA32(unsigned long long a, unsigned long long b)
DKMXDA32 (Two Cross Signed 32x32 with 64-bit Saturation Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKMADS(unsigned long long t, unsigned long long a, unsigned long long b)
DKMADS (Two 16x16 with 32-bit Signed Add and Sub)
__STATIC_FORCEINLINE unsigned long long __RV_DUMAQA(unsigned long long t, unsigned long long a, unsigned long long b)
DUMAQA (Four Unsigned 8x8 with 32-bit Unsigned Add)
__STATIC_FORCEINLINE long long __RV_DSMALXDA(long long t, unsigned long long a, unsigned long long b)
DSMALXDA (Four Signed 16x16 with 64-bit Add)
__STATIC_FORCEINLINE unsigned long long __RV_DSMAQA(unsigned long long t, unsigned long long a, unsigned long long b)
DSMAQA (Four Signed 8x8 with 32-bit Signed Add)
__STATIC_FORCEINLINE long long __RV_DKMSXDA32(long long t, unsigned long long a, unsigned long long b)
DKMSXDA32 (Two Cross Signed 32x32 with 64-bit Saturation Sub)
__STATIC_FORCEINLINE long long __RV_DSMALXDS(long long t, unsigned long long a, unsigned long long b)
DSMALXDS (Four Cross Signed 16x16 with 64-bit Add and Sub)
__STATIC_FORCEINLINE long long __RV_DSMALDA(long long t, unsigned long long a, unsigned long long b)
DSMALDA (Four Signed 16x16 with 64-bit Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKMADRS(unsigned long long t, unsigned long long a, unsigned long long b)
DKMADRS (Two 16x16 with 32-bit Signed Add and Reversed Sub)
__STATIC_FORCEINLINE unsigned long long __RV_DKMADA(unsigned long long t, unsigned long long a, unsigned long long b)
DKMADA (Saturating Signed Multiply Two Halfs and Two Adds)
__STATIC_FORCEINLINE unsigned long long __RV_DSMAQA_SU(unsigned long long t, unsigned long long a, unsigned long long b)
DSMAQA.SU (Four Signed 8 x Unsigned 8 with 32-bit Signed Add)
__STATIC_FORCEINLINE long long __RV_DSMALDRS(long long t, unsigned long long a, unsigned long long b)
DSMALDRS (Four Signed 16x16 with 64-bit Add and Revered Sub)
__STATIC_FORCEINLINE long __RV_DSMA32_U(unsigned long long a, unsigned long long b)
DSMA32.u (64-bit SIMD 32-bit Signed Multiply Addition With Rounding and Clip)
__STATIC_FORCEINLINE long long __RV_DSMDRS32(unsigned long long a, unsigned long long b)
DSMDRS32 (Two Signed 32x32 with 64-bit Revered Sub)
__STATIC_FORCEINLINE long __RV_DSMXS32_U(unsigned long long a, unsigned long long b)
DSMXS32.u (64-bit SIMD 32-bit Signed Multiply Cross Subtraction With Rounding and Clip)
__STATIC_FORCEINLINE long long __RV_DKMSDA32(long long t, unsigned long long a, unsigned long long b)
DKMSDA32 (Two Signed 32x32 with 64-bit Saturation Sub)
__STATIC_FORCEINLINE unsigned long long __RV_DKSMS32_U(unsigned long long t, unsigned long long a, unsigned long long b)
DKSMS32.u (Two Signed Multiply Shift-clip and Saturation with Rounding)
__STATIC_FORCEINLINE unsigned long long __RV_DKMMSB(unsigned long long t, unsigned long long a, unsigned long long b)
DKMMSB (64-bit MSW 32x32 Signed Multiply and Saturating Sub)
__STATIC_FORCEINLINE long long __RV_DDSMAQA_SU(long long t, unsigned long long a, unsigned long long b)
DDSMAQA.SU (Eight Signed 8 x Unsigned 8 with 64-bit Add)
__STATIC_FORCEINLINE long long __RV_DSMALTT(long long t, unsigned long long a, unsigned long long b)
DSMALTT (Signed Multiply Top Half & Add 64-bit)
__STATIC_FORCEINLINE long long __RV_DSMDS32(unsigned long long a, unsigned long long b)
DSMDS32 (Two Signed 32x32 with 64-bit Sub)
__STATIC_FORCEINLINE long long __RV_DSMSLDA(long long t, unsigned long long a, unsigned long long b)
DSMSLDA (Four Signed 16x16 with 64-bit Sub)
__STATIC_FORCEINLINE long __RV_DMADA32(long long t, unsigned long long a, unsigned long long b)
DMADA32 ((Two Cross Signed 32x32 with 64-bit Add and Clip to 32-bit)
__STATIC_FORCEINLINE long long __RV_DSMALDS(long long t, unsigned long long a, unsigned long long b)
DSMALDS (Four Signed 16x16 with 64-bit Add and Sub)
__STATIC_FORCEINLINE long long __RV_DKMAXDS32(long long t, unsigned long long a, unsigned long long b)
DKMAXDS32 (Two Cross Signed 32x32 with 64-bit Saturation Add and Sub)
__STATIC_FORCEINLINE unsigned long long __RV_DKMMAC(unsigned long long t, unsigned long long a, unsigned long long b)
DKMMAC (64-bit MSW 32x32 Signed Multiply and Saturating Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKMSXDA(unsigned long long t, unsigned long long a, unsigned long long b)
DKMSXDA (Two Cross 16x16 with 32-bit Signed Double Sub)
__STATIC_FORCEINLINE long long __RV_DKMATT32(long long t, unsigned long long a, unsigned long long b)
DKMATT32 (Saturating Signed Multiply Bottom & Top Words & Add)
__STATIC_FORCEINLINE long long __RV_DKMAXDA32(long long t, unsigned long long a, unsigned long long b)
DKMAXDA32 (Two Cross Signed 32x32 with 64-bit Saturation Add)
__STATIC_FORCEINLINE long long __RV_DDSMAQA(long long t, unsigned long long a, unsigned long long b)
DDSMAQA (Eight Signed 8x8 with 64-bit Add)
__STATIC_FORCEINLINE long __RV_DSMXA32_U(unsigned long long a, unsigned long long b)
DSMXA32.u (64-bit SIMD 32-bit Signed Cross Multiply Addition with Rounding and Clip)
__STATIC_FORCEINLINE unsigned long long __RV_DKMSDA(unsigned long long t, unsigned long long a, unsigned long long b)
DKMSDA (Two 16x16 with 32-bit Signed Double Sub)
__STATIC_FORCEINLINE long long __RV_DSMXDS32(unsigned long long a, unsigned long long b)
DSMXDS32 (Two Cross Signed 32x32 with 64-bit Sub)
__STATIC_FORCEINLINE long __RV_DSMS32_U(unsigned long long a, unsigned long long b)
DSMS32.u (64-bit SIMD 32-bit Signed Multiply Subtraction with Rounding and Clip)
__STATIC_FORCEINLINE long long __RV_DSMALBT(long long t, unsigned long long a, unsigned long long b)
DSMALBT (Signed Multiply Bottom Half & Top Half & Add 64-bit)
__STATIC_FORCEINLINE long long __RV_DSMALBB(long long t, unsigned long long a, unsigned long long b)
DSMALBB (Signed Multiply Bottom Halfs & Add 64-bit)
__STATIC_FORCEINLINE long long __RV_DDUMAQA(long long t, unsigned long long a, unsigned long long b)
DDUMAQA (Eight Unsigned 8x8 with 64-bit Unsigned Add)
__STATIC_FORCEINLINE long long __RV_DKMADS32(long long t, unsigned long long a, unsigned long long b)
DKMADS32 (Two Signed 32x32 with 64-bit Saturation Add and Sub)
__STATIC_FORCEINLINE long long __RV_DKMABB32(long long t, unsigned long long a, unsigned long long b)
DKMABB32 (Saturating Signed Multiply Bottom Words & Add)
__STATIC_FORCEINLINE long long __RV_DKMADRS32(long long t, unsigned long long a, unsigned long long b)
DKMADRS32 (Two Signed 32x32 with 64-bit Saturation Revered Add and Sub)
__STATIC_FORCEINLINE long __RV_DSMADA16(long long t, unsigned long long a, unsigned long long b)
DSMADA16 (Signed Multiply Two Halfs and Two Adds 32-bit)
__STATIC_FORCEINLINE unsigned long long __RV_DKMAXDA(unsigned long long t, unsigned long long a, unsigned long long b)
DKMAXDA (Two Cross 16x16 with 32-bit Signed Double Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKMMAC_U(unsigned long long t, unsigned long long a, unsigned long long b)
DKMMAC.u (64-bit MSW 32x32 Unsigned Multiply and Saturating Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKMMSB_U(unsigned long long t, unsigned long long a, unsigned long long b)
DKMMSB.u (64-bit MSW 32x32 Unsigned Multiply and Saturating Sub)
__STATIC_FORCEINLINE long __RV_DSMAXDA16(long long t, unsigned long long a, unsigned long long b)
DSMAXDA16 (Signed Crossed Multiply Two Halfs and Two Adds 32-bit)
__STATIC_FORCEINLINE unsigned long long __RV_DKMAXDS(unsigned long long t, unsigned long long a, unsigned long long b)
DKMAXDS (Saturating Signed Crossed Multiply Two Halfs & Subtract & Add)
__STATIC_FORCEINLINE long long __RV_DKMDA32(unsigned long long a, unsigned long long b)
DKMDA32 (Two Signed 32x32 with 64-bit Saturation Add)
__STATIC_FORCEINLINE long long __RV_DKMABT32(long long t, unsigned long long a, unsigned long long b)
DKMABT32 (Saturating Signed Multiply Bottom & Top Words & Add)
__STATIC_FORCEINLINE long long __RV_DSMSLXDA(long long t, unsigned long long a, unsigned long long b)
DSMSLXDA (Four Cross Signed 16x16 with 64-bit Sub)
__STATIC_FORCEINLINE long long __RV_DKMADA32(long long t, unsigned long long a, unsigned long long b)
DKMADA32 (Two Signed 32x32 with 64-bit Saturation Add)
__STATIC_FORCEINLINE void __RV_CLROV(void)
CLROV (Clear OV flag)
__STATIC_FORCEINLINE unsigned long __RV_RDOV(void)
RDOV (Read OV flag)
__STATIC_FORCEINLINE unsigned long __RV_PBSAD(unsigned long a, unsigned long b)
PBSAD (Parallel Byte Sum of Absolute Difference)
__STATIC_FORCEINLINE unsigned long __RV_CLRS32(unsigned long a)
CLRS32 (SIMD 32-bit Count Leading Redundant Sign)
__STATIC_FORCEINLINE unsigned long __RV_CLZ32(unsigned long a)
CLZ32 (SIMD 32-bit Count Leading Zero)
__STATIC_FORCEINLINE unsigned long __RV_CLO32(unsigned long a)
CLO32 (SIMD 32-bit Count Leading One)
__STATIC_FORCEINLINE unsigned long __RV_PBSADA(unsigned long t, unsigned long a, unsigned long b)
PBSADA (Parallel Byte Sum of Absolute Difference Accum)
__STATIC_FORCEINLINE long __RV_KMABT32(long t, unsigned long a, unsigned long b)
KMABT32 (Saturating Signed Multiply Bottom & Top Words & Add)
__STATIC_FORCEINLINE long __RV_KMABB32(long t, unsigned long a, unsigned long b)
KMABB32 (Saturating Signed Multiply Bottom Words & Add)
__STATIC_FORCEINLINE long __RV_KMATT32(long t, unsigned long a, unsigned long b)
KMATT32 (Saturating Signed Multiply Top Words & Add)
__STATIC_FORCEINLINE long __RV_SMTT32(unsigned long a, unsigned long b)
SMTT32 (Signed Multiply Top Word & Top Word)
__STATIC_FORCEINLINE long __RV_SMBB32(unsigned long a, unsigned long b)
SMBB32 (Signed Multiply Bottom Word & Bottom Word)
__STATIC_FORCEINLINE long __RV_SMBT32(unsigned long a, unsigned long b)
SMBT32 (Signed Multiply Bottom Word & Top Word)
__STATIC_FORCEINLINE unsigned long __RV_PKBT32(unsigned long a, unsigned long b)
PKBT32 (Pack Two 32-bit Data from Bottom and Top Half)
__STATIC_FORCEINLINE unsigned long __RV_PKBB32(unsigned long a, unsigned long b)
PKBB32 (Pack Two 32-bit Data from Both Bottom Half)
__STATIC_FORCEINLINE unsigned long __RV_PKTT32(unsigned long a, unsigned long b)
PKTT32 (Pack Two 32-bit Data from Both Top Half)
__STATIC_FORCEINLINE unsigned long __RV_PKTB32(unsigned long a, unsigned long b)
PKTB32 (Pack Two 32-bit Data from Top and Bottom Half)
__STATIC_FORCEINLINE long __RV_KMDA32(unsigned long a, unsigned long b)
KMDA32 (Signed Multiply Two Words and Add)
__STATIC_FORCEINLINE long __RV_KMADS32(long t, unsigned long a, unsigned long b)
KMADS32 (Saturating Signed Multiply Two Words & Subtract & Add)
__STATIC_FORCEINLINE long __RV_KMADRS32(long t, unsigned long a, unsigned long b)
KMADRS32 (Saturating Signed Multiply Two Words & Reverse Subtract & Add)
__STATIC_FORCEINLINE long __RV_SMDS32(unsigned long a, unsigned long b)
SMDS32 (Signed Multiply Two Words and Subtract)
__STATIC_FORCEINLINE long __RV_KMADA32(long t, unsigned long a, unsigned long b)
KMADA32 (Saturating Signed Multiply Two Words and Two Adds)
__STATIC_FORCEINLINE long __RV_KMSDA32(long t, unsigned long a, unsigned long b)
KMSDA32 (Saturating Signed Multiply Two Words & Add & Subtract)
__STATIC_FORCEINLINE long __RV_SMXDS32(unsigned long a, unsigned long b)
SMXDS32 (Signed Crossed Multiply Two Words and Subtract)
__STATIC_FORCEINLINE long __RV_SMDRS32(unsigned long a, unsigned long b)
SMDRS32 (Signed Multiply Two Words and Reverse Subtract)
__STATIC_FORCEINLINE long __RV_KMXDA32(unsigned long a, unsigned long b)
KMXDA32 (Signed Crossed Multiply Two Words and Add)
__STATIC_FORCEINLINE long __RV_KMAXDS32(long t, unsigned long a, unsigned long b)
KMAXDS32 (Saturating Signed Crossed Multiply Two Words & Subtract & Add)
__STATIC_FORCEINLINE long __RV_KMAXDA32(long t, unsigned long a, unsigned long b)
KMAXDA32 (Saturating Signed Crossed Multiply Two Words and Two Adds)
__STATIC_FORCEINLINE long __RV_KMSXDA32(long t, unsigned long a, unsigned long b)
KMSXDA32 (Saturating Signed Crossed Multiply Two Words & Add & Subtract)
__STATIC_FORCEINLINE unsigned long __RV_UKSTAS32(unsigned long a, unsigned long b)
UKSTAS32 (SIMD 32-bit Unsigned Saturating Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UKADD32(unsigned long a, unsigned long b)
UKADD32 (SIMD 32-bit Unsigned Saturating Addition)
__STATIC_FORCEINLINE unsigned long __RV_UKSUB32(unsigned long a, unsigned long b)
UKSUB32 (SIMD 32-bit Unsigned Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_CRSA32(unsigned long a, unsigned long b)
CRSA32 (SIMD 32-bit Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_KSUB32(unsigned long a, unsigned long b)
KSUB32 (SIMD 32-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_RCRAS32(unsigned long a, unsigned long b)
RCRAS32 (SIMD 32-bit Signed Halving Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UKCRSA32(unsigned long a, unsigned long b)
UKCRSA32 (SIMD 32-bit Unsigned Saturating Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_RCRSA32(unsigned long a, unsigned long b)
RCRSA32 (SIMD 32-bit Signed Halving Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_CRAS32(unsigned long a, unsigned long b)
CRAS32 (SIMD 32-bit Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_KCRSA32(unsigned long a, unsigned long b)
KCRSA32 (SIMD 32-bit Signed Saturating Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_KCRAS32(unsigned long a, unsigned long b)
KCRAS32 (SIMD 32-bit Signed Saturating Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_URSTAS32(unsigned long a, unsigned long b)
URSTAS32 (SIMD 32-bit Unsigned Halving Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_SUB32(unsigned long a, unsigned long b)
SUB32 (SIMD 32-bit Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_URADD32(unsigned long a, unsigned long b)
URADD32 (SIMD 32-bit Unsigned Halving Addition)
__STATIC_FORCEINLINE unsigned long __RV_RSUB32(unsigned long a, unsigned long b)
RSUB32 (SIMD 32-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_STSA32(unsigned long a, unsigned long b)
STSA32 (SIMD 32-bit Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_RSTSA32(unsigned long a, unsigned long b)
RSTSA32 (SIMD 32-bit Signed Halving Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_URSTSA32(unsigned long a, unsigned long b)
URSTSA32 (SIMD 32-bit Unsigned Halving Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_KADD32(unsigned long a, unsigned long b)
KADD32 (SIMD 32-bit Signed Saturating Addition)
__STATIC_FORCEINLINE unsigned long __RV_URCRSA32(unsigned long a, unsigned long b)
URCRSA32 (SIMD 32-bit Unsigned Halving Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_URSUB32(unsigned long a, unsigned long b)
URSUB32 (SIMD 32-bit Unsigned Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_KSTAS32(unsigned long a, unsigned long b)
KSTAS32 (SIMD 32-bit Signed Saturating Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_ADD32(unsigned long a, unsigned long b)
ADD32 (SIMD 32-bit Addition)
__STATIC_FORCEINLINE unsigned long __RV_RSTAS32(unsigned long a, unsigned long b)
RSTAS32 (SIMD 32-bit Signed Halving Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_URCRAS32(unsigned long a, unsigned long b)
URCRAS32 (SIMD 32-bit Unsigned Halving Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UKCRAS32(unsigned long a, unsigned long b)
UKCRAS32 (SIMD 32-bit Unsigned Saturating Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_STAS32(unsigned long a, unsigned long b)
STAS32 (SIMD 32-bit Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_RADD32(unsigned long a, unsigned long b)
RADD32 (SIMD 32-bit Signed Halving Addition)
__STATIC_FORCEINLINE unsigned long __RV_UKSTSA32(unsigned long a, unsigned long b)
UKSTSA32 (SIMD 32-bit Unsigned Saturating Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_KSTSA32(unsigned long a, unsigned long b)
KSTSA32 (SIMD 32-bit Signed Saturating Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_UMAX32(unsigned long a, unsigned long b)
UMAX32 (SIMD 32-bit Unsigned Maximum)
__STATIC_FORCEINLINE unsigned long __RV_UMIN32(unsigned long a, unsigned long b)
UMIN32 (SIMD 32-bit Unsigned Minimum)
__STATIC_FORCEINLINE unsigned long __RV_SMAX32(unsigned long a, unsigned long b)
SMAX32 (SIMD 32-bit Signed Maximum)
__STATIC_FORCEINLINE unsigned long __RV_KABS32(unsigned long a)
KABS32 (Scalar 32-bit Absolute Value with Saturation)
__STATIC_FORCEINLINE unsigned long __RV_SMIN32(unsigned long a, unsigned long b)
SMIN32 (SIMD 32-bit Signed Minimum)
__STATIC_FORCEINLINE unsigned long __RV_SRA32(unsigned long a, unsigned int b)
SRA32 (SIMD 32-bit Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_SRL32_U(unsigned long a, unsigned int b)
SRL32.u (SIMD 32-bit Rounding Shift Right Logical)
__STATIC_FORCEINLINE unsigned long __RV_KSLL32(unsigned long a, unsigned int b)
KSLL32 (SIMD 32-bit Saturating Shift Left Logical)
__STATIC_FORCEINLINE unsigned long __RV_SRL32(unsigned long a, unsigned int b)
SRL32 (SIMD 32-bit Shift Right Logical)
__STATIC_FORCEINLINE unsigned long __RV_KSLRA32(unsigned long a, int b)
KSLRA32 (SIMD 32-bit Shift Left Logical with Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_SLL32(unsigned long a, unsigned int b)
SLL32 (SIMD 32-bit Shift Left Logical)
__STATIC_FORCEINLINE unsigned long __RV_SRA32_U(unsigned long a, unsigned int b)
SRA32.u (SIMD 32-bit Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_KSLRA32_U(unsigned long a, int b)
KSLRA32.u (SIMD 32-bit Shift Left Logical with Saturation or Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_KDMTT16(unsigned long a, unsigned long b)
KDMTT16 (SIMD Signed Saturating Double Multiply T16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_KDMABT16(unsigned long t, unsigned long a, unsigned long b)
KDMABT16 (SIMD Signed Saturating Double Multiply Addition B16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_KDMABB16(unsigned long t, unsigned long a, unsigned long b)
KDMABB16 (SIMD Signed Saturating Double Multiply Addition B16 x B16)
__STATIC_FORCEINLINE unsigned long __RV_KDMBB16(unsigned long a, unsigned long b)
KDMBB16 (SIMD Signed Saturating Double Multiply B16 x B16)
__STATIC_FORCEINLINE unsigned long __RV_KHMBT16(unsigned long a, unsigned long b)
KHMBT16 (SIMD Signed Saturating Half Multiply B16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_KHMBB16(unsigned long a, unsigned long b)
KHMBB16 (SIMD Signed Saturating Half Multiply B16 x B16)
__STATIC_FORCEINLINE unsigned long __RV_KHMTT16(unsigned long a, unsigned long b)
KHMTT16 (SIMD Signed Saturating Half Multiply T16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_KDMATT16(unsigned long t, unsigned long a, unsigned long b)
KDMATT16 (SIMD Signed Saturating Double Multiply Addition T16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_KDMBT16(unsigned long a, unsigned long b)
KDMBT16 (SIMD Signed Saturating Double Multiply B16 x T16)
__STATIC_FORCEINLINE long __RV_SMDS(unsigned long a, unsigned long b)
SMDS (SIMD Signed Multiply Two Halfs and Subtract)
__STATIC_FORCEINLINE long __RV_KMADRS(long t, unsigned long a, unsigned long b)
KMADRS (SIMD Saturating Signed Multiply Two Halfs & Reverse Subtract & Add)
__STATIC_FORCEINLINE long __RV_KMAXDA(long t, unsigned long a, unsigned long b)
KMAXDA (SIMD Saturating Signed Crossed Multiply Two Halfs and Two Adds)
__STATIC_FORCEINLINE long __RV_KMDA(unsigned long a, unsigned long b)
KMDA (SIMD Signed Multiply Two Halfs and Add)
__STATIC_FORCEINLINE long __RV_SMBT16(unsigned long a, unsigned long b)
SMBT16 (SIMD Signed Multiply Bottom Half & Top Half)
__STATIC_FORCEINLINE long __RV_KMXDA(unsigned long a, unsigned long b)
KMXDA (SIMD Signed Crossed Multiply Two Halfs and Add)
__STATIC_FORCEINLINE long __RV_KMSDA(long t, unsigned long a, unsigned long b)
KMSDA (SIMD Saturating Signed Multiply Two Halfs & Add & Subtract)
__STATIC_FORCEINLINE long __RV_KMABB(long t, unsigned long a, unsigned long b)
KMABB (SIMD Saturating Signed Multiply Bottom Halfs & Add)
__STATIC_FORCEINLINE long __RV_KMABT(long t, unsigned long a, unsigned long b)
KMABT (SIMD Saturating Signed Multiply Bottom & Top Halfs & Add)
__STATIC_FORCEINLINE long __RV_SMXDS(unsigned long a, unsigned long b)
SMXDS (SIMD Signed Crossed Multiply Two Halfs and Subtract)
__STATIC_FORCEINLINE long __RV_KMAXDS(long t, unsigned long a, unsigned long b)
KMAXDS (SIMD Saturating Signed Crossed Multiply Two Halfs & Subtract & Add)
__STATIC_FORCEINLINE long __RV_SMBB16(unsigned long a, unsigned long b)
SMBB16 (SIMD Signed Multiply Bottom Half & Bottom Half)
__STATIC_FORCEINLINE long __RV_KMADS(long t, unsigned long a, unsigned long b)
KMADS (SIMD Saturating Signed Multiply Two Halfs & Subtract & Add)
__STATIC_FORCEINLINE long __RV_KMADA(long t, unsigned long a, unsigned long b)
KMADA (SIMD Saturating Signed Multiply Two Halfs and Two Adds)
__STATIC_FORCEINLINE long __RV_KMSXDA(long t, unsigned long a, unsigned long b)
KMSXDA (SIMD Saturating Signed Crossed Multiply Two Halfs & Add & Subtract)
__STATIC_FORCEINLINE long __RV_SMTT16(unsigned long a, unsigned long b)
SMTT16 (SIMD Signed Multiply Top Half & Top Half)
__STATIC_FORCEINLINE long __RV_KMATT(long t, unsigned long a, unsigned long b)
KMATT (SIMD Saturating Signed Multiply Top Halfs & Add)
__STATIC_FORCEINLINE long __RV_SMDRS(unsigned long a, unsigned long b)
SMDRS (SIMD Signed Multiply Two Halfs and Reverse Subtract)
__STATIC_FORCEINLINE long long __RV_SMALXDA(long long t, unsigned long a, unsigned long b)
SMALXDA (Signed Crossed Multiply Two Halfs and Two Adds 64-bit)
__STATIC_FORCEINLINE long long __RV_SMALTT(long long t, unsigned long a, unsigned long b)
SMALTT (Signed Multiply Top Halfs & Add 64-bit)
__STATIC_FORCEINLINE long long __RV_SMAL(long long a, unsigned long b)
SMAL (Signed Multiply Halfs & Add 64-bit)
__STATIC_FORCEINLINE long long __RV_SMALDS(long long t, unsigned long a, unsigned long b)
SMALDS (Signed Multiply Two Halfs & Subtract & Add 64-bit)
__STATIC_FORCEINLINE long long __RV_SMALBT(long long t, unsigned long a, unsigned long b)
SMALBT (Signed Multiply Bottom Half & Top Half & Add 64-bit)
__STATIC_FORCEINLINE long long __RV_SMALDA(long long t, unsigned long a, unsigned long b)
SMALDA (Signed Multiply Two Halfs and Two Adds 64-bit)
__STATIC_FORCEINLINE long long __RV_SMALDRS(long long t, unsigned long a, unsigned long b)
SMALDRS (Signed Multiply Two Halfs & Reverse Subtract & Add 64- bit)
__STATIC_FORCEINLINE long long __RV_SMSLDA(long long t, unsigned long a, unsigned long b)
SMSLDA (Signed Multiply Two Halfs & Add & Subtract 64-bit)
__STATIC_FORCEINLINE long long __RV_SMALBB(long long t, unsigned long a, unsigned long b)
SMALBB (Signed Multiply Bottom Halfs & Add 64-bit)
__STATIC_FORCEINLINE long long __RV_SMSLXDA(long long t, unsigned long a, unsigned long b)
SMSLXDA (Signed Crossed Multiply Two Halfs & Add & Subtract 64- bit)
__STATIC_FORCEINLINE long long __RV_SMALXDS(long long t, unsigned long a, unsigned long b)
SMALXDS (Signed Crossed Multiply Two Halfs & Subtract & Add 64- bit)
__STATIC_FORCEINLINE long __RV_KMMAWB(long t, unsigned long a, unsigned long b)
KMMAWB (SIMD Saturating MSW Signed Multiply Word and Bottom Half and Add)
__STATIC_FORCEINLINE long __RV_KMMAWB2(long t, unsigned long a, unsigned long b)
KMMAWB2 (SIMD Saturating MSW Signed Multiply Word and Bottom Half & 2 and Add)
__STATIC_FORCEINLINE long __RV_KMMWT2_U(long a, unsigned long b)
KMMWT2.u (SIMD Saturating MSW Signed Multiply Word and Top Half & 2 with Rounding)
__STATIC_FORCEINLINE long __RV_KMMAWT2_U(long t, unsigned long a, unsigned long b)
KMMAWT2.u (SIMD Saturating MSW Signed Multiply Word and Top Half & 2 and Add with Rounding)
__STATIC_FORCEINLINE long __RV_KMMAWT(long t, unsigned long a, unsigned long b)
KMMAWT (SIMD Saturating MSW Signed Multiply Word and Top Half and Add)
__STATIC_FORCEINLINE long __RV_KMMAWT_U(long t, unsigned long a, unsigned long b)
KMMAWT.u (SIMD Saturating MSW Signed Multiply Word and Top Half and Add with Rounding)
__STATIC_FORCEINLINE long __RV_SMMWB_U(long a, unsigned long b)
SMMWB.u (SIMD MSW Signed Multiply Word and Bottom Half with Rounding)
__STATIC_FORCEINLINE long __RV_KMMAWT2(long t, unsigned long a, unsigned long b)
KMMAWT2 (SIMD Saturating MSW Signed Multiply Word and Top Half & 2 and Add)
__STATIC_FORCEINLINE long __RV_KMMAWB_U(long t, unsigned long a, unsigned long b)
KMMAWB.u (SIMD Saturating MSW Signed Multiply Word and Bottom Half and Add with Rounding)
__STATIC_FORCEINLINE long __RV_SMMWT_U(long a, unsigned long b)
SMMWT.u (SIMD MSW Signed Multiply Word and Top Half with Rounding)
__STATIC_FORCEINLINE long __RV_SMMWT(long a, unsigned long b)
SMMWT (SIMD MSW Signed Multiply Word and Top Half)
__STATIC_FORCEINLINE long __RV_SMMWB(long a, unsigned long b)
SMMWB (SIMD MSW Signed Multiply Word and Bottom Half)
__STATIC_FORCEINLINE long __RV_KMMWB2_U(long a, unsigned long b)
KMMWB2.u (SIMD Saturating MSW Signed Multiply Word and Bottom Half & 2 with Rounding)
__STATIC_FORCEINLINE long __RV_KMMWT2(long a, unsigned long b)
KMMWT2 (SIMD Saturating MSW Signed Multiply Word and Top Half & 2)
__STATIC_FORCEINLINE long __RV_KMMWB2(long a, unsigned long b)
KMMWB2 (SIMD Saturating MSW Signed Multiply Word and Bottom Half & 2)
__STATIC_FORCEINLINE long __RV_KMMAWB2_U(long t, unsigned long a, unsigned long b)
KMMAWB2.u (SIMD Saturating MSW Signed Multiply Word and Bottom Half & 2 and Add with Rounding)
__STATIC_FORCEINLINE long __RV_KMMSB(long t, long a, long b)
KMMSB (SIMD Saturating MSW Signed Multiply Word and Subtract)
__STATIC_FORCEINLINE long __RV_SMMUL(long a, long b)
SMMUL (SIMD MSW Signed Multiply Word)
__STATIC_FORCEINLINE long __RV_SMMUL_U(long a, long b)
SMMUL.u (SIMD MSW Signed Multiply Word with Rounding)
__STATIC_FORCEINLINE long __RV_KWMMUL(long a, long b)
KWMMUL (SIMD Saturating MSW Signed Multiply Word & Double)
__STATIC_FORCEINLINE long __RV_KWMMUL_U(long a, long b)
KWMMUL.u (SIMD Saturating MSW Signed Multiply Word & Double with Rounding)
__STATIC_FORCEINLINE long __RV_KMMAC_U(long t, long a, long b)
KMMAC.u (SIMD Saturating MSW Signed Multiply Word and Add with Rounding)
__STATIC_FORCEINLINE long __RV_KMMAC(long t, long a, long b)
KMMAC (SIMD Saturating MSW Signed Multiply Word and Add)
__STATIC_FORCEINLINE long __RV_KMMSB_U(long t, long a, long b)
KMMSB.u (SIMD Saturating MSW Signed Multiply Word and Subtraction with Rounding)
__STATIC_FORCEINLINE unsigned long __RV_KSUB16(unsigned long a, unsigned long b)
KSUB16 (SIMD 16-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_SUB16(unsigned long a, unsigned long b)
SUB16 (SIMD 16-bit Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_URSUB16(unsigned long a, unsigned long b)
URSUB16 (SIMD 16-bit Unsigned Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UKCRAS16(unsigned long a, unsigned long b)
UKCRAS16 (SIMD 16-bit Unsigned Saturating Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_RCRAS16(unsigned long a, unsigned long b)
RCRAS16 (SIMD 16-bit Signed Halving Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UKCRSA16(unsigned long a, unsigned long b)
UKCRSA16 (SIMD 16-bit Unsigned Saturating Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_URADD16(unsigned long a, unsigned long b)
URADD16 (SIMD 16-bit Unsigned Halving Addition)
__STATIC_FORCEINLINE unsigned long __RV_STSA16(unsigned long a, unsigned long b)
STSA16 (SIMD 16-bit Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_URCRAS16(unsigned long a, unsigned long b)
URCRAS16 (SIMD 16-bit Unsigned Halving Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_KCRSA16(unsigned long a, unsigned long b)
KCRSA16 (SIMD 16-bit Signed Saturating Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_RADD16(unsigned long a, unsigned long b)
RADD16 (SIMD 16-bit Signed Halving Addition)
__STATIC_FORCEINLINE unsigned long __RV_KSTAS16(unsigned long a, unsigned long b)
KSTAS16 (SIMD 16-bit Signed Saturating Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UKADD16(unsigned long a, unsigned long b)
UKADD16 (SIMD 16-bit Unsigned Saturating Addition)
__STATIC_FORCEINLINE unsigned long __RV_KSTSA16(unsigned long a, unsigned long b)
KSTSA16 (SIMD 16-bit Signed Saturating Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_CRAS16(unsigned long a, unsigned long b)
CRAS16 (SIMD 16-bit Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_URSTAS16(unsigned long a, unsigned long b)
URSTAS16 (SIMD 16-bit Unsigned Halving Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_RCRSA16(unsigned long a, unsigned long b)
RCRSA16 (SIMD 16-bit Signed Halving Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_RSUB16(unsigned long a, unsigned long b)
RSUB16 (SIMD 16-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_CRSA16(unsigned long a, unsigned long b)
CRSA16 (SIMD 16-bit Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_KCRAS16(unsigned long a, unsigned long b)
KCRAS16 (SIMD 16-bit Signed Saturating Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UKSTAS16(unsigned long a, unsigned long b)
UKSTAS16 (SIMD 16-bit Unsigned Saturating Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_ADD16(unsigned long a, unsigned long b)
ADD16 (SIMD 16-bit Addition)
__STATIC_FORCEINLINE unsigned long __RV_URCRSA16(unsigned long a, unsigned long b)
URCRSA16 (SIMD 16-bit Unsigned Halving Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_URSTSA16(unsigned long a, unsigned long b)
URSTSA16 (SIMD 16-bit Unsigned Halving Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_UKSUB16(unsigned long a, unsigned long b)
UKSUB16 (SIMD 16-bit Unsigned Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_KADD16(unsigned long a, unsigned long b)
KADD16 (SIMD 16-bit Signed Saturating Addition)
__STATIC_FORCEINLINE unsigned long __RV_RSTAS16(unsigned long a, unsigned long b)
RSTAS16 (SIMD 16-bit Signed Halving Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UKSTSA16(unsigned long a, unsigned long b)
UKSTSA16 (SIMD 16-bit Unsigned Saturating Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_RSTSA16(unsigned long a, unsigned long b)
RSTSA16 (SIMD 16-bit Signed Halving Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_STAS16(unsigned long a, unsigned long b)
STAS16 (SIMD 16-bit Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_SCMPLT16(unsigned long a, unsigned long b)
SCMPLT16 (SIMD 16-bit Signed Compare Less Than)
__STATIC_FORCEINLINE unsigned long __RV_UCMPLE16(unsigned long a, unsigned long b)
UCMPLE16 (SIMD 16-bit Unsigned Compare Less Than & Equal)
__STATIC_FORCEINLINE unsigned long __RV_UCMPLT16(unsigned long a, unsigned long b)
UCMPLT16 (SIMD 16-bit Unsigned Compare Less Than)
__STATIC_FORCEINLINE unsigned long __RV_CMPEQ16(unsigned long a, unsigned long b)
CMPEQ16 (SIMD 16-bit Integer Compare Equal)
__STATIC_FORCEINLINE unsigned long __RV_SCMPLE16(unsigned long a, unsigned long b)
SCMPLE16 (SIMD 16-bit Signed Compare Less Than & Equal)
__STATIC_FORCEINLINE unsigned long __RV_CLO16(unsigned long a)
CLO16 (SIMD 16-bit Count Leading One)
__STATIC_FORCEINLINE unsigned long __RV_KABS16(unsigned long a)
KABS16 (SIMD 16-bit Saturating Absolute)
__STATIC_FORCEINLINE unsigned long __RV_CLRS16(unsigned long a)
CLRS16 (SIMD 16-bit Count Leading Redundant Sign)
__STATIC_FORCEINLINE unsigned long __RV_SMIN16(unsigned long a, unsigned long b)
SMIN16 (SIMD 16-bit Signed Minimum)
__STATIC_FORCEINLINE unsigned long __RV_CLZ16(unsigned long a)
CLZ16 (SIMD 16-bit Count Leading Zero)
__STATIC_FORCEINLINE unsigned long __RV_SMAX16(unsigned long a, unsigned long b)
SMAX16 (SIMD 16-bit Signed Maximum)
__STATIC_FORCEINLINE unsigned long __RV_UMAX16(unsigned long a, unsigned long b)
UMAX16 (SIMD 16-bit Unsigned Maximum)
__STATIC_FORCEINLINE unsigned long __RV_UMIN16(unsigned long a, unsigned long b)
UMIN16 (SIMD 16-bit Unsigned Minimum)
__STATIC_FORCEINLINE unsigned long long __RV_UMUL16(unsigned int a, unsigned int b)
UMUL16 (SIMD Unsigned 16-bit Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_SMULX16(unsigned int a, unsigned int b)
SMULX16 (SIMD Signed Crossed 16-bit Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_UMULX16(unsigned int a, unsigned int b)
UMULX16 (SIMD Unsigned Crossed 16-bit Multiply)
__STATIC_FORCEINLINE unsigned long __RV_KHM16(unsigned long a, unsigned long b)
KHM16 (SIMD Signed Saturating Q15 Multiply)
__STATIC_FORCEINLINE unsigned long __RV_KHMX16(unsigned long a, unsigned long b)
KHMX16 (SIMD Signed Saturating Crossed Q15 Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_SMUL16(unsigned int a, unsigned int b)
SMUL16 (SIMD Signed 16-bit Multiply)
__STATIC_FORCEINLINE unsigned long __RV_PKTT16(unsigned long a, unsigned long b)
PKTT16 (Pack Two 16-bit Data from Both Top Half)
__STATIC_FORCEINLINE unsigned long __RV_PKBB16(unsigned long a, unsigned long b)
PKBB16 (Pack Two 16-bit Data from Both Bottom Half)
__STATIC_FORCEINLINE unsigned long __RV_PKTB16(unsigned long a, unsigned long b)
PKTB16 (Pack Two 16-bit Data from Top and Bottom Half)
__STATIC_FORCEINLINE unsigned long __RV_PKBT16(unsigned long a, unsigned long b)
PKBT16 (Pack Two 16-bit Data from Bottom and Top Half)
__STATIC_FORCEINLINE unsigned long __RV_SRA16_U(unsigned long a, unsigned long b)
SRA16.u (SIMD 16-bit Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_SLL16(unsigned long a, unsigned int b)
SLL16 (SIMD 16-bit Shift Left Logical)
__STATIC_FORCEINLINE unsigned long __RV_SRL16_U(unsigned long a, unsigned int b)
SRL16.u (SIMD 16-bit Rounding Shift Right Logical)
__STATIC_FORCEINLINE unsigned long __RV_KSLRA16(unsigned long a, int b)
KSLRA16 (SIMD 16-bit Shift Left Logical with Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_SRA16(unsigned long a, unsigned long b)
SRA16 (SIMD 16-bit Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_SRL16(unsigned long a, unsigned int b)
SRL16 (SIMD 16-bit Shift Right Logical)
__STATIC_FORCEINLINE unsigned long __RV_KSLL16(unsigned long a, unsigned int b)
KSLL16 (SIMD 16-bit Saturating Shift Left Logical)
__STATIC_FORCEINLINE unsigned long __RV_KSLRA16_U(unsigned long a, int b)
KSLRA16.u (SIMD 16-bit Shift Left Logical with Saturation or Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_RADD8(unsigned long a, unsigned long b)
RADD8 (SIMD 8-bit Signed Halving Addition)
__STATIC_FORCEINLINE unsigned long __RV_UKSUB8(unsigned long a, unsigned long b)
UKSUB8 (SIMD 8-bit Unsigned Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_URADD8(unsigned long a, unsigned long b)
URADD8 (SIMD 8-bit Unsigned Halving Addition)
__STATIC_FORCEINLINE unsigned long __RV_UKADD8(unsigned long a, unsigned long b)
UKADD8 (SIMD 8-bit Unsigned Saturating Addition)
__STATIC_FORCEINLINE unsigned long __RV_ADD8(unsigned long a, unsigned long b)
ADD8 (SIMD 8-bit Addition)
__STATIC_FORCEINLINE unsigned long __RV_KADD8(unsigned long a, unsigned long b)
KADD8 (SIMD 8-bit Signed Saturating Addition)
__STATIC_FORCEINLINE unsigned long __RV_KSUB8(unsigned long a, unsigned long b)
KSUB8 (SIMD 8-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_SUB8(unsigned long a, unsigned long b)
SUB8 (SIMD 8-bit Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_URSUB8(unsigned long a, unsigned long b)
URSUB8 (SIMD 8-bit Unsigned Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_RSUB8(unsigned long a, unsigned long b)
RSUB8 (SIMD 8-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_SCMPLE8(unsigned long a, unsigned long b)
SCMPLE8 (SIMD 8-bit Signed Compare Less Than & Equal)
__STATIC_FORCEINLINE unsigned long __RV_UCMPLT8(unsigned long a, unsigned long b)
UCMPLT8 (SIMD 8-bit Unsigned Compare Less Than)
__STATIC_FORCEINLINE unsigned long __RV_SCMPLT8(unsigned long a, unsigned long b)
SCMPLT8 (SIMD 8-bit Signed Compare Less Than)
__STATIC_FORCEINLINE unsigned long __RV_UCMPLE8(unsigned long a, unsigned long b)
UCMPLE8 (SIMD 8-bit Unsigned Compare Less Than & Equal)
__STATIC_FORCEINLINE unsigned long __RV_CMPEQ8(unsigned long a, unsigned long b)
CMPEQ8 (SIMD 8-bit Integer Compare Equal)
__STATIC_FORCEINLINE unsigned long __RV_KABS8(unsigned long a)
KABS8 (SIMD 8-bit Saturating Absolute)
__STATIC_FORCEINLINE unsigned long __RV_SMIN8(unsigned long a, unsigned long b)
SMIN8 (SIMD 8-bit Signed Minimum)
__STATIC_FORCEINLINE unsigned long __RV_CLRS8(unsigned long a)
CLRS8 (SIMD 8-bit Count Leading Redundant Sign)
__STATIC_FORCEINLINE unsigned long __RV_UMAX8(unsigned long a, unsigned long b)
UMAX8 (SIMD 8-bit Unsigned Maximum)
__STATIC_FORCEINLINE unsigned long __RV_CLZ8(unsigned long a)
CLZ8 (SIMD 8-bit Count Leading Zero)
__STATIC_FORCEINLINE unsigned long __RV_UMIN8(unsigned long a, unsigned long b)
UMIN8 (SIMD 8-bit Unsigned Minimum)
__STATIC_FORCEINLINE unsigned long __RV_CLO8(unsigned long a)
CLO8 (SIMD 8-bit Count Leading One)
__STATIC_FORCEINLINE unsigned long __RV_SMAX8(unsigned long a, unsigned long b)
SMAX8 (SIMD 8-bit Signed Maximum)
__STATIC_FORCEINLINE unsigned long long __RV_UMUL8(unsigned int a, unsigned int b)
UMUL8 (SIMD Unsigned 8-bit Multiply)
__STATIC_FORCEINLINE unsigned long __RV_KHMX8(unsigned long a, unsigned long b)
KHMX8 (SIMD Signed Saturating Crossed Q7 Multiply)
__STATIC_FORCEINLINE unsigned long __RV_KHM8(unsigned long a, unsigned long b)
KHM8 (SIMD Signed Saturating Q7 Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_SMULX8(unsigned int a, unsigned int b)
SMULX8 (SIMD Signed Crossed 8-bit Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_SMUL8(unsigned int a, unsigned int b)
SMUL8 (SIMD Signed 8-bit Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_UMULX8(unsigned int a, unsigned int b)
UMULX8 (SIMD Unsigned Crossed 8-bit Multiply)
__STATIC_FORCEINLINE unsigned long __RV_SRL8(unsigned long a, unsigned int b)
SRL8 (SIMD 8-bit Shift Right Logical)
__STATIC_FORCEINLINE unsigned long __RV_KSLRA8_U(unsigned long a, int b)
KSLRA8.u (SIMD 8-bit Shift Left Logical with Saturation or Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_SRL8_U(unsigned long a, unsigned int b)
SRL8.u (SIMD 8-bit Rounding Shift Right Logical)
__STATIC_FORCEINLINE unsigned long __RV_SRA8(unsigned long a, unsigned int b)
SRA8 (SIMD 8-bit Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_KSLRA8(unsigned long a, int b)
KSLRA8 (SIMD 8-bit Shift Left Logical with Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_SLL8(unsigned long a, unsigned int b)
SLL8 (SIMD 8-bit Shift Left Logical)
__STATIC_FORCEINLINE unsigned long __RV_SRA8_U(unsigned long a, unsigned int b)
SRA8.u (SIMD 8-bit Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_KSLL8(unsigned long a, unsigned int b)
KSLL8 (SIMD 8-bit Saturating Shift Left Logical)
__STATIC_FORCEINLINE unsigned long __RV_ZUNPKD820(unsigned long a)
ZUNPKD820 (Unsigned Unpacking Bytes 2 & 0)
__STATIC_FORCEINLINE unsigned long __RV_ZUNPKD810(unsigned long a)
ZUNPKD810 (Unsigned Unpacking Bytes 1 & 0)
__STATIC_FORCEINLINE unsigned long __RV_ZUNPKD830(unsigned long a)
ZUNPKD830 (Unsigned Unpacking Bytes 3 & 0)
__STATIC_FORCEINLINE unsigned long __RV_ZUNPKD832(unsigned long a)
ZUNPKD832 (Unsigned Unpacking Bytes 3 & 2)
__STATIC_FORCEINLINE unsigned long __RV_ZUNPKD831(unsigned long a)
ZUNPKD831 (Unsigned Unpacking Bytes 3 & 1)
__STATIC_FORCEINLINE unsigned long __RV_SUNPKD832(unsigned long a)
SUNPKD832 (Signed Unpacking Bytes 3 & 2)
__STATIC_FORCEINLINE unsigned long __RV_SUNPKD830(unsigned long a)
SUNPKD830 (Signed Unpacking Bytes 3 & 0)
__STATIC_FORCEINLINE unsigned long __RV_SUNPKD820(unsigned long a)
SUNPKD820 (Signed Unpacking Bytes 2 & 0)
__STATIC_FORCEINLINE unsigned long __RV_SUNPKD831(unsigned long a)
SUNPKD831 (Signed Unpacking Bytes 3 & 1)
__STATIC_FORCEINLINE unsigned long __RV_SUNPKD810(unsigned long a)
SUNPKD810 (Signed Unpacking Bytes 1 & 0)