|
NMSIS-Core
Version 1.3.1
NMSIS-Core support for Nuclei processor-based devices
|
18 #ifndef __CORE_FEATURE_DSP__
19 #define __CORE_FEATURE_DSP__
35 #include "core_feature_base.h"
37 #if defined(__DSP_PRESENT) && (__DSP_PRESENT == 1)
39 #if defined(__INC_INTRINSIC_API) && (__INC_INTRINSIC_API == 1)
41 #include <rvp_intrinsic.h>
451 unsigned long result;
452 __ASM volatile(
"add8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
492 unsigned long result;
493 __ASM volatile(
"add16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
545 unsigned long long result;
546 __ASM volatile(
"add64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
585 __ASM volatile(
"ave %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
625 unsigned long result;
626 __ASM volatile(
"bitrev %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
665 #define __RV_BITREVI(a, b) \
667 unsigned long result; \
668 unsigned long __a = (unsigned long)(a); \
669 __ASM volatile("bitrevi %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
708 unsigned long result;
709 __ASM volatile(
"bpick %0, %1, %2, %3" :
"=r"(result) :
"r"(a),
"r"(b),
"r"(c));
733 __ASM volatile(
"clrov ");
778 unsigned long result;
779 __ASM volatile(
"clrs8 %0, %1" :
"=r"(result) :
"r"(a));
825 unsigned long result;
826 __ASM volatile(
"clrs16 %0, %1" :
"=r"(result) :
"r"(a));
872 unsigned long result;
873 __ASM volatile(
"clrs32 %0, %1" :
"=r"(result) :
"r"(a));
919 unsigned long result;
920 __ASM volatile(
"clo8 %0, %1" :
"=r"(result) :
"r"(a));
966 unsigned long result;
967 __ASM volatile(
"clo16 %0, %1" :
"=r"(result) :
"r"(a));
1013 unsigned long result;
1014 __ASM volatile(
"clo32 %0, %1" :
"=r"(result) :
"r"(a));
1060 unsigned long result;
1061 __ASM volatile(
"clz8 %0, %1" :
"=r"(result) :
"r"(a));
1107 unsigned long result;
1108 __ASM volatile(
"clz16 %0, %1" :
"=r"(result) :
"r"(a));
1154 unsigned long result;
1155 __ASM volatile(
"clz32 %0, %1" :
"=r"(result) :
"r"(a));
1196 unsigned long result;
1197 __ASM volatile(
"cmpeq8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1238 unsigned long result;
1239 __ASM volatile(
"cmpeq16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1284 unsigned long result;
1285 __ASM volatile(
"cras16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1330 unsigned long result;
1331 __ASM volatile(
"crsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1368 #define __RV_INSB(t, a, b) \
1370 unsigned long __t = (unsigned long)(t); \
1371 unsigned long __a = (unsigned long)(a); \
1372 __ASM volatile("insb %0, %1, %2" : "+r"(__t) : "r"(__a), "K"(b)); \
1416 unsigned long result;
1417 __ASM volatile(
"kabs8 %0, %1" :
"=r"(result) :
"r"(a));
1461 unsigned long result;
1462 __ASM volatile(
"kabs16 %0, %1" :
"=r"(result) :
"r"(a));
1508 unsigned long result;
1509 __ASM volatile(
"kabsw %0, %1" :
"=r"(result) :
"r"(a));
1555 unsigned long result;
1556 __ASM volatile(
"kadd8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1602 unsigned long result;
1603 __ASM volatile(
"kadd16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1669 __ASM volatile(
"kadd64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1716 __ASM volatile(
"kaddh %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1764 __ASM volatile(
"kaddw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1820 unsigned long result;
1821 __ASM volatile(
"kcras16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1877 unsigned long result;
1878 __ASM volatile(
"kcrsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1933 __ASM volatile(
"kdmbb %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
1988 __ASM volatile(
"kdmbt %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
2043 __ASM volatile(
"kdmtt %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
2107 __ASM volatile(
"kdmabb %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
2171 __ASM volatile(
"kdmabt %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
2235 __ASM volatile(
"kdmatt %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
2296 unsigned long result;
2297 __ASM volatile(
"khm8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
2358 unsigned long result;
2359 __ASM volatile(
"khmx8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
2421 unsigned long result;
2422 __ASM volatile(
"khm16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
2484 unsigned long result;
2485 __ASM volatile(
"khmx16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
2538 __ASM volatile(
"khmbb %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
2591 __ASM volatile(
"khmbt %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
2644 __ASM volatile(
"khmtt %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
2707 __ASM volatile(
"kmabb %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
2770 __ASM volatile(
"kmabt %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
2833 __ASM volatile(
"kmatt %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
2897 __ASM volatile(
"kmada %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
2961 __ASM volatile(
"kmaxda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3033 __ASM volatile(
"kmads %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3105 __ASM volatile(
"kmadrs %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3177 __ASM volatile(
"kmaxds %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3245 __ASM volatile(
"kmar64 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3296 __ASM volatile(
"kmda %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
3347 __ASM volatile(
"kmxda %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
3408 __ASM volatile(
"kmmac %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3469 __ASM volatile(
"kmmac.u %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3531 __ASM volatile(
"kmmawb %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3593 __ASM volatile(
"kmmawb.u %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3661 __ASM volatile(
"kmmawb2 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3729 __ASM volatile(
"kmmawb2.u %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3791 __ASM volatile(
"kmmawt %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3853 __ASM volatile(
"kmmawt.u %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3921 __ASM volatile(
"kmmawt2 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
3989 __ASM volatile(
"kmmawt2.u %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
4049 __ASM volatile(
"kmmsb %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
4109 __ASM volatile(
"kmmsb.u %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
4165 __ASM volatile(
"kmmwb2 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
4221 __ASM volatile(
"kmmwb2.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
4277 __ASM volatile(
"kmmwt2 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
4333 __ASM volatile(
"kmmwt2.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
4395 __ASM volatile(
"kmsda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
4457 __ASM volatile(
"kmsxda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
4525 __ASM volatile(
"kmsr64 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
4573 __ASM volatile(
"ksllw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
4617 #define __RV_KSLLIW(a, b) \
4620 long __a = (long)(a); \
4621 __ASM volatile("kslliw %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
4673 unsigned long result;
4674 __ASM volatile(
"ksll8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
4723 #define __RV_KSLLI8(a, b) \
4725 unsigned long result; \
4726 unsigned long __a = (unsigned long)(a); \
4727 __ASM volatile("kslli8 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
4779 unsigned long result;
4780 __ASM volatile(
"ksll16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
4829 #define __RV_KSLLI16(a, b) \
4831 unsigned long result; \
4832 unsigned long __a = (unsigned long)(a); \
4833 __ASM volatile("kslli16 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
4899 unsigned long result;
4900 __ASM volatile(
"kslra8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
4966 unsigned long result;
4967 __ASM volatile(
"kslra8.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5033 unsigned long result;
5034 __ASM volatile(
"kslra16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5100 unsigned long result;
5101 __ASM volatile(
"kslra16.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5161 __ASM volatile(
"kslraw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5224 __ASM volatile(
"kslraw.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5280 unsigned long result;
5281 __ASM volatile(
"kstas16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5337 unsigned long result;
5338 __ASM volatile(
"kstsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5384 unsigned long result;
5385 __ASM volatile(
"ksub8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5432 unsigned long result;
5433 __ASM volatile(
"ksub16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5499 __ASM volatile(
"ksub64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5546 __ASM volatile(
"ksubh %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5594 __ASM volatile(
"ksubw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5650 __ASM volatile(
"kwmmul %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5706 __ASM volatile(
"kwmmul.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5751 __ASM volatile(
"maddr32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
5791 __ASM volatile(
"maxw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5827 __ASM volatile(
"minw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5873 __ASM volatile(
"msubr32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
5924 unsigned long long result;
5925 __ASM volatile(
"mulr64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
5977 __ASM volatile(
"mulsr64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6015 unsigned long result;
6016 __ASM volatile(
"pbsad %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6057 __ASM volatile(
"pbsada %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
6107 unsigned long result;
6108 __ASM volatile(
"pkbb16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6158 unsigned long result;
6159 __ASM volatile(
"pkbt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6209 unsigned long result;
6210 __ASM volatile(
"pktt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6260 unsigned long result;
6261 __ASM volatile(
"pktb16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6305 unsigned long result;
6306 __ASM volatile(
"radd8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6350 unsigned long result;
6351 __ASM volatile(
"radd16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6404 __ASM volatile(
"radd64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6452 __ASM volatile(
"raddw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6500 unsigned long result;
6501 __ASM volatile(
"rcras16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6549 unsigned long result;
6550 __ASM volatile(
"rcrsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6576 unsigned long result;
6577 __ASM volatile(
"rdov %0" :
"=r"(result));
6625 unsigned long result;
6626 __ASM volatile(
"rstas16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6674 unsigned long result;
6675 __ASM volatile(
"rstsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6721 unsigned long result;
6722 __ASM volatile(
"rsub8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6768 unsigned long result;
6769 __ASM volatile(
"rsub16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6823 __ASM volatile(
"rsub64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6871 __ASM volatile(
"rsubw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
6916 #define __RV_SCLIP8(a, b) \
6918 unsigned long result; \
6919 unsigned long __a = (unsigned long)(a); \
6920 __ASM volatile("sclip8 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
6965 #define __RV_SCLIP16(a, b) \
6967 unsigned long result; \
6968 unsigned long __a = (unsigned long)(a); \
6969 __ASM volatile("sclip16 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
7014 #define __RV_SCLIP32(a, b) \
7017 long __a = (long)(a); \
7018 __ASM volatile("sclip32 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
7057 unsigned long result;
7058 __ASM volatile(
"scmple8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
7097 unsigned long result;
7098 __ASM volatile(
"scmple16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
7136 unsigned long result;
7137 __ASM volatile(
"scmplt8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
7175 unsigned long result;
7176 __ASM volatile(
"scmplt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
7216 unsigned long result;
7217 __ASM volatile(
"sll8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
7254 #define __RV_SLLI8(a, b) \
7256 unsigned long result; \
7257 unsigned long __a = (unsigned long)(a); \
7258 __ASM volatile("slli8 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
7298 unsigned long result;
7299 __ASM volatile(
"sll16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
7336 #define __RV_SLLI16(a, b) \
7338 unsigned long result; \
7339 unsigned long __a = (unsigned long)(a); \
7340 __ASM volatile("slli16 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
7400 __ASM volatile(
"smal %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
7482 __ASM volatile(
"smalbb %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
7564 __ASM volatile(
"smalbt %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
7646 __ASM volatile(
"smaltt %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
7730 __ASM volatile(
"smalda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
7814 __ASM volatile(
"smalxda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
7905 __ASM volatile(
"smalds %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
7996 __ASM volatile(
"smaldrs %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
8087 __ASM volatile(
"smalxds %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
8139 __ASM volatile(
"smar64 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
8183 __ASM volatile(
"smaqa %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
8228 __ASM volatile(
"smaqa.su %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
8266 unsigned long result;
8267 __ASM volatile(
"smax8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8305 unsigned long result;
8306 __ASM volatile(
"smax16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8358 __ASM volatile(
"smbb16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8410 __ASM volatile(
"smbt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8462 __ASM volatile(
"smtt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8521 __ASM volatile(
"smds %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8580 __ASM volatile(
"smdrs %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8639 __ASM volatile(
"smxds %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8677 unsigned long result;
8678 __ASM volatile(
"smin8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8716 unsigned long result;
8717 __ASM volatile(
"smin16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8768 __ASM volatile(
"smmul %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8819 __ASM volatile(
"smmul.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8869 __ASM volatile(
"smmwb %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8919 __ASM volatile(
"smmwb.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
8969 __ASM volatile(
"smmwt %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
9019 __ASM volatile(
"smmwt.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
9101 __ASM volatile(
"smslda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
9183 __ASM volatile(
"smslxda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
9236 __ASM volatile(
"smsr64 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
9318 unsigned long long result;
9319 __ASM volatile(
"smul8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
9401 unsigned long long result;
9402 __ASM volatile(
"smulx8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
9486 unsigned long long result;
9487 __ASM volatile(
"smul16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
9571 unsigned long long result;
9572 __ASM volatile(
"smulx16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
9626 __ASM volatile(
"sra.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
9678 #define __RV_SRAI_U(a, b) \
9681 long __a = (long)(a); \
9682 __ASM volatile("srai.u %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
9735 unsigned long result;
9736 __ASM volatile(
"sra8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
9789 unsigned long result;
9790 __ASM volatile(
"sra8.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
9840 #define __RV_SRAI8(a, b) \
9842 unsigned long result; \
9843 unsigned long __a = (unsigned long)(a); \
9844 __ASM volatile("srai8 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
9894 #define __RV_SRAI8_U(a, b) \
9896 unsigned long result; \
9897 unsigned long __a = (unsigned long)(a); \
9898 __ASM volatile("srai8.u %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
9951 unsigned long result;
9952 __ASM volatile(
"sra16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10005 unsigned long result;
10006 __ASM volatile(
"sra16.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10057 #define __RV_SRAI16(a, b) \
10059 unsigned long result; \
10060 unsigned long __a = (unsigned long)(a); \
10061 __ASM volatile("srai16 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
10112 #define __RV_SRAI16_U(a, b) \
10114 unsigned long result; \
10115 unsigned long __a = (unsigned long)(a); \
10116 __ASM volatile("srai16.u %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
10168 unsigned long result;
10169 __ASM volatile(
"srl8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10221 unsigned long result;
10222 __ASM volatile(
"srl8.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10271 #define __RV_SRLI8(a, b) \
10273 unsigned long result; \
10274 unsigned long __a = (unsigned long)(a); \
10275 __ASM volatile("srli8 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
10324 #define __RV_SRLI8_U(a, b) \
10326 unsigned long result; \
10327 unsigned long __a = (unsigned long)(a); \
10328 __ASM volatile("srli8.u %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
10379 unsigned long result;
10380 __ASM volatile(
"srl16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10431 unsigned long result;
10432 __ASM volatile(
"srl16.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10481 #define __RV_SRLI16(a, b) \
10483 unsigned long result; \
10484 unsigned long __a = (unsigned long)(a); \
10485 __ASM volatile("srli16 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
10534 #define __RV_SRLI16_U(a, b) \
10536 unsigned long result; \
10537 unsigned long __a = (unsigned long)(a); \
10538 __ASM volatile("srli16.u %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
10583 unsigned long result;
10584 __ASM volatile(
"stas16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10629 unsigned long result;
10630 __ASM volatile(
"stsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10670 unsigned long result;
10671 __ASM volatile(
"sub8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10711 unsigned long result;
10712 __ASM volatile(
"sub16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10765 unsigned long long result;
10766 __ASM volatile(
"sub64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
10811 unsigned long result;
10812 __ASM volatile(
"sunpkd810 %0, %1" :
"=r"(result) :
"r"(a));
10857 unsigned long result;
10858 __ASM volatile(
"sunpkd820 %0, %1" :
"=r"(result) :
"r"(a));
10903 unsigned long result;
10904 __ASM volatile(
"sunpkd830 %0, %1" :
"=r"(result) :
"r"(a));
10949 unsigned long result;
10950 __ASM volatile(
"sunpkd831 %0, %1" :
"=r"(result) :
"r"(a));
10995 unsigned long result;
10996 __ASM volatile(
"sunpkd832 %0, %1" :
"=r"(result) :
"r"(a));
11032 unsigned long result;
11033 __ASM volatile(
"swap8 %0, %1" :
"=r"(result) :
"r"(a));
11069 unsigned long result;
11070 __ASM volatile(
"swap16 %0, %1" :
"=r"(result) :
"r"(a));
11114 #define __RV_UCLIP8(a, b) \
11116 unsigned long result; \
11117 unsigned long __a = (unsigned long)(a); \
11118 __ASM volatile("uclip8 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
11163 #define __RV_UCLIP16(a, b) \
11165 unsigned long result; \
11166 unsigned long __a = (unsigned long)(a); \
11167 __ASM volatile("uclip16 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
11213 #define __RV_UCLIP32(a, b) \
11215 unsigned long result; \
11216 unsigned long __a = (unsigned long)(a); \
11217 __ASM volatile("uclip32 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
11256 unsigned long result;
11257 __ASM volatile(
"ucmple8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11296 unsigned long result;
11297 __ASM volatile(
"ucmple16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11335 unsigned long result;
11336 __ASM volatile(
"ucmplt8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11374 unsigned long result;
11375 __ASM volatile(
"ucmplt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11419 unsigned long result;
11420 __ASM volatile(
"ukadd8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11464 unsigned long result;
11465 __ASM volatile(
"ukadd16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11526 unsigned long long result;
11527 __ASM volatile(
"ukadd64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11568 unsigned long result;
11569 __ASM volatile(
"ukaddh %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11611 unsigned long result;
11612 __ASM volatile(
"ukaddw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11667 unsigned long result;
11668 __ASM volatile(
"ukcras16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11722 unsigned long result;
11723 __ASM volatile(
"ukcrsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11787 __ASM volatile(
"ukmar64 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
11852 __ASM volatile(
"ukmsr64 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
11907 unsigned long result;
11908 __ASM volatile(
"ukstas16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
11962 unsigned long result;
11963 __ASM volatile(
"ukstsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12007 unsigned long result;
12008 __ASM volatile(
"uksub8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12052 unsigned long result;
12053 __ASM volatile(
"uksub16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12115 unsigned long long result;
12116 __ASM volatile(
"uksub64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12161 unsigned long result;
12162 __ASM volatile(
"uksubh %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12205 unsigned long result;
12206 __ASM volatile(
"uksubw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12259 __ASM volatile(
"umar64 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
12303 __ASM volatile(
"umaqa %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
12341 unsigned long result;
12342 __ASM volatile(
"umax8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12380 unsigned long result;
12381 __ASM volatile(
"umax16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12419 unsigned long result;
12420 __ASM volatile(
"umin8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12458 unsigned long result;
12459 __ASM volatile(
"umin16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12512 __ASM volatile(
"umsr64 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
12595 unsigned long long result;
12596 __ASM volatile(
"umul8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12679 unsigned long long result;
12680 __ASM volatile(
"umulx8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12764 unsigned long long result;
12765 __ASM volatile(
"umul16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12849 unsigned long long result;
12850 __ASM volatile(
"umulx16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12896 unsigned long result;
12897 __ASM volatile(
"uradd8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12943 unsigned long result;
12944 __ASM volatile(
"uradd16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
12995 unsigned long long result;
12996 __ASM volatile(
"uradd64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13043 unsigned long result;
13044 __ASM volatile(
"uraddw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13092 unsigned long result;
13093 __ASM volatile(
"urcras16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13141 unsigned long result;
13142 __ASM volatile(
"urcrsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13190 unsigned long result;
13191 __ASM volatile(
"urstas16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13239 unsigned long result;
13240 __ASM volatile(
"urstsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13286 unsigned long result;
13287 __ASM volatile(
"ursub8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13333 unsigned long result;
13334 __ASM volatile(
"ursub16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13387 unsigned long long result;
13388 __ASM volatile(
"ursub64 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13435 unsigned long result;
13436 __ASM volatile(
"ursubw %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13486 #define __RV_WEXTI(a, b) \
13488 unsigned long result; \
13489 long long __a = (long long)(a); \
13490 __ASM volatile("wexti %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
13539 unsigned long result;
13540 __ASM volatile(
"wext %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13585 unsigned long result;
13586 __ASM volatile(
"zunpkd810 %0, %1" :
"=r"(result) :
"r"(a));
13631 unsigned long result;
13632 __ASM volatile(
"zunpkd820 %0, %1" :
"=r"(result) :
"r"(a));
13677 unsigned long result;
13678 __ASM volatile(
"zunpkd830 %0, %1" :
"=r"(result) :
"r"(a));
13723 unsigned long result;
13724 __ASM volatile(
"zunpkd831 %0, %1" :
"=r"(result) :
"r"(a));
13769 unsigned long result;
13770 __ASM volatile(
"zunpkd832 %0, %1" :
"=r"(result) :
"r"(a));
13775 #if (__RISCV_XLEN == 64) || defined(__ONLY_FOR_DOXYGEN_DOCUMENT_GENERATION__)
13811 unsigned long result;
13812 __ASM volatile(
"add32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13854 unsigned long result;
13855 __ASM volatile(
"cras32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13896 unsigned long result;
13897 __ASM volatile(
"crsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
13956 unsigned long result;
13957 __ASM volatile(
"kabs32 %0, %1" :
"=r"(result) :
"r"(a));
14002 unsigned long result;
14003 __ASM volatile(
"kadd32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
14053 unsigned long result;
14054 __ASM volatile(
"kcras32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
14103 unsigned long result;
14104 __ASM volatile(
"kcrsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
14155 unsigned long result;
14156 __ASM volatile(
"kdmbb16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
14207 unsigned long result;
14208 __ASM volatile(
"kdmbt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
14259 unsigned long result;
14260 __ASM volatile(
"kdmtt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
14325 __ASM volatile(
"kdmabb16 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
14390 __ASM volatile(
"kdmabt16 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
14455 __ASM volatile(
"kdmatt16 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
14507 unsigned long result;
14508 __ASM volatile(
"khmbb16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
14560 unsigned long result;
14561 __ASM volatile(
"khmbt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
14613 unsigned long result;
14614 __ASM volatile(
"khmtt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
14676 __ASM volatile(
"kmabb32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
14738 __ASM volatile(
"kmabt32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
14800 __ASM volatile(
"kmatt32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
14856 __ASM volatile(
"kmada32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
14912 __ASM volatile(
"kmaxda32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
14964 __ASM volatile(
"kmda32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15016 __ASM volatile(
"kmxda32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15080 __ASM volatile(
"kmads32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
15144 __ASM volatile(
"kmadrs32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
15208 __ASM volatile(
"kmaxds32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
15263 __ASM volatile(
"kmsda32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
15318 __ASM volatile(
"kmsxda32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
15369 unsigned long result;
15370 __ASM volatile(
"ksll32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15418 #define __RV_KSLLI32(a, b) \
15420 unsigned long result; \
15421 unsigned long __a = (unsigned long)(a); \
15422 __ASM volatile("kslli32 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
15487 unsigned long result;
15488 __ASM volatile(
"kslra32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15553 unsigned long result;
15554 __ASM volatile(
"kslra32.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15605 unsigned long result;
15606 __ASM volatile(
"kstas32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15656 unsigned long result;
15657 __ASM volatile(
"kstsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15703 unsigned long result;
15704 __ASM volatile(
"ksub32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15751 unsigned long result;
15752 __ASM volatile(
"pkbb32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15799 unsigned long result;
15800 __ASM volatile(
"pkbt32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15847 unsigned long result;
15848 __ASM volatile(
"pktt32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15895 unsigned long result;
15896 __ASM volatile(
"pktb32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15941 unsigned long result;
15942 __ASM volatile(
"radd32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
15988 unsigned long result;
15989 __ASM volatile(
"rcras32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16035 unsigned long result;
16036 __ASM volatile(
"rcrsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16082 unsigned long result;
16083 __ASM volatile(
"rstas32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16128 unsigned long result;
16129 __ASM volatile(
"rstsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16174 unsigned long result;
16175 __ASM volatile(
"rsub32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16214 unsigned long result;
16215 __ASM volatile(
"sll32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16251 #define __RV_SLLI32(a, b) \
16253 unsigned long result; \
16254 unsigned long __a = (unsigned long)(a); \
16255 __ASM volatile("slli32 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
16292 unsigned long result;
16293 __ASM volatile(
"smax32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16342 __ASM volatile(
"smbb32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16391 __ASM volatile(
"smbt32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16440 __ASM volatile(
"smtt32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16493 __ASM volatile(
"smds32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16546 __ASM volatile(
"smdrs32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16599 __ASM volatile(
"smxds32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16636 unsigned long result;
16637 __ASM volatile(
"smin32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16689 unsigned long result;
16690 __ASM volatile(
"sra32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16742 unsigned long result;
16743 __ASM volatile(
"sra32.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
16793 #define __RV_SRAI32(a, b) \
16795 unsigned long result; \
16796 unsigned long __a = (unsigned long)(a); \
16797 __ASM volatile("srai32 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
16847 #define __RV_SRAI32_U(a, b) \
16849 unsigned long result; \
16850 unsigned long __a = (unsigned long)(a); \
16851 __ASM volatile("srai32.u %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
16893 #define __RV_SRAIW_U(a, b) \
16896 int __a = (int)(a); \
16897 __ASM volatile("sraiw.u %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
16949 unsigned long result;
16950 __ASM volatile(
"srl32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17002 unsigned long result;
17003 __ASM volatile(
"srl32.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17051 #define __RV_SRLI32(a, b) \
17053 unsigned long result; \
17054 unsigned long __a = (unsigned long)(a); \
17055 __ASM volatile("srli32 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
17103 #define __RV_SRLI32_U(a, b) \
17105 unsigned long result; \
17106 unsigned long __a = (unsigned long)(a); \
17107 __ASM volatile("srli32.u %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
17149 unsigned long result;
17150 __ASM volatile(
"stas32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17191 unsigned long result;
17192 __ASM volatile(
"stsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17231 unsigned long result;
17232 __ASM volatile(
"sub32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17275 unsigned long result;
17276 __ASM volatile(
"ukadd32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17328 unsigned long result;
17329 __ASM volatile(
"ukcras32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17380 unsigned long result;
17381 __ASM volatile(
"ukcrsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17433 unsigned long result;
17434 __ASM volatile(
"ukstas32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17485 unsigned long result;
17486 __ASM volatile(
"ukstsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17529 unsigned long result;
17530 __ASM volatile(
"uksub32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17567 unsigned long result;
17568 __ASM volatile(
"umax32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17605 unsigned long result;
17606 __ASM volatile(
"umin32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17651 unsigned long result;
17652 __ASM volatile(
"uradd32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17698 unsigned long result;
17699 __ASM volatile(
"urcras32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17745 unsigned long result;
17746 __ASM volatile(
"urcrsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17792 unsigned long result;
17793 __ASM volatile(
"urstas32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17839 unsigned long result;
17840 __ASM volatile(
"urstsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17885 unsigned long result;
17886 __ASM volatile(
"ursub32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
17930 unsigned long result;
17931 __ASM volatile(
"expd80 %0, %1" :
"=r"(result) :
"r"(a));
17965 unsigned long result;
17966 __ASM volatile(
"expd81 %0, %1" :
"=r"(result) :
"r"(a));
18000 unsigned long result;
18001 __ASM volatile(
"expd82 %0, %1" :
"=r"(result) :
"r"(a));
18035 unsigned long result;
18036 __ASM volatile(
"expd83 %0, %1" :
"=r"(result) :
"r"(a));
18041 #if (__RISCV_XLEN == 64)
18071 unsigned long result;
18072 __ASM volatile(
"expd84 %0, %1" :
"=r"(result) :
"r"(a));
18106 unsigned long result;
18107 __ASM volatile(
"expd85 %0, %1" :
"=r"(result) :
"r"(a));
18141 unsigned long result;
18142 __ASM volatile(
"expd86 %0, %1" :
"=r"(result) :
"r"(a));
18176 unsigned long result;
18177 __ASM volatile(
"expd87 %0, %1" :
"=r"(result) :
"r"(a));
18183 #if (__RISCV_XLEN == 32) || defined(__ONLY_FOR_DOXYGEN_DOCUMENT_GENERATION__)
18253 unsigned long long result;
18254 __ASM volatile(
"dkhm8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18308 unsigned long long result;
18309 __ASM volatile(
"dkhm16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18353 unsigned long long result;
18354 __ASM volatile(
"dkabs8 %0, %1" :
"=r"(result) :
"r"(a));
18398 unsigned long long result;
18399 __ASM volatile(
"dkabs16 %0, %1" :
"=r"(result) :
"r"(a));
18456 unsigned long long result;
18457 __ASM volatile(
"dkslra8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18515 unsigned long long result;
18516 __ASM volatile(
"dkslra16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18562 unsigned long long result;
18563 __ASM volatile(
"dkadd8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18609 unsigned long long result;
18610 __ASM volatile(
"dkadd16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18656 unsigned long long result;
18657 __ASM volatile(
"dksub8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18704 unsigned long long result;
18705 __ASM volatile(
"dksub16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18757 unsigned long long result;
18758 __ASM volatile(
"dkhmx8 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18810 unsigned long long result;
18811 __ASM volatile(
"dkhmx16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18855 unsigned long long result;
18856 __ASM volatile(
"dsmmul %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18900 unsigned long long result;
18901 __ASM volatile(
"dsmmul.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18947 unsigned long long result;
18948 __ASM volatile(
"dkwmmul %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
18994 unsigned long long result;
18995 __ASM volatile(
"dkwmmul.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19039 unsigned long long result;
19040 __ASM volatile(
"dkabs32 %0, %1" :
"=r"(result) :
"r"(a));
19092 unsigned long long result;
19093 __ASM volatile(
"dkslra32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19139 unsigned long long result;
19140 __ASM volatile(
"dkadd32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19186 unsigned long long result;
19187 __ASM volatile(
"dksub32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19224 unsigned long long result;
19225 __ASM volatile(
"dradd16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19262 unsigned long long result;
19263 __ASM volatile(
"dsub16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19300 unsigned long long result;
19301 __ASM volatile(
"dradd32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19338 unsigned long long result;
19339 __ASM volatile(
"dsub32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19383 unsigned long long result;
19384 __ASM volatile(
"dmsr16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19429 unsigned long long result;
19430 __ASM volatile(
"dmsr17 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19470 unsigned long long result;
19471 __ASM volatile(
"dmsr33 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19511 unsigned long long result;
19512 __ASM volatile(
"dmxsr33 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19549 unsigned long result;
19550 __ASM volatile(
"dredas16 %0, %1" :
"=r"(result) :
"r"(a));
19585 unsigned long result;
19586 __ASM volatile(
"dredsa16 %0, %1" :
"=r"(result) :
"r"(a));
19633 __ASM volatile(
"dkclip64 %0, %1" :
"=r"(result) :
"r"(a));
19679 unsigned long long result;
19680 __ASM volatile(
"dkmda %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19726 unsigned long long result;
19727 __ASM volatile(
"dkmxda %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19767 unsigned long long result;
19768 __ASM volatile(
"dsmdrs %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19808 unsigned long long result;
19809 __ASM volatile(
"dsmxds %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19847 __ASM volatile(
"dsmbb32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19886 __ASM volatile(
"dsmbb32.sra14 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19926 __ASM volatile(
"dsmbb32.sra32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
19966 __ASM volatile(
"dsmbt32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20006 __ASM volatile(
"dsmbt32.sra14 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20046 __ASM volatile(
"dsmbt32.sra32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20086 __ASM volatile(
"dsmtt32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20126 __ASM volatile(
"dsmtt32.sra14 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20166 __ASM volatile(
"dsmtt32.sra32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20202 unsigned long long result;
20203 __ASM volatile(
"dpkbb32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20239 unsigned long long result;
20240 __ASM volatile(
"dpkbt32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20276 unsigned long long result;
20277 __ASM volatile(
"dpktt32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20313 unsigned long long result;
20314 __ASM volatile(
"dpktb32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20351 unsigned long long result;
20352 __ASM volatile(
"dpktb16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20389 unsigned long long result;
20390 __ASM volatile(
"dpkbb16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20427 unsigned long long result;
20428 __ASM volatile(
"dpkbt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20465 unsigned long long result;
20466 __ASM volatile(
"dpktt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20510 unsigned long long result;
20511 __ASM volatile(
"dsra16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20548 unsigned long long result;
20549 __ASM volatile(
"dadd16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20586 unsigned long long result;
20587 __ASM volatile(
"dadd32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20627 unsigned long long result;
20628 __ASM volatile(
"dsmbb16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20668 unsigned long long result;
20669 __ASM volatile(
"dsmbt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20709 unsigned long long result;
20710 __ASM volatile(
"dsmtt16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20751 unsigned long long result;
20752 __ASM volatile(
"drcrsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20792 unsigned long long result;
20793 __ASM volatile(
"drcrsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20834 unsigned long long result;
20835 __ASM volatile(
"drcras16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20875 unsigned long long result;
20876 __ASM volatile(
"DRCRAS32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20930 unsigned long long result;
20931 __ASM volatile(
"dkcras16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
20985 unsigned long long result;
20986 __ASM volatile(
"dkcrsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21023 unsigned long long result;
21024 __ASM volatile(
"drsub16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21063 unsigned long long result;
21064 __ASM volatile(
"dstsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21103 unsigned long long result;
21104 __ASM volatile(
"DSTAS32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21153 unsigned long long result;
21154 __ASM volatile(
"dkcrsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21203 unsigned long long result;
21204 __ASM volatile(
"dkcras32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21243 unsigned long long result;
21244 __ASM volatile(
"dcrsa32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21283 unsigned long long result;
21284 __ASM volatile(
"dcras32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21338 unsigned long long result;
21339 __ASM volatile(
"dkstsa16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21393 unsigned long long result;
21394 __ASM volatile(
"dkstas16 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21438 #define __RV_DSCLIP8(a, b) \
21440 unsigned long long result; \
21441 unsigned long long __a = (unsigned long long)(a); \
21442 __ASM volatile("dsclip8 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
21486 #define __RV_DSCLIP16(a, b) \
21488 unsigned long long result; \
21489 unsigned long long __a = (unsigned long long)(a); \
21490 __ASM volatile("dsclip16 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
21534 #define __RV_DSCLIP32(a, b) \
21536 unsigned long long result; \
21537 unsigned long long __a = (unsigned long long)(a); \
21538 __ASM volatile("dsclip32 %0, %1, %2" : "=r"(result) : "r"(__a), "K"(b)); \
21575 unsigned long long result;
21576 __ASM volatile(
"drsub32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21611 unsigned long long result;
21612 __ASM volatile(
"dpack32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
21648 unsigned long long result;
21649 __ASM volatile(
"dsunpkd810 %0, %1" :
"=r"(result) :
"r"(a));
21685 unsigned long long result;
21686 __ASM volatile(
"dsunpkd820 %0, %1" :
"=r"(result) :
"r"(a));
21722 unsigned long long result;
21723 __ASM volatile(
"dsunpkd830 %0, %1" :
"=r"(result) :
"r"(a));
21759 unsigned long long result;
21760 __ASM volatile(
"dsunpkd831 %0, %1" :
"=r"(result) :
"r"(a));
21796 unsigned long long result;
21797 __ASM volatile(
"dsunpkd832 %0, %1" :
"=r"(result) :
"r"(a));
21833 unsigned long long result;
21834 __ASM volatile(
"dzunpkd810 %0, %1" :
"=r"(result) :
"r"(a));
21870 unsigned long long result;
21871 __ASM volatile(
"dzunpkd820 %0, %1" :
"=r"(result) :
"r"(a));
21907 unsigned long long result;
21908 __ASM volatile(
"dzunpkd830 %0, %1" :
"=r"(result) :
"r"(a));
21944 unsigned long long result;
21945 __ASM volatile(
"dzunpkd831 %0, %1" :
"=r"(result) :
"r"(a));
21981 unsigned long long result;
21982 __ASM volatile(
"dzunpkd832 %0, %1" :
"=r"(result) :
"r"(a));
22029 __ASM volatile(
"dkmmac %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22076 __ASM volatile(
"dkmmac.u %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22123 __ASM volatile(
"dkmmsb %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22170 __ASM volatile(
"dkmmsb.u %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22217 __ASM volatile(
"dkmada %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22263 __ASM volatile(
"dkmaxda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22310 __ASM volatile(
"dkmads %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22357 __ASM volatile(
"dkmadrs %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22403 __ASM volatile(
"dkmaxds %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22449 __ASM volatile(
"dkmsda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22495 __ASM volatile(
"dkmsxda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22544 __ASM volatile(
"dsmaqa %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22593 __ASM volatile(
"dsmaqa.su %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22642 __ASM volatile(
"dumaqa %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22684 __ASM volatile(
"dkmda32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
22726 __ASM volatile(
"dkmxda32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
22768 __ASM volatile(
"dkmada32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22811 __ASM volatile(
"dkmaxda32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22855 __ASM volatile(
"dkmads32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22899 __ASM volatile(
"dkmadrs32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22943 __ASM volatile(
"dkmaxds32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
22986 __ASM volatile(
"dkmsda32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23029 __ASM volatile(
"dkmsxda32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23073 __ASM volatile(
"dsmds32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
23116 __ASM volatile(
"dsmdrs32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
23160 __ASM volatile(
"dsmxds32 %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
23206 __ASM volatile(
"dsmalda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23252 __ASM volatile(
"dsmalxda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23298 __ASM volatile(
"dsmalds %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23344 __ASM volatile(
"dsmaldrs %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23390 __ASM volatile(
"dsmalxds %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23435 __ASM volatile(
"dsmslda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23480 __ASM volatile(
"dsmslxda %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23531 __ASM volatile(
"ddsmaqa %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23582 __ASM volatile(
"ddsmaqa.su %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23633 __ASM volatile(
"ddumaqa %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23674 __ASM volatile(
"dsma32.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
23715 __ASM volatile(
"dsmxs32.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
23756 __ASM volatile(
"dsmxa32.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
23797 __ASM volatile(
"dsms32.u %0, %1, %2" :
"=r"(result) :
"r"(a),
"r"(b));
23841 __ASM volatile(
"dsmada16 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23885 __ASM volatile(
"dsmaxda16 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23924 __ASM volatile(
"dksms32.u %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
23963 __ASM volatile(
"dmada32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
24004 __ASM volatile(
"dsmalbb %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
24047 __ASM volatile(
"dsmalbt %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
24090 __ASM volatile(
"dsmaltt %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
24138 __ASM volatile(
"dkmabb32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
24186 __ASM volatile(
"dkmabt32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
24234 __ASM volatile(
"dkmatt32 %0, %1, %2" :
"+r"(t) :
"r"(a),
"r"(b));
24240 #elif defined (__ICCRISCV__)
24242 #if __riscv_xlen == 32
24243 #include "iar_nds32_intrinsic.h"
24244 #elif __riscv_xlen == 64
24245 #include "iar_nds64_intrinsic.h"
24247 #error "Unexpected RISC-V XLEN size."
24250 #pragma language=save
24251 #pragma language=extended
24254 #define __RV_CLROV __nds__clrov
24255 #define __RV_RDOV __nds__rdov
24256 #define __RV_ADD8 __nds__add8
24257 #define __RV_SUB8 __nds__sub8
24258 #define __RV_ADD16 __nds__add16
24259 #define __RV_SUB16 __nds__sub16
24260 #define __RV_ADD64 __nds__add64
24261 #define __RV_SUB64 __nds__sub64
24262 #define __RV_RADD8 __nds__radd8
24263 #define __RV_RSUB8 __nds__rsub8
24264 #define __RV_RADD16 __nds__radd16
24265 #define __RV_RSUB16 __nds__rsub16
24266 #define __RV_RADD64 __nds__radd64
24267 #define __RV_RSUB64 __nds__rsub64
24268 #define __RV_RADDW __nds__raddw
24269 #define __RV_RSUBW __nds__rsubw
24270 #define __RV_URADD8 __nds__uradd8
24271 #define __RV_URSUB8 __nds__ursub8
24272 #define __RV_URADD16 __nds__uradd16
24273 #define __RV_URSUB16 __nds__ursub16
24274 #define __RV_URADD64 __nds__uradd64
24275 #define __RV_URSUB64 __nds__ursub64
24276 #define __RV_URADDW __nds__uraddw
24277 #define __RV_URSUBW __nds__ursubw
24278 #define __RV_KADD8 __nds__kadd8
24279 #define __RV_KSUB8 __nds__ksub8
24280 #define __RV_KADD16 __nds__kadd16
24281 #define __RV_KSUB16 __nds__ksub16
24282 #define __RV_KADD64 __nds__kadd64
24283 #define __RV_KSUB64 __nds__ksub64
24284 #define __RV_KADDH __nds__kaddh
24285 #define __RV_KSUBH __nds__ksubh
24286 #define __RV_KADDW __nds__kaddw
24287 #define __RV_KSUBW __nds__ksubw
24288 #define __RV_UKADD8 __nds__ukadd8
24289 #define __RV_UKSUB8 __nds__uksub8
24290 #define __RV_UKADD16 __nds__ukadd16
24291 #define __RV_UKSUB16 __nds__uksub16
24292 #define __RV_UKADD64 __nds__ukadd64
24293 #define __RV_UKSUB64 __nds__uksub64
24294 #define __RV_UKADDH __nds__ukaddh
24295 #define __RV_UKSUBH __nds__uksubh
24296 #define __RV_UKADDW __nds__ukaddw
24297 #define __RV_UKSUBW __nds__uksubw
24298 #define __RV_CRAS16 __nds__cras16
24299 #define __RV_CRSA16 __nds__crsa16
24300 #define __RV_RCRAS16 __nds__rcras16
24301 #define __RV_RCRSA16 __nds__rcrsa16
24302 #define __RV_URCRAS16 __nds__urcras16
24303 #define __RV_URCRSA16 __nds__urcrsa16
24304 #define __RV_KCRAS16 __nds__kcras16
24305 #define __RV_KCRSA16 __nds__kcrsa16
24306 #define __RV_UKCRAS16 __nds__ukcras16
24307 #define __RV_UKCRSA16 __nds__ukcrsa16
24308 #define __RV_SRA8 __nds__sra8
24309 #define __RV_SRAI8 __nds__sra8
24310 #define __RV_SRA16 __nds__sra16
24311 #define __RV_SRAI16 __nds__sra16
24312 #define __RV_SRL8 __nds__srl8
24313 #define __RV_SRL16 __nds__srl16
24314 #define __RV_SLL8 __nds__sll8
24315 #define __RV_SLL16 __nds__sll16
24316 #define __RV_SRA_U __nds__sra_u
24317 #define __RV_SRA8_U __nds__sra8_u
24318 #define __RV_SRA16_U __nds__sra16_u
24319 #define __RV_SRL8_U __nds__srl8_u
24320 #define __RV_SRL16_U __nds__srl16_u
24321 #define __RV_KSLL8 __nds__ksll8
24322 #define __RV_KSLL16 __nds__ksll16
24323 #define __RV_KSLLW __nds__ksllw
24324 #define __RV_KSLRA8 __nds__kslra8
24325 #define __RV_KSLRA8_U __nds__kslra8_u
24326 #define __RV_KSLRA16 __nds__kslra16
24327 #define __RV_KSLRA16_U __nds__kslra16_u
24328 #define __RV_KSLRAW __nds__kslraw
24329 #define __RV_KSLRAW_U __nds__kslraw_u
24330 #define __RV_CMPEQ8 __nds__cmpeq8
24331 #define __RV_CMPEQ16 __nds__cmpeq16
24332 #define __RV_SCMPLE8 __nds__scmple8
24333 #define __RV_SCMPLE16 __nds__scmple16
24334 #define __RV_SCMPLT8 __nds__scmplt8
24335 #define __RV_SCMPLT16 __nds__scmplt16
24336 #define __RV_UCMPLE8 __nds__ucmple8
24337 #define __RV_UCMPLE16 __nds__ucmple16
24338 #define __RV_UCMPLT8 __nds__ucmplt8
24339 #define __RV_UCMPLT16 __nds__ucmplt16
24340 #define __RV_SMUL8 __nds__smul8
24341 #define __RV_UMUL8 __nds__umul8
24342 #define __RV_SMUL16 __nds__smul16
24343 #define __RV_UMUL16 __nds__umul16
24344 #define __RV_SMULX8 __nds__smulx8
24345 #define __RV_UMULX8 __nds__umulx8
24346 #define __RV_SMULX16 __nds__smulx16
24347 #define __RV_UMULX16 __nds__umulx16
24348 #define __RV_KHM8 __nds__khm8
24349 #define __RV_KHMX8 __nds__khmx8
24350 #define __RV_KHM16 __nds__khm16
24351 #define __RV_KHMX16 __nds__khmx16
24352 #define __RV_MULR64 __nds__mulr64
24353 #define __RV_MULSR64 __nds__mulsr64
24354 #define __RV_SMMUL __nds__smmul
24355 #define __RV_SMMUL_U __nds__smmul_u
24356 #define __RV_WEXT __nds__wext
24357 #define __RV_SUNPKD810 __nds__sunpkd810
24358 #define __RV_SUNPKD820 __nds__sunpkd820
24359 #define __RV_SUNPKD830 __nds__sunpkd830
24360 #define __RV_SUNPKD831 __nds__sunpkd831
24361 #define __RV_SUNPKD832 __nds__sunpkd832
24362 #define __RV_ZUNPKD810 __nds__zunpkd810
24363 #define __RV_ZUNPKD820 __nds__zunpkd820
24364 #define __RV_ZUNPKD830 __nds__zunpkd830
24365 #define __RV_ZUNPKD831 __nds__zunpkd831
24366 #define __RV_ZUNPKD832 __nds__zunpkd832
24367 #define __RV_PKBB16 __nds__pkbb16
24368 #define __RV_PKBT16 __nds__pkbt16
24369 #define __RV_PKTT16 __nds__pktt16
24370 #define __RV_PKTB16 __nds__pktb16
24371 #define __RV_KMMAC __nds__kmmac
24372 #define __RV_KMMAC_U __nds__kmmac_u
24373 #define __RV_KMMSB __nds__kmmsb
24374 #define __RV_KMMSB_U __nds__kmmsb_u
24375 #define __RV_KWMMUL __nds__kwmmul
24376 #define __RV_KWMMUL_U __nds__kwmmul_u
24377 #define __RV_SMMWB __nds__smmwb
24378 #define __RV_SMMWB_U __nds__smmwb_u
24379 #define __RV_SMMWT __nds__smmwt
24380 #define __RV_SMMWT_U __nds__smmwt_u
24381 #define __RV_KMMAWB __nds__kmmawb
24382 #define __RV_KMMAWB_U __nds__kmmawb_u
24383 #define __RV_KMMAWT __nds__kmmawt
24384 #define __RV_KMMAWT_U __nds__kmmawt_u
24385 #define __RV_KMMWB2 __nds__kmmwb2
24386 #define __RV_KMMWB2_U __nds__kmmwb2_u
24387 #define __RV_KMMWT2 __nds__kmmwt2
24388 #define __RV_KMMWT2_U __nds__kmmwt2_u
24389 #define __RV_KMMAWB2 __nds__kmmawb2
24390 #define __RV_KMMAWB2_U __nds__kmmawb2_u
24391 #define __RV_KMMAWT2 __nds__kmmawt2
24392 #define __RV_KMMAWT2_U __nds__kmmawt2_u
24393 #define __RV_SMBB16 __nds__smbb16
24394 #define __RV_SMBT16 __nds__smbt16
24395 #define __RV_SMTT16 __nds__smtt16
24396 #define __RV_KMDA __nds__kmda
24397 #define __RV_KMXDA __nds__kmxda
24398 #define __RV_SMDS __nds__smds
24399 #define __RV_SMDRS __nds__smdrs
24400 #define __RV_SMXDS __nds__smxds
24401 #define __RV_KMABB __nds__kmabb
24402 #define __RV_KMABT __nds__kmabt
24403 #define __RV_KMATT __nds__kmatt
24404 #define __RV_KMADA __nds__kmada
24405 #define __RV_KMAXDA __nds__kmaxda
24406 #define __RV_KMADS __nds__kmads
24407 #define __RV_KMADRS __nds__kmadrs
24408 #define __RV_KMAXDS __nds__kmaxds
24409 #define __RV_KMSDA __nds__kmsda
24410 #define __RV_KMSXDA __nds__kmsxda
24411 #define __RV_SMAL __nds__smal
24412 #define __RV_SMAQA __nds__smaqa
24413 #define __RV_UMAQA __nds__umaqa
24414 #define __RV_SMAQA_SU __nds__smaqa_su
24415 #define __RV_SMAR64 __nds__smar64
24416 #define __RV_SMSR64 __nds__smsr64
24417 #define __RV_UMAR64 __nds__umar64
24418 #define __RV_UMSR64 __nds__umsr64
24419 #define __RV_KMAR64 __nds__kmar64
24420 #define __RV_KMSR64 __nds__kmsr64
24421 #define __RV_UKMAR64 __nds__ukmar64
24422 #define __RV_UKMSR64 __nds__ukmsr64
24423 #define __RV_SMALBB __nds__smalbb
24424 #define __RV_SMALBT __nds__smalbt
24425 #define __RV_SMALTT __nds__smaltt
24426 #define __RV_SMALDA __nds__smalda
24427 #define __RV_SMALXDA __nds__smalxda
24428 #define __RV_SMALDS __nds__smalds
24429 #define __RV_SMALDRS __nds__smaldrs
24430 #define __RV_SMALXDS __nds__smalxds
24431 #define __RV_SMSLDA __nds__smslda
24432 #define __RV_SMSLXDA __nds__smslxda
24433 #define __RV_MINW __nds__minw
24434 #define __RV_MAXW __nds__maxw
24435 #define __RV_SMIN8 __nds__smin8
24436 #define __RV_SMAX8 __nds__smax8
24437 #define __RV_SMIN16 __nds__smin16
24438 #define __RV_SMAX16 __nds__smax16
24439 #define __RV_UMIN8 __nds__umin8
24440 #define __RV_UMAX8 __nds__umax8
24441 #define __RV_UMIN16 __nds__umin16
24442 #define __RV_UMAX16 __nds__umax16
24443 #define __RV_KABS8 __nds__kabs8
24444 #define __RV_KABS16 __nds__kabs16
24445 #define __RV_KABSW __nds__kabsw
24446 #define __RV_SCLIP8 __nds__sclip8
24447 #define __RV_SCLIP16 __nds__sclip16
24448 #define __RV_SCLIP32 __nds__sclip32
24449 #define __RV_UCLIP8 __nds__uclip8
24450 #define __RV_UCLIP16 __nds__uclip16
24451 #define __RV_UCLIP32 __nds__uclip32
24452 #define __RV_CLO8 __nds__clo8
24453 #define __RV_CLO16 __nds__clo16
24454 #define __RV_CLO32 __nds__clo32
24455 #define __RV_CLZ8 __nds__clz8
24456 #define __RV_CLZ16 __nds__clz16
24457 #define __RV_CLZ32 __nds__clz32
24458 #define __RV_CLRS8 __nds__clrs8
24459 #define __RV_CLRS16 __nds__clrs16
24460 #define __RV_CLRS32 __nds__clrs32
24461 #define __RV_SWAP8 __nds__swap8
24462 #define __RV_SWAP16 __nds__swap16
24463 #define __RV_KHMBB __nds__khmbb
24464 #define __RV_KHMBT __nds__khmbt
24465 #define __RV_KHMTT __nds__khmtt
24466 #define __RV_KDMBB __nds__kdmbb
24467 #define __RV_KDMBT __nds__kdmbt
24468 #define __RV_KDMTT __nds__kdmtt
24469 #define __RV_KDMABB __nds__kdmabb
24470 #define __RV_KDMABT __nds__kdmabt
24471 #define __RV_KDMATT __nds__kdmatt
24472 #define __RV_MADDR32 __nds__maddr32
24473 #define __RV_MSUBR32 __nds__msubr32
24474 #define __RV_PBSAD __nds__pbsad
24475 #define __RV_PBSADA __nds__pbsada
24476 #define __RV_AVE __nds__ave
24477 #define __RV_BITREV __nds__bitrev
24478 #define __RV_INSB __nds__insb
24480 #if (__riscv_xlen == 64)
24481 #define __RV_ADD32 __nds__add32
24482 #define __RV_SUB32 __nds__sub32
24483 #define __RV_RADD32 __nds__radd32
24484 #define __RV_RSUB32 __nds__rsub32
24485 #define __RV_URADD32 __nds__uradd32
24486 #define __RV_URSUB32 __nds__ursub32
24487 #define __RV_KADD32 __nds__kadd32
24488 #define __RV_KSUB32 __nds__ksub32
24489 #define __RV_UKADD32 __nds__ukadd32
24490 #define __RV_UKSUB32 __nds__uksub32
24491 #define __RV_CRAS32 __nds__cras32
24492 #define __RV_CRSA32 __nds__crsa32
24493 #define __RV_RCRAS32 __nds__rcras32
24494 #define __RV_RCRSA32 __nds__rcrsa32
24495 #define __RV_URCRAS32 __nds__urcras32
24496 #define __RV_URCRSA32 __nds__urcrsa32
24497 #define __RV_KCRAS32 __nds__kcras32
24498 #define __RV_KCRSA32 __nds__kcrsa32
24499 #define __RV_UKCRAS32 __nds__ukcras32
24500 #define __RV_UKCRSA32 __nds__ukcrsa32
24501 #define __RV_SRA32 __nds__sra32
24502 #define __RV_SRAI32 __nds__sra32
24503 #define __RV_SRL32 __nds__srl32
24504 #define __RV_SLL32 __nds__sll32
24505 #define __RV_SLLI32 __nds__sll32
24506 #define __RV_SRAW_U __nds__sraw_u
24507 #define __RV_SRA32_U __nds__sra32_u
24508 #define __RV_SRL32_U __nds__srl32_u
24509 #define __RV_KSLL32 __nds__ksll32
24510 #define __RV_KSLRA32 __nds__kslra32
24511 #define __RV_KSLRA32_U __nds__kslra32_u
24512 #define __RV_SMBB32 __nds__smbb32
24513 #define __RV_SMBT32 __nds__smbt32
24514 #define __RV_SMTT32 __nds__smtt32
24515 #define __RV_PKBB32 __nds__pkbb32
24516 #define __RV_PKBT32 __nds__pkbt32
24517 #define __RV_PKTT32 __nds__pktt32
24518 #define __RV_PKTB32 __nds__pktb32
24519 #define __RV_SMIN32 __nds__smin32
24520 #define __RV_SMAX32 __nds__smax32
24521 #define __RV_UMIN32 __nds__umin32
24522 #define __RV_UMAX32 __nds__umax32
24523 #define __RV_KABS32 __nds__kabs32
24524 #define __RV_KHMBB16 __nds__khmbb16
24525 #define __RV_KHMBT16 __nds__khmbt16
24526 #define __RV_KHMTT16 __nds__khmtt16
24527 #define __RV_KDMBB16 __nds__kdmbb16
24528 #define __RV_KDMBT16 __nds__kdmbt16
24529 #define __RV_KDMTT16 __nds__kdmtt16
24530 #define __RV_KDMABB16 __nds__kdmabb16
24531 #define __RV_KDMABT16 __nds__kdmabt16
24532 #define __RV_KDMATT16 __nds__kdmatt16
24533 #define __RV_KMABB32 __nds__kmabb32
24534 #define __RV_KMABT32 __nds__kmabt32
24535 #define __RV_KMATT32 __nds__kmatt32
24536 #define __RV_KMDA32 __nds__kmda32
24537 #define __RV_KMXDA32 __nds__kmxda32
24538 #define __RV_KMADA32 __nds__kmada32
24539 #define __RV_KMAXDA32 __nds__kmaxda32
24540 #define __RV_KMADS32 __nds__kmads32
24541 #define __RV_KMADRS32 __nds__kmadrs32
24542 #define __RV_KMAXDS32 __nds__kmaxds32
24543 #define __RV_KMSDA32 __nds__kmsda32
24544 #define __RV_KMSXDA32 __nds__kmsxda32
24545 #define __RV_SMDS32 __nds__smds32
24546 #define __RV_SMDRS32 __nds__smdrs32
24547 #define __RV_SMXDS32 __nds__smxds32
24556 #pragma inline=forced_no_body
24557 unsigned long __RV_STAS16(
unsigned long a,
unsigned long b) {
24559 __asm(
".insn r 0x7F, 0x2, 0x7A, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24563 #pragma inline=forced_no_body
24564 unsigned long __RV_RSTAS16(
unsigned long a,
unsigned long b) {
24566 __asm(
".insn r 0x7F, 0x2, 0x5A, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24570 #pragma inline=forced_no_body
24571 unsigned long __RV_KSTAS16(
unsigned long a,
unsigned long b) {
24573 __asm(
".insn r 0x7F, 0x2, 0x62, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24577 #pragma inline=forced_no_body
24578 unsigned long __RV_URSTAS16(
unsigned long a,
unsigned long b) {
24580 __asm(
".insn r 0x7F, 0x2, 0x6A, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24584 #pragma inline=forced_no_body
24585 unsigned long __RV_UKSTAS16(
unsigned long a,
unsigned long b) {
24587 __asm(
".insn r 0x7F, 0x2, 0x72, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24591 #pragma inline=forced_no_body
24592 unsigned long __RV_STSA16(
unsigned long a,
unsigned long b) {
24594 __asm(
".insn r 0x7F, 0x2, 0x7B, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24598 #pragma inline=forced_no_body
24599 unsigned long __RV_RSTSA16(
unsigned long a,
unsigned long b) {
24601 __asm(
".insn r 0x7F, 0x2, 0x5B, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24605 #pragma inline=forced_no_body
24606 unsigned long __RV_KSTSA16(
unsigned long a,
unsigned long b) {
24608 __asm(
".insn r 0x7F, 0x2, 0x63, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24612 #pragma inline=forced_no_body
24613 unsigned long __RV_URSTSA16(
unsigned long a,
unsigned long b) {
24615 __asm(
".insn r 0x7F, 0x2, 0x6B, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24619 #pragma inline=forced_no_body
24620 unsigned long __RV_UKSTSA16(
unsigned long a,
unsigned long b) {
24622 __asm(
".insn r 0x7F, 0x2, 0x73, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24632 #pragma inline=forced_no_body
24633 unsigned long __RV_STAS32(
unsigned long a,
unsigned long b) {
24635 __asm(
".insn r 0x7F, 0x2, 0x78, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24639 #pragma inline=forced_no_body
24640 unsigned long __RV_RSTAS32(
unsigned long a,
unsigned long b) {
24642 __asm(
".insn r 0x7F, 0x2, 0x58, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24646 #pragma inline=forced_no_body
24647 unsigned long __RV_KSTAS32(
unsigned long a,
unsigned long b) {
24649 __asm(
".insn r 0x7F, 0x2, 0x60, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24653 #pragma inline=forced_no_body
24654 unsigned long __RV_URSTAS32(
unsigned long a,
unsigned long b) {
24656 __asm(
".insn r 0x7F, 0x2, 0x68, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24660 #pragma inline=forced_no_body
24661 unsigned long __RV_UKSTAS32(
unsigned long a,
unsigned long b) {
24663 __asm(
".insn r 0x7F, 0x2, 0x70, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24667 #pragma inline=forced_no_body
24668 unsigned long __RV_STSA32(
unsigned long a,
unsigned long b) {
24670 __asm(
".insn r 0x7F, 0x2, 0x79, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24674 #pragma inline=forced_no_body
24675 unsigned long __RV_RSTSA32(
unsigned long a,
unsigned long b) {
24677 __asm(
".insn r 0x7F, 0x2, 0x59, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24681 #pragma inline=forced_no_body
24682 unsigned long __RV_KSTSA32(
unsigned long a,
unsigned long b) {
24684 __asm(
".insn r 0x7F, 0x2, 0x61, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24688 #pragma inline=forced_no_body
24689 unsigned long __RV_URSTSA32(
unsigned long a,
unsigned long b) {
24691 __asm(
".insn r 0x7F, 0x2, 0x69, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24695 #pragma inline=forced_no_body
24696 unsigned long __RV_UKSTSA32(
unsigned long a,
unsigned long b) {
24698 __asm(
".insn r 0x7F, 0x2, 0x71, %0,%1,%2":
"=r"(r) :
"r"(a),
"r"(b) );
24702 #pragma inline=forced_no_body
24708 #pragma inline=forced_no_body
24714 #pragma inline=forced_no_body
24720 #pragma inline=forced_no_body
24726 #if __RISCV_XLEN == 64
24728 #pragma inline=forced_no_body
24729 unsigned long __RV_EXPD84(
unsigned long a)
24734 #pragma inline=forced_no_body
24735 unsigned long __RV_EXPD85(
unsigned long a)
24740 #pragma inline=forced_no_body
24741 unsigned long __RV_EXPD86(
unsigned long a)
24746 #pragma inline=forced_no_body
24747 unsigned long __RV_EXPD87(
unsigned long a)
24752 #pragma language=restore
24755 #error Unknown compiler
24761 #define __QADD8(x, y) __RV_KADD8(x, y)
24763 #define __QSUB8(x, y) __RV_KSUB8((x), (y))
24765 #define __QADD16(x, y) __RV_KADD16((x), (y))
24767 #define __SHADD16(x, y) __RV_RADD16((x), (y))
24769 #define __QSUB16(x, y) __RV_KSUB16((x), (y))
24771 #define __SHSUB16(x, y) __RV_RSUB16((x), (y))
24773 #define __QASX(x, y) __RV_KCRAS16((x), (y))
24775 #define __SHASX(x, y) __RV_RCRAS16((x), (y))
24777 #define __QSAX(x, y) __RV_KCRSA16((x), (y))
24779 #define __SHSAX(x, y) __RV_RCRSA16((x), (y))
24781 #define __SMUSDX(x, y) __RV_SMXDS((y), (x))
24788 #define __QADD(x, y) __RV_KADDW((x), (y))
24790 #define __QSUB(x, y) __RV_KSUBW((x), (y))
24827 #define __SXTB16(x) __RV_SUNPKD820(x)
24833 #define __SXTAB16_RORn(ARG1, ARG2, ROTATE) __SXTAB16(ARG1, __ROR(ARG2, ROTATE))
24840 return (acc + mul);
24842 #define __DKHM8 __RV_DKHM8
24843 #define __DKHM16 __RV_DKHM16
24844 #define __DKSUB16 __RV_DKSUB16
24845 #define __SMAQA __RV_SMAQA
24846 #define __MULSR64 __RV_MULSR64
24847 #define __DQADD8 __RV_DKADD8
24848 #define __DQSUB8 __RV_DKSUB8
24849 #define __DKADD16 __RV_DKADD16
24850 #define __PKBB16 __RV_PKBB16
24851 #define __DKSLRA16 __RV_DKSLRA16
24852 #define __DKSLRA8 __RV_DKSLRA8
24853 #define __KABSW __RV_KABSW
24854 #define __DKABS8 __RV_DKABS8
24855 #define __DKABS16 __RV_DKABS16
24856 #define __SMALDA __RV_SMALDA
24857 #define __SMSLDA __RV_SMSLDA
24858 #define __SMALBB __RV_SMALBB
24859 #define __SUB64 __RV_SUB64
24860 #define __ADD64 __RV_ADD64
24861 #define __SMBB16 __RV_SMBB16
24862 #define __SMBT16 __RV_SMBT16
24863 #define __SMTT16 __RV_SMTT16
24864 #define __EXPD80 __RV_EXPD80
24865 #define __SMAX8 __RV_SMAX8
24866 #define __SMAX16 __RV_SMAX16
24867 #define __PKTT16 __RV_PKTT16
24868 #define __KADD16 __RV_KADD16
24869 #define __SADD16 __RV_ADD16
24870 #define __SSUB8 __RV_KSUB8
24871 #define __SADD8 __RV_KADD8
24872 #define __USAT16 __RV_UCLIP16
24873 #define __SMALTT __RV_SMALTT
24876 #define __PKHBT(ARG1, ARG2, ARG3) ((ARG3 == 0) ? __RV_PKTB16(ARG2, ARG1) : \
24877 (ARG3 == 16) ? __RV_PKBB16(ARG2, ARG1) : \
24878 (((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
24879 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)))
24882 #define __PKHTB(ARG1, ARG2, ARG3) ((ARG3 == 0) ? __RV_PKTB16(ARG1, ARG2) : \
24883 (ARG3 == 16) ? __RV_PKTT16(ARG1, ARG2) : \
24884 (((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
24885 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)))
24887 #if __RISCV_XLEN == 64
24890 #define __PKHBT64(ARG1, ARG2, ARG3) ((ARG3 == 0) ? __RV_PKTB16(ARG2, ARG1) : \
24891 (ARG3 == 16) ? __RV_PKBB16(ARG2, ARG1) : \
24892 ((int64_t)((((uint32_t)((uint64_t)ARG1 >> 32)) & 0x0000FFFFUL) | \
24893 ((((uint32_t)((uint64_t)ARG2 >> 32)) << (ARG3)) & 0xFFFF0000UL)) << 32) | \
24894 ((int64_t)(((((uint32_t)(ARG1))) & 0x0000FFFFUL) | \
24895 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)) & 0xFFFFFFFFUL))
24899 #define __PKHTB64(ARG1, ARG2, ARG3) ((ARG3 == 0) ? __RV_PKTB16(ARG1, ARG2) : \
24900 (ARG3 == 16) ? __RV_PKTT16(ARG1, ARG2) : \
24901 ((uint64_t)(((uint32_t)((uint64_t)ARG1 >> 32) & 0xFFFF0000UL) | \
24902 ((((uint32_t)((uint64_t)ARG2 >> 32)) >> (ARG3)) & 0x0000FFFFUL)) << 32) | \
24903 ((uint64_t)(((uint32_t)(ARG1) & 0xFFFF0000UL) | \
24904 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)) & 0xFFFFFFFFUL))
24908 #define __PKHBT64(ARG1, ARG2, ARG3) ((ARG3 == 0) ? __RV_DPKTB16(ARG2, ARG1) : \
24909 (ARG3 == 16) ? __RV_DPKBB16(ARG2, ARG1) : \
24910 ((int64_t)((((uint32_t)((uint64_t)ARG1 >> 32)) & 0x0000FFFFUL) | \
24911 ((((uint32_t)((uint64_t)ARG2 >> 32)) << (ARG3)) & 0xFFFF0000UL)) << 32) | \
24912 ((int64_t)(((((uint32_t)(ARG1))) & 0x0000FFFFUL) | \
24913 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)) & 0xFFFFFFFFUL))
24917 #define __PKHTB64(ARG1, ARG2, ARG3) ((ARG3 == 0) ? __RV_DPKTB16(ARG1, ARG2) : \
24918 (ARG3 == 16) ? __RV_DPKTT16(ARG1, ARG2) : \
24919 ((uint64_t)(((uint32_t)((uint64_t)ARG1 >> 32) & 0xFFFF0000UL) | \
24920 ((((uint32_t)((uint64_t)ARG2 >> 32)) >> (ARG3)) & 0x0000FFFFUL)) << 32) | \
24921 ((uint64_t)(((uint32_t)(ARG1) & 0xFFFF0000UL) | \
24922 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)) & 0xFFFFFFFFUL))
24926 #define __SXTB16_RORn(ARG1, ARG2) __RV_SUNPKD820(__ROR(ARG1, ARG2))
__STATIC_FORCEINLINE unsigned long __RV_CLZ16(unsigned long a)
CLZ16 (SIMD 16-bit Count Leading Zero)
__STATIC_FORCEINLINE unsigned long __RV_CLRS8(unsigned long a)
CLRS8 (SIMD 8-bit Count Leading Redundant Sign)
__STATIC_FORCEINLINE unsigned long long __RV_DKMAXDA(unsigned long long t, unsigned long long a, unsigned long long b)
DKMAXDA (Two Cross 16x16 with 32-bit Signed Double Add)
__STATIC_FORCEINLINE unsigned long long __RV_DRCRAS16(unsigned long long a, unsigned long long b)
DRCRAS16 (16-bit Signed Halving Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UCMPLE8(unsigned long a, unsigned long b)
UCMPLE8 (SIMD 8-bit Unsigned Compare Less Than & Equal)
__STATIC_FORCEINLINE unsigned long __RV_UKCRSA16(unsigned long a, unsigned long b)
UKCRSA16 (SIMD 16-bit Unsigned Saturating Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long long __RV_UKMSR64(unsigned long long t, unsigned long a, unsigned long b)
UKMSR64 (Unsigned Multiply and Saturating Subtract from 64-Bit Data)
__STATIC_FORCEINLINE void __RV_CLROV(void)
CLROV (Clear OV flag)
__STATIC_FORCEINLINE unsigned long long __RV_DPKTB32(unsigned long long a, unsigned long long b)
DPKTB32 (Pack Two 32-bit Data from Top and Bottom Half)
__STATIC_FORCEINLINE unsigned long __RV_PKBT16(unsigned long a, unsigned long b)
PKBT16 (Pack Two 16-bit Data from Bottom and Top Half)
__STATIC_FORCEINLINE long __RV_KMADRS(long t, unsigned long a, unsigned long b)
KMADRS (SIMD Saturating Signed Multiply Two Halfs & Reverse Subtract & Add)
__STATIC_FORCEINLINE unsigned long __RV_RCRAS32(unsigned long a, unsigned long b)
RCRAS32 (SIMD 32-bit Signed Halving Cross Addition & Subtraction)
__STATIC_FORCEINLINE long __RV_DSMXS32_U(unsigned long long a, unsigned long long b)
DSMXS32.u (64-bit SIMD 32-bit Signed Multiply Cross Subtraction With Rounding and Clip)
__STATIC_FORCEINLINE unsigned long __RV_PBSADA(unsigned long t, unsigned long a, unsigned long b)
PBSADA (Parallel Byte Sum of Absolute Difference Accum)
__STATIC_FORCEINLINE long __RV_KMABB(long t, unsigned long a, unsigned long b)
KMABB (SIMD Saturating Signed Multiply Bottom Halfs & Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKHM8(unsigned long long a, unsigned long long b)
DKHM8 (64-bit SIMD Signed Saturating Q7 Multiply)
__STATIC_FORCEINLINE unsigned long __RV_WEXT(long long a, unsigned int b)
WEXT (Extract Word from 64-bit)
__STATIC_FORCEINLINE unsigned long __RV_PKBB16(unsigned long a, unsigned long b)
PKBB16 (Pack Two 16-bit Data from Both Bottom Half)
__STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD810(unsigned long long a)
DZUNPKD810 (UnSigned Unpacking Bytes 1 & 0)
__STATIC_FORCEINLINE unsigned long __RV_SRL16(unsigned long a, unsigned int b)
SRL16 (SIMD 16-bit Shift Right Logical)
__STATIC_FORCEINLINE long long __RV_KADD64(long long a, long long b)
KADD64 (64-bit Signed Saturating Addition)
__STATIC_FORCEINLINE unsigned long __RV_RSTAS16(unsigned long a, unsigned long b)
RSTAS16 (SIMD 16-bit Signed Halving Straight Addition & Subtraction)
__STATIC_FORCEINLINE long __RV_SMMUL_U(long a, long b)
SMMUL.u (SIMD MSW Signed Multiply Word with Rounding)
__STATIC_FORCEINLINE long __RV_KMDA(unsigned long a, unsigned long b)
KMDA (SIMD Signed Multiply Two Halfs and Add)
__STATIC_FORCEINLINE long __RV_SMXDS(unsigned long a, unsigned long b)
SMXDS (SIMD Signed Crossed Multiply Two Halfs and Subtract)
__STATIC_FORCEINLINE unsigned long long __RV_DKMMSB(unsigned long long t, unsigned long long a, unsigned long long b)
DKMMSB (64-bit MSW 32x32 Signed Multiply and Saturating Sub)
__STATIC_FORCEINLINE long __RV_KMMSB(long t, long a, long b)
KMMSB (SIMD Saturating MSW Signed Multiply Word and Subtract)
__STATIC_FORCEINLINE unsigned long __RV_SMAX32(unsigned long a, unsigned long b)
SMAX32 (SIMD 32-bit Signed Maximum)
__STATIC_FORCEINLINE long __RV_KDMBT(unsigned int a, unsigned int b)
KDMBT (Signed Saturating Double Multiply B16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_SRA16_U(unsigned long a, unsigned long b)
SRA16.u (SIMD 16-bit Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE long long __RV_DSMTT32_SRA14(unsigned long long a, unsigned long long b)
DSMTT32.sra14 (Signed Multiply Top Word & Top Word with Right Shift 14-bit)
__STATIC_FORCEINLINE long long __RV_SMALBB(long long t, unsigned long a, unsigned long b)
SMALBB (Signed Multiply Bottom Halfs & Add 64-bit)
__STATIC_FORCEINLINE long __RV_KMABT32(long t, unsigned long a, unsigned long b)
KMABT32 (Saturating Signed Multiply Bottom & Top Words & Add)
__STATIC_FORCEINLINE unsigned long __RV_SMIN32(unsigned long a, unsigned long b)
SMIN32 (SIMD 32-bit Signed Minimum)
__STATIC_FORCEINLINE long __RV_KMAXDS(long t, unsigned long a, unsigned long b)
KMAXDS (SIMD Saturating Signed Crossed Multiply Two Halfs & Subtract & Add)
__STATIC_FORCEINLINE unsigned long __RV_KHMTT16(unsigned long a, unsigned long b)
KHMTT16 (SIMD Signed Saturating Half Multiply T16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_KSLRA32(unsigned long a, int b)
KSLRA32 (SIMD 32-bit Shift Left Logical with Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long long __RV_DADD16(unsigned long long a, unsigned long long b)
DADD16 (16-bit Addition)
__STATIC_FORCEINLINE unsigned long __RV_PKBT32(unsigned long a, unsigned long b)
PKBT32 (Pack Two 32-bit Data from Bottom and Top Half)
__STATIC_FORCEINLINE unsigned long long __RV_URSUB64(unsigned long long a, unsigned long long b)
URSUB64 (64-bit Unsigned Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_KSLL8(unsigned long a, unsigned int b)
KSLL8 (SIMD 8-bit Saturating Shift Left Logical)
__STATIC_FORCEINLINE long __RV_KMMAC_U(long t, long a, long b)
KMMAC.u (SIMD Saturating MSW Signed Multiply Word and Add with Rounding)
__STATIC_FORCEINLINE long long __RV_DSMBT32_SRA32(unsigned long long a, unsigned long long b)
DSMBT32.sra32 (Signed Crossed Multiply Two Halfs and Subtract with Right Shift 32)
__STATIC_FORCEINLINE unsigned long __RV_UKSTSA32(unsigned long a, unsigned long b)
UKSTSA32 (SIMD 32-bit Unsigned Saturating Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_UKSUB16(unsigned long a, unsigned long b)
UKSUB16 (SIMD 16-bit Unsigned Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_SRL32_U(unsigned long a, unsigned int b)
SRL32.u (SIMD 32-bit Rounding Shift Right Logical)
__STATIC_FORCEINLINE unsigned long __RV_KHMX8(unsigned long a, unsigned long b)
KHMX8 (SIMD Signed Saturating Crossed Q7 Multiply)
__STATIC_FORCEINLINE long __RV_KMATT(long t, unsigned long a, unsigned long b)
KMATT (SIMD Saturating Signed Multiply Top Halfs & Add)
__STATIC_FORCEINLINE unsigned long __RV_URSUB32(unsigned long a, unsigned long b)
URSUB32 (SIMD 32-bit Unsigned Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_SRL8(unsigned long a, unsigned int b)
SRL8 (SIMD 8-bit Shift Right Logical)
__STATIC_FORCEINLINE unsigned long __RV_BITREV(unsigned long a, unsigned long b)
BITREV (Bit Reverse)
__STATIC_FORCEINLINE unsigned long long __RV_DKMADRS(unsigned long long t, unsigned long long a, unsigned long long b)
DKMADRS (Two 16x16 with 32-bit Signed Add and Reversed Sub)
__STATIC_FORCEINLINE unsigned long __RV_SRA8_U(unsigned long a, unsigned int b)
SRA8.u (SIMD 8-bit Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE long __RV_AVE(long a, long b)
AVE (Average with Rounding)
__STATIC_FORCEINLINE long long __RV_DSMTT32(unsigned long long a, unsigned long long b)
DSMTT32 (Signed Multiply Top Word & Top Word)
__STATIC_FORCEINLINE unsigned long long __RV_DKHMX8(unsigned long long a, unsigned long long b)
DKHMX8 (64-bit SIMD Signed Crossed Saturating Q7 Multiply)
__STATIC_FORCEINLINE unsigned long __RV_URSUBW(unsigned int a, unsigned int b)
URSUBW (32-bit Unsigned Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_MSUBR32(unsigned long t, unsigned long a, unsigned long b)
MSUBR32 (Multiply and Subtract from 32-Bit Word)
__STATIC_FORCEINLINE long __RV_SMBB16(unsigned long a, unsigned long b)
SMBB16 (SIMD Signed Multiply Bottom Half & Bottom Half)
__STATIC_FORCEINLINE long __RV_SMMWB(long a, unsigned long b)
SMMWB (SIMD MSW Signed Multiply Word and Bottom Half)
__STATIC_FORCEINLINE unsigned long long __RV_DSMBT16(unsigned long long a, unsigned long long b)
DSMBT16 (Signed Multiply Bottom Half & Top Half)
__STATIC_FORCEINLINE unsigned long __RV_SLL32(unsigned long a, unsigned int b)
SLL32 (SIMD 32-bit Shift Left Logical)
__STATIC_FORCEINLINE unsigned long long __RV_SMUL8(unsigned int a, unsigned int b)
SMUL8 (SIMD Signed 8-bit Multiply)
__STATIC_FORCEINLINE unsigned long __RV_KHMBT16(unsigned long a, unsigned long b)
KHMBT16 (SIMD Signed Saturating Half Multiply B16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_EXPD83(unsigned long a)
EXPD83 (Expand and Copy Byte 3 to 32bit(rv32) or 64bit(when rv64))
__STATIC_FORCEINLINE unsigned long __RV_UKCRSA32(unsigned long a, unsigned long b)
UKCRSA32 (SIMD 32-bit Unsigned Saturating Cross Subtraction & Addition)
__STATIC_FORCEINLINE long __RV_KSUBH(int a, int b)
KSUBH (Signed Subtraction with Q15 Saturation)
__STATIC_FORCEINLINE long long __RV_DSMBT32_SRA14(unsigned long long a, unsigned long long b)
DSMBT32.sra14 (Signed Multiply Bottom Word & Top Word with Right Shift 14)
__STATIC_FORCEINLINE unsigned long __RV_CMPEQ8(unsigned long a, unsigned long b)
CMPEQ8 (SIMD 8-bit Integer Compare Equal)
__STATIC_FORCEINLINE long __RV_KDMATT(long t, unsigned int a, unsigned int b)
KDMATT (Signed Saturating Double Multiply Addition T16 x T16)
__STATIC_FORCEINLINE long __RV_KHMTT(unsigned int a, unsigned int b)
KHMTT (Signed Saturating Half Multiply T16 x T16)
__STATIC_FORCEINLINE long __RV_KMXDA32(unsigned long a, unsigned long b)
KMXDA32 (Signed Crossed Multiply Two Words and Add)
__STATIC_FORCEINLINE long __RV_KMMWB2_U(long a, unsigned long b)
KMMWB2.u (SIMD Saturating MSW Signed Multiply Word and Bottom Half & 2 with Rounding)
__STATIC_FORCEINLINE long __RV_KDMABB(long t, unsigned int a, unsigned int b)
KDMABB (Signed Saturating Double Multiply Addition B16 x B16)
__STATIC_FORCEINLINE unsigned long __RV_RSUB16(unsigned long a, unsigned long b)
RSUB16 (SIMD 16-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE long __RV_SMTT32(unsigned long a, unsigned long b)
SMTT32 (Signed Multiply Top Word & Top Word)
__STATIC_FORCEINLINE unsigned long long __RV_DKABS32(unsigned long long a)
DKABS32 (64-bit SIMD 32-bit Saturating Absolute)
__STATIC_FORCEINLINE long __RV_KMAXDS32(long t, unsigned long a, unsigned long b)
KMAXDS32 (Saturating Signed Crossed Multiply Two Words & Subtract & Add)
__STATIC_FORCEINLINE long __RV_KMSXDA32(long t, unsigned long a, unsigned long b)
KMSXDA32 (Saturating Signed Crossed Multiply Two Words & Add & Subtract)
__STATIC_FORCEINLINE unsigned long long __RV_DSMMUL_U(unsigned long long a, unsigned long long b)
DSMMUL.u (64-bit MSW 32x32 Unsigned Multiply)
__STATIC_FORCEINLINE long long __RV_SMALXDA(long long t, unsigned long a, unsigned long b)
SMALXDA (Signed Crossed Multiply Two Halfs and Two Adds 64-bit)
__STATIC_FORCEINLINE unsigned long __RV_ZUNPKD820(unsigned long a)
ZUNPKD820 (Unsigned Unpacking Bytes 2 & 0)
__STATIC_FORCEINLINE unsigned long __RV_KHMX16(unsigned long a, unsigned long b)
KHMX16 (SIMD Signed Saturating Crossed Q15 Multiply)
__STATIC_FORCEINLINE long __RV_KMMAWB2(long t, unsigned long a, unsigned long b)
KMMAWB2 (SIMD Saturating MSW Signed Multiply Word and Bottom Half & 2 and Add)
__STATIC_FORCEINLINE unsigned long __RV_RSTAS32(unsigned long a, unsigned long b)
RSTAS32 (SIMD 32-bit Signed Halving Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_EXPD82(unsigned long a)
EXPD82 (Expand and Copy Byte 2 to 32bit(rv32) or 64bit(when rv64))
__STATIC_FORCEINLINE unsigned long __RV_UKSTAS16(unsigned long a, unsigned long b)
UKSTAS16 (SIMD 16-bit Unsigned Saturating Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DSMDRS(unsigned long long a, unsigned long long b)
DSMDRS (Signed Multiply Two Halfs and Reverse Subtract)
__STATIC_FORCEINLINE long __RV_RADDW(int a, int b)
RADDW (32-bit Signed Halving Addition)
__STATIC_FORCEINLINE unsigned long __RV_KSUB16(unsigned long a, unsigned long b)
KSUB16 (SIMD 16-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UKSTSA16(unsigned long a, unsigned long b)
UKSTSA16 (SIMD 16-bit Unsigned Saturating Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long long __RV_SMULX8(unsigned int a, unsigned int b)
SMULX8 (SIMD Signed Crossed 8-bit Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD831(unsigned long long a)
DZUNPKD831 (UnSigned Unpacking Bytes 3 & 1)
__STATIC_FORCEINLINE unsigned long long __RV_DRADD32(unsigned long long a, unsigned long long b)
DRADD32 (64-bit SIMD 32-bit Halving Signed Addition)
__STATIC_FORCEINLINE unsigned long __RV_PKTT16(unsigned long a, unsigned long b)
PKTT16 (Pack Two 16-bit Data from Both Top Half)
__STATIC_FORCEINLINE unsigned long __RV_SWAP16(unsigned long a)
SWAP16 (Swap Halfword within Word)
__STATIC_FORCEINLINE long long __RV_DKMADA32(long long t, unsigned long long a, unsigned long long b)
DKMADA32 (Two Signed 32x32 with 64-bit Saturation Add)
__STATIC_FORCEINLINE unsigned long long __RV_DSRA16(unsigned long long a, unsigned long b)
DSRA16 (SIMD 16-bit Shift Right Arithmetic)
__STATIC_FORCEINLINE long __RV_KMMWB2(long a, unsigned long b)
KMMWB2 (SIMD Saturating MSW Signed Multiply Word and Bottom Half & 2)
__STATIC_FORCEINLINE unsigned long __RV_RCRAS16(unsigned long a, unsigned long b)
RCRAS16 (SIMD 16-bit Signed Halving Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_RADD16(unsigned long a, unsigned long b)
RADD16 (SIMD 16-bit Signed Halving Addition)
__STATIC_FORCEINLINE unsigned long long __RV_UMULX16(unsigned int a, unsigned int b)
UMULX16 (SIMD Unsigned Crossed 16-bit Multiply)
__STATIC_FORCEINLINE long __RV_KMDA32(unsigned long a, unsigned long b)
KMDA32 (Signed Multiply Two Words and Add)
__STATIC_FORCEINLINE unsigned long __RV_CLZ8(unsigned long a)
CLZ8 (SIMD 8-bit Count Leading Zero)
__STATIC_FORCEINLINE unsigned long long __RV_UMULX8(unsigned int a, unsigned int b)
UMULX8 (SIMD Unsigned Crossed 8-bit Multiply)
__STATIC_FORCEINLINE unsigned long __RV_KSLRA8_U(unsigned long a, int b)
KSLRA8.u (SIMD 8-bit Shift Left Logical with Saturation or Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE long __RV_KMADS32(long t, unsigned long a, unsigned long b)
KMADS32 (Saturating Signed Multiply Two Words & Subtract & Add)
__STATIC_FORCEINLINE long __RV_SMXDS32(unsigned long a, unsigned long b)
SMXDS32 (Signed Crossed Multiply Two Words and Subtract)
__STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD820(unsigned long long a)
DSUNPKD820 (Signed Unpacking Bytes 2 & 0)
__STATIC_FORCEINLINE long long __RV_DKMABB32(long long t, unsigned long long a, unsigned long long b)
DKMABB32 (Saturating Signed Multiply Bottom Words & Add)
__STATIC_FORCEINLINE long long __RV_MULSR64(long a, long b)
MULSR64 (Multiply Word Signed to 64-bit Data)
__STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD830(unsigned long long a)
DSUNPKD830 (Signed Unpacking Bytes 3 & 0)
__STATIC_FORCEINLINE long __RV_KMABB32(long t, unsigned long a, unsigned long b)
KMABB32 (Saturating Signed Multiply Bottom Words & Add)
__STATIC_FORCEINLINE long long __RV_SMSR64(long long t, long a, long b)
SMSR64 (Signed Multiply and Subtract from 64- Bit Data)
__STATIC_FORCEINLINE unsigned long long __RV_DKMDA(unsigned long long a, unsigned long long b)
DKMDA (Signed Multiply Two Halfs and Add)
__STATIC_FORCEINLINE unsigned long __RV_CLO32(unsigned long a)
CLO32 (SIMD 32-bit Count Leading One)
__STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD832(unsigned long long a)
DZUNPKD832 (UnSigned Unpacking Bytes 3 & 2)
__STATIC_FORCEINLINE unsigned long __RV_KDMBB16(unsigned long a, unsigned long b)
KDMBB16 (SIMD Signed Saturating Double Multiply B16 x B16)
__STATIC_FORCEINLINE unsigned long __RV_CLRS32(unsigned long a)
CLRS32 (SIMD 32-bit Count Leading Redundant Sign)
__STATIC_FORCEINLINE long __RV_KMMAWT2_U(long t, unsigned long a, unsigned long b)
KMMAWT2.u (SIMD Saturating MSW Signed Multiply Word and Top Half & 2 and Add with Rounding)
__STATIC_FORCEINLINE unsigned long __RV_UKSTAS32(unsigned long a, unsigned long b)
UKSTAS32 (SIMD 32-bit Unsigned Saturating Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DMSR17(unsigned long a, unsigned long b)
DMSR17 (Signed Multiply Halfs with Right Shift 17-bit and Cross Multiply Halfs with Right Shift 17-bi...
__STATIC_FORCEINLINE unsigned long __RV_RSUB8(unsigned long a, unsigned long b)
RSUB8 (SIMD 8-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DKADD32(unsigned long long a, unsigned long long b)
DKADD32(64-bit SIMD 32-bit Signed Saturating Addition)
__STATIC_FORCEINLINE long __RV_KMAXDA32(long t, unsigned long a, unsigned long b)
KMAXDA32 (Saturating Signed Crossed Multiply Two Words and Two Adds)
__STATIC_FORCEINLINE unsigned long __RV_CRAS16(unsigned long a, unsigned long b)
CRAS16 (SIMD 16-bit Cross Addition & Subtraction)
__STATIC_FORCEINLINE long __RV_KHMBT(unsigned int a, unsigned int b)
KHMBT (Signed Saturating Half Multiply B16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_KSTSA16(unsigned long a, unsigned long b)
KSTSA16 (SIMD 16-bit Signed Saturating Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_RSUB32(unsigned long a, unsigned long b)
RSUB32 (SIMD 32-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DPKBB32(unsigned long long a, unsigned long long b)
DPKBB32 (Pack Two 32-bit Data from Both Bottom Half)
__STATIC_FORCEINLINE unsigned long long __RV_DKMMSB_U(unsigned long long t, unsigned long long a, unsigned long long b)
DKMMSB.u (64-bit MSW 32x32 Unsigned Multiply and Saturating Sub)
__STATIC_FORCEINLINE long __RV_SMAQA_SU(long t, unsigned long a, unsigned long b)
SMAQA.SU (Signed and Unsigned Multiply Four Bytes with 32-bit Adds)
__STATIC_FORCEINLINE unsigned long __RV_SLL8(unsigned long a, unsigned int b)
SLL8 (SIMD 8-bit Shift Left Logical)
__STATIC_FORCEINLINE unsigned long long __RV_DPKBT16(unsigned long long a, unsigned long long b)
DPKBT16 (Pack Two 16-bit Data from Bottom and Top Half)
__STATIC_FORCEINLINE unsigned long __RV_ZUNPKD832(unsigned long a)
ZUNPKD832 (Unsigned Unpacking Bytes 3 & 2)
__STATIC_FORCEINLINE unsigned long __RV_CLO16(unsigned long a)
CLO16 (SIMD 16-bit Count Leading One)
__STATIC_FORCEINLINE unsigned long __RV_URSTAS16(unsigned long a, unsigned long b)
URSTAS16 (SIMD 16-bit Unsigned Halving Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DSMAQA(unsigned long long t, unsigned long long a, unsigned long long b)
DSMAQA (Four Signed 8x8 with 32-bit Signed Add)
__STATIC_FORCEINLINE unsigned long __RV_UKSUB32(unsigned long a, unsigned long b)
UKSUB32 (SIMD 32-bit Unsigned Saturating Subtraction)
__STATIC_FORCEINLINE long __RV_KMABT(long t, unsigned long a, unsigned long b)
KMABT (SIMD Saturating Signed Multiply Bottom & Top Halfs & Add)
__STATIC_FORCEINLINE long long __RV_DSMBT32(unsigned long long a, unsigned long long b)
SMBT32 (Signed Multiply Bottom Word & Top Word)
__STATIC_FORCEINLINE unsigned long __RV_KSUB8(unsigned long a, unsigned long b)
KSUB8 (SIMD 8-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UCMPLE16(unsigned long a, unsigned long b)
UCMPLE16 (SIMD 16-bit Unsigned Compare Less Than & Equal)
__STATIC_FORCEINLINE long __RV_KDMABT(long t, unsigned int a, unsigned int b)
KDMABT (Signed Saturating Double Multiply Addition B16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_CLZ32(unsigned long a)
CLZ32 (SIMD 32-bit Count Leading Zero)
__STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD830(unsigned long long a)
DZUNPKD830 (UnSigned Unpacking Bytes 3 & 0)
__STATIC_FORCEINLINE unsigned long long __RV_DPKTT32(unsigned long long a, unsigned long long b)
DPKTT32 (Pack Two 32-bit Data from Both Top Half)
__STATIC_FORCEINLINE unsigned long long __RV_SUB64(unsigned long long a, unsigned long long b)
SUB64 (64-bit Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_KCRAS16(unsigned long a, unsigned long b)
KCRAS16 (SIMD 16-bit Signed Saturating Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_URADDW(unsigned int a, unsigned int b)
URADDW (32-bit Unsigned Halving Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DKMMAC_U(unsigned long long t, unsigned long long a, unsigned long long b)
DKMMAC.u (64-bit MSW 32x32 Unsigned Multiply and Saturating Add)
__STATIC_FORCEINLINE unsigned long long __RV_UMSR64(unsigned long long t, unsigned long a, unsigned long b)
UMSR64 (Unsigned Multiply and Subtract from 64-Bit Data)
__STATIC_FORCEINLINE long __RV_DMADA32(long long t, unsigned long long a, unsigned long long b)
DMADA32 ((Two Cross Signed 32x32 with 64-bit Add and Clip to 32-bit)
__STATIC_FORCEINLINE unsigned long __RV_UMIN16(unsigned long a, unsigned long b)
UMIN16 (SIMD 16-bit Unsigned Minimum)
__STATIC_FORCEINLINE long long __RV_KSUB64(long long a, long long b)
KSUB64 (64-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_SMAX16(unsigned long a, unsigned long b)
SMAX16 (SIMD 16-bit Signed Maximum)
__STATIC_FORCEINLINE unsigned long __RV_SUNPKD830(unsigned long a)
SUNPKD830 (Signed Unpacking Bytes 3 & 0)
__STATIC_FORCEINLINE long __RV_KMMWT2(long a, unsigned long b)
KMMWT2 (SIMD Saturating MSW Signed Multiply Word and Top Half & 2)
__STATIC_FORCEINLINE long long __RV_DSMALTT(long long t, unsigned long long a, unsigned long long b)
DSMALTT (Signed Multiply Top Half & Add 64-bit)
__STATIC_FORCEINLINE unsigned long long __RV_DRCRAS32(unsigned long long a, unsigned long long b)
DRCRAS32 (32-bit Signed Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_SCMPLT8(unsigned long a, unsigned long b)
SCMPLT8 (SIMD 8-bit Signed Compare Less Than)
__STATIC_FORCEINLINE long __RV_KMMAWT(long t, unsigned long a, unsigned long b)
KMMAWT (SIMD Saturating MSW Signed Multiply Word and Top Half and Add)
__STATIC_FORCEINLINE unsigned long __RV_KSLRA16_U(unsigned long a, int b)
KSLRA16.u (SIMD 16-bit Shift Left Logical with Saturation or Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_UKSUBW(unsigned int a, unsigned int b)
UKSUBW (Unsigned Subtraction with U32 Saturation)
__STATIC_FORCEINLINE long __RV_SMDS(unsigned long a, unsigned long b)
SMDS (SIMD Signed Multiply Two Halfs and Subtract)
__STATIC_FORCEINLINE unsigned long __RV_URADD8(unsigned long a, unsigned long b)
URADD8 (SIMD 8-bit Unsigned Halving Addition)
__STATIC_FORCEINLINE unsigned long __RV_STAS16(unsigned long a, unsigned long b)
STAS16 (SIMD 16-bit Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_KADD8(unsigned long a, unsigned long b)
KADD8 (SIMD 8-bit Signed Saturating Addition)
__STATIC_FORCEINLINE unsigned long __RV_ADD8(unsigned long a, unsigned long b)
ADD8 (SIMD 8-bit Addition)
__STATIC_FORCEINLINE unsigned long __RV_DREDSA16(unsigned long long a)
DREDSA16 (Reduced Subtraction and Reduced Addition)
__STATIC_FORCEINLINE unsigned long __RV_PKTB16(unsigned long a, unsigned long b)
PKTB16 (Pack Two 16-bit Data from Top and Bottom Half)
__STATIC_FORCEINLINE unsigned long __RV_SRA32(unsigned long a, unsigned int b)
SRA32 (SIMD 32-bit Shift Right Arithmetic)
__STATIC_FORCEINLINE long long __RV_DSMDRS32(unsigned long long a, unsigned long long b)
DSMDRS32 (Two Signed 32x32 with 64-bit Revered Sub)
__STATIC_FORCEINLINE unsigned long long __RV_DSUB16(unsigned long long a, unsigned long long b)
DSUB16 (64-bit SIMD 16-bit Halving Signed Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_RADD32(unsigned long a, unsigned long b)
RADD32 (SIMD 32-bit Signed Halving Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DKMADS(unsigned long long t, unsigned long long a, unsigned long long b)
DKMADS (Two 16x16 with 32-bit Signed Add and Sub)
__STATIC_FORCEINLINE unsigned long long __RV_DKCRSA16(unsigned long long a, unsigned long long b)
DKCRSA16 (16-bit Signed Saturating Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DSUB32(unsigned long long a, unsigned long long b)
DSUB32 (64-bit SIMD 32-bit Halving Signed Subtraction)
__STATIC_FORCEINLINE long __RV_MAXW(int a, int b)
MAXW (32-bit Signed Word Maximum)
__STATIC_FORCEINLINE long __RV_SRA_U(long a, unsigned int b)
SRA.u (Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE long __RV_DSMADA16(long long t, unsigned long long a, unsigned long long b)
DSMADA16 (Signed Multiply Two Halfs and Two Adds 32-bit)
__STATIC_FORCEINLINE long __RV_SMMWT(long a, unsigned long b)
SMMWT (SIMD MSW Signed Multiply Word and Top Half)
__STATIC_FORCEINLINE unsigned long __RV_SMIN16(unsigned long a, unsigned long b)
SMIN16 (SIMD 16-bit Signed Minimum)
__STATIC_FORCEINLINE long __RV_KMMAWT2(long t, unsigned long a, unsigned long b)
KMMAWT2 (SIMD Saturating MSW Signed Multiply Word and Top Half & 2 and Add)
__STATIC_FORCEINLINE long long __RV_DKMXDA32(unsigned long long a, unsigned long long b)
DKMXDA32 (Two Cross Signed 32x32 with 64-bit Saturation Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKSTSA16(unsigned long long a, unsigned long long b)
DKSTSA16 (16-bit Signed Saturating Straight Subtraction & Addition)
__STATIC_FORCEINLINE long __RV_KMSDA32(long t, unsigned long a, unsigned long b)
KMSDA32 (Saturating Signed Multiply Two Words & Add & Subtract)
__STATIC_FORCEINLINE unsigned long __RV_URCRAS16(unsigned long a, unsigned long b)
URCRAS16 (SIMD 16-bit Unsigned Halving Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_PKBB32(unsigned long a, unsigned long b)
PKBB32 (Pack Two 32-bit Data from Both Bottom Half)
__STATIC_FORCEINLINE long long __RV_DKMADS32(long long t, unsigned long long a, unsigned long long b)
DKMADS32 (Two Signed 32x32 with 64-bit Saturation Add and Sub)
__STATIC_FORCEINLINE unsigned long long __RV_DCRAS32(unsigned long long a, unsigned long long b)
DCRAS32 (32-bit Cross Addition & Subtraction)
__STATIC_FORCEINLINE long long __RV_DDSMAQA(long long t, unsigned long long a, unsigned long long b)
DDSMAQA (Eight Signed 8x8 with 64-bit Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKADD8(unsigned long long a, unsigned long long b)
DKADD8 (64-bit SIMD 8-bit Signed Saturating Addition)
__STATIC_FORCEINLINE unsigned long long __RV_UKADD64(unsigned long long a, unsigned long long b)
UKADD64 (64-bit Unsigned Saturating Addition)
__STATIC_FORCEINLINE unsigned long long __RV_SMUL16(unsigned int a, unsigned int b)
SMUL16 (SIMD Signed 16-bit Multiply)
__STATIC_FORCEINLINE long __RV_SMTT16(unsigned long a, unsigned long b)
SMTT16 (SIMD Signed Multiply Top Half & Top Half)
__STATIC_FORCEINLINE unsigned long __RV_KABS32(unsigned long a)
KABS32 (Scalar 32-bit Absolute Value with Saturation)
__STATIC_FORCEINLINE unsigned long long __RV_DSMMUL(unsigned long long a, unsigned long long b)
DSMMUL (64-bit MSW 32x32 Signed Multiply)
__STATIC_FORCEINLINE long long __RV_SMALDS(long long t, unsigned long a, unsigned long b)
SMALDS (Signed Multiply Two Halfs & Subtract & Add 64-bit)
__STATIC_FORCEINLINE unsigned long __RV_CRSA16(unsigned long a, unsigned long b)
CRSA16 (SIMD 16-bit Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DKMMAC(unsigned long long t, unsigned long long a, unsigned long long b)
DKMMAC (64-bit MSW 32x32 Signed Multiply and Saturating Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKABS16(unsigned long long a)
DKABS16 (64-bit SIMD 16-bit Saturating Absolute)
__STATIC_FORCEINLINE unsigned long long __RV_DKADD16(unsigned long long a, unsigned long long b)
DKADD16 (64-bit SIMD 16-bit Signed Saturating Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DKSUB32(unsigned long long a, unsigned long long b)
DKSUB32 (64-bit SIMD 32-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DMXSR33(unsigned long long a, unsigned long long b)
DMXSR33 (Signed Multiply with Right Shift 33-bit and Cross Multiply with Right Shift 33-bit)
__STATIC_FORCEINLINE unsigned long __RV_KSTAS16(unsigned long a, unsigned long b)
KSTAS16 (SIMD 16-bit Signed Saturating Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_SRL8_U(unsigned long a, unsigned int b)
SRL8.u (SIMD 8-bit Rounding Shift Right Logical)
__STATIC_FORCEINLINE unsigned long __RV_UKSUB8(unsigned long a, unsigned long b)
UKSUB8 (SIMD 8-bit Unsigned Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_BPICK(unsigned long a, unsigned long b, unsigned long c)
BPICK (Bit-wise Pick)
__STATIC_FORCEINLINE long __RV_KWMMUL_U(long a, long b)
KWMMUL.u (SIMD Saturating MSW Signed Multiply Word & Double with Rounding)
__STATIC_FORCEINLINE long long __RV_SMALDA(long long t, unsigned long a, unsigned long b)
SMALDA (Signed Multiply Two Halfs and Two Adds 64-bit)
__STATIC_FORCEINLINE unsigned long __RV_SRL16_U(unsigned long a, unsigned int b)
SRL16.u (SIMD 16-bit Rounding Shift Right Logical)
__STATIC_FORCEINLINE unsigned long __RV_SMAX8(unsigned long a, unsigned long b)
SMAX8 (SIMD 8-bit Signed Maximum)
__STATIC_FORCEINLINE unsigned long __RV_KCRSA16(unsigned long a, unsigned long b)
KCRSA16 (SIMD 16-bit Signed Saturating Cross Subtraction & Addition)
__STATIC_FORCEINLINE long __RV_DSMA32_U(unsigned long long a, unsigned long long b)
DSMA32.u (64-bit SIMD 32-bit Signed Multiply Addition With Rounding and Clip)
__STATIC_FORCEINLINE unsigned long long __RV_ADD64(unsigned long long a, unsigned long long b)
ADD64 (64-bit Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DKMADA(unsigned long long t, unsigned long long a, unsigned long long b)
DKMADA (Saturating Signed Multiply Two Halfs and Two Adds)
__STATIC_FORCEINLINE unsigned long __RV_SUB32(unsigned long a, unsigned long b)
SUB32 (SIMD 32-bit Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_RCRSA32(unsigned long a, unsigned long b)
RCRSA32 (SIMD 32-bit Signed Halving Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DKSMS32_U(unsigned long long t, unsigned long long a, unsigned long long b)
DKSMS32.u (Two Signed Multiply Shift-clip and Saturation with Rounding)
__STATIC_FORCEINLINE long __RV_KMSDA(long t, unsigned long a, unsigned long b)
KMSDA (SIMD Saturating Signed Multiply Two Halfs & Add & Subtract)
__STATIC_FORCEINLINE long __RV_KADDW(int a, int b)
KADDW (Signed Addition with Q31 Saturation)
__STATIC_FORCEINLINE unsigned long __RV_CLO8(unsigned long a)
CLO8 (SIMD 8-bit Count Leading One)
__STATIC_FORCEINLINE unsigned long long __RV_DKSLRA16(unsigned long long a, int b)
DKSLRA16 (64-bit SIMD 16-bit Shift Left Logical with Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_URSTSA32(unsigned long a, unsigned long b)
URSTSA32 (SIMD 32-bit Unsigned Halving Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_PBSAD(unsigned long a, unsigned long b)
PBSAD (Parallel Byte Sum of Absolute Difference)
__STATIC_FORCEINLINE unsigned long __RV_SLL16(unsigned long a, unsigned int b)
SLL16 (SIMD 16-bit Shift Left Logical)
__STATIC_FORCEINLINE unsigned long __RV_CMPEQ16(unsigned long a, unsigned long b)
CMPEQ16 (SIMD 16-bit Integer Compare Equal)
__STATIC_FORCEINLINE unsigned long __RV_KABSW(signed long a)
KABSW (Scalar 32-bit Absolute Value with Saturation)
__STATIC_FORCEINLINE long long __RV_DSMSLXDA(long long t, unsigned long long a, unsigned long long b)
DSMSLXDA (Four Cross Signed 16x16 with 64-bit Sub)
__STATIC_FORCEINLINE unsigned long long __RV_DSTAS32(unsigned long long a, unsigned long long b)
DSTAS32 (SIMD 32-bit Straight Addition & Subtractionn)
__STATIC_FORCEINLINE long long __RV_SMALBT(long long t, unsigned long a, unsigned long b)
SMALBT (Signed Multiply Bottom Half & Top Half & Add 64-bit)
__STATIC_FORCEINLINE unsigned long __RV_ZUNPKD810(unsigned long a)
ZUNPKD810 (Unsigned Unpacking Bytes 1 & 0)
__STATIC_FORCEINLINE unsigned long __RV_URADD32(unsigned long a, unsigned long b)
URADD32 (SIMD 32-bit Unsigned Halving Addition)
__STATIC_FORCEINLINE long long __RV_RADD64(long long a, long long b)
RADD64 (64-bit Signed Halving Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DRCRSA16(unsigned long long a, unsigned long long b)
DRCRSA16 (16-bit Signed Halving Cross Subtraction & Addition)
__STATIC_FORCEINLINE long long __RV_KMSR64(long long t, long a, long b)
KMSR64 (Signed Multiply and Saturating Subtract from 64-Bit Data)
__STATIC_FORCEINLINE unsigned long __RV_KDMTT16(unsigned long a, unsigned long b)
KDMTT16 (SIMD Signed Saturating Double Multiply T16 x T16)
__STATIC_FORCEINLINE long __RV_KMMAWT_U(long t, unsigned long a, unsigned long b)
KMMAWT.u (SIMD Saturating MSW Signed Multiply Word and Top Half and Add with Rounding)
__STATIC_FORCEINLINE long long __RV_DSMALBB(long long t, unsigned long long a, unsigned long long b)
DSMALBB (Signed Multiply Bottom Halfs & Add 64-bit)
__STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD810(unsigned long long a)
DSUNPKD810 (Signed Unpacking Bytes 1 & 0)
__STATIC_FORCEINLINE long __RV_SMMWT_U(long a, unsigned long b)
SMMWT.u (SIMD MSW Signed Multiply Word and Top Half with Rounding)
__STATIC_FORCEINLINE unsigned long __RV_UMAX32(unsigned long a, unsigned long b)
UMAX32 (SIMD 32-bit Unsigned Maximum)
__STATIC_FORCEINLINE unsigned long __RV_SUNPKD810(unsigned long a)
SUNPKD810 (Signed Unpacking Bytes 1 & 0)
__STATIC_FORCEINLINE unsigned long __RV_STSA32(unsigned long a, unsigned long b)
STSA32 (SIMD 32-bit Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_KHM8(unsigned long a, unsigned long b)
KHM8 (SIMD Signed Saturating Q7 Multiply)
__STATIC_FORCEINLINE long long __RV_DSMALXDA(long long t, unsigned long long a, unsigned long long b)
DSMALXDA (Four Signed 16x16 with 64-bit Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKSUB16(unsigned long long a, unsigned long long b)
DKSUB16 (64-bit SIMD 16-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DKCRAS32(unsigned long long a, unsigned long long b)
DKCRAS32 (32-bit Signed Saturating Cross Addition & Subtraction)
__STATIC_FORCEINLINE long long __RV_DKMABT32(long long t, unsigned long long a, unsigned long long b)
DKMABT32 (Saturating Signed Multiply Bottom & Top Words & Add)
__STATIC_FORCEINLINE unsigned long long __RV_DSMAQA_SU(unsigned long long t, unsigned long long a, unsigned long long b)
DSMAQA.SU (Four Signed 8 x Unsigned 8 with 32-bit Signed Add)
__STATIC_FORCEINLINE unsigned long __RV_UMIN8(unsigned long a, unsigned long b)
UMIN8 (SIMD 8-bit Unsigned Minimum)
__STATIC_FORCEINLINE long __RV_DSMS32_U(unsigned long long a, unsigned long long b)
DSMS32.u (64-bit SIMD 32-bit Signed Multiply Subtraction with Rounding and Clip)
__STATIC_FORCEINLINE unsigned long long __RV_DSMTT16(unsigned long long a, unsigned long long b)
DSMTT16 (Signed Multiply Top Half & Top Half)
__STATIC_FORCEINLINE unsigned long __RV_STSA16(unsigned long a, unsigned long b)
STSA16 (SIMD 16-bit Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_KSLL16(unsigned long a, unsigned int b)
KSLL16 (SIMD 16-bit Saturating Shift Left Logical)
__STATIC_FORCEINLINE unsigned long __RV_UMAQA(unsigned long t, unsigned long a, unsigned long b)
UMAQA (Unsigned Multiply Four Bytes with 32- bit Adds)
__STATIC_FORCEINLINE unsigned long __RV_KDMABT16(unsigned long t, unsigned long a, unsigned long b)
KDMABT16 (SIMD Signed Saturating Double Multiply Addition B16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_SUB16(unsigned long a, unsigned long b)
SUB16 (SIMD 16-bit Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UKADDH(unsigned int a, unsigned int b)
UKADDH (Unsigned Addition with U16 Saturation)
__STATIC_FORCEINLINE unsigned long __RV_UKSUBH(unsigned int a, unsigned int b)
UKSUBH (Unsigned Subtraction with U16 Saturation)
__STATIC_FORCEINLINE long __RV_DSMXA32_U(unsigned long long a, unsigned long long b)
DSMXA32.u (64-bit SIMD 32-bit Signed Cross Multiply Addition with Rounding and Clip)
__STATIC_FORCEINLINE long long __RV_DSMBB32_SRA14(unsigned long long a, unsigned long long b)
DSMBB32.sra14 (Signed Crossed Multiply Two Halfs and Subtract with Right Shift 14)
#define __STATIC_FORCEINLINE
Define a static function that should be always inlined by the compiler.
__STATIC_FORCEINLINE unsigned long long __RV_DKHM16(unsigned long long a, unsigned long long b)
DKHM16 (64-bit SIMD Signed Saturating Q15 Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_DPKBB16(unsigned long long a, unsigned long long b)
DPKBB16 (Pack Two 16-bit Data from Both Bottom Half)
__STATIC_FORCEINLINE unsigned long long __RV_DSMBB16(unsigned long long a, unsigned long long b)
DSMBB16 (Signed Multiply Bottom Half & Bottom Half)
__STATIC_FORCEINLINE long long __RV_DSMXDS32(unsigned long long a, unsigned long long b)
DSMXDS32 (Two Cross Signed 32x32 with 64-bit Sub)
__STATIC_FORCEINLINE long long __RV_DKMDA32(unsigned long long a, unsigned long long b)
DKMDA32 (Two Signed 32x32 with 64-bit Saturation Add)
__STATIC_FORCEINLINE long __RV_SMBT32(unsigned long a, unsigned long b)
SMBT32 (Signed Multiply Bottom Word & Top Word)
__STATIC_FORCEINLINE long long __RV_DSMDS32(unsigned long long a, unsigned long long b)
DSMDS32 (Two Signed 32x32 with 64-bit Sub)
__STATIC_FORCEINLINE long long __RV_RSUB64(long long a, long long b)
RSUB64 (64-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_RDOV(void)
RDOV (Read OV flag)
__STATIC_FORCEINLINE unsigned long long __RV_DKMSDA(unsigned long long t, unsigned long long a, unsigned long long b)
DKMSDA (Two 16x16 with 32-bit Signed Double Sub)
__STATIC_FORCEINLINE long long __RV_DSMALDS(long long t, unsigned long long a, unsigned long long b)
DSMALDS (Four Signed 16x16 with 64-bit Add and Sub)
__STATIC_FORCEINLINE long __RV_SMAQA(long t, unsigned long a, unsigned long b)
SMAQA (Signed Multiply Four Bytes with 32-bit Adds)
__STATIC_FORCEINLINE long long __RV_SMSLXDA(long long t, unsigned long a, unsigned long b)
SMSLXDA (Signed Crossed Multiply Two Halfs & Add & Subtract 64- bit)
__STATIC_FORCEINLINE long long __RV_DKMSDA32(long long t, unsigned long long a, unsigned long long b)
DKMSDA32 (Two Signed 32x32 with 64-bit Saturation Sub)
__STATIC_FORCEINLINE unsigned long long __RV_DRADD16(unsigned long long a, unsigned long long b)
DRADD16 (64-bit SIMD 16-bit Halving Signed Addition)
__STATIC_FORCEINLINE unsigned long __RV_KHMBB16(unsigned long a, unsigned long b)
KHMBB16 (SIMD Signed Saturating Half Multiply B16 x B16)
__STATIC_FORCEINLINE unsigned long long __RV_DKHMX16(unsigned long long a, unsigned long long b)
DKHMX16 (64-bit SIMD Signed Crossed Saturating Q15 Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_DRSUB16(unsigned long long a, unsigned long long b)
DRSUB16 (16-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE long long __RV_DKMAXDA32(long long t, unsigned long long a, unsigned long long b)
DKMAXDA32 (Two Cross Signed 32x32 with 64-bit Saturation Add)
__STATIC_FORCEINLINE unsigned long __RV_KADD16(unsigned long a, unsigned long b)
KADD16 (SIMD 16-bit Signed Saturating Addition)
__STATIC_FORCEINLINE unsigned long __RV_SUB8(unsigned long a, unsigned long b)
SUB8 (SIMD 8-bit Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_CRSA32(unsigned long a, unsigned long b)
CRSA32 (SIMD 32-bit Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long long __RV_UMUL16(unsigned int a, unsigned int b)
UMUL16 (SIMD Unsigned 16-bit Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_DMSR33(unsigned long long a, unsigned long long b)
DMSR33 (Signed Multiply with Right Shift 33-bit and Cross Multiply with Right Shift 33-bit)
__STATIC_FORCEINLINE long long __RV_DKMAXDS32(long long t, unsigned long long a, unsigned long long b)
DKMAXDS32 (Two Cross Signed 32x32 with 64-bit Saturation Add and Sub)
__STATIC_FORCEINLINE unsigned long long __RV_DPKBT32(unsigned long long a, unsigned long long b)
DPKBT32 (Pack Two 32-bit Data from Bottom and Top Half)
__STATIC_FORCEINLINE long long __RV_DDSMAQA_SU(long long t, unsigned long long a, unsigned long long b)
DDSMAQA.SU (Eight Signed 8 x Unsigned 8 with 64-bit Add)
__STATIC_FORCEINLINE long long __RV_DSMALBT(long long t, unsigned long long a, unsigned long long b)
DSMALBT (Signed Multiply Bottom Half & Top Half & Add 64-bit)
__STATIC_FORCEINLINE long long __RV_DKMATT32(long long t, unsigned long long a, unsigned long long b)
DKMATT32 (Saturating Signed Multiply Bottom & Top Words & Add)
__STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD831(unsigned long long a)
DSUNPKD831 (Signed Unpacking Bytes 3 & 1)
__STATIC_FORCEINLINE long __RV_KMATT32(long t, unsigned long a, unsigned long b)
KMATT32 (Saturating Signed Multiply Top Words & Add)
__STATIC_FORCEINLINE long long __RV_DSMTT32_SRA32(unsigned long long a, unsigned long long b)
DSMTT32.sra32 (Signed Multiply Top Word & Top Word with Right Shift 32-bit)
__STATIC_FORCEINLINE long __RV_KHMBB(unsigned int a, unsigned int b)
KHMBB (Signed Saturating Half Multiply B16 x B16)
__STATIC_FORCEINLINE unsigned long __RV_SRA32_U(unsigned long a, unsigned int b)
SRA32.u (SIMD 32-bit Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_SUNPKD820(unsigned long a)
SUNPKD820 (Signed Unpacking Bytes 2 & 0)
__STATIC_FORCEINLINE long __RV_KSLRAW(int a, int b)
KSLRAW (Shift Left Logical with Q31 Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_EXPD81(unsigned long a)
EXPD81 (Expand and Copy Byte 1 to 32bit(rv32) or 64bit(when rv64))
__STATIC_FORCEINLINE long __RV_KMXDA(unsigned long a, unsigned long b)
KMXDA (SIMD Signed Crossed Multiply Two Halfs and Add)
__STATIC_FORCEINLINE long long __RV_DDUMAQA(long long t, unsigned long long a, unsigned long long b)
DDUMAQA (Eight Unsigned 8x8 with 64-bit Unsigned Add)
__STATIC_FORCEINLINE long __RV_KMADA32(long t, unsigned long a, unsigned long b)
KMADA32 (Saturating Signed Multiply Two Words and Two Adds)
__STATIC_FORCEINLINE unsigned long __RV_KSLRA8(unsigned long a, int b)
KSLRA8 (SIMD 8-bit Shift Left Logical with Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long long __RV_UKMAR64(unsigned long long t, unsigned long a, unsigned long b)
UKMAR64 (Unsigned Multiply and Saturating Add to 64-Bit Data)
__STATIC_FORCEINLINE long long __RV_SMALTT(long long t, unsigned long a, unsigned long b)
SMALTT (Signed Multiply Top Halfs & Add 64-bit)
__STATIC_FORCEINLINE unsigned long __RV_KHM16(unsigned long a, unsigned long b)
KHM16 (SIMD Signed Saturating Q15 Multiply)
__STATIC_FORCEINLINE unsigned long long __RV_DKMAXDS(unsigned long long t, unsigned long long a, unsigned long long b)
DKMAXDS (Saturating Signed Crossed Multiply Two Halfs & Subtract & Add)
__STATIC_FORCEINLINE long __RV_KMADA(long t, unsigned long a, unsigned long b)
KMADA (SIMD Saturating Signed Multiply Two Halfs and Two Adds)
__STATIC_FORCEINLINE unsigned long __RV_UCMPLT16(unsigned long a, unsigned long b)
UCMPLT16 (SIMD 16-bit Unsigned Compare Less Than)
__STATIC_FORCEINLINE unsigned long long __RV_DKCRAS16(unsigned long long a, unsigned long long b)
DKCRAS16 (16-bit Signed Saturating Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_CRAS32(unsigned long a, unsigned long b)
CRAS32 (SIMD 32-bit Cross Addition & Subtraction)
__STATIC_FORCEINLINE long __RV_KMADS(long t, unsigned long a, unsigned long b)
KMADS (SIMD Saturating Signed Multiply Two Halfs & Subtract & Add)
__STATIC_FORCEINLINE unsigned long long __RV_DKMXDA(unsigned long long a, unsigned long long b)
DKMXDA (Signed Crossed Multiply Two Halfs and Add)
__STATIC_FORCEINLINE unsigned long long __RV_DADD32(unsigned long long a, unsigned long long b)
DADD32 (32-bit Addition)
__STATIC_FORCEINLINE unsigned long __RV_CLRS16(unsigned long a)
CLRS16 (SIMD 16-bit Count Leading Redundant Sign)
__STATIC_FORCEINLINE unsigned long __RV_KABS16(unsigned long a)
KABS16 (SIMD 16-bit Saturating Absolute)
__STATIC_FORCEINLINE unsigned long __RV_RSTSA32(unsigned long a, unsigned long b)
RSTSA32 (SIMD 32-bit Signed Halving Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_SCMPLE8(unsigned long a, unsigned long b)
SCMPLE8 (SIMD 8-bit Signed Compare Less Than & Equal)
__STATIC_FORCEINLINE long __RV_SMDRS(unsigned long a, unsigned long b)
SMDRS (SIMD Signed Multiply Two Halfs and Reverse Subtract)
__STATIC_FORCEINLINE long __RV_KMMAWB2_U(long t, unsigned long a, unsigned long b)
KMMAWB2.u (SIMD Saturating MSW Signed Multiply Word and Bottom Half & 2 and Add with Rounding)
__STATIC_FORCEINLINE long __RV_KMMAC(long t, long a, long b)
KMMAC (SIMD Saturating MSW Signed Multiply Word and Add)
__STATIC_FORCEINLINE unsigned long long __RV_SMULX16(unsigned int a, unsigned int b)
SMULX16 (SIMD Signed Crossed 16-bit Multiply)
__STATIC_FORCEINLINE unsigned long __RV_SUNPKD832(unsigned long a)
SUNPKD832 (Signed Unpacking Bytes 3 & 2)
__STATIC_FORCEINLINE long __RV_KSLLW(long a, unsigned int b)
KSLLW (Saturating Shift Left Logical for Word)
__STATIC_FORCEINLINE long long __RV_DSMALDA(long long t, unsigned long long a, unsigned long long b)
DSMALDA (Four Signed 16x16 with 64-bit Add)
__STATIC_FORCEINLINE unsigned long __RV_MADDR32(unsigned long t, unsigned long a, unsigned long b)
MADDR32 (Multiply and Add to 32-Bit Word)
__STATIC_FORCEINLINE unsigned long long __RV_DKMSXDA(unsigned long long t, unsigned long long a, unsigned long long b)
DKMSXDA (Two Cross 16x16 with 32-bit Signed Double Sub)
__STATIC_FORCEINLINE unsigned long __RV_ZUNPKD830(unsigned long a)
ZUNPKD830 (Unsigned Unpacking Bytes 3 & 0)
__STATIC_FORCEINLINE unsigned long __RV_KADD32(unsigned long a, unsigned long b)
KADD32 (SIMD 32-bit Signed Saturating Addition)
__STATIC_FORCEINLINE unsigned long long __RV_URADD64(unsigned long long a, unsigned long long b)
URADD64 (64-bit Unsigned Halving Addition)
__STATIC_FORCEINLINE unsigned long __RV_URSTSA16(unsigned long a, unsigned long b)
URSTSA16 (SIMD 16-bit Unsigned Halving Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_KDMBT16(unsigned long a, unsigned long b)
KDMBT16 (SIMD Signed Saturating Double Multiply B16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_URCRAS32(unsigned long a, unsigned long b)
URCRAS32 (SIMD 32-bit Unsigned Halving Cross Addition & Subtraction)
__STATIC_FORCEINLINE long __RV_KDMTT(unsigned int a, unsigned int b)
KDMTT (Signed Saturating Double Multiply T16 x T16)
__STATIC_FORCEINLINE unsigned long __RV_SCMPLE16(unsigned long a, unsigned long b)
SCMPLE16 (SIMD 16-bit Signed Compare Less Than & Equal)
__STATIC_FORCEINLINE long __RV_KMMAWB(long t, unsigned long a, unsigned long b)
KMMAWB (SIMD Saturating MSW Signed Multiply Word and Bottom Half and Add)
__STATIC_FORCEINLINE unsigned long __RV_UCMPLT8(unsigned long a, unsigned long b)
UCMPLT8 (SIMD 8-bit Unsigned Compare Less Than)
__STATIC_FORCEINLINE long long __RV_DSMALDRS(long long t, unsigned long long a, unsigned long long b)
DSMALDRS (Four Signed 16x16 with 64-bit Add and Revered Sub)
__STATIC_FORCEINLINE long long __RV_DKMADRS32(long long t, unsigned long long a, unsigned long long b)
DKMADRS32 (Two Signed 32x32 with 64-bit Saturation Revered Add and Sub)
__STATIC_FORCEINLINE long __RV_RSUBW(int a, int b)
RSUBW (32-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_ADD16(unsigned long a, unsigned long b)
ADD16 (SIMD 16-bit Addition)
__STATIC_FORCEINLINE unsigned long __RV_STAS32(unsigned long a, unsigned long b)
STAS32 (SIMD 32-bit Straight Addition & Subtraction)
__STATIC_FORCEINLINE long __RV_SMMUL(long a, long b)
SMMUL (SIMD MSW Signed Multiply Word)
__STATIC_FORCEINLINE long long __RV_SMALXDS(long long t, unsigned long a, unsigned long b)
SMALXDS (Signed Crossed Multiply Two Halfs & Subtract & Add 64- bit)
__STATIC_FORCEINLINE unsigned long __RV_SRA8(unsigned long a, unsigned int b)
SRA8 (SIMD 8-bit Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_PKTB32(unsigned long a, unsigned long b)
PKTB32 (Pack Two 32-bit Data from Top and Bottom Half)
__STATIC_FORCEINLINE long long __RV_DSMBB32(unsigned long long a, unsigned long long b)
DSMBB32 (Signed Multiply Bottom Word & Bottom Word)
__STATIC_FORCEINLINE unsigned long __RV_ZUNPKD831(unsigned long a)
ZUNPKD831 (Unsigned Unpacking Bytes 3 & 1)
__STATIC_FORCEINLINE unsigned long __RV_URSUB16(unsigned long a, unsigned long b)
URSUB16 (SIMD 16-bit Unsigned Halving Subtraction)
__STATIC_FORCEINLINE long long __RV_SMSLDA(long long t, unsigned long a, unsigned long b)
SMSLDA (Signed Multiply Two Halfs & Add & Subtract 64-bit)
__STATIC_FORCEINLINE unsigned long __RV_DREDAS16(unsigned long long a)
DREDAS16 (Reduced Addition and Reduced Subtraction)
__STATIC_FORCEINLINE long __RV_KDMBB(unsigned int a, unsigned int b)
KDMBB (Signed Saturating Double Multiply B16 x B16)
__STATIC_FORCEINLINE unsigned long long __RV_DRSUB32(unsigned long long a, unsigned long long b)
DRSUB32 (32-bit Signed Halving Subtraction)
__STATIC_FORCEINLINE long long __RV_SMAR64(long long t, long a, long b)
SMAR64 (Signed Multiply and Add to 64-Bit Data)
__STATIC_FORCEINLINE long __RV_SMMWB_U(long a, unsigned long b)
SMMWB.u (SIMD MSW Signed Multiply Word and Bottom Half with Rounding)
__STATIC_FORCEINLINE int16_t __RV_DKCLIP64(unsigned long long a)
DKCLIP64 (64-bit Clipped to 16-bit Saturation Value)
__STATIC_FORCEINLINE unsigned long __RV_UKCRAS32(unsigned long a, unsigned long b)
UKCRAS32 (SIMD 32-bit Unsigned Saturating Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DKCRSA32(unsigned long long a, unsigned long long b)
DKCRSA32 (32-bit Signed Saturating Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long long __RV_UMUL8(unsigned int a, unsigned int b)
UMUL8 (SIMD Unsigned 8-bit Multiply)
__STATIC_FORCEINLINE long __RV_SMBB32(unsigned long a, unsigned long b)
SMBB32 (Signed Multiply Bottom Word & Bottom Word)
__STATIC_FORCEINLINE unsigned long long __RV_DKSTAS16(unsigned long long a, unsigned long long b)
DKSTAS16 (16-bit Signed Saturating Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_UMAR64(unsigned long long t, unsigned long a, unsigned long b)
UMAR64 (Unsigned Multiply and Add to 64-Bit Data)
__STATIC_FORCEINLINE long __RV_SMDRS32(unsigned long a, unsigned long b)
SMDRS32 (Signed Multiply Two Words and Reverse Subtract)
__STATIC_FORCEINLINE unsigned long __RV_UKADD32(unsigned long a, unsigned long b)
UKADD32 (SIMD 32-bit Unsigned Saturating Addition)
__STATIC_FORCEINLINE unsigned long __RV_SUNPKD831(unsigned long a)
SUNPKD831 (Signed Unpacking Bytes 3 & 1)
__STATIC_FORCEINLINE unsigned long long __RV_DKSUB8(unsigned long long a, unsigned long long b)
DKSUB8 (64-bit SIMD 8-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_MULR64(unsigned long a, unsigned long b)
MULR64 (Multiply Word Unsigned to 64-bit Data)
__STATIC_FORCEINLINE long __RV_MINW(int a, int b)
MINW (32-bit Signed Word Minimum)
__STATIC_FORCEINLINE unsigned long __RV_UKCRAS16(unsigned long a, unsigned long b)
UKCRAS16 (SIMD 16-bit Unsigned Saturating Cross Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_KDMATT16(unsigned long t, unsigned long a, unsigned long b)
KDMATT16 (SIMD Signed Saturating Double Multiply Addition T16 x T16)
__STATIC_FORCEINLINE long __RV_DSMAXDA16(long long t, unsigned long long a, unsigned long long b)
DSMAXDA16 (Signed Crossed Multiply Two Halfs and Two Adds 32-bit)
__STATIC_FORCEINLINE unsigned long long __RV_DRCRSA32(unsigned long long a, unsigned long long b)
DRCRSA32 (32-bit Signed Halving Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_URCRSA16(unsigned long a, unsigned long b)
URCRSA16 (SIMD 16-bit Unsigned Halving Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DMSR16(unsigned long a, unsigned long b)
DMSR16 (Signed Multiply Halfs with Right Shift 16-bit and Cross Multiply Halfs with Right Shift 16-bi...
__STATIC_FORCEINLINE unsigned long long __RV_DUMAQA(unsigned long long t, unsigned long long a, unsigned long long b)
DUMAQA (Four Unsigned 8x8 with 32-bit Unsigned Add)
__STATIC_FORCEINLINE unsigned long __RV_KSLL32(unsigned long a, unsigned int b)
KSLL32 (SIMD 32-bit Saturating Shift Left Logical)
__STATIC_FORCEINLINE unsigned long __RV_KSLRA16(unsigned long a, int b)
KSLRA16 (SIMD 16-bit Shift Left Logical with Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_URCRSA32(unsigned long a, unsigned long b)
URCRSA32 (SIMD 32-bit Unsigned Halving Cross Subtraction & Addition)
__STATIC_FORCEINLINE long __RV_KSLRAW_U(int a, int b)
KSLRAW.u (Shift Left Logical with Q31 Saturation or Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE long long __RV_DSMBB32_SRA32(unsigned long long a, unsigned long long b)
DSMBB32.sra32 (Signed Crossed Multiply Two Halfs and Subtract with Right Shift 32)
__STATIC_FORCEINLINE unsigned long __RV_SRA16(unsigned long a, unsigned long b)
SRA16 (SIMD 16-bit Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_URSUB8(unsigned long a, unsigned long b)
URSUB8 (SIMD 8-bit Unsigned Halving Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_KDMABB16(unsigned long t, unsigned long a, unsigned long b)
KDMABB16 (SIMD Signed Saturating Double Multiply Addition B16 x B16)
__STATIC_FORCEINLINE long __RV_KWMMUL(long a, long b)
KWMMUL (SIMD Saturating MSW Signed Multiply Word & Double)
__STATIC_FORCEINLINE unsigned long __RV_RADD8(unsigned long a, unsigned long b)
RADD8 (SIMD 8-bit Signed Halving Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DPKTB16(unsigned long long a, unsigned long long b)
DPKTB16 (Pack Two 32-bit Data from Top and Bottom Half)
__STATIC_FORCEINLINE unsigned long __RV_UKADD16(unsigned long a, unsigned long b)
UKADD16 (SIMD 16-bit Unsigned Saturating Addition)
__STATIC_FORCEINLINE unsigned long __RV_UMAX16(unsigned long a, unsigned long b)
UMAX16 (SIMD 16-bit Unsigned Maximum)
__STATIC_FORCEINLINE unsigned long long __RV_DKWMMUL_U(unsigned long long a, unsigned long long b)
DKWMMUL.u (64-bit MSW 32x32 Unsigned Multiply & Double)
__STATIC_FORCEINLINE long long __RV_DSMSLDA(long long t, unsigned long long a, unsigned long long b)
DSMSLDA (Four Signed 16x16 with 64-bit Sub)
__STATIC_FORCEINLINE long long __RV_SMALDRS(long long t, unsigned long a, unsigned long b)
SMALDRS (Signed Multiply Two Halfs & Reverse Subtract & Add 64- bit)
__STATIC_FORCEINLINE unsigned long __RV_UKADDW(unsigned int a, unsigned int b)
UKADDW (Unsigned Addition with U32 Saturation)
__STATIC_FORCEINLINE unsigned long long __RV_DPACK32(signed long a, signed long b)
DPACK32 (SIMD Pack Two 32-bit Data To 64-bit)
__STATIC_FORCEINLINE unsigned long __RV_KSLRA32_U(unsigned long a, int b)
KSLRA32.u (SIMD 32-bit Shift Left Logical with Saturation or Rounding Shift Right Arithmetic)
__STATIC_FORCEINLINE long long __RV_DKMSXDA32(long long t, unsigned long long a, unsigned long long b)
DKMSXDA32 (Two Cross Signed 32x32 with 64-bit Saturation Sub)
__STATIC_FORCEINLINE unsigned long __RV_UKADD8(unsigned long a, unsigned long b)
UKADD8 (SIMD 8-bit Unsigned Saturating Addition)
__STATIC_FORCEINLINE unsigned long __RV_KSTAS32(unsigned long a, unsigned long b)
KSTAS32 (SIMD 32-bit Signed Saturating Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DKWMMUL(unsigned long long a, unsigned long long b)
DKWMMUL (64-bit MSW 32x32 Signed Multiply & Double)
#define __EXPD_BYTE(x)
Expand byte to unsigned long value.
__STATIC_FORCEINLINE unsigned long long __RV_DSMXDS(unsigned long long a, unsigned long long b)
DSMXDS (Signed Crossed Multiply Two Halfs and Subtract)
__STATIC_FORCEINLINE long __RV_KMAXDA(long t, unsigned long a, unsigned long b)
KMAXDA (SIMD Saturating Signed Crossed Multiply Two Halfs and Two Adds)
__STATIC_FORCEINLINE long __RV_SMBT16(unsigned long a, unsigned long b)
SMBT16 (SIMD Signed Multiply Bottom Half & Top Half)
__STATIC_FORCEINLINE unsigned long __RV_EXPD80(unsigned long a)
EXPD80 (Expand and Copy Byte 0 to 32bit(when rv32) or 64bit(when rv64))
__STATIC_FORCEINLINE unsigned long __RV_URSTAS32(unsigned long a, unsigned long b)
URSTAS32 (SIMD 32-bit Unsigned Halving Straight Addition & Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_SWAP8(unsigned long a)
SWAP8 (Swap Byte within Halfword)
__STATIC_FORCEINLINE long __RV_KMMAWB_U(long t, unsigned long a, unsigned long b)
KMMAWB.u (SIMD Saturating MSW Signed Multiply Word and Bottom Half and Add with Rounding)
__STATIC_FORCEINLINE unsigned long __RV_KCRAS32(unsigned long a, unsigned long b)
KCRAS32 (SIMD 32-bit Signed Saturating Cross Addition & Subtraction)
__STATIC_FORCEINLINE long long __RV_KMAR64(long long t, long a, long b)
KMAR64 (Signed Multiply and Saturating Add to 64-Bit Data)
#define __ASM
Pass information from the compiler to the assembler.
__STATIC_FORCEINLINE unsigned long long __RV_DPKTT16(unsigned long long a, unsigned long long b)
DPKTT16 (Pack Two 16-bit Data from Both Top Half)
__STATIC_FORCEINLINE unsigned long long __RV_DKSLRA8(unsigned long long a, int b)
DKSLRA8 (64-bit SIMD 8-bit Shift Left Logical with Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_PKTT32(unsigned long a, unsigned long b)
PKTT32 (Pack Two 32-bit Data from Both Top Half)
__STATIC_FORCEINLINE unsigned long __RV_UMAX8(unsigned long a, unsigned long b)
UMAX8 (SIMD 8-bit Unsigned Maximum)
__STATIC_FORCEINLINE long __RV_KMMWT2_U(long a, unsigned long b)
KMMWT2.u (SIMD Saturating MSW Signed Multiply Word and Top Half & 2 with Rounding)
__STATIC_FORCEINLINE unsigned long __RV_RCRSA16(unsigned long a, unsigned long b)
RCRSA16 (SIMD 16-bit Signed Halving Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_SRL32(unsigned long a, unsigned int b)
SRL32 (SIMD 32-bit Shift Right Logical)
__STATIC_FORCEINLINE unsigned long __RV_KSUB32(unsigned long a, unsigned long b)
KSUB32 (SIMD 32-bit Signed Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long __RV_UMIN32(unsigned long a, unsigned long b)
UMIN32 (SIMD 32-bit Unsigned Minimum)
__STATIC_FORCEINLINE unsigned long long __RV_DKABS8(unsigned long long a)
DKABS8 (64-bit SIMD 8-bit Saturating Absolute)
__STATIC_FORCEINLINE long __RV_KSUBW(int a, int b)
KSUBW (Signed Subtraction with Q31 Saturation)
__STATIC_FORCEINLINE unsigned long long __RV_DCRSA32(unsigned long long a, unsigned long long b)
DCRSA32 (32-bit Cross Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_KABS8(unsigned long a)
KABS8 (SIMD 8-bit Saturating Absolute)
__STATIC_FORCEINLINE long __RV_KADDH(int a, int b)
KADDH (Signed Addition with Q15 Saturation)
__STATIC_FORCEINLINE long long __RV_DSMALXDS(long long t, unsigned long long a, unsigned long long b)
DSMALXDS (Four Cross Signed 16x16 with 64-bit Add and Sub)
__STATIC_FORCEINLINE unsigned long __RV_KSTSA32(unsigned long a, unsigned long b)
KSTSA32 (SIMD 32-bit Signed Saturating Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long __RV_SCMPLT16(unsigned long a, unsigned long b)
SCMPLT16 (SIMD 16-bit Signed Compare Less Than)
__STATIC_FORCEINLINE unsigned long __RV_KCRSA32(unsigned long a, unsigned long b)
KCRSA32 (SIMD 32-bit Signed Saturating Cross Subtraction & Addition)
__STATIC_FORCEINLINE long long __RV_SMAL(long long a, unsigned long b)
SMAL (Signed Multiply Halfs & Add 64-bit)
__STATIC_FORCEINLINE unsigned long __RV_URADD16(unsigned long a, unsigned long b)
URADD16 (SIMD 16-bit Unsigned Halving Addition)
__STATIC_FORCEINLINE long __RV_SMDS32(unsigned long a, unsigned long b)
SMDS32 (Signed Multiply Two Words and Subtract)
__STATIC_FORCEINLINE unsigned long __RV_ADD32(unsigned long a, unsigned long b)
ADD32 (SIMD 32-bit Addition)
__STATIC_FORCEINLINE unsigned long long __RV_DSUNPKD832(unsigned long long a)
DSUNPKD832 (Signed Unpacking Bytes 3 & 2)
__STATIC_FORCEINLINE unsigned long __RV_SMIN8(unsigned long a, unsigned long b)
SMIN8 (SIMD 8-bit Signed Minimum)
__STATIC_FORCEINLINE long __RV_KMADRS32(long t, unsigned long a, unsigned long b)
KMADRS32 (Saturating Signed Multiply Two Words & Reverse Subtract & Add)
__STATIC_FORCEINLINE unsigned long long __RV_DSTSA32(unsigned long long a, unsigned long long b)
DSTSA32 (32-bit Straight Subtraction & Addition)
__STATIC_FORCEINLINE unsigned long long __RV_UKSUB64(unsigned long long a, unsigned long long b)
UKSUB64 (64-bit Unsigned Saturating Subtraction)
__STATIC_FORCEINLINE unsigned long long __RV_DKSLRA32(unsigned long long a, int b)
DKSLRA32 (64-bit SIMD 32-bit Shift Left Logical with Saturation or Shift Right Arithmetic)
__STATIC_FORCEINLINE unsigned long __RV_RSTSA16(unsigned long a, unsigned long b)
RSTSA16 (SIMD 16-bit Signed Halving Straight Subtraction & Addition)
__STATIC_FORCEINLINE long __RV_KMSXDA(long t, unsigned long a, unsigned long b)
KMSXDA (SIMD Saturating Signed Crossed Multiply Two Halfs & Add & Subtract)
__STATIC_FORCEINLINE unsigned long long __RV_DZUNPKD820(unsigned long long a)
DZUNPKD820 (UnSigned Unpacking Bytes 2 & 0)
__STATIC_FORCEINLINE long __RV_KMMSB_U(long t, long a, long b)
KMMSB.u (SIMD Saturating MSW Signed Multiply Word and Subtraction with Rounding)