diff mbox series

[v2,3/8] LoongArch: Added Loongson SX directive builtin function support.

Message ID 20230718110625.88834-4-panchenghui@loongson.cn
State New
Headers show
Series Add Loongson SX/ASX instruction support to LoongArch target. | expand

Commit Message

Chenghui Pan July 18, 2023, 11:06 a.m. UTC
From: Lulu Cheng <chenglulu@loongson.cn>

gcc/ChangeLog:

	* config.gcc: Export the header file lsxintrin.h.
	* config/loongarch/loongarch-builtins.cc (LARCH_FTYPE_NAME4): Add builtin function support.
	(enum loongarch_builtin_type): Ditto.
	(AVAIL_ALL): Ditto.
	(LARCH_BUILTIN): Ditto.
	(LSX_BUILTIN): Ditto.
	(LSX_BUILTIN_TEST_BRANCH): Ditto.
	(LSX_NO_TARGET_BUILTIN): Ditto.
	(CODE_FOR_lsx_vsadd_b): Ditto.
	(CODE_FOR_lsx_vsadd_h): Ditto.
	(CODE_FOR_lsx_vsadd_w): Ditto.
	(CODE_FOR_lsx_vsadd_d): Ditto.
	(CODE_FOR_lsx_vsadd_bu): Ditto.
	(CODE_FOR_lsx_vsadd_hu): Ditto.
	(CODE_FOR_lsx_vsadd_wu): Ditto.
	(CODE_FOR_lsx_vsadd_du): Ditto.
	(CODE_FOR_lsx_vadd_b): Ditto.
	(CODE_FOR_lsx_vadd_h): Ditto.
	(CODE_FOR_lsx_vadd_w): Ditto.
	(CODE_FOR_lsx_vadd_d): Ditto.
	(CODE_FOR_lsx_vaddi_bu): Ditto.
	(CODE_FOR_lsx_vaddi_hu): Ditto.
	(CODE_FOR_lsx_vaddi_wu): Ditto.
	(CODE_FOR_lsx_vaddi_du): Ditto.
	(CODE_FOR_lsx_vand_v): Ditto.
	(CODE_FOR_lsx_vandi_b): Ditto.
	(CODE_FOR_lsx_bnz_v): Ditto.
	(CODE_FOR_lsx_bz_v): Ditto.
	(CODE_FOR_lsx_vbitsel_v): Ditto.
	(CODE_FOR_lsx_vseqi_b): Ditto.
	(CODE_FOR_lsx_vseqi_h): Ditto.
	(CODE_FOR_lsx_vseqi_w): Ditto.
	(CODE_FOR_lsx_vseqi_d): Ditto.
	(CODE_FOR_lsx_vslti_b): Ditto.
	(CODE_FOR_lsx_vslti_h): Ditto.
	(CODE_FOR_lsx_vslti_w): Ditto.
	(CODE_FOR_lsx_vslti_d): Ditto.
	(CODE_FOR_lsx_vslti_bu): Ditto.
	(CODE_FOR_lsx_vslti_hu): Ditto.
	(CODE_FOR_lsx_vslti_wu): Ditto.
	(CODE_FOR_lsx_vslti_du): Ditto.
	(CODE_FOR_lsx_vslei_b): Ditto.
	(CODE_FOR_lsx_vslei_h): Ditto.
	(CODE_FOR_lsx_vslei_w): Ditto.
	(CODE_FOR_lsx_vslei_d): Ditto.
	(CODE_FOR_lsx_vslei_bu): Ditto.
	(CODE_FOR_lsx_vslei_hu): Ditto.
	(CODE_FOR_lsx_vslei_wu): Ditto.
	(CODE_FOR_lsx_vslei_du): Ditto.
	(CODE_FOR_lsx_vdiv_b): Ditto.
	(CODE_FOR_lsx_vdiv_h): Ditto.
	(CODE_FOR_lsx_vdiv_w): Ditto.
	(CODE_FOR_lsx_vdiv_d): Ditto.
	(CODE_FOR_lsx_vdiv_bu): Ditto.
	(CODE_FOR_lsx_vdiv_hu): Ditto.
	(CODE_FOR_lsx_vdiv_wu): Ditto.
	(CODE_FOR_lsx_vdiv_du): Ditto.
	(CODE_FOR_lsx_vfadd_s): Ditto.
	(CODE_FOR_lsx_vfadd_d): Ditto.
	(CODE_FOR_lsx_vftintrz_w_s): Ditto.
	(CODE_FOR_lsx_vftintrz_l_d): Ditto.
	(CODE_FOR_lsx_vftintrz_wu_s): Ditto.
	(CODE_FOR_lsx_vftintrz_lu_d): Ditto.
	(CODE_FOR_lsx_vffint_s_w): Ditto.
	(CODE_FOR_lsx_vffint_d_l): Ditto.
	(CODE_FOR_lsx_vffint_s_wu): Ditto.
	(CODE_FOR_lsx_vffint_d_lu): Ditto.
	(CODE_FOR_lsx_vfsub_s): Ditto.
	(CODE_FOR_lsx_vfsub_d): Ditto.
	(CODE_FOR_lsx_vfmul_s): Ditto.
	(CODE_FOR_lsx_vfmul_d): Ditto.
	(CODE_FOR_lsx_vfdiv_s): Ditto.
	(CODE_FOR_lsx_vfdiv_d): Ditto.
	(CODE_FOR_lsx_vfmax_s): Ditto.
	(CODE_FOR_lsx_vfmax_d): Ditto.
	(CODE_FOR_lsx_vfmin_s): Ditto.
	(CODE_FOR_lsx_vfmin_d): Ditto.
	(CODE_FOR_lsx_vfsqrt_s): Ditto.
	(CODE_FOR_lsx_vfsqrt_d): Ditto.
	(CODE_FOR_lsx_vflogb_s): Ditto.
	(CODE_FOR_lsx_vflogb_d): Ditto.
	(CODE_FOR_lsx_vmax_b): Ditto.
	(CODE_FOR_lsx_vmax_h): Ditto.
	(CODE_FOR_lsx_vmax_w): Ditto.
	(CODE_FOR_lsx_vmax_d): Ditto.
	(CODE_FOR_lsx_vmaxi_b): Ditto.
	(CODE_FOR_lsx_vmaxi_h): Ditto.
	(CODE_FOR_lsx_vmaxi_w): Ditto.
	(CODE_FOR_lsx_vmaxi_d): Ditto.
	(CODE_FOR_lsx_vmax_bu): Ditto.
	(CODE_FOR_lsx_vmax_hu): Ditto.
	(CODE_FOR_lsx_vmax_wu): Ditto.
	(CODE_FOR_lsx_vmax_du): Ditto.
	(CODE_FOR_lsx_vmaxi_bu): Ditto.
	(CODE_FOR_lsx_vmaxi_hu): Ditto.
	(CODE_FOR_lsx_vmaxi_wu): Ditto.
	(CODE_FOR_lsx_vmaxi_du): Ditto.
	(CODE_FOR_lsx_vmin_b): Ditto.
	(CODE_FOR_lsx_vmin_h): Ditto.
	(CODE_FOR_lsx_vmin_w): Ditto.
	(CODE_FOR_lsx_vmin_d): Ditto.
	(CODE_FOR_lsx_vmini_b): Ditto.
	(CODE_FOR_lsx_vmini_h): Ditto.
	(CODE_FOR_lsx_vmini_w): Ditto.
	(CODE_FOR_lsx_vmini_d): Ditto.
	(CODE_FOR_lsx_vmin_bu): Ditto.
	(CODE_FOR_lsx_vmin_hu): Ditto.
	(CODE_FOR_lsx_vmin_wu): Ditto.
	(CODE_FOR_lsx_vmin_du): Ditto.
	(CODE_FOR_lsx_vmini_bu): Ditto.
	(CODE_FOR_lsx_vmini_hu): Ditto.
	(CODE_FOR_lsx_vmini_wu): Ditto.
	(CODE_FOR_lsx_vmini_du): Ditto.
	(CODE_FOR_lsx_vmod_b): Ditto.
	(CODE_FOR_lsx_vmod_h): Ditto.
	(CODE_FOR_lsx_vmod_w): Ditto.
	(CODE_FOR_lsx_vmod_d): Ditto.
	(CODE_FOR_lsx_vmod_bu): Ditto.
	(CODE_FOR_lsx_vmod_hu): Ditto.
	(CODE_FOR_lsx_vmod_wu): Ditto.
	(CODE_FOR_lsx_vmod_du): Ditto.
	(CODE_FOR_lsx_vmul_b): Ditto.
	(CODE_FOR_lsx_vmul_h): Ditto.
	(CODE_FOR_lsx_vmul_w): Ditto.
	(CODE_FOR_lsx_vmul_d): Ditto.
	(CODE_FOR_lsx_vclz_b): Ditto.
	(CODE_FOR_lsx_vclz_h): Ditto.
	(CODE_FOR_lsx_vclz_w): Ditto.
	(CODE_FOR_lsx_vclz_d): Ditto.
	(CODE_FOR_lsx_vnor_v): Ditto.
	(CODE_FOR_lsx_vor_v): Ditto.
	(CODE_FOR_lsx_vori_b): Ditto.
	(CODE_FOR_lsx_vnori_b): Ditto.
	(CODE_FOR_lsx_vpcnt_b): Ditto.
	(CODE_FOR_lsx_vpcnt_h): Ditto.
	(CODE_FOR_lsx_vpcnt_w): Ditto.
	(CODE_FOR_lsx_vpcnt_d): Ditto.
	(CODE_FOR_lsx_vxor_v): Ditto.
	(CODE_FOR_lsx_vxori_b): Ditto.
	(CODE_FOR_lsx_vsll_b): Ditto.
	(CODE_FOR_lsx_vsll_h): Ditto.
	(CODE_FOR_lsx_vsll_w): Ditto.
	(CODE_FOR_lsx_vsll_d): Ditto.
	(CODE_FOR_lsx_vslli_b): Ditto.
	(CODE_FOR_lsx_vslli_h): Ditto.
	(CODE_FOR_lsx_vslli_w): Ditto.
	(CODE_FOR_lsx_vslli_d): Ditto.
	(CODE_FOR_lsx_vsra_b): Ditto.
	(CODE_FOR_lsx_vsra_h): Ditto.
	(CODE_FOR_lsx_vsra_w): Ditto.
	(CODE_FOR_lsx_vsra_d): Ditto.
	(CODE_FOR_lsx_vsrai_b): Ditto.
	(CODE_FOR_lsx_vsrai_h): Ditto.
	(CODE_FOR_lsx_vsrai_w): Ditto.
	(CODE_FOR_lsx_vsrai_d): Ditto.
	(CODE_FOR_lsx_vsrl_b): Ditto.
	(CODE_FOR_lsx_vsrl_h): Ditto.
	(CODE_FOR_lsx_vsrl_w): Ditto.
	(CODE_FOR_lsx_vsrl_d): Ditto.
	(CODE_FOR_lsx_vsrli_b): Ditto.
	(CODE_FOR_lsx_vsrli_h): Ditto.
	(CODE_FOR_lsx_vsrli_w): Ditto.
	(CODE_FOR_lsx_vsrli_d): Ditto.
	(CODE_FOR_lsx_vsub_b): Ditto.
	(CODE_FOR_lsx_vsub_h): Ditto.
	(CODE_FOR_lsx_vsub_w): Ditto.
	(CODE_FOR_lsx_vsub_d): Ditto.
	(CODE_FOR_lsx_vsubi_bu): Ditto.
	(CODE_FOR_lsx_vsubi_hu): Ditto.
	(CODE_FOR_lsx_vsubi_wu): Ditto.
	(CODE_FOR_lsx_vsubi_du): Ditto.
	(CODE_FOR_lsx_vpackod_d): Ditto.
	(CODE_FOR_lsx_vpackev_d): Ditto.
	(CODE_FOR_lsx_vpickod_d): Ditto.
	(CODE_FOR_lsx_vpickev_d): Ditto.
	(CODE_FOR_lsx_vrepli_b): Ditto.
	(CODE_FOR_lsx_vrepli_h): Ditto.
	(CODE_FOR_lsx_vrepli_w): Ditto.
	(CODE_FOR_lsx_vrepli_d): Ditto.
	(CODE_FOR_lsx_vsat_b): Ditto.
	(CODE_FOR_lsx_vsat_h): Ditto.
	(CODE_FOR_lsx_vsat_w): Ditto.
	(CODE_FOR_lsx_vsat_d): Ditto.
	(CODE_FOR_lsx_vsat_bu): Ditto.
	(CODE_FOR_lsx_vsat_hu): Ditto.
	(CODE_FOR_lsx_vsat_wu): Ditto.
	(CODE_FOR_lsx_vsat_du): Ditto.
	(CODE_FOR_lsx_vavg_b): Ditto.
	(CODE_FOR_lsx_vavg_h): Ditto.
	(CODE_FOR_lsx_vavg_w): Ditto.
	(CODE_FOR_lsx_vavg_d): Ditto.
	(CODE_FOR_lsx_vavg_bu): Ditto.
	(CODE_FOR_lsx_vavg_hu): Ditto.
	(CODE_FOR_lsx_vavg_wu): Ditto.
	(CODE_FOR_lsx_vavg_du): Ditto.
	(CODE_FOR_lsx_vavgr_b): Ditto.
	(CODE_FOR_lsx_vavgr_h): Ditto.
	(CODE_FOR_lsx_vavgr_w): Ditto.
	(CODE_FOR_lsx_vavgr_d): Ditto.
	(CODE_FOR_lsx_vavgr_bu): Ditto.
	(CODE_FOR_lsx_vavgr_hu): Ditto.
	(CODE_FOR_lsx_vavgr_wu): Ditto.
	(CODE_FOR_lsx_vavgr_du): Ditto.
	(CODE_FOR_lsx_vssub_b): Ditto.
	(CODE_FOR_lsx_vssub_h): Ditto.
	(CODE_FOR_lsx_vssub_w): Ditto.
	(CODE_FOR_lsx_vssub_d): Ditto.
	(CODE_FOR_lsx_vssub_bu): Ditto.
	(CODE_FOR_lsx_vssub_hu): Ditto.
	(CODE_FOR_lsx_vssub_wu): Ditto.
	(CODE_FOR_lsx_vssub_du): Ditto.
	(CODE_FOR_lsx_vabsd_b): Ditto.
	(CODE_FOR_lsx_vabsd_h): Ditto.
	(CODE_FOR_lsx_vabsd_w): Ditto.
	(CODE_FOR_lsx_vabsd_d): Ditto.
	(CODE_FOR_lsx_vabsd_bu): Ditto.
	(CODE_FOR_lsx_vabsd_hu): Ditto.
	(CODE_FOR_lsx_vabsd_wu): Ditto.
	(CODE_FOR_lsx_vabsd_du): Ditto.
	(CODE_FOR_lsx_vftint_w_s): Ditto.
	(CODE_FOR_lsx_vftint_l_d): Ditto.
	(CODE_FOR_lsx_vftint_wu_s): Ditto.
	(CODE_FOR_lsx_vftint_lu_d): Ditto.
	(CODE_FOR_lsx_vandn_v): Ditto.
	(CODE_FOR_lsx_vorn_v): Ditto.
	(CODE_FOR_lsx_vneg_b): Ditto.
	(CODE_FOR_lsx_vneg_h): Ditto.
	(CODE_FOR_lsx_vneg_w): Ditto.
	(CODE_FOR_lsx_vneg_d): Ditto.
	(CODE_FOR_lsx_vshuf4i_d): Ditto.
	(CODE_FOR_lsx_vbsrl_v): Ditto.
	(CODE_FOR_lsx_vbsll_v): Ditto.
	(CODE_FOR_lsx_vfmadd_s): Ditto.
	(CODE_FOR_lsx_vfmadd_d): Ditto.
	(CODE_FOR_lsx_vfmsub_s): Ditto.
	(CODE_FOR_lsx_vfmsub_d): Ditto.
	(CODE_FOR_lsx_vfnmadd_s): Ditto.
	(CODE_FOR_lsx_vfnmadd_d): Ditto.
	(CODE_FOR_lsx_vfnmsub_s): Ditto.
	(CODE_FOR_lsx_vfnmsub_d): Ditto.
	(CODE_FOR_lsx_vmuh_b): Ditto.
	(CODE_FOR_lsx_vmuh_h): Ditto.
	(CODE_FOR_lsx_vmuh_w): Ditto.
	(CODE_FOR_lsx_vmuh_d): Ditto.
	(CODE_FOR_lsx_vmuh_bu): Ditto.
	(CODE_FOR_lsx_vmuh_hu): Ditto.
	(CODE_FOR_lsx_vmuh_wu): Ditto.
	(CODE_FOR_lsx_vmuh_du): Ditto.
	(CODE_FOR_lsx_vsllwil_h_b): Ditto.
	(CODE_FOR_lsx_vsllwil_w_h): Ditto.
	(CODE_FOR_lsx_vsllwil_d_w): Ditto.
	(CODE_FOR_lsx_vsllwil_hu_bu): Ditto.
	(CODE_FOR_lsx_vsllwil_wu_hu): Ditto.
	(CODE_FOR_lsx_vsllwil_du_wu): Ditto.
	(CODE_FOR_lsx_vssran_b_h): Ditto.
	(CODE_FOR_lsx_vssran_h_w): Ditto.
	(CODE_FOR_lsx_vssran_w_d): Ditto.
	(CODE_FOR_lsx_vssran_bu_h): Ditto.
	(CODE_FOR_lsx_vssran_hu_w): Ditto.
	(CODE_FOR_lsx_vssran_wu_d): Ditto.
	(CODE_FOR_lsx_vssrarn_b_h): Ditto.
	(CODE_FOR_lsx_vssrarn_h_w): Ditto.
	(CODE_FOR_lsx_vssrarn_w_d): Ditto.
	(CODE_FOR_lsx_vssrarn_bu_h): Ditto.
	(CODE_FOR_lsx_vssrarn_hu_w): Ditto.
	(CODE_FOR_lsx_vssrarn_wu_d): Ditto.
	(CODE_FOR_lsx_vssrln_bu_h): Ditto.
	(CODE_FOR_lsx_vssrln_hu_w): Ditto.
	(CODE_FOR_lsx_vssrln_wu_d): Ditto.
	(CODE_FOR_lsx_vssrlrn_bu_h): Ditto.
	(CODE_FOR_lsx_vssrlrn_hu_w): Ditto.
	(CODE_FOR_lsx_vssrlrn_wu_d): Ditto.
	(loongarch_builtin_vector_type): Ditto.
	(loongarch_build_cvpointer_type): Ditto.
	(LARCH_ATYPE_CVPOINTER): Ditto.
	(LARCH_ATYPE_BOOLEAN): Ditto.
	(LARCH_ATYPE_V2SF): Ditto.
	(LARCH_ATYPE_V2HI): Ditto.
	(LARCH_ATYPE_V2SI): Ditto.
	(LARCH_ATYPE_V4QI): Ditto.
	(LARCH_ATYPE_V4HI): Ditto.
	(LARCH_ATYPE_V8QI): Ditto.
	(LARCH_ATYPE_V2DI): Ditto.
	(LARCH_ATYPE_V4SI): Ditto.
	(LARCH_ATYPE_V8HI): Ditto.
	(LARCH_ATYPE_V16QI): Ditto.
	(LARCH_ATYPE_V2DF): Ditto.
	(LARCH_ATYPE_V4SF): Ditto.
	(LARCH_ATYPE_V4DI): Ditto.
	(LARCH_ATYPE_V8SI): Ditto.
	(LARCH_ATYPE_V16HI): Ditto.
	(LARCH_ATYPE_V32QI): Ditto.
	(LARCH_ATYPE_V4DF): Ditto.
	(LARCH_ATYPE_V8SF): Ditto.
	(LARCH_ATYPE_UV2DI): Ditto.
	(LARCH_ATYPE_UV4SI): Ditto.
	(LARCH_ATYPE_UV8HI): Ditto.
	(LARCH_ATYPE_UV16QI): Ditto.
	(LARCH_ATYPE_UV4DI): Ditto.
	(LARCH_ATYPE_UV8SI): Ditto.
	(LARCH_ATYPE_UV16HI): Ditto.
	(LARCH_ATYPE_UV32QI): Ditto.
	(LARCH_ATYPE_UV2SI): Ditto.
	(LARCH_ATYPE_UV4HI): Ditto.
	(LARCH_ATYPE_UV8QI): Ditto.
	(loongarch_builtin_vectorized_function): Ditto.
	(LARCH_GET_BUILTIN): Ditto.
	(loongarch_expand_builtin_insn): Ditto.
	(loongarch_expand_builtin_lsx_test_branch): Ditto.
	(loongarch_expand_builtin): Ditto.
	* config/loongarch/loongarch-ftypes.def (1): Ditto.
	(2): Ditto.
	(3): Ditto.
	(4): Ditto.
	* config/loongarch/lsxintrin.h: New file.
---
 gcc/config.gcc                             |    2 +-
 gcc/config/loongarch/loongarch-builtins.cc | 1498 +++++-
 gcc/config/loongarch/loongarch-ftypes.def  |  397 +-
 gcc/config/loongarch/lsxintrin.h           | 5181 ++++++++++++++++++++
 4 files changed, 7071 insertions(+), 7 deletions(-)
 create mode 100644 gcc/config/loongarch/lsxintrin.h
diff mbox series

Patch

diff --git a/gcc/config.gcc b/gcc/config.gcc
index d88071773c9..3aa1d9dd4e6 100644
--- a/gcc/config.gcc
+++ b/gcc/config.gcc
@@ -468,7 +468,7 @@  mips*-*-*)
 	;;
 loongarch*-*-*)
 	cpu_type=loongarch
-	extra_headers="larchintrin.h"
+	extra_headers="larchintrin.h lsxintrin.h"
 	extra_objs="loongarch-c.o loongarch-builtins.o loongarch-cpu.o loongarch-opts.o loongarch-def.o"
 	extra_gcc_objs="loongarch-driver.o loongarch-cpu.o loongarch-opts.o loongarch-def.o"
 	extra_options="${extra_options} g.opt fused-madd.opt"
diff --git a/gcc/config/loongarch/loongarch-builtins.cc b/gcc/config/loongarch/loongarch-builtins.cc
index ebe70a986c3..5958f5b7fbe 100644
--- a/gcc/config/loongarch/loongarch-builtins.cc
+++ b/gcc/config/loongarch/loongarch-builtins.cc
@@ -34,14 +34,18 @@  along with GCC; see the file COPYING3.  If not see
 #include "recog.h"
 #include "diagnostic.h"
 #include "fold-const.h"
+#include "explow.h"
 #include "expr.h"
 #include "langhooks.h"
 #include "emit-rtl.h"
+#include "case-cfn-macros.h"
 
 /* Macros to create an enumeration identifier for a function prototype.  */
 #define LARCH_FTYPE_NAME1(A, B) LARCH_##A##_FTYPE_##B
 #define LARCH_FTYPE_NAME2(A, B, C) LARCH_##A##_FTYPE_##B##_##C
 #define LARCH_FTYPE_NAME3(A, B, C, D) LARCH_##A##_FTYPE_##B##_##C##_##D
+#define LARCH_FTYPE_NAME4(A, B, C, D, E) \
+  LARCH_##A##_FTYPE_##B##_##C##_##D##_##E
 
 /* Classifies the prototype of a built-in function.  */
 enum loongarch_function_type
@@ -64,6 +68,12 @@  enum loongarch_builtin_type
      value and the arguments are mapped to operands 0 and above.  */
   LARCH_BUILTIN_DIRECT_NO_TARGET,
 
+  /* For generating LoongArch LSX.  */
+  LARCH_BUILTIN_LSX,
+
+  /* The function corresponds to an LSX conditional branch instruction
+     combined with a compare instruction.  */
+  LARCH_BUILTIN_LSX_TEST_BRANCH,
 };
 
 /* Declare an availability predicate for built-in functions that require
@@ -101,6 +111,7 @@  struct loongarch_builtin_description
 };
 
 AVAIL_ALL (hard_float, TARGET_HARD_FLOAT_ABI)
+AVAIL_ALL (lsx, ISA_HAS_LSX)
 
 /* Construct a loongarch_builtin_description from the given arguments.
 
@@ -120,8 +131,8 @@  AVAIL_ALL (hard_float, TARGET_HARD_FLOAT_ABI)
 #define LARCH_BUILTIN(INSN, NAME, BUILTIN_TYPE, FUNCTION_TYPE, AVAIL) \
   { \
     CODE_FOR_loongarch_##INSN, "__builtin_loongarch_" NAME, \
-      BUILTIN_TYPE, FUNCTION_TYPE, \
-      loongarch_builtin_avail_##AVAIL \
+    BUILTIN_TYPE, FUNCTION_TYPE, \
+    loongarch_builtin_avail_##AVAIL \
   }
 
 /* Define __builtin_loongarch_<INSN>, which is a LARCH_BUILTIN_DIRECT function
@@ -137,6 +148,300 @@  AVAIL_ALL (hard_float, TARGET_HARD_FLOAT_ABI)
   LARCH_BUILTIN (INSN, #INSN, LARCH_BUILTIN_DIRECT_NO_TARGET, \
 		 FUNCTION_TYPE, AVAIL)
 
+/* Define an LSX LARCH_BUILTIN_DIRECT function __builtin_lsx_<INSN>
+   for instruction CODE_FOR_lsx_<INSN>.  FUNCTION_TYPE is a builtin_description
+   field.  */
+#define LSX_BUILTIN(INSN, FUNCTION_TYPE)				\
+  { CODE_FOR_lsx_ ## INSN,						\
+    "__builtin_lsx_" #INSN,  LARCH_BUILTIN_DIRECT,			\
+    FUNCTION_TYPE, loongarch_builtin_avail_lsx }
+
+
+/* Define an LSX LARCH_BUILTIN_LSX_TEST_BRANCH function __builtin_lsx_<INSN>
+   for instruction CODE_FOR_lsx_<INSN>.  FUNCTION_TYPE is a builtin_description
+   field.  */
+#define LSX_BUILTIN_TEST_BRANCH(INSN, FUNCTION_TYPE)			\
+  { CODE_FOR_lsx_ ## INSN,						\
+    "__builtin_lsx_" #INSN, LARCH_BUILTIN_LSX_TEST_BRANCH,		\
+    FUNCTION_TYPE, loongarch_builtin_avail_lsx }
+
+/* Define an LSX LARCH_BUILTIN_DIRECT_NO_TARGET function __builtin_lsx_<INSN>
+   for instruction CODE_FOR_lsx_<INSN>.  FUNCTION_TYPE is a builtin_description
+   field.  */
+#define LSX_NO_TARGET_BUILTIN(INSN, FUNCTION_TYPE)			\
+  { CODE_FOR_lsx_ ## INSN,						\
+    "__builtin_lsx_" #INSN,  LARCH_BUILTIN_DIRECT_NO_TARGET,		\
+    FUNCTION_TYPE, loongarch_builtin_avail_lsx }
+
+/* LoongArch SX define CODE_FOR_lsx_xxx */
+#define CODE_FOR_lsx_vsadd_b CODE_FOR_ssaddv16qi3
+#define CODE_FOR_lsx_vsadd_h CODE_FOR_ssaddv8hi3
+#define CODE_FOR_lsx_vsadd_w CODE_FOR_ssaddv4si3
+#define CODE_FOR_lsx_vsadd_d CODE_FOR_ssaddv2di3
+#define CODE_FOR_lsx_vsadd_bu CODE_FOR_usaddv16qi3
+#define CODE_FOR_lsx_vsadd_hu CODE_FOR_usaddv8hi3
+#define CODE_FOR_lsx_vsadd_wu CODE_FOR_usaddv4si3
+#define CODE_FOR_lsx_vsadd_du CODE_FOR_usaddv2di3
+#define CODE_FOR_lsx_vadd_b CODE_FOR_addv16qi3
+#define CODE_FOR_lsx_vadd_h CODE_FOR_addv8hi3
+#define CODE_FOR_lsx_vadd_w CODE_FOR_addv4si3
+#define CODE_FOR_lsx_vadd_d CODE_FOR_addv2di3
+#define CODE_FOR_lsx_vaddi_bu CODE_FOR_addv16qi3
+#define CODE_FOR_lsx_vaddi_hu CODE_FOR_addv8hi3
+#define CODE_FOR_lsx_vaddi_wu CODE_FOR_addv4si3
+#define CODE_FOR_lsx_vaddi_du CODE_FOR_addv2di3
+#define CODE_FOR_lsx_vand_v CODE_FOR_andv16qi3
+#define CODE_FOR_lsx_vandi_b CODE_FOR_andv16qi3
+#define CODE_FOR_lsx_bnz_v CODE_FOR_lsx_bnz_v_b
+#define CODE_FOR_lsx_bz_v CODE_FOR_lsx_bz_v_b
+#define CODE_FOR_lsx_vbitsel_v CODE_FOR_lsx_vbitsel_b
+#define CODE_FOR_lsx_vseqi_b CODE_FOR_lsx_vseq_b
+#define CODE_FOR_lsx_vseqi_h CODE_FOR_lsx_vseq_h
+#define CODE_FOR_lsx_vseqi_w CODE_FOR_lsx_vseq_w
+#define CODE_FOR_lsx_vseqi_d CODE_FOR_lsx_vseq_d
+#define CODE_FOR_lsx_vslti_b CODE_FOR_lsx_vslt_b
+#define CODE_FOR_lsx_vslti_h CODE_FOR_lsx_vslt_h
+#define CODE_FOR_lsx_vslti_w CODE_FOR_lsx_vslt_w
+#define CODE_FOR_lsx_vslti_d CODE_FOR_lsx_vslt_d
+#define CODE_FOR_lsx_vslti_bu CODE_FOR_lsx_vslt_bu
+#define CODE_FOR_lsx_vslti_hu CODE_FOR_lsx_vslt_hu
+#define CODE_FOR_lsx_vslti_wu CODE_FOR_lsx_vslt_wu
+#define CODE_FOR_lsx_vslti_du CODE_FOR_lsx_vslt_du
+#define CODE_FOR_lsx_vslei_b CODE_FOR_lsx_vsle_b
+#define CODE_FOR_lsx_vslei_h CODE_FOR_lsx_vsle_h
+#define CODE_FOR_lsx_vslei_w CODE_FOR_lsx_vsle_w
+#define CODE_FOR_lsx_vslei_d CODE_FOR_lsx_vsle_d
+#define CODE_FOR_lsx_vslei_bu CODE_FOR_lsx_vsle_bu
+#define CODE_FOR_lsx_vslei_hu CODE_FOR_lsx_vsle_hu
+#define CODE_FOR_lsx_vslei_wu CODE_FOR_lsx_vsle_wu
+#define CODE_FOR_lsx_vslei_du CODE_FOR_lsx_vsle_du
+#define CODE_FOR_lsx_vdiv_b CODE_FOR_divv16qi3
+#define CODE_FOR_lsx_vdiv_h CODE_FOR_divv8hi3
+#define CODE_FOR_lsx_vdiv_w CODE_FOR_divv4si3
+#define CODE_FOR_lsx_vdiv_d CODE_FOR_divv2di3
+#define CODE_FOR_lsx_vdiv_bu CODE_FOR_udivv16qi3
+#define CODE_FOR_lsx_vdiv_hu CODE_FOR_udivv8hi3
+#define CODE_FOR_lsx_vdiv_wu CODE_FOR_udivv4si3
+#define CODE_FOR_lsx_vdiv_du CODE_FOR_udivv2di3
+#define CODE_FOR_lsx_vfadd_s CODE_FOR_addv4sf3
+#define CODE_FOR_lsx_vfadd_d CODE_FOR_addv2df3
+#define CODE_FOR_lsx_vftintrz_w_s CODE_FOR_fix_truncv4sfv4si2
+#define CODE_FOR_lsx_vftintrz_l_d CODE_FOR_fix_truncv2dfv2di2
+#define CODE_FOR_lsx_vftintrz_wu_s CODE_FOR_fixuns_truncv4sfv4si2
+#define CODE_FOR_lsx_vftintrz_lu_d CODE_FOR_fixuns_truncv2dfv2di2
+#define CODE_FOR_lsx_vffint_s_w CODE_FOR_floatv4siv4sf2
+#define CODE_FOR_lsx_vffint_d_l CODE_FOR_floatv2div2df2
+#define CODE_FOR_lsx_vffint_s_wu CODE_FOR_floatunsv4siv4sf2
+#define CODE_FOR_lsx_vffint_d_lu CODE_FOR_floatunsv2div2df2
+#define CODE_FOR_lsx_vfsub_s CODE_FOR_subv4sf3
+#define CODE_FOR_lsx_vfsub_d CODE_FOR_subv2df3
+#define CODE_FOR_lsx_vfmul_s CODE_FOR_mulv4sf3
+#define CODE_FOR_lsx_vfmul_d CODE_FOR_mulv2df3
+#define CODE_FOR_lsx_vfdiv_s CODE_FOR_divv4sf3
+#define CODE_FOR_lsx_vfdiv_d CODE_FOR_divv2df3
+#define CODE_FOR_lsx_vfmax_s CODE_FOR_smaxv4sf3
+#define CODE_FOR_lsx_vfmax_d CODE_FOR_smaxv2df3
+#define CODE_FOR_lsx_vfmin_s CODE_FOR_sminv4sf3
+#define CODE_FOR_lsx_vfmin_d CODE_FOR_sminv2df3
+#define CODE_FOR_lsx_vfsqrt_s CODE_FOR_sqrtv4sf2
+#define CODE_FOR_lsx_vfsqrt_d CODE_FOR_sqrtv2df2
+#define CODE_FOR_lsx_vflogb_s CODE_FOR_logbv4sf2
+#define CODE_FOR_lsx_vflogb_d CODE_FOR_logbv2df2
+#define CODE_FOR_lsx_vmax_b CODE_FOR_smaxv16qi3
+#define CODE_FOR_lsx_vmax_h CODE_FOR_smaxv8hi3
+#define CODE_FOR_lsx_vmax_w CODE_FOR_smaxv4si3
+#define CODE_FOR_lsx_vmax_d CODE_FOR_smaxv2di3
+#define CODE_FOR_lsx_vmaxi_b CODE_FOR_smaxv16qi3
+#define CODE_FOR_lsx_vmaxi_h CODE_FOR_smaxv8hi3
+#define CODE_FOR_lsx_vmaxi_w CODE_FOR_smaxv4si3
+#define CODE_FOR_lsx_vmaxi_d CODE_FOR_smaxv2di3
+#define CODE_FOR_lsx_vmax_bu CODE_FOR_umaxv16qi3
+#define CODE_FOR_lsx_vmax_hu CODE_FOR_umaxv8hi3
+#define CODE_FOR_lsx_vmax_wu CODE_FOR_umaxv4si3
+#define CODE_FOR_lsx_vmax_du CODE_FOR_umaxv2di3
+#define CODE_FOR_lsx_vmaxi_bu CODE_FOR_umaxv16qi3
+#define CODE_FOR_lsx_vmaxi_hu CODE_FOR_umaxv8hi3
+#define CODE_FOR_lsx_vmaxi_wu CODE_FOR_umaxv4si3
+#define CODE_FOR_lsx_vmaxi_du CODE_FOR_umaxv2di3
+#define CODE_FOR_lsx_vmin_b CODE_FOR_sminv16qi3
+#define CODE_FOR_lsx_vmin_h CODE_FOR_sminv8hi3
+#define CODE_FOR_lsx_vmin_w CODE_FOR_sminv4si3
+#define CODE_FOR_lsx_vmin_d CODE_FOR_sminv2di3
+#define CODE_FOR_lsx_vmini_b CODE_FOR_sminv16qi3
+#define CODE_FOR_lsx_vmini_h CODE_FOR_sminv8hi3
+#define CODE_FOR_lsx_vmini_w CODE_FOR_sminv4si3
+#define CODE_FOR_lsx_vmini_d CODE_FOR_sminv2di3
+#define CODE_FOR_lsx_vmin_bu CODE_FOR_uminv16qi3
+#define CODE_FOR_lsx_vmin_hu CODE_FOR_uminv8hi3
+#define CODE_FOR_lsx_vmin_wu CODE_FOR_uminv4si3
+#define CODE_FOR_lsx_vmin_du CODE_FOR_uminv2di3
+#define CODE_FOR_lsx_vmini_bu CODE_FOR_uminv16qi3
+#define CODE_FOR_lsx_vmini_hu CODE_FOR_uminv8hi3
+#define CODE_FOR_lsx_vmini_wu CODE_FOR_uminv4si3
+#define CODE_FOR_lsx_vmini_du CODE_FOR_uminv2di3
+#define CODE_FOR_lsx_vmod_b CODE_FOR_modv16qi3
+#define CODE_FOR_lsx_vmod_h CODE_FOR_modv8hi3
+#define CODE_FOR_lsx_vmod_w CODE_FOR_modv4si3
+#define CODE_FOR_lsx_vmod_d CODE_FOR_modv2di3
+#define CODE_FOR_lsx_vmod_bu CODE_FOR_umodv16qi3
+#define CODE_FOR_lsx_vmod_hu CODE_FOR_umodv8hi3
+#define CODE_FOR_lsx_vmod_wu CODE_FOR_umodv4si3
+#define CODE_FOR_lsx_vmod_du CODE_FOR_umodv2di3
+#define CODE_FOR_lsx_vmul_b CODE_FOR_mulv16qi3
+#define CODE_FOR_lsx_vmul_h CODE_FOR_mulv8hi3
+#define CODE_FOR_lsx_vmul_w CODE_FOR_mulv4si3
+#define CODE_FOR_lsx_vmul_d CODE_FOR_mulv2di3
+#define CODE_FOR_lsx_vclz_b CODE_FOR_clzv16qi2
+#define CODE_FOR_lsx_vclz_h CODE_FOR_clzv8hi2
+#define CODE_FOR_lsx_vclz_w CODE_FOR_clzv4si2
+#define CODE_FOR_lsx_vclz_d CODE_FOR_clzv2di2
+#define CODE_FOR_lsx_vnor_v CODE_FOR_lsx_nor_b
+#define CODE_FOR_lsx_vor_v CODE_FOR_iorv16qi3
+#define CODE_FOR_lsx_vori_b CODE_FOR_iorv16qi3
+#define CODE_FOR_lsx_vnori_b CODE_FOR_lsx_nor_b
+#define CODE_FOR_lsx_vpcnt_b CODE_FOR_popcountv16qi2
+#define CODE_FOR_lsx_vpcnt_h CODE_FOR_popcountv8hi2
+#define CODE_FOR_lsx_vpcnt_w CODE_FOR_popcountv4si2
+#define CODE_FOR_lsx_vpcnt_d CODE_FOR_popcountv2di2
+#define CODE_FOR_lsx_vxor_v CODE_FOR_xorv16qi3
+#define CODE_FOR_lsx_vxori_b CODE_FOR_xorv16qi3
+#define CODE_FOR_lsx_vsll_b CODE_FOR_vashlv16qi3
+#define CODE_FOR_lsx_vsll_h CODE_FOR_vashlv8hi3
+#define CODE_FOR_lsx_vsll_w CODE_FOR_vashlv4si3
+#define CODE_FOR_lsx_vsll_d CODE_FOR_vashlv2di3
+#define CODE_FOR_lsx_vslli_b CODE_FOR_vashlv16qi3
+#define CODE_FOR_lsx_vslli_h CODE_FOR_vashlv8hi3
+#define CODE_FOR_lsx_vslli_w CODE_FOR_vashlv4si3
+#define CODE_FOR_lsx_vslli_d CODE_FOR_vashlv2di3
+#define CODE_FOR_lsx_vsra_b CODE_FOR_vashrv16qi3
+#define CODE_FOR_lsx_vsra_h CODE_FOR_vashrv8hi3
+#define CODE_FOR_lsx_vsra_w CODE_FOR_vashrv4si3
+#define CODE_FOR_lsx_vsra_d CODE_FOR_vashrv2di3
+#define CODE_FOR_lsx_vsrai_b CODE_FOR_vashrv16qi3
+#define CODE_FOR_lsx_vsrai_h CODE_FOR_vashrv8hi3
+#define CODE_FOR_lsx_vsrai_w CODE_FOR_vashrv4si3
+#define CODE_FOR_lsx_vsrai_d CODE_FOR_vashrv2di3
+#define CODE_FOR_lsx_vsrl_b CODE_FOR_vlshrv16qi3
+#define CODE_FOR_lsx_vsrl_h CODE_FOR_vlshrv8hi3
+#define CODE_FOR_lsx_vsrl_w CODE_FOR_vlshrv4si3
+#define CODE_FOR_lsx_vsrl_d CODE_FOR_vlshrv2di3
+#define CODE_FOR_lsx_vsrli_b CODE_FOR_vlshrv16qi3
+#define CODE_FOR_lsx_vsrli_h CODE_FOR_vlshrv8hi3
+#define CODE_FOR_lsx_vsrli_w CODE_FOR_vlshrv4si3
+#define CODE_FOR_lsx_vsrli_d CODE_FOR_vlshrv2di3
+#define CODE_FOR_lsx_vsub_b CODE_FOR_subv16qi3
+#define CODE_FOR_lsx_vsub_h CODE_FOR_subv8hi3
+#define CODE_FOR_lsx_vsub_w CODE_FOR_subv4si3
+#define CODE_FOR_lsx_vsub_d CODE_FOR_subv2di3
+#define CODE_FOR_lsx_vsubi_bu CODE_FOR_subv16qi3
+#define CODE_FOR_lsx_vsubi_hu CODE_FOR_subv8hi3
+#define CODE_FOR_lsx_vsubi_wu CODE_FOR_subv4si3
+#define CODE_FOR_lsx_vsubi_du CODE_FOR_subv2di3
+
+#define CODE_FOR_lsx_vpackod_d CODE_FOR_lsx_vilvh_d
+#define CODE_FOR_lsx_vpackev_d CODE_FOR_lsx_vilvl_d
+#define CODE_FOR_lsx_vpickod_d CODE_FOR_lsx_vilvh_d
+#define CODE_FOR_lsx_vpickev_d CODE_FOR_lsx_vilvl_d
+
+#define CODE_FOR_lsx_vrepli_b CODE_FOR_lsx_vrepliv16qi
+#define CODE_FOR_lsx_vrepli_h CODE_FOR_lsx_vrepliv8hi
+#define CODE_FOR_lsx_vrepli_w CODE_FOR_lsx_vrepliv4si
+#define CODE_FOR_lsx_vrepli_d CODE_FOR_lsx_vrepliv2di
+#define CODE_FOR_lsx_vsat_b CODE_FOR_lsx_vsat_s_b
+#define CODE_FOR_lsx_vsat_h CODE_FOR_lsx_vsat_s_h
+#define CODE_FOR_lsx_vsat_w CODE_FOR_lsx_vsat_s_w
+#define CODE_FOR_lsx_vsat_d CODE_FOR_lsx_vsat_s_d
+#define CODE_FOR_lsx_vsat_bu CODE_FOR_lsx_vsat_u_bu
+#define CODE_FOR_lsx_vsat_hu CODE_FOR_lsx_vsat_u_hu
+#define CODE_FOR_lsx_vsat_wu CODE_FOR_lsx_vsat_u_wu
+#define CODE_FOR_lsx_vsat_du CODE_FOR_lsx_vsat_u_du
+#define CODE_FOR_lsx_vavg_b CODE_FOR_lsx_vavg_s_b
+#define CODE_FOR_lsx_vavg_h CODE_FOR_lsx_vavg_s_h
+#define CODE_FOR_lsx_vavg_w CODE_FOR_lsx_vavg_s_w
+#define CODE_FOR_lsx_vavg_d CODE_FOR_lsx_vavg_s_d
+#define CODE_FOR_lsx_vavg_bu CODE_FOR_lsx_vavg_u_bu
+#define CODE_FOR_lsx_vavg_hu CODE_FOR_lsx_vavg_u_hu
+#define CODE_FOR_lsx_vavg_wu CODE_FOR_lsx_vavg_u_wu
+#define CODE_FOR_lsx_vavg_du CODE_FOR_lsx_vavg_u_du
+#define CODE_FOR_lsx_vavgr_b CODE_FOR_lsx_vavgr_s_b
+#define CODE_FOR_lsx_vavgr_h CODE_FOR_lsx_vavgr_s_h
+#define CODE_FOR_lsx_vavgr_w CODE_FOR_lsx_vavgr_s_w
+#define CODE_FOR_lsx_vavgr_d CODE_FOR_lsx_vavgr_s_d
+#define CODE_FOR_lsx_vavgr_bu CODE_FOR_lsx_vavgr_u_bu
+#define CODE_FOR_lsx_vavgr_hu CODE_FOR_lsx_vavgr_u_hu
+#define CODE_FOR_lsx_vavgr_wu CODE_FOR_lsx_vavgr_u_wu
+#define CODE_FOR_lsx_vavgr_du CODE_FOR_lsx_vavgr_u_du
+#define CODE_FOR_lsx_vssub_b CODE_FOR_lsx_vssub_s_b
+#define CODE_FOR_lsx_vssub_h CODE_FOR_lsx_vssub_s_h
+#define CODE_FOR_lsx_vssub_w CODE_FOR_lsx_vssub_s_w
+#define CODE_FOR_lsx_vssub_d CODE_FOR_lsx_vssub_s_d
+#define CODE_FOR_lsx_vssub_bu CODE_FOR_lsx_vssub_u_bu
+#define CODE_FOR_lsx_vssub_hu CODE_FOR_lsx_vssub_u_hu
+#define CODE_FOR_lsx_vssub_wu CODE_FOR_lsx_vssub_u_wu
+#define CODE_FOR_lsx_vssub_du CODE_FOR_lsx_vssub_u_du
+#define CODE_FOR_lsx_vabsd_b CODE_FOR_lsx_vabsd_s_b
+#define CODE_FOR_lsx_vabsd_h CODE_FOR_lsx_vabsd_s_h
+#define CODE_FOR_lsx_vabsd_w CODE_FOR_lsx_vabsd_s_w
+#define CODE_FOR_lsx_vabsd_d CODE_FOR_lsx_vabsd_s_d
+#define CODE_FOR_lsx_vabsd_bu CODE_FOR_lsx_vabsd_u_bu
+#define CODE_FOR_lsx_vabsd_hu CODE_FOR_lsx_vabsd_u_hu
+#define CODE_FOR_lsx_vabsd_wu CODE_FOR_lsx_vabsd_u_wu
+#define CODE_FOR_lsx_vabsd_du CODE_FOR_lsx_vabsd_u_du
+#define CODE_FOR_lsx_vftint_w_s CODE_FOR_lsx_vftint_s_w_s
+#define CODE_FOR_lsx_vftint_l_d CODE_FOR_lsx_vftint_s_l_d
+#define CODE_FOR_lsx_vftint_wu_s CODE_FOR_lsx_vftint_u_wu_s
+#define CODE_FOR_lsx_vftint_lu_d CODE_FOR_lsx_vftint_u_lu_d
+#define CODE_FOR_lsx_vandn_v CODE_FOR_vandnv16qi3
+#define CODE_FOR_lsx_vorn_v CODE_FOR_vornv16qi3
+#define CODE_FOR_lsx_vneg_b CODE_FOR_vnegv16qi2
+#define CODE_FOR_lsx_vneg_h CODE_FOR_vnegv8hi2
+#define CODE_FOR_lsx_vneg_w CODE_FOR_vnegv4si2
+#define CODE_FOR_lsx_vneg_d CODE_FOR_vnegv2di2
+#define CODE_FOR_lsx_vshuf4i_d CODE_FOR_lsx_vshuf4i_d
+#define CODE_FOR_lsx_vbsrl_v CODE_FOR_lsx_vbsrl_b
+#define CODE_FOR_lsx_vbsll_v CODE_FOR_lsx_vbsll_b
+#define CODE_FOR_lsx_vfmadd_s CODE_FOR_fmav4sf4
+#define CODE_FOR_lsx_vfmadd_d CODE_FOR_fmav2df4
+#define CODE_FOR_lsx_vfmsub_s CODE_FOR_fmsv4sf4
+#define CODE_FOR_lsx_vfmsub_d CODE_FOR_fmsv2df4
+#define CODE_FOR_lsx_vfnmadd_s CODE_FOR_vfnmaddv4sf4_nmadd4
+#define CODE_FOR_lsx_vfnmadd_d CODE_FOR_vfnmaddv2df4_nmadd4
+#define CODE_FOR_lsx_vfnmsub_s CODE_FOR_vfnmsubv4sf4_nmsub4
+#define CODE_FOR_lsx_vfnmsub_d CODE_FOR_vfnmsubv2df4_nmsub4
+
+#define CODE_FOR_lsx_vmuh_b CODE_FOR_lsx_vmuh_s_b
+#define CODE_FOR_lsx_vmuh_h CODE_FOR_lsx_vmuh_s_h
+#define CODE_FOR_lsx_vmuh_w CODE_FOR_lsx_vmuh_s_w
+#define CODE_FOR_lsx_vmuh_d CODE_FOR_lsx_vmuh_s_d
+#define CODE_FOR_lsx_vmuh_bu CODE_FOR_lsx_vmuh_u_bu
+#define CODE_FOR_lsx_vmuh_hu CODE_FOR_lsx_vmuh_u_hu
+#define CODE_FOR_lsx_vmuh_wu CODE_FOR_lsx_vmuh_u_wu
+#define CODE_FOR_lsx_vmuh_du CODE_FOR_lsx_vmuh_u_du
+#define CODE_FOR_lsx_vsllwil_h_b CODE_FOR_lsx_vsllwil_s_h_b
+#define CODE_FOR_lsx_vsllwil_w_h CODE_FOR_lsx_vsllwil_s_w_h
+#define CODE_FOR_lsx_vsllwil_d_w CODE_FOR_lsx_vsllwil_s_d_w
+#define CODE_FOR_lsx_vsllwil_hu_bu CODE_FOR_lsx_vsllwil_u_hu_bu
+#define CODE_FOR_lsx_vsllwil_wu_hu CODE_FOR_lsx_vsllwil_u_wu_hu
+#define CODE_FOR_lsx_vsllwil_du_wu CODE_FOR_lsx_vsllwil_u_du_wu
+#define CODE_FOR_lsx_vssran_b_h CODE_FOR_lsx_vssran_s_b_h
+#define CODE_FOR_lsx_vssran_h_w CODE_FOR_lsx_vssran_s_h_w
+#define CODE_FOR_lsx_vssran_w_d CODE_FOR_lsx_vssran_s_w_d
+#define CODE_FOR_lsx_vssran_bu_h CODE_FOR_lsx_vssran_u_bu_h
+#define CODE_FOR_lsx_vssran_hu_w CODE_FOR_lsx_vssran_u_hu_w
+#define CODE_FOR_lsx_vssran_wu_d CODE_FOR_lsx_vssran_u_wu_d
+#define CODE_FOR_lsx_vssrarn_b_h CODE_FOR_lsx_vssrarn_s_b_h
+#define CODE_FOR_lsx_vssrarn_h_w CODE_FOR_lsx_vssrarn_s_h_w
+#define CODE_FOR_lsx_vssrarn_w_d CODE_FOR_lsx_vssrarn_s_w_d
+#define CODE_FOR_lsx_vssrarn_bu_h CODE_FOR_lsx_vssrarn_u_bu_h
+#define CODE_FOR_lsx_vssrarn_hu_w CODE_FOR_lsx_vssrarn_u_hu_w
+#define CODE_FOR_lsx_vssrarn_wu_d CODE_FOR_lsx_vssrarn_u_wu_d
+#define CODE_FOR_lsx_vssrln_bu_h CODE_FOR_lsx_vssrln_u_bu_h
+#define CODE_FOR_lsx_vssrln_hu_w CODE_FOR_lsx_vssrln_u_hu_w
+#define CODE_FOR_lsx_vssrln_wu_d CODE_FOR_lsx_vssrln_u_wu_d
+#define CODE_FOR_lsx_vssrlrn_bu_h CODE_FOR_lsx_vssrlrn_u_bu_h
+#define CODE_FOR_lsx_vssrlrn_hu_w CODE_FOR_lsx_vssrlrn_u_hu_w
+#define CODE_FOR_lsx_vssrlrn_wu_d CODE_FOR_lsx_vssrlrn_u_wu_d
+
 static const struct loongarch_builtin_description loongarch_builtins[] = {
 #define LARCH_MOVFCSR2GR 0
   DIRECT_BUILTIN (movfcsr2gr, LARCH_USI_FTYPE_UQI, hard_float),
@@ -184,6 +489,727 @@  static const struct loongarch_builtin_description loongarch_builtins[] = {
   DIRECT_NO_TARGET_BUILTIN (asrtgt_d, LARCH_VOID_FTYPE_DI_DI, default),
   DIRECT_NO_TARGET_BUILTIN (syscall, LARCH_VOID_FTYPE_USI, default),
   DIRECT_NO_TARGET_BUILTIN (break, LARCH_VOID_FTYPE_USI, default),
+
+  /* Built-in functions for LSX.  */
+  LSX_BUILTIN (vsll_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vsll_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vsll_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vsll_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vslli_b, LARCH_V16QI_FTYPE_V16QI_UQI),
+  LSX_BUILTIN (vslli_h, LARCH_V8HI_FTYPE_V8HI_UQI),
+  LSX_BUILTIN (vslli_w, LARCH_V4SI_FTYPE_V4SI_UQI),
+  LSX_BUILTIN (vslli_d, LARCH_V2DI_FTYPE_V2DI_UQI),
+  LSX_BUILTIN (vsra_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vsra_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vsra_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vsra_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vsrai_b, LARCH_V16QI_FTYPE_V16QI_UQI),
+  LSX_BUILTIN (vsrai_h, LARCH_V8HI_FTYPE_V8HI_UQI),
+  LSX_BUILTIN (vsrai_w, LARCH_V4SI_FTYPE_V4SI_UQI),
+  LSX_BUILTIN (vsrai_d, LARCH_V2DI_FTYPE_V2DI_UQI),
+  LSX_BUILTIN (vsrar_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vsrar_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vsrar_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vsrar_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vsrari_b, LARCH_V16QI_FTYPE_V16QI_UQI),
+  LSX_BUILTIN (vsrari_h, LARCH_V8HI_FTYPE_V8HI_UQI),
+  LSX_BUILTIN (vsrari_w, LARCH_V4SI_FTYPE_V4SI_UQI),
+  LSX_BUILTIN (vsrari_d, LARCH_V2DI_FTYPE_V2DI_UQI),
+  LSX_BUILTIN (vsrl_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vsrl_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vsrl_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vsrl_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vsrli_b, LARCH_V16QI_FTYPE_V16QI_UQI),
+  LSX_BUILTIN (vsrli_h, LARCH_V8HI_FTYPE_V8HI_UQI),
+  LSX_BUILTIN (vsrli_w, LARCH_V4SI_FTYPE_V4SI_UQI),
+  LSX_BUILTIN (vsrli_d, LARCH_V2DI_FTYPE_V2DI_UQI),
+  LSX_BUILTIN (vsrlr_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vsrlr_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vsrlr_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vsrlr_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vsrlri_b, LARCH_V16QI_FTYPE_V16QI_UQI),
+  LSX_BUILTIN (vsrlri_h, LARCH_V8HI_FTYPE_V8HI_UQI),
+  LSX_BUILTIN (vsrlri_w, LARCH_V4SI_FTYPE_V4SI_UQI),
+  LSX_BUILTIN (vsrlri_d, LARCH_V2DI_FTYPE_V2DI_UQI),
+  LSX_BUILTIN (vbitclr_b, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vbitclr_h, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vbitclr_w, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vbitclr_d, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vbitclri_b, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+  LSX_BUILTIN (vbitclri_h, LARCH_UV8HI_FTYPE_UV8HI_UQI),
+  LSX_BUILTIN (vbitclri_w, LARCH_UV4SI_FTYPE_UV4SI_UQI),
+  LSX_BUILTIN (vbitclri_d, LARCH_UV2DI_FTYPE_UV2DI_UQI),
+  LSX_BUILTIN (vbitset_b, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vbitset_h, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vbitset_w, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vbitset_d, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vbitseti_b, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+  LSX_BUILTIN (vbitseti_h, LARCH_UV8HI_FTYPE_UV8HI_UQI),
+  LSX_BUILTIN (vbitseti_w, LARCH_UV4SI_FTYPE_UV4SI_UQI),
+  LSX_BUILTIN (vbitseti_d, LARCH_UV2DI_FTYPE_UV2DI_UQI),
+  LSX_BUILTIN (vbitrev_b, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vbitrev_h, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vbitrev_w, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vbitrev_d, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vbitrevi_b, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+  LSX_BUILTIN (vbitrevi_h, LARCH_UV8HI_FTYPE_UV8HI_UQI),
+  LSX_BUILTIN (vbitrevi_w, LARCH_UV4SI_FTYPE_UV4SI_UQI),
+  LSX_BUILTIN (vbitrevi_d, LARCH_UV2DI_FTYPE_UV2DI_UQI),
+  LSX_BUILTIN (vadd_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vadd_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vadd_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vadd_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vaddi_bu, LARCH_V16QI_FTYPE_V16QI_UQI),
+  LSX_BUILTIN (vaddi_hu, LARCH_V8HI_FTYPE_V8HI_UQI),
+  LSX_BUILTIN (vaddi_wu, LARCH_V4SI_FTYPE_V4SI_UQI),
+  LSX_BUILTIN (vaddi_du, LARCH_V2DI_FTYPE_V2DI_UQI),
+  LSX_BUILTIN (vsub_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vsub_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vsub_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vsub_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vsubi_bu, LARCH_V16QI_FTYPE_V16QI_UQI),
+  LSX_BUILTIN (vsubi_hu, LARCH_V8HI_FTYPE_V8HI_UQI),
+  LSX_BUILTIN (vsubi_wu, LARCH_V4SI_FTYPE_V4SI_UQI),
+  LSX_BUILTIN (vsubi_du, LARCH_V2DI_FTYPE_V2DI_UQI),
+  LSX_BUILTIN (vmax_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vmax_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vmax_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vmax_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vmaxi_b, LARCH_V16QI_FTYPE_V16QI_QI),
+  LSX_BUILTIN (vmaxi_h, LARCH_V8HI_FTYPE_V8HI_QI),
+  LSX_BUILTIN (vmaxi_w, LARCH_V4SI_FTYPE_V4SI_QI),
+  LSX_BUILTIN (vmaxi_d, LARCH_V2DI_FTYPE_V2DI_QI),
+  LSX_BUILTIN (vmax_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vmax_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vmax_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vmax_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vmaxi_bu, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+  LSX_BUILTIN (vmaxi_hu, LARCH_UV8HI_FTYPE_UV8HI_UQI),
+  LSX_BUILTIN (vmaxi_wu, LARCH_UV4SI_FTYPE_UV4SI_UQI),
+  LSX_BUILTIN (vmaxi_du, LARCH_UV2DI_FTYPE_UV2DI_UQI),
+  LSX_BUILTIN (vmin_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vmin_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vmin_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vmin_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vmini_b, LARCH_V16QI_FTYPE_V16QI_QI),
+  LSX_BUILTIN (vmini_h, LARCH_V8HI_FTYPE_V8HI_QI),
+  LSX_BUILTIN (vmini_w, LARCH_V4SI_FTYPE_V4SI_QI),
+  LSX_BUILTIN (vmini_d, LARCH_V2DI_FTYPE_V2DI_QI),
+  LSX_BUILTIN (vmin_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vmin_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vmin_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vmin_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vmini_bu, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+  LSX_BUILTIN (vmini_hu, LARCH_UV8HI_FTYPE_UV8HI_UQI),
+  LSX_BUILTIN (vmini_wu, LARCH_UV4SI_FTYPE_UV4SI_UQI),
+  LSX_BUILTIN (vmini_du, LARCH_UV2DI_FTYPE_UV2DI_UQI),
+  LSX_BUILTIN (vseq_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vseq_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vseq_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vseq_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vseqi_b, LARCH_V16QI_FTYPE_V16QI_QI),
+  LSX_BUILTIN (vseqi_h, LARCH_V8HI_FTYPE_V8HI_QI),
+  LSX_BUILTIN (vseqi_w, LARCH_V4SI_FTYPE_V4SI_QI),
+  LSX_BUILTIN (vseqi_d, LARCH_V2DI_FTYPE_V2DI_QI),
+  LSX_BUILTIN (vslti_b, LARCH_V16QI_FTYPE_V16QI_QI),
+  LSX_BUILTIN (vslt_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vslt_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vslt_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vslt_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vslti_h, LARCH_V8HI_FTYPE_V8HI_QI),
+  LSX_BUILTIN (vslti_w, LARCH_V4SI_FTYPE_V4SI_QI),
+  LSX_BUILTIN (vslti_d, LARCH_V2DI_FTYPE_V2DI_QI),
+  LSX_BUILTIN (vslt_bu, LARCH_V16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vslt_hu, LARCH_V8HI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vslt_wu, LARCH_V4SI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vslt_du, LARCH_V2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vslti_bu, LARCH_V16QI_FTYPE_UV16QI_UQI),
+  LSX_BUILTIN (vslti_hu, LARCH_V8HI_FTYPE_UV8HI_UQI),
+  LSX_BUILTIN (vslti_wu, LARCH_V4SI_FTYPE_UV4SI_UQI),
+  LSX_BUILTIN (vslti_du, LARCH_V2DI_FTYPE_UV2DI_UQI),
+  LSX_BUILTIN (vsle_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vsle_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vsle_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vsle_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vslei_b, LARCH_V16QI_FTYPE_V16QI_QI),
+  LSX_BUILTIN (vslei_h, LARCH_V8HI_FTYPE_V8HI_QI),
+  LSX_BUILTIN (vslei_w, LARCH_V4SI_FTYPE_V4SI_QI),
+  LSX_BUILTIN (vslei_d, LARCH_V2DI_FTYPE_V2DI_QI),
+  LSX_BUILTIN (vsle_bu, LARCH_V16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vsle_hu, LARCH_V8HI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vsle_wu, LARCH_V4SI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vsle_du, LARCH_V2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vslei_bu, LARCH_V16QI_FTYPE_UV16QI_UQI),
+  LSX_BUILTIN (vslei_hu, LARCH_V8HI_FTYPE_UV8HI_UQI),
+  LSX_BUILTIN (vslei_wu, LARCH_V4SI_FTYPE_UV4SI_UQI),
+  LSX_BUILTIN (vslei_du, LARCH_V2DI_FTYPE_UV2DI_UQI),
+  LSX_BUILTIN (vsat_b, LARCH_V16QI_FTYPE_V16QI_UQI),
+  LSX_BUILTIN (vsat_h, LARCH_V8HI_FTYPE_V8HI_UQI),
+  LSX_BUILTIN (vsat_w, LARCH_V4SI_FTYPE_V4SI_UQI),
+  LSX_BUILTIN (vsat_d, LARCH_V2DI_FTYPE_V2DI_UQI),
+  LSX_BUILTIN (vsat_bu, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+  LSX_BUILTIN (vsat_hu, LARCH_UV8HI_FTYPE_UV8HI_UQI),
+  LSX_BUILTIN (vsat_wu, LARCH_UV4SI_FTYPE_UV4SI_UQI),
+  LSX_BUILTIN (vsat_du, LARCH_UV2DI_FTYPE_UV2DI_UQI),
+  LSX_BUILTIN (vadda_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vadda_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vadda_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vadda_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vsadd_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vsadd_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vsadd_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vsadd_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vsadd_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vsadd_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vsadd_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vsadd_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vavg_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vavg_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vavg_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vavg_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vavg_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vavg_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vavg_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vavg_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vavgr_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vavgr_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vavgr_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vavgr_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vavgr_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vavgr_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vavgr_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vavgr_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vssub_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vssub_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vssub_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vssub_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vssub_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vssub_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vssub_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vssub_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vabsd_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vabsd_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vabsd_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vabsd_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vabsd_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vabsd_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vabsd_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vabsd_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vmul_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vmul_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vmul_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vmul_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vmadd_b, LARCH_V16QI_FTYPE_V16QI_V16QI_V16QI),
+  LSX_BUILTIN (vmadd_h, LARCH_V8HI_FTYPE_V8HI_V8HI_V8HI),
+  LSX_BUILTIN (vmadd_w, LARCH_V4SI_FTYPE_V4SI_V4SI_V4SI),
+  LSX_BUILTIN (vmadd_d, LARCH_V2DI_FTYPE_V2DI_V2DI_V2DI),
+  LSX_BUILTIN (vmsub_b, LARCH_V16QI_FTYPE_V16QI_V16QI_V16QI),
+  LSX_BUILTIN (vmsub_h, LARCH_V8HI_FTYPE_V8HI_V8HI_V8HI),
+  LSX_BUILTIN (vmsub_w, LARCH_V4SI_FTYPE_V4SI_V4SI_V4SI),
+  LSX_BUILTIN (vmsub_d, LARCH_V2DI_FTYPE_V2DI_V2DI_V2DI),
+  LSX_BUILTIN (vdiv_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vdiv_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vdiv_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vdiv_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vdiv_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vdiv_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vdiv_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vdiv_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vhaddw_h_b, LARCH_V8HI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vhaddw_w_h, LARCH_V4SI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vhaddw_d_w, LARCH_V2DI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vhaddw_hu_bu, LARCH_UV8HI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vhaddw_wu_hu, LARCH_UV4SI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vhaddw_du_wu, LARCH_UV2DI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vhsubw_h_b, LARCH_V8HI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vhsubw_w_h, LARCH_V4SI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vhsubw_d_w, LARCH_V2DI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vhsubw_hu_bu, LARCH_V8HI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vhsubw_wu_hu, LARCH_V4SI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vhsubw_du_wu, LARCH_V2DI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vmod_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vmod_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vmod_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vmod_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vmod_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vmod_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vmod_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vmod_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vreplve_b, LARCH_V16QI_FTYPE_V16QI_SI),
+  LSX_BUILTIN (vreplve_h, LARCH_V8HI_FTYPE_V8HI_SI),
+  LSX_BUILTIN (vreplve_w, LARCH_V4SI_FTYPE_V4SI_SI),
+  LSX_BUILTIN (vreplve_d, LARCH_V2DI_FTYPE_V2DI_SI),
+  LSX_BUILTIN (vreplvei_b, LARCH_V16QI_FTYPE_V16QI_UQI),
+  LSX_BUILTIN (vreplvei_h, LARCH_V8HI_FTYPE_V8HI_UQI),
+  LSX_BUILTIN (vreplvei_w, LARCH_V4SI_FTYPE_V4SI_UQI),
+  LSX_BUILTIN (vreplvei_d, LARCH_V2DI_FTYPE_V2DI_UQI),
+  LSX_BUILTIN (vpickev_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vpickev_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vpickev_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vpickev_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vpickod_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vpickod_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vpickod_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vpickod_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vilvh_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vilvh_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vilvh_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vilvh_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vilvl_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vilvl_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vilvl_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vilvl_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vpackev_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vpackev_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vpackev_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vpackev_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vpackod_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vpackod_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vpackod_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vpackod_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vshuf_h, LARCH_V8HI_FTYPE_V8HI_V8HI_V8HI),
+  LSX_BUILTIN (vshuf_w, LARCH_V4SI_FTYPE_V4SI_V4SI_V4SI),
+  LSX_BUILTIN (vshuf_d, LARCH_V2DI_FTYPE_V2DI_V2DI_V2DI),
+  LSX_BUILTIN (vand_v, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vandi_b, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+  LSX_BUILTIN (vor_v, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vori_b, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+  LSX_BUILTIN (vnor_v, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vnori_b, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+  LSX_BUILTIN (vxor_v, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vxori_b, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+  LSX_BUILTIN (vbitsel_v, LARCH_UV16QI_FTYPE_UV16QI_UV16QI_UV16QI),
+  LSX_BUILTIN (vbitseli_b, LARCH_UV16QI_FTYPE_UV16QI_UV16QI_USI),
+  LSX_BUILTIN (vshuf4i_b, LARCH_V16QI_FTYPE_V16QI_USI),
+  LSX_BUILTIN (vshuf4i_h, LARCH_V8HI_FTYPE_V8HI_USI),
+  LSX_BUILTIN (vshuf4i_w, LARCH_V4SI_FTYPE_V4SI_USI),
+  LSX_BUILTIN (vreplgr2vr_b, LARCH_V16QI_FTYPE_SI),
+  LSX_BUILTIN (vreplgr2vr_h, LARCH_V8HI_FTYPE_SI),
+  LSX_BUILTIN (vreplgr2vr_w, LARCH_V4SI_FTYPE_SI),
+  LSX_BUILTIN (vreplgr2vr_d, LARCH_V2DI_FTYPE_DI),
+  LSX_BUILTIN (vpcnt_b, LARCH_V16QI_FTYPE_V16QI),
+  LSX_BUILTIN (vpcnt_h, LARCH_V8HI_FTYPE_V8HI),
+  LSX_BUILTIN (vpcnt_w, LARCH_V4SI_FTYPE_V4SI),
+  LSX_BUILTIN (vpcnt_d, LARCH_V2DI_FTYPE_V2DI),
+  LSX_BUILTIN (vclo_b, LARCH_V16QI_FTYPE_V16QI),
+  LSX_BUILTIN (vclo_h, LARCH_V8HI_FTYPE_V8HI),
+  LSX_BUILTIN (vclo_w, LARCH_V4SI_FTYPE_V4SI),
+  LSX_BUILTIN (vclo_d, LARCH_V2DI_FTYPE_V2DI),
+  LSX_BUILTIN (vclz_b, LARCH_V16QI_FTYPE_V16QI),
+  LSX_BUILTIN (vclz_h, LARCH_V8HI_FTYPE_V8HI),
+  LSX_BUILTIN (vclz_w, LARCH_V4SI_FTYPE_V4SI),
+  LSX_BUILTIN (vclz_d, LARCH_V2DI_FTYPE_V2DI),
+  LSX_BUILTIN (vpickve2gr_b, LARCH_SI_FTYPE_V16QI_UQI),
+  LSX_BUILTIN (vpickve2gr_h, LARCH_SI_FTYPE_V8HI_UQI),
+  LSX_BUILTIN (vpickve2gr_w, LARCH_SI_FTYPE_V4SI_UQI),
+  LSX_BUILTIN (vpickve2gr_d, LARCH_DI_FTYPE_V2DI_UQI),
+  LSX_BUILTIN (vpickve2gr_bu, LARCH_USI_FTYPE_V16QI_UQI),
+  LSX_BUILTIN (vpickve2gr_hu, LARCH_USI_FTYPE_V8HI_UQI),
+  LSX_BUILTIN (vpickve2gr_wu, LARCH_USI_FTYPE_V4SI_UQI),
+  LSX_BUILTIN (vpickve2gr_du, LARCH_UDI_FTYPE_V2DI_UQI),
+  LSX_BUILTIN (vinsgr2vr_b, LARCH_V16QI_FTYPE_V16QI_SI_UQI),
+  LSX_BUILTIN (vinsgr2vr_h, LARCH_V8HI_FTYPE_V8HI_SI_UQI),
+  LSX_BUILTIN (vinsgr2vr_w, LARCH_V4SI_FTYPE_V4SI_SI_UQI),
+  LSX_BUILTIN (vinsgr2vr_d, LARCH_V2DI_FTYPE_V2DI_DI_UQI),
+  LSX_BUILTIN_TEST_BRANCH (bnz_b, LARCH_SI_FTYPE_UV16QI),
+  LSX_BUILTIN_TEST_BRANCH (bnz_h, LARCH_SI_FTYPE_UV8HI),
+  LSX_BUILTIN_TEST_BRANCH (bnz_w, LARCH_SI_FTYPE_UV4SI),
+  LSX_BUILTIN_TEST_BRANCH (bnz_d, LARCH_SI_FTYPE_UV2DI),
+  LSX_BUILTIN_TEST_BRANCH (bz_b, LARCH_SI_FTYPE_UV16QI),
+  LSX_BUILTIN_TEST_BRANCH (bz_h, LARCH_SI_FTYPE_UV8HI),
+  LSX_BUILTIN_TEST_BRANCH (bz_w, LARCH_SI_FTYPE_UV4SI),
+  LSX_BUILTIN_TEST_BRANCH (bz_d, LARCH_SI_FTYPE_UV2DI),
+  LSX_BUILTIN_TEST_BRANCH (bz_v, LARCH_SI_FTYPE_UV16QI),
+  LSX_BUILTIN_TEST_BRANCH (bnz_v,	LARCH_SI_FTYPE_UV16QI),
+  LSX_BUILTIN (vrepli_b, LARCH_V16QI_FTYPE_HI),
+  LSX_BUILTIN (vrepli_h, LARCH_V8HI_FTYPE_HI),
+  LSX_BUILTIN (vrepli_w, LARCH_V4SI_FTYPE_HI),
+  LSX_BUILTIN (vrepli_d, LARCH_V2DI_FTYPE_HI),
+  LSX_BUILTIN (vfcmp_caf_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_caf_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_cor_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_cor_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_cun_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_cun_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_cune_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_cune_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_cueq_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_cueq_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_ceq_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_ceq_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_cne_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_cne_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_clt_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_clt_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_cult_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_cult_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_cle_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_cle_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_cule_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_cule_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_saf_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_saf_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_sor_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_sor_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_sun_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_sun_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_sune_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_sune_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_sueq_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_sueq_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_seq_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_seq_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_sne_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_sne_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_slt_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_slt_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_sult_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_sult_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_sle_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_sle_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcmp_sule_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcmp_sule_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfadd_s, LARCH_V4SF_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfadd_d, LARCH_V2DF_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfsub_s, LARCH_V4SF_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfsub_d, LARCH_V2DF_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfmul_s, LARCH_V4SF_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfmul_d, LARCH_V2DF_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfdiv_s, LARCH_V4SF_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfdiv_d, LARCH_V2DF_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfcvt_h_s, LARCH_V8HI_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfcvt_s_d, LARCH_V4SF_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfmin_s, LARCH_V4SF_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfmin_d, LARCH_V2DF_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfmina_s, LARCH_V4SF_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfmina_d, LARCH_V2DF_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfmax_s, LARCH_V4SF_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfmax_d, LARCH_V2DF_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfmaxa_s, LARCH_V4SF_FTYPE_V4SF_V4SF),
+  LSX_BUILTIN (vfmaxa_d, LARCH_V2DF_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vfclass_s, LARCH_V4SI_FTYPE_V4SF),
+  LSX_BUILTIN (vfclass_d, LARCH_V2DI_FTYPE_V2DF),
+  LSX_BUILTIN (vfsqrt_s, LARCH_V4SF_FTYPE_V4SF),
+  LSX_BUILTIN (vfsqrt_d, LARCH_V2DF_FTYPE_V2DF),
+  LSX_BUILTIN (vfrecip_s, LARCH_V4SF_FTYPE_V4SF),
+  LSX_BUILTIN (vfrecip_d, LARCH_V2DF_FTYPE_V2DF),
+  LSX_BUILTIN (vfrint_s, LARCH_V4SF_FTYPE_V4SF),
+  LSX_BUILTIN (vfrint_d, LARCH_V2DF_FTYPE_V2DF),
+  LSX_BUILTIN (vfrsqrt_s, LARCH_V4SF_FTYPE_V4SF),
+  LSX_BUILTIN (vfrsqrt_d, LARCH_V2DF_FTYPE_V2DF),
+  LSX_BUILTIN (vflogb_s, LARCH_V4SF_FTYPE_V4SF),
+  LSX_BUILTIN (vflogb_d, LARCH_V2DF_FTYPE_V2DF),
+  LSX_BUILTIN (vfcvth_s_h, LARCH_V4SF_FTYPE_V8HI),
+  LSX_BUILTIN (vfcvth_d_s, LARCH_V2DF_FTYPE_V4SF),
+  LSX_BUILTIN (vfcvtl_s_h, LARCH_V4SF_FTYPE_V8HI),
+  LSX_BUILTIN (vfcvtl_d_s, LARCH_V2DF_FTYPE_V4SF),
+  LSX_BUILTIN (vftint_w_s, LARCH_V4SI_FTYPE_V4SF),
+  LSX_BUILTIN (vftint_l_d, LARCH_V2DI_FTYPE_V2DF),
+  LSX_BUILTIN (vftint_wu_s, LARCH_UV4SI_FTYPE_V4SF),
+  LSX_BUILTIN (vftint_lu_d, LARCH_UV2DI_FTYPE_V2DF),
+  LSX_BUILTIN (vftintrz_w_s, LARCH_V4SI_FTYPE_V4SF),
+  LSX_BUILTIN (vftintrz_l_d, LARCH_V2DI_FTYPE_V2DF),
+  LSX_BUILTIN (vftintrz_wu_s, LARCH_UV4SI_FTYPE_V4SF),
+  LSX_BUILTIN (vftintrz_lu_d, LARCH_UV2DI_FTYPE_V2DF),
+  LSX_BUILTIN (vffint_s_w, LARCH_V4SF_FTYPE_V4SI),
+  LSX_BUILTIN (vffint_d_l, LARCH_V2DF_FTYPE_V2DI),
+  LSX_BUILTIN (vffint_s_wu, LARCH_V4SF_FTYPE_UV4SI),
+  LSX_BUILTIN (vffint_d_lu, LARCH_V2DF_FTYPE_UV2DI),
+
+  LSX_BUILTIN (vandn_v, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vneg_b, LARCH_V16QI_FTYPE_V16QI),
+  LSX_BUILTIN (vneg_h, LARCH_V8HI_FTYPE_V8HI),
+  LSX_BUILTIN (vneg_w, LARCH_V4SI_FTYPE_V4SI),
+  LSX_BUILTIN (vneg_d, LARCH_V2DI_FTYPE_V2DI),
+  LSX_BUILTIN (vmuh_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vmuh_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vmuh_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vmuh_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vmuh_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vmuh_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vmuh_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vmuh_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vsllwil_h_b, LARCH_V8HI_FTYPE_V16QI_UQI),
+  LSX_BUILTIN (vsllwil_w_h, LARCH_V4SI_FTYPE_V8HI_UQI),
+  LSX_BUILTIN (vsllwil_d_w, LARCH_V2DI_FTYPE_V4SI_UQI),
+  LSX_BUILTIN (vsllwil_hu_bu, LARCH_UV8HI_FTYPE_UV16QI_UQI),
+  LSX_BUILTIN (vsllwil_wu_hu, LARCH_UV4SI_FTYPE_UV8HI_UQI),
+  LSX_BUILTIN (vsllwil_du_wu, LARCH_UV2DI_FTYPE_UV4SI_UQI),
+  LSX_BUILTIN (vsran_b_h, LARCH_V16QI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vsran_h_w, LARCH_V8HI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vsran_w_d, LARCH_V4SI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vssran_b_h, LARCH_V16QI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vssran_h_w, LARCH_V8HI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vssran_w_d, LARCH_V4SI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vssran_bu_h, LARCH_UV16QI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vssran_hu_w, LARCH_UV8HI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vssran_wu_d, LARCH_UV4SI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vsrarn_b_h, LARCH_V16QI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vsrarn_h_w, LARCH_V8HI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vsrarn_w_d, LARCH_V4SI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vssrarn_b_h, LARCH_V16QI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vssrarn_h_w, LARCH_V8HI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vssrarn_w_d, LARCH_V4SI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vssrarn_bu_h, LARCH_UV16QI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vssrarn_hu_w, LARCH_UV8HI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vssrarn_wu_d, LARCH_UV4SI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vsrln_b_h, LARCH_V16QI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vsrln_h_w, LARCH_V8HI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vsrln_w_d, LARCH_V4SI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vssrln_bu_h, LARCH_UV16QI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vssrln_hu_w, LARCH_UV8HI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vssrln_wu_d, LARCH_UV4SI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vsrlrn_b_h, LARCH_V16QI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vsrlrn_h_w, LARCH_V8HI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vsrlrn_w_d, LARCH_V4SI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vssrlrn_bu_h, LARCH_UV16QI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vssrlrn_hu_w, LARCH_UV8HI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vssrlrn_wu_d, LARCH_UV4SI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vfrstpi_b, LARCH_V16QI_FTYPE_V16QI_V16QI_UQI),
+  LSX_BUILTIN (vfrstpi_h, LARCH_V8HI_FTYPE_V8HI_V8HI_UQI),
+  LSX_BUILTIN (vfrstp_b, LARCH_V16QI_FTYPE_V16QI_V16QI_V16QI),
+  LSX_BUILTIN (vfrstp_h, LARCH_V8HI_FTYPE_V8HI_V8HI_V8HI),
+  LSX_BUILTIN (vshuf4i_d, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+  LSX_BUILTIN (vbsrl_v, LARCH_V16QI_FTYPE_V16QI_UQI),
+  LSX_BUILTIN (vbsll_v, LARCH_V16QI_FTYPE_V16QI_UQI),
+  LSX_BUILTIN (vextrins_b, LARCH_V16QI_FTYPE_V16QI_V16QI_USI),
+  LSX_BUILTIN (vextrins_h, LARCH_V8HI_FTYPE_V8HI_V8HI_USI),
+  LSX_BUILTIN (vextrins_w, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+  LSX_BUILTIN (vextrins_d, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+  LSX_BUILTIN (vmskltz_b, LARCH_V16QI_FTYPE_V16QI),
+  LSX_BUILTIN (vmskltz_h, LARCH_V8HI_FTYPE_V8HI),
+  LSX_BUILTIN (vmskltz_w, LARCH_V4SI_FTYPE_V4SI),
+  LSX_BUILTIN (vmskltz_d, LARCH_V2DI_FTYPE_V2DI),
+  LSX_BUILTIN (vsigncov_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vsigncov_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vsigncov_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vsigncov_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vfmadd_s, LARCH_V4SF_FTYPE_V4SF_V4SF_V4SF),
+  LSX_BUILTIN (vfmadd_d, LARCH_V2DF_FTYPE_V2DF_V2DF_V2DF),
+  LSX_BUILTIN (vfmsub_s, LARCH_V4SF_FTYPE_V4SF_V4SF_V4SF),
+  LSX_BUILTIN (vfmsub_d, LARCH_V2DF_FTYPE_V2DF_V2DF_V2DF),
+  LSX_BUILTIN (vfnmadd_s, LARCH_V4SF_FTYPE_V4SF_V4SF_V4SF),
+  LSX_BUILTIN (vfnmadd_d, LARCH_V2DF_FTYPE_V2DF_V2DF_V2DF),
+  LSX_BUILTIN (vfnmsub_s, LARCH_V4SF_FTYPE_V4SF_V4SF_V4SF),
+  LSX_BUILTIN (vfnmsub_d, LARCH_V2DF_FTYPE_V2DF_V2DF_V2DF),
+  LSX_BUILTIN (vftintrne_w_s, LARCH_V4SI_FTYPE_V4SF),
+  LSX_BUILTIN (vftintrne_l_d, LARCH_V2DI_FTYPE_V2DF),
+  LSX_BUILTIN (vftintrp_w_s, LARCH_V4SI_FTYPE_V4SF),
+  LSX_BUILTIN (vftintrp_l_d, LARCH_V2DI_FTYPE_V2DF),
+  LSX_BUILTIN (vftintrm_w_s, LARCH_V4SI_FTYPE_V4SF),
+  LSX_BUILTIN (vftintrm_l_d, LARCH_V2DI_FTYPE_V2DF),
+  LSX_BUILTIN (vftint_w_d, LARCH_V4SI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vffint_s_l, LARCH_V4SF_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vftintrz_w_d, LARCH_V4SI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vftintrp_w_d, LARCH_V4SI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vftintrm_w_d, LARCH_V4SI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vftintrne_w_d, LARCH_V4SI_FTYPE_V2DF_V2DF),
+  LSX_BUILTIN (vftintl_l_s, LARCH_V2DI_FTYPE_V4SF),
+  LSX_BUILTIN (vftinth_l_s, LARCH_V2DI_FTYPE_V4SF),
+  LSX_BUILTIN (vffinth_d_w, LARCH_V2DF_FTYPE_V4SI),
+  LSX_BUILTIN (vffintl_d_w, LARCH_V2DF_FTYPE_V4SI),
+  LSX_BUILTIN (vftintrzl_l_s, LARCH_V2DI_FTYPE_V4SF),
+  LSX_BUILTIN (vftintrzh_l_s, LARCH_V2DI_FTYPE_V4SF),
+  LSX_BUILTIN (vftintrpl_l_s, LARCH_V2DI_FTYPE_V4SF),
+  LSX_BUILTIN (vftintrph_l_s, LARCH_V2DI_FTYPE_V4SF),
+  LSX_BUILTIN (vftintrml_l_s, LARCH_V2DI_FTYPE_V4SF),
+  LSX_BUILTIN (vftintrmh_l_s, LARCH_V2DI_FTYPE_V4SF),
+  LSX_BUILTIN (vftintrnel_l_s, LARCH_V2DI_FTYPE_V4SF),
+  LSX_BUILTIN (vftintrneh_l_s, LARCH_V2DI_FTYPE_V4SF),
+  LSX_BUILTIN (vfrintrne_s, LARCH_V4SF_FTYPE_V4SF),
+  LSX_BUILTIN (vfrintrne_d, LARCH_V2DF_FTYPE_V2DF),
+  LSX_BUILTIN (vfrintrz_s, LARCH_V4SF_FTYPE_V4SF),
+  LSX_BUILTIN (vfrintrz_d, LARCH_V2DF_FTYPE_V2DF),
+  LSX_BUILTIN (vfrintrp_s, LARCH_V4SF_FTYPE_V4SF),
+  LSX_BUILTIN (vfrintrp_d, LARCH_V2DF_FTYPE_V2DF),
+  LSX_BUILTIN (vfrintrm_s, LARCH_V4SF_FTYPE_V4SF),
+  LSX_BUILTIN (vfrintrm_d, LARCH_V2DF_FTYPE_V2DF),
+  LSX_NO_TARGET_BUILTIN (vstelm_b, LARCH_VOID_FTYPE_V16QI_CVPOINTER_SI_UQI),
+  LSX_NO_TARGET_BUILTIN (vstelm_h, LARCH_VOID_FTYPE_V8HI_CVPOINTER_SI_UQI),
+  LSX_NO_TARGET_BUILTIN (vstelm_w, LARCH_VOID_FTYPE_V4SI_CVPOINTER_SI_UQI),
+  LSX_NO_TARGET_BUILTIN (vstelm_d, LARCH_VOID_FTYPE_V2DI_CVPOINTER_SI_UQI),
+  LSX_BUILTIN (vaddwev_d_w, LARCH_V2DI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vaddwev_w_h, LARCH_V4SI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vaddwev_h_b, LARCH_V8HI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vaddwod_d_w, LARCH_V2DI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vaddwod_w_h, LARCH_V4SI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vaddwod_h_b, LARCH_V8HI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vaddwev_d_wu, LARCH_V2DI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vaddwev_w_hu, LARCH_V4SI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vaddwev_h_bu, LARCH_V8HI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vaddwod_d_wu, LARCH_V2DI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vaddwod_w_hu, LARCH_V4SI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vaddwod_h_bu, LARCH_V8HI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vaddwev_d_wu_w, LARCH_V2DI_FTYPE_UV4SI_V4SI),
+  LSX_BUILTIN (vaddwev_w_hu_h, LARCH_V4SI_FTYPE_UV8HI_V8HI),
+  LSX_BUILTIN (vaddwev_h_bu_b, LARCH_V8HI_FTYPE_UV16QI_V16QI),
+  LSX_BUILTIN (vaddwod_d_wu_w, LARCH_V2DI_FTYPE_UV4SI_V4SI),
+  LSX_BUILTIN (vaddwod_w_hu_h, LARCH_V4SI_FTYPE_UV8HI_V8HI),
+  LSX_BUILTIN (vaddwod_h_bu_b, LARCH_V8HI_FTYPE_UV16QI_V16QI),
+  LSX_BUILTIN (vsubwev_d_w, LARCH_V2DI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vsubwev_w_h, LARCH_V4SI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vsubwev_h_b, LARCH_V8HI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vsubwod_d_w, LARCH_V2DI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vsubwod_w_h, LARCH_V4SI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vsubwod_h_b, LARCH_V8HI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vsubwev_d_wu, LARCH_V2DI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vsubwev_w_hu, LARCH_V4SI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vsubwev_h_bu, LARCH_V8HI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vsubwod_d_wu, LARCH_V2DI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vsubwod_w_hu, LARCH_V4SI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vsubwod_h_bu, LARCH_V8HI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vaddwev_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vaddwod_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vaddwev_q_du, LARCH_V2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vaddwod_q_du, LARCH_V2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vsubwev_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vsubwod_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vsubwev_q_du, LARCH_V2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vsubwod_q_du, LARCH_V2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vaddwev_q_du_d, LARCH_V2DI_FTYPE_UV2DI_V2DI),
+  LSX_BUILTIN (vaddwod_q_du_d, LARCH_V2DI_FTYPE_UV2DI_V2DI),
+
+  LSX_BUILTIN (vmulwev_d_w, LARCH_V2DI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vmulwev_w_h, LARCH_V4SI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vmulwev_h_b, LARCH_V8HI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vmulwod_d_w, LARCH_V2DI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vmulwod_w_h, LARCH_V4SI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vmulwod_h_b, LARCH_V8HI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vmulwev_d_wu, LARCH_V2DI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vmulwev_w_hu, LARCH_V4SI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vmulwev_h_bu, LARCH_V8HI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vmulwod_d_wu, LARCH_V2DI_FTYPE_UV4SI_UV4SI),
+  LSX_BUILTIN (vmulwod_w_hu, LARCH_V4SI_FTYPE_UV8HI_UV8HI),
+  LSX_BUILTIN (vmulwod_h_bu, LARCH_V8HI_FTYPE_UV16QI_UV16QI),
+  LSX_BUILTIN (vmulwev_d_wu_w, LARCH_V2DI_FTYPE_UV4SI_V4SI),
+  LSX_BUILTIN (vmulwev_w_hu_h, LARCH_V4SI_FTYPE_UV8HI_V8HI),
+  LSX_BUILTIN (vmulwev_h_bu_b, LARCH_V8HI_FTYPE_UV16QI_V16QI),
+  LSX_BUILTIN (vmulwod_d_wu_w, LARCH_V2DI_FTYPE_UV4SI_V4SI),
+  LSX_BUILTIN (vmulwod_w_hu_h, LARCH_V4SI_FTYPE_UV8HI_V8HI),
+  LSX_BUILTIN (vmulwod_h_bu_b, LARCH_V8HI_FTYPE_UV16QI_V16QI),
+  LSX_BUILTIN (vmulwev_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vmulwod_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vmulwev_q_du, LARCH_V2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vmulwod_q_du, LARCH_V2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vmulwev_q_du_d, LARCH_V2DI_FTYPE_UV2DI_V2DI),
+  LSX_BUILTIN (vmulwod_q_du_d, LARCH_V2DI_FTYPE_UV2DI_V2DI),
+  LSX_BUILTIN (vhaddw_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vhaddw_qu_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vhsubw_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vhsubw_qu_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+  LSX_BUILTIN (vmaddwev_d_w, LARCH_V2DI_FTYPE_V2DI_V4SI_V4SI),
+  LSX_BUILTIN (vmaddwev_w_h, LARCH_V4SI_FTYPE_V4SI_V8HI_V8HI),
+  LSX_BUILTIN (vmaddwev_h_b, LARCH_V8HI_FTYPE_V8HI_V16QI_V16QI),
+  LSX_BUILTIN (vmaddwev_d_wu, LARCH_UV2DI_FTYPE_UV2DI_UV4SI_UV4SI),
+  LSX_BUILTIN (vmaddwev_w_hu, LARCH_UV4SI_FTYPE_UV4SI_UV8HI_UV8HI),
+  LSX_BUILTIN (vmaddwev_h_bu, LARCH_UV8HI_FTYPE_UV8HI_UV16QI_UV16QI),
+  LSX_BUILTIN (vmaddwod_d_w, LARCH_V2DI_FTYPE_V2DI_V4SI_V4SI),
+  LSX_BUILTIN (vmaddwod_w_h, LARCH_V4SI_FTYPE_V4SI_V8HI_V8HI),
+  LSX_BUILTIN (vmaddwod_h_b, LARCH_V8HI_FTYPE_V8HI_V16QI_V16QI),
+  LSX_BUILTIN (vmaddwod_d_wu, LARCH_UV2DI_FTYPE_UV2DI_UV4SI_UV4SI),
+  LSX_BUILTIN (vmaddwod_w_hu, LARCH_UV4SI_FTYPE_UV4SI_UV8HI_UV8HI),
+  LSX_BUILTIN (vmaddwod_h_bu, LARCH_UV8HI_FTYPE_UV8HI_UV16QI_UV16QI),
+  LSX_BUILTIN (vmaddwev_d_wu_w, LARCH_V2DI_FTYPE_V2DI_UV4SI_V4SI),
+  LSX_BUILTIN (vmaddwev_w_hu_h, LARCH_V4SI_FTYPE_V4SI_UV8HI_V8HI),
+  LSX_BUILTIN (vmaddwev_h_bu_b, LARCH_V8HI_FTYPE_V8HI_UV16QI_V16QI),
+  LSX_BUILTIN (vmaddwod_d_wu_w, LARCH_V2DI_FTYPE_V2DI_UV4SI_V4SI),
+  LSX_BUILTIN (vmaddwod_w_hu_h, LARCH_V4SI_FTYPE_V4SI_UV8HI_V8HI),
+  LSX_BUILTIN (vmaddwod_h_bu_b, LARCH_V8HI_FTYPE_V8HI_UV16QI_V16QI),
+  LSX_BUILTIN (vmaddwev_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI_V2DI),
+  LSX_BUILTIN (vmaddwod_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI_V2DI),
+  LSX_BUILTIN (vmaddwev_q_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI_UV2DI),
+  LSX_BUILTIN (vmaddwod_q_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI_UV2DI),
+  LSX_BUILTIN (vmaddwev_q_du_d, LARCH_V2DI_FTYPE_V2DI_UV2DI_V2DI),
+  LSX_BUILTIN (vmaddwod_q_du_d, LARCH_V2DI_FTYPE_V2DI_UV2DI_V2DI),
+  LSX_BUILTIN (vrotr_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vrotr_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vrotr_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vrotr_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vadd_q, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vsub_q, LARCH_V2DI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vldrepl_b, LARCH_V16QI_FTYPE_CVPOINTER_SI),
+  LSX_BUILTIN (vldrepl_h, LARCH_V8HI_FTYPE_CVPOINTER_SI),
+  LSX_BUILTIN (vldrepl_w, LARCH_V4SI_FTYPE_CVPOINTER_SI),
+  LSX_BUILTIN (vldrepl_d, LARCH_V2DI_FTYPE_CVPOINTER_SI),
+
+  LSX_BUILTIN (vmskgez_b, LARCH_V16QI_FTYPE_V16QI),
+  LSX_BUILTIN (vmsknz_b, LARCH_V16QI_FTYPE_V16QI),
+  LSX_BUILTIN (vexth_h_b, LARCH_V8HI_FTYPE_V16QI),
+  LSX_BUILTIN (vexth_w_h, LARCH_V4SI_FTYPE_V8HI),
+  LSX_BUILTIN (vexth_d_w, LARCH_V2DI_FTYPE_V4SI),
+  LSX_BUILTIN (vexth_q_d, LARCH_V2DI_FTYPE_V2DI),
+  LSX_BUILTIN (vexth_hu_bu, LARCH_UV8HI_FTYPE_UV16QI),
+  LSX_BUILTIN (vexth_wu_hu, LARCH_UV4SI_FTYPE_UV8HI),
+  LSX_BUILTIN (vexth_du_wu, LARCH_UV2DI_FTYPE_UV4SI),
+  LSX_BUILTIN (vexth_qu_du, LARCH_UV2DI_FTYPE_UV2DI),
+  LSX_BUILTIN (vrotri_b, LARCH_V16QI_FTYPE_V16QI_UQI),
+  LSX_BUILTIN (vrotri_h, LARCH_V8HI_FTYPE_V8HI_UQI),
+  LSX_BUILTIN (vrotri_w, LARCH_V4SI_FTYPE_V4SI_UQI),
+  LSX_BUILTIN (vrotri_d, LARCH_V2DI_FTYPE_V2DI_UQI),
+  LSX_BUILTIN (vextl_q_d, LARCH_V2DI_FTYPE_V2DI),
+  LSX_BUILTIN (vsrlni_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI),
+  LSX_BUILTIN (vsrlni_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI),
+  LSX_BUILTIN (vsrlni_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+  LSX_BUILTIN (vsrlni_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+  LSX_BUILTIN (vsrlrni_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI),
+  LSX_BUILTIN (vsrlrni_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI),
+  LSX_BUILTIN (vsrlrni_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+  LSX_BUILTIN (vsrlrni_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+  LSX_BUILTIN (vssrlni_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI),
+  LSX_BUILTIN (vssrlni_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI),
+  LSX_BUILTIN (vssrlni_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+  LSX_BUILTIN (vssrlni_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+  LSX_BUILTIN (vssrlni_bu_h, LARCH_UV16QI_FTYPE_UV16QI_V16QI_USI),
+  LSX_BUILTIN (vssrlni_hu_w, LARCH_UV8HI_FTYPE_UV8HI_V8HI_USI),
+  LSX_BUILTIN (vssrlni_wu_d, LARCH_UV4SI_FTYPE_UV4SI_V4SI_USI),
+  LSX_BUILTIN (vssrlni_du_q, LARCH_UV2DI_FTYPE_UV2DI_V2DI_USI),
+  LSX_BUILTIN (vssrlrni_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI),
+  LSX_BUILTIN (vssrlrni_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI),
+  LSX_BUILTIN (vssrlrni_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+  LSX_BUILTIN (vssrlrni_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+  LSX_BUILTIN (vssrlrni_bu_h, LARCH_UV16QI_FTYPE_UV16QI_V16QI_USI),
+  LSX_BUILTIN (vssrlrni_hu_w, LARCH_UV8HI_FTYPE_UV8HI_V8HI_USI),
+  LSX_BUILTIN (vssrlrni_wu_d, LARCH_UV4SI_FTYPE_UV4SI_V4SI_USI),
+  LSX_BUILTIN (vssrlrni_du_q, LARCH_UV2DI_FTYPE_UV2DI_V2DI_USI),
+  LSX_BUILTIN (vsrani_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI),
+  LSX_BUILTIN (vsrani_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI),
+  LSX_BUILTIN (vsrani_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+  LSX_BUILTIN (vsrani_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+  LSX_BUILTIN (vsrarni_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI),
+  LSX_BUILTIN (vsrarni_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI),
+  LSX_BUILTIN (vsrarni_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+  LSX_BUILTIN (vsrarni_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+  LSX_BUILTIN (vssrani_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI),
+  LSX_BUILTIN (vssrani_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI),
+  LSX_BUILTIN (vssrani_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+  LSX_BUILTIN (vssrani_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+  LSX_BUILTIN (vssrani_bu_h, LARCH_UV16QI_FTYPE_UV16QI_V16QI_USI),
+  LSX_BUILTIN (vssrani_hu_w, LARCH_UV8HI_FTYPE_UV8HI_V8HI_USI),
+  LSX_BUILTIN (vssrani_wu_d, LARCH_UV4SI_FTYPE_UV4SI_V4SI_USI),
+  LSX_BUILTIN (vssrani_du_q, LARCH_UV2DI_FTYPE_UV2DI_V2DI_USI),
+  LSX_BUILTIN (vssrarni_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI),
+  LSX_BUILTIN (vssrarni_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI),
+  LSX_BUILTIN (vssrarni_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+  LSX_BUILTIN (vssrarni_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+  LSX_BUILTIN (vssrarni_bu_h, LARCH_UV16QI_FTYPE_UV16QI_V16QI_USI),
+  LSX_BUILTIN (vssrarni_hu_w, LARCH_UV8HI_FTYPE_UV8HI_V8HI_USI),
+  LSX_BUILTIN (vssrarni_wu_d, LARCH_UV4SI_FTYPE_UV4SI_V4SI_USI),
+  LSX_BUILTIN (vssrarni_du_q, LARCH_UV2DI_FTYPE_UV2DI_V2DI_USI),
+  LSX_BUILTIN (vpermi_w, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+  LSX_BUILTIN (vld, LARCH_V16QI_FTYPE_CVPOINTER_SI),
+  LSX_NO_TARGET_BUILTIN (vst, LARCH_VOID_FTYPE_V16QI_CVPOINTER_SI),
+  LSX_BUILTIN (vssrlrn_b_h, LARCH_V16QI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vssrlrn_h_w, LARCH_V8HI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vssrlrn_w_d, LARCH_V4SI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vssrln_b_h, LARCH_V16QI_FTYPE_V8HI_V8HI),
+  LSX_BUILTIN (vssrln_h_w, LARCH_V8HI_FTYPE_V4SI_V4SI),
+  LSX_BUILTIN (vssrln_w_d, LARCH_V4SI_FTYPE_V2DI_V2DI),
+  LSX_BUILTIN (vorn_v, LARCH_V16QI_FTYPE_V16QI_V16QI),
+  LSX_BUILTIN (vldi, LARCH_V2DI_FTYPE_HI),
+  LSX_BUILTIN (vshuf_b, LARCH_V16QI_FTYPE_V16QI_V16QI_V16QI),
+  LSX_BUILTIN (vldx, LARCH_V16QI_FTYPE_CVPOINTER_DI),
+  LSX_NO_TARGET_BUILTIN (vstx, LARCH_VOID_FTYPE_V16QI_CVPOINTER_DI),
+  LSX_BUILTIN (vextl_qu_du, LARCH_UV2DI_FTYPE_UV2DI)
 };
 
 /* Index I is the function declaration for loongarch_builtins[I], or null if
@@ -193,11 +1219,46 @@  static GTY (()) tree loongarch_builtin_decls[ARRAY_SIZE (loongarch_builtins)];
    using the instruction code or return null if not defined for the target.  */
 static GTY (()) int loongarch_get_builtin_decl_index[NUM_INSN_CODES];
 
+
+/* MODE is a vector mode whose elements have type TYPE.  Return the type
+   of the vector itself.  */
+
+static tree
+loongarch_builtin_vector_type (tree type, machine_mode mode)
+{
+  static tree types[2 * (int) MAX_MACHINE_MODE];
+  int mode_index;
+
+  mode_index = (int) mode;
+
+  if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type))
+    mode_index += MAX_MACHINE_MODE;
+
+  if (types[mode_index] == NULL_TREE)
+    types[mode_index] = build_vector_type_for_mode (type, mode);
+  return types[mode_index];
+}
+
+/* Return a type for 'const volatile void *'.  */
+
+static tree
+loongarch_build_cvpointer_type (void)
+{
+  static tree cache;
+
+  if (cache == NULL_TREE)
+    cache = build_pointer_type (build_qualified_type (void_type_node,
+						      TYPE_QUAL_CONST
+						      | TYPE_QUAL_VOLATILE));
+  return cache;
+}
+
 /* Source-level argument types.  */
 #define LARCH_ATYPE_VOID void_type_node
 #define LARCH_ATYPE_INT integer_type_node
 #define LARCH_ATYPE_POINTER ptr_type_node
-
+#define LARCH_ATYPE_CVPOINTER loongarch_build_cvpointer_type ()
+#define LARCH_ATYPE_BOOLEAN boolean_type_node
 /* Standard mode-based argument types.  */
 #define LARCH_ATYPE_QI intQI_type_node
 #define LARCH_ATYPE_UQI unsigned_intQI_type_node
@@ -210,6 +1271,72 @@  static GTY (()) int loongarch_get_builtin_decl_index[NUM_INSN_CODES];
 #define LARCH_ATYPE_SF float_type_node
 #define LARCH_ATYPE_DF double_type_node
 
+/* Vector argument types.  */
+#define LARCH_ATYPE_V2SF						\
+  loongarch_builtin_vector_type (float_type_node, V2SFmode)
+#define LARCH_ATYPE_V2HI						\
+  loongarch_builtin_vector_type (intHI_type_node, V2HImode)
+#define LARCH_ATYPE_V2SI						\
+  loongarch_builtin_vector_type (intSI_type_node, V2SImode)
+#define LARCH_ATYPE_V4QI						\
+  loongarch_builtin_vector_type (intQI_type_node, V4QImode)
+#define LARCH_ATYPE_V4HI						\
+  loongarch_builtin_vector_type (intHI_type_node, V4HImode)
+#define LARCH_ATYPE_V8QI						\
+  loongarch_builtin_vector_type (intQI_type_node, V8QImode)
+
+#define LARCH_ATYPE_V2DI						\
+  loongarch_builtin_vector_type (long_long_integer_type_node, V2DImode)
+#define LARCH_ATYPE_V4SI						\
+  loongarch_builtin_vector_type (intSI_type_node, V4SImode)
+#define LARCH_ATYPE_V8HI						\
+  loongarch_builtin_vector_type (intHI_type_node, V8HImode)
+#define LARCH_ATYPE_V16QI						\
+  loongarch_builtin_vector_type (intQI_type_node, V16QImode)
+#define LARCH_ATYPE_V2DF						\
+  loongarch_builtin_vector_type (double_type_node, V2DFmode)
+#define LARCH_ATYPE_V4SF						\
+  loongarch_builtin_vector_type (float_type_node, V4SFmode)
+
+/* LoongArch ASX.  */
+#define LARCH_ATYPE_V4DI						\
+  loongarch_builtin_vector_type (long_long_integer_type_node, V4DImode)
+#define LARCH_ATYPE_V8SI						\
+  loongarch_builtin_vector_type (intSI_type_node, V8SImode)
+#define LARCH_ATYPE_V16HI						\
+  loongarch_builtin_vector_type (intHI_type_node, V16HImode)
+#define LARCH_ATYPE_V32QI						\
+  loongarch_builtin_vector_type (intQI_type_node, V32QImode)
+#define LARCH_ATYPE_V4DF						\
+  loongarch_builtin_vector_type (double_type_node, V4DFmode)
+#define LARCH_ATYPE_V8SF						\
+  loongarch_builtin_vector_type (float_type_node, V8SFmode)
+
+#define LARCH_ATYPE_UV2DI					\
+  loongarch_builtin_vector_type (long_long_unsigned_type_node, V2DImode)
+#define LARCH_ATYPE_UV4SI					\
+  loongarch_builtin_vector_type (unsigned_intSI_type_node, V4SImode)
+#define LARCH_ATYPE_UV8HI					\
+  loongarch_builtin_vector_type (unsigned_intHI_type_node, V8HImode)
+#define LARCH_ATYPE_UV16QI					\
+  loongarch_builtin_vector_type (unsigned_intQI_type_node, V16QImode)
+
+#define LARCH_ATYPE_UV4DI					\
+  loongarch_builtin_vector_type (long_long_unsigned_type_node, V4DImode)
+#define LARCH_ATYPE_UV8SI					\
+  loongarch_builtin_vector_type (unsigned_intSI_type_node, V8SImode)
+#define LARCH_ATYPE_UV16HI					\
+  loongarch_builtin_vector_type (unsigned_intHI_type_node, V16HImode)
+#define LARCH_ATYPE_UV32QI					\
+  loongarch_builtin_vector_type (unsigned_intQI_type_node, V32QImode)
+
+#define LARCH_ATYPE_UV2SI					\
+  loongarch_builtin_vector_type (unsigned_intSI_type_node, V2SImode)
+#define LARCH_ATYPE_UV4HI					\
+  loongarch_builtin_vector_type (unsigned_intHI_type_node, V4HImode)
+#define LARCH_ATYPE_UV8QI					\
+  loongarch_builtin_vector_type (unsigned_intQI_type_node, V8QImode)
+
 /* LARCH_FTYPE_ATYPESN takes N LARCH_FTYPES-like type codes and lists
    their associated LARCH_ATYPEs.  */
 #define LARCH_FTYPE_ATYPES1(A, B) LARCH_ATYPE_##A, LARCH_ATYPE_##B
@@ -283,6 +1410,92 @@  loongarch_builtin_decl (unsigned int code, bool initialize_p ATTRIBUTE_UNUSED)
   return loongarch_builtin_decls[code];
 }
 
+/* Implement TARGET_VECTORIZE_BUILTIN_VECTORIZED_FUNCTION.  */
+
+tree
+loongarch_builtin_vectorized_function (unsigned int fn, tree type_out,
+				       tree type_in)
+{
+  machine_mode in_mode, out_mode;
+  int in_n, out_n;
+
+  if (TREE_CODE (type_out) != VECTOR_TYPE
+      || TREE_CODE (type_in) != VECTOR_TYPE
+      || !ISA_HAS_LSX)
+    return NULL_TREE;
+
+  out_mode = TYPE_MODE (TREE_TYPE (type_out));
+  out_n = TYPE_VECTOR_SUBPARTS (type_out);
+  in_mode = TYPE_MODE (TREE_TYPE (type_in));
+  in_n = TYPE_VECTOR_SUBPARTS (type_in);
+
+  /* INSN is the name of the associated instruction pattern, without
+     the leading CODE_FOR_.  */
+#define LARCH_GET_BUILTIN(INSN) \
+  loongarch_builtin_decls[loongarch_get_builtin_decl_index[CODE_FOR_##INSN]]
+
+  switch (fn)
+    {
+    CASE_CFN_CEIL:
+      if (out_mode == DFmode && in_mode == DFmode)
+    {
+      if (out_n == 2 && in_n == 2)
+	return LARCH_GET_BUILTIN (lsx_vfrintrp_d);
+    }
+      if (out_mode == SFmode && in_mode == SFmode)
+    {
+      if (out_n == 4 && in_n == 4)
+	return LARCH_GET_BUILTIN (lsx_vfrintrp_s);
+    }
+      break;
+
+    CASE_CFN_TRUNC:
+      if (out_mode == DFmode && in_mode == DFmode)
+    {
+      if (out_n == 2 && in_n == 2)
+	return LARCH_GET_BUILTIN (lsx_vfrintrz_d);
+    }
+      if (out_mode == SFmode && in_mode == SFmode)
+    {
+      if (out_n == 4 && in_n == 4)
+	return LARCH_GET_BUILTIN (lsx_vfrintrz_s);
+    }
+      break;
+
+    CASE_CFN_RINT:
+    CASE_CFN_ROUND:
+      if (out_mode == DFmode && in_mode == DFmode)
+    {
+      if (out_n == 2 && in_n == 2)
+	return LARCH_GET_BUILTIN (lsx_vfrint_d);
+    }
+      if (out_mode == SFmode && in_mode == SFmode)
+    {
+      if (out_n == 4 && in_n == 4)
+	return LARCH_GET_BUILTIN (lsx_vfrint_s);
+    }
+      break;
+
+    CASE_CFN_FLOOR:
+      if (out_mode == DFmode && in_mode == DFmode)
+    {
+      if (out_n == 2 && in_n == 2)
+	return LARCH_GET_BUILTIN (lsx_vfrintrm_d);
+    }
+      if (out_mode == SFmode && in_mode == SFmode)
+    {
+      if (out_n == 4 && in_n == 4)
+	return LARCH_GET_BUILTIN (lsx_vfrintrm_s);
+    }
+      break;
+
+    default:
+      break;
+    }
+
+  return NULL_TREE;
+}
+
 /* Take argument ARGNO from EXP's argument list and convert it into
    an expand operand.  Store the operand in *OP.  */
 
@@ -318,7 +1531,236 @@  static rtx
 loongarch_expand_builtin_insn (enum insn_code icode, unsigned int nops,
 			       struct expand_operand *ops, bool has_target_p)
 {
-  if (!maybe_expand_insn (icode, nops, ops))
+  machine_mode imode;
+  int rangelo = 0, rangehi = 0, error_opno = 0;
+
+  switch (icode)
+    {
+    case CODE_FOR_lsx_vaddi_bu:
+    case CODE_FOR_lsx_vaddi_hu:
+    case CODE_FOR_lsx_vaddi_wu:
+    case CODE_FOR_lsx_vaddi_du:
+    case CODE_FOR_lsx_vslti_bu:
+    case CODE_FOR_lsx_vslti_hu:
+    case CODE_FOR_lsx_vslti_wu:
+    case CODE_FOR_lsx_vslti_du:
+    case CODE_FOR_lsx_vslei_bu:
+    case CODE_FOR_lsx_vslei_hu:
+    case CODE_FOR_lsx_vslei_wu:
+    case CODE_FOR_lsx_vslei_du:
+    case CODE_FOR_lsx_vmaxi_bu:
+    case CODE_FOR_lsx_vmaxi_hu:
+    case CODE_FOR_lsx_vmaxi_wu:
+    case CODE_FOR_lsx_vmaxi_du:
+    case CODE_FOR_lsx_vmini_bu:
+    case CODE_FOR_lsx_vmini_hu:
+    case CODE_FOR_lsx_vmini_wu:
+    case CODE_FOR_lsx_vmini_du:
+    case CODE_FOR_lsx_vsubi_bu:
+    case CODE_FOR_lsx_vsubi_hu:
+    case CODE_FOR_lsx_vsubi_wu:
+    case CODE_FOR_lsx_vsubi_du:
+      gcc_assert (has_target_p && nops == 3);
+      /* We only generate a vector of constants iff the second argument
+	 is an immediate.  We also validate the range of the immediate.  */
+      if (CONST_INT_P (ops[2].value))
+	{
+	  rangelo = 0;
+	  rangehi = 31;
+	  if (IN_RANGE (INTVAL (ops[2].value), rangelo, rangehi))
+	    {
+	      ops[2].mode = ops[0].mode;
+	      ops[2].value = loongarch_gen_const_int_vector (ops[2].mode,
+							     INTVAL (ops[2].value));
+	    }
+	  else
+	    error_opno = 2;
+	}
+      break;
+
+    case CODE_FOR_lsx_vseqi_b:
+    case CODE_FOR_lsx_vseqi_h:
+    case CODE_FOR_lsx_vseqi_w:
+    case CODE_FOR_lsx_vseqi_d:
+    case CODE_FOR_lsx_vslti_b:
+    case CODE_FOR_lsx_vslti_h:
+    case CODE_FOR_lsx_vslti_w:
+    case CODE_FOR_lsx_vslti_d:
+    case CODE_FOR_lsx_vslei_b:
+    case CODE_FOR_lsx_vslei_h:
+    case CODE_FOR_lsx_vslei_w:
+    case CODE_FOR_lsx_vslei_d:
+    case CODE_FOR_lsx_vmaxi_b:
+    case CODE_FOR_lsx_vmaxi_h:
+    case CODE_FOR_lsx_vmaxi_w:
+    case CODE_FOR_lsx_vmaxi_d:
+    case CODE_FOR_lsx_vmini_b:
+    case CODE_FOR_lsx_vmini_h:
+    case CODE_FOR_lsx_vmini_w:
+    case CODE_FOR_lsx_vmini_d:
+      gcc_assert (has_target_p && nops == 3);
+      /* We only generate a vector of constants iff the second argument
+	 is an immediate.  We also validate the range of the immediate.  */
+      if (CONST_INT_P (ops[2].value))
+	{
+	  rangelo = -16;
+	  rangehi = 15;
+	  if (IN_RANGE (INTVAL (ops[2].value), rangelo, rangehi))
+	    {
+	      ops[2].mode = ops[0].mode;
+	      ops[2].value = loongarch_gen_const_int_vector (ops[2].mode,
+							     INTVAL (ops[2].value));
+	    }
+	  else
+	    error_opno = 2;
+	}
+      break;
+
+    case CODE_FOR_lsx_vandi_b:
+    case CODE_FOR_lsx_vori_b:
+    case CODE_FOR_lsx_vnori_b:
+    case CODE_FOR_lsx_vxori_b:
+      gcc_assert (has_target_p && nops == 3);
+      if (!CONST_INT_P (ops[2].value))
+	break;
+      ops[2].mode = ops[0].mode;
+      ops[2].value = loongarch_gen_const_int_vector (ops[2].mode,
+						     INTVAL (ops[2].value));
+      break;
+
+    case CODE_FOR_lsx_vbitseli_b:
+      gcc_assert (has_target_p && nops == 4);
+      if (!CONST_INT_P (ops[3].value))
+	break;
+      ops[3].mode = ops[0].mode;
+      ops[3].value = loongarch_gen_const_int_vector (ops[3].mode,
+						     INTVAL (ops[3].value));
+      break;
+
+    case CODE_FOR_lsx_vreplgr2vr_b:
+    case CODE_FOR_lsx_vreplgr2vr_h:
+    case CODE_FOR_lsx_vreplgr2vr_w:
+    case CODE_FOR_lsx_vreplgr2vr_d:
+      /* Map the built-ins to vector fill operations.  We need fix up the mode
+	 for the element being inserted.  */
+      gcc_assert (has_target_p && nops == 2);
+      imode = GET_MODE_INNER (ops[0].mode);
+      ops[1].value = lowpart_subreg (imode, ops[1].value, ops[1].mode);
+      ops[1].mode = imode;
+      break;
+
+    case CODE_FOR_lsx_vilvh_b:
+    case CODE_FOR_lsx_vilvh_h:
+    case CODE_FOR_lsx_vilvh_w:
+    case CODE_FOR_lsx_vilvh_d:
+    case CODE_FOR_lsx_vilvl_b:
+    case CODE_FOR_lsx_vilvl_h:
+    case CODE_FOR_lsx_vilvl_w:
+    case CODE_FOR_lsx_vilvl_d:
+    case CODE_FOR_lsx_vpackev_b:
+    case CODE_FOR_lsx_vpackev_h:
+    case CODE_FOR_lsx_vpackev_w:
+    case CODE_FOR_lsx_vpackod_b:
+    case CODE_FOR_lsx_vpackod_h:
+    case CODE_FOR_lsx_vpackod_w:
+    case CODE_FOR_lsx_vpickev_b:
+    case CODE_FOR_lsx_vpickev_h:
+    case CODE_FOR_lsx_vpickev_w:
+    case CODE_FOR_lsx_vpickod_b:
+    case CODE_FOR_lsx_vpickod_h:
+    case CODE_FOR_lsx_vpickod_w:
+      /* Swap the operands 1 and 2 for interleave operations.  Built-ins follow
+	 convention of ISA, which have op1 as higher component and op2 as lower
+	 component.  However, the VEC_PERM op in tree and vec_concat in RTL
+	 expects first operand to be lower component, because of which this
+	 swap is needed for builtins.  */
+      gcc_assert (has_target_p && nops == 3);
+      std::swap (ops[1], ops[2]);
+      break;
+
+    case CODE_FOR_lsx_vslli_b:
+    case CODE_FOR_lsx_vslli_h:
+    case CODE_FOR_lsx_vslli_w:
+    case CODE_FOR_lsx_vslli_d:
+    case CODE_FOR_lsx_vsrai_b:
+    case CODE_FOR_lsx_vsrai_h:
+    case CODE_FOR_lsx_vsrai_w:
+    case CODE_FOR_lsx_vsrai_d:
+    case CODE_FOR_lsx_vsrli_b:
+    case CODE_FOR_lsx_vsrli_h:
+    case CODE_FOR_lsx_vsrli_w:
+    case CODE_FOR_lsx_vsrli_d:
+      gcc_assert (has_target_p && nops == 3);
+      if (CONST_INT_P (ops[2].value))
+	{
+	  rangelo = 0;
+	  rangehi = GET_MODE_UNIT_BITSIZE (ops[0].mode) - 1;
+	  if (IN_RANGE (INTVAL (ops[2].value), rangelo, rangehi))
+	    {
+	      ops[2].mode = ops[0].mode;
+	      ops[2].value = loongarch_gen_const_int_vector (ops[2].mode,
+							     INTVAL (ops[2].value));
+	    }
+	  else
+	    error_opno = 2;
+	}
+      break;
+
+    case CODE_FOR_lsx_vinsgr2vr_b:
+    case CODE_FOR_lsx_vinsgr2vr_h:
+    case CODE_FOR_lsx_vinsgr2vr_w:
+    case CODE_FOR_lsx_vinsgr2vr_d:
+      /* Map the built-ins to insert operations.  We need to swap operands,
+	 fix up the mode for the element being inserted, and generate
+	 a bit mask for vec_merge.  */
+      gcc_assert (has_target_p && nops == 4);
+      std::swap (ops[1], ops[2]);
+      imode = GET_MODE_INNER (ops[0].mode);
+      ops[1].value = lowpart_subreg (imode, ops[1].value, ops[1].mode);
+      ops[1].mode = imode;
+      rangelo = 0;
+      rangehi = GET_MODE_NUNITS (ops[0].mode) - 1;
+      if (CONST_INT_P (ops[3].value)
+	  && IN_RANGE (INTVAL (ops[3].value), rangelo, rangehi))
+	ops[3].value = GEN_INT (1 << INTVAL (ops[3].value));
+      else
+	error_opno = 2;
+      break;
+
+      /* Map the built-ins to element insert operations.  We need to swap
+	 operands and generate a bit mask.  */
+      gcc_assert (has_target_p && nops == 4);
+      std::swap (ops[1], ops[2]);
+      std::swap (ops[1], ops[3]);
+      rangelo = 0;
+      rangehi = GET_MODE_NUNITS (ops[0].mode) - 1;
+      if (CONST_INT_P (ops[3].value)
+	  && IN_RANGE (INTVAL (ops[3].value), rangelo, rangehi))
+	ops[3].value = GEN_INT (1 << INTVAL (ops[3].value));
+      else
+	error_opno = 2;
+      break;
+
+    case CODE_FOR_lsx_vshuf4i_b:
+    case CODE_FOR_lsx_vshuf4i_h:
+    case CODE_FOR_lsx_vshuf4i_w:
+    case CODE_FOR_lsx_vshuf4i_w_f:
+      gcc_assert (has_target_p && nops == 3);
+      ops[2].value = loongarch_gen_const_int_vector_shuffle (ops[0].mode,
+							     INTVAL (ops[2].value));
+      break;
+
+    default:
+      break;
+  }
+
+  if (error_opno != 0)
+    {
+      error ("argument %d to the built-in must be a constant"
+	     " in range %d to %d", error_opno, rangelo, rangehi);
+      return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx;
+    }
+  else if (!maybe_expand_insn (icode, nops, ops))
     {
       error ("invalid argument to built-in function");
       return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx;
@@ -352,6 +1794,50 @@  loongarch_expand_builtin_direct (enum insn_code icode, rtx target, tree exp,
   return loongarch_expand_builtin_insn (icode, opno, ops, has_target_p);
 }
 
+/* Expand an LSX built-in for a compare and branch instruction specified by
+   ICODE, set a general-purpose register to 1 if the branch was taken,
+   0 otherwise.  */
+
+static rtx
+loongarch_expand_builtin_lsx_test_branch (enum insn_code icode, tree exp)
+{
+  struct expand_operand ops[3];
+  rtx_insn *cbranch;
+  rtx_code_label *true_label, *done_label;
+  rtx cmp_result;
+
+  true_label = gen_label_rtx ();
+  done_label = gen_label_rtx ();
+
+  create_input_operand (&ops[0], true_label, TYPE_MODE (TREE_TYPE (exp)));
+  loongarch_prepare_builtin_arg (&ops[1], exp, 0);
+  create_fixed_operand (&ops[2], const0_rtx);
+
+  /* Make sure that the operand 1 is a REG.  */
+  if (GET_CODE (ops[1].value) != REG)
+    ops[1].value = force_reg (ops[1].mode, ops[1].value);
+
+  if ((cbranch = maybe_gen_insn (icode, 3, ops)) == NULL_RTX)
+    error ("failed to expand built-in function");
+
+  cmp_result = gen_reg_rtx (SImode);
+
+  /* First assume that CMP_RESULT is false.  */
+  loongarch_emit_move (cmp_result, const0_rtx);
+
+  /* Branch to TRUE_LABEL if CBRANCH is taken and DONE_LABEL otherwise.  */
+  emit_jump_insn (cbranch);
+  emit_jump_insn (gen_jump (done_label));
+  emit_barrier ();
+
+  /* Set CMP_RESULT to true if the branch was taken.  */
+  emit_label (true_label);
+  loongarch_emit_move (cmp_result, const1_rtx);
+
+  emit_label (done_label);
+  return cmp_result;
+}
+
 /* Implement TARGET_EXPAND_BUILTIN.  */
 
 rtx
@@ -372,10 +1858,14 @@  loongarch_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
   switch (d->builtin_type)
     {
     case LARCH_BUILTIN_DIRECT:
+    case LARCH_BUILTIN_LSX:
       return loongarch_expand_builtin_direct (d->icode, target, exp, true);
 
     case LARCH_BUILTIN_DIRECT_NO_TARGET:
       return loongarch_expand_builtin_direct (d->icode, target, exp, false);
+
+    case LARCH_BUILTIN_LSX_TEST_BRANCH:
+      return loongarch_expand_builtin_lsx_test_branch (d->icode, exp);
     }
   gcc_unreachable ();
 }
diff --git a/gcc/config/loongarch/loongarch-ftypes.def b/gcc/config/loongarch/loongarch-ftypes.def
index 06d2e0519f7..1ce9d83ccab 100644
--- a/gcc/config/loongarch/loongarch-ftypes.def
+++ b/gcc/config/loongarch/loongarch-ftypes.def
@@ -1,7 +1,7 @@ 
 /* Definitions of prototypes for LoongArch built-in functions.
    Copyright (C) 2021-2023 Free Software Foundation, Inc.
    Contributed by Loongson Ltd.
-   Based on MIPS target for GNU ckompiler.
+   Based on MIPS target for GNU compiler.
 
 This file is part of GCC.
 
@@ -32,7 +32,7 @@  along with GCC; see the file COPYING3.  If not see
       INT for integer_type_node
       POINTER for ptr_type_node
 
-   (we don't use PTR because that's a ANSI-compatibillity macro).
+   (we don't use PTR because that's a ANSI-compatibility macro).
 
    Please keep this list lexicographically sorted by the LIST argument.  */
 
@@ -63,3 +63,396 @@  DEF_LARCH_FTYPE (3, (VOID, USI, USI, SI))
 DEF_LARCH_FTYPE (3, (VOID, USI, UDI, SI))
 DEF_LARCH_FTYPE (3, (USI, USI, USI, USI))
 DEF_LARCH_FTYPE (3, (UDI, UDI, UDI, USI))
+
+DEF_LARCH_FTYPE (1, (DF, DF))
+DEF_LARCH_FTYPE (2, (DF, DF, DF))
+DEF_LARCH_FTYPE (1, (DF, V2DF))
+
+DEF_LARCH_FTYPE (1, (DI, DI))
+DEF_LARCH_FTYPE (1, (DI, SI))
+DEF_LARCH_FTYPE (1, (DI, UQI))
+DEF_LARCH_FTYPE (2, (DI, DI, DI))
+DEF_LARCH_FTYPE (2, (DI, DI, SI))
+DEF_LARCH_FTYPE (3, (DI, DI, SI, SI))
+DEF_LARCH_FTYPE (3, (DI, DI, USI, USI))
+DEF_LARCH_FTYPE (3, (DI, DI, DI, QI))
+DEF_LARCH_FTYPE (3, (DI, DI, V2HI, V2HI))
+DEF_LARCH_FTYPE (3, (DI, DI, V4QI, V4QI))
+DEF_LARCH_FTYPE (2, (DI, POINTER, SI))
+DEF_LARCH_FTYPE (2, (DI, SI, SI))
+DEF_LARCH_FTYPE (2, (DI, USI, USI))
+
+DEF_LARCH_FTYPE (2, (DI, V2DI, UQI))
+
+DEF_LARCH_FTYPE (2, (INT, DF, DF))
+DEF_LARCH_FTYPE (2, (INT, SF, SF))
+
+DEF_LARCH_FTYPE (2, (INT, V2SF, V2SF))
+DEF_LARCH_FTYPE (4, (INT, V2SF, V2SF, V2SF, V2SF))
+
+DEF_LARCH_FTYPE (1, (SF, SF))
+DEF_LARCH_FTYPE (2, (SF, SF, SF))
+DEF_LARCH_FTYPE (1, (SF, V2SF))
+DEF_LARCH_FTYPE (1, (SF, V4SF))
+
+DEF_LARCH_FTYPE (2, (SI, POINTER, SI))
+DEF_LARCH_FTYPE (1, (SI, SI))
+DEF_LARCH_FTYPE (1, (SI, UDI))
+DEF_LARCH_FTYPE (2, (QI, QI, QI))
+DEF_LARCH_FTYPE (2, (HI, HI, HI))
+DEF_LARCH_FTYPE (3, (SI, SI, SI, SI))
+DEF_LARCH_FTYPE (3, (SI, SI, SI, QI))
+DEF_LARCH_FTYPE (1, (SI, UQI))
+DEF_LARCH_FTYPE (1, (SI, UV16QI))
+DEF_LARCH_FTYPE (1, (SI, UV2DI))
+DEF_LARCH_FTYPE (1, (SI, UV4SI))
+DEF_LARCH_FTYPE (1, (SI, UV8HI))
+DEF_LARCH_FTYPE (2, (SI, V16QI, UQI))
+DEF_LARCH_FTYPE (1, (SI, V2HI))
+DEF_LARCH_FTYPE (2, (SI, V2HI, V2HI))
+DEF_LARCH_FTYPE (1, (SI, V4QI))
+DEF_LARCH_FTYPE (2, (SI, V4QI, V4QI))
+DEF_LARCH_FTYPE (2, (SI, V4SI, UQI))
+DEF_LARCH_FTYPE (2, (SI, V8HI, UQI))
+DEF_LARCH_FTYPE (1, (SI, VOID))
+
+DEF_LARCH_FTYPE (2, (UDI, UDI, UDI))
+DEF_LARCH_FTYPE (2, (UDI, UV2SI, UV2SI))
+DEF_LARCH_FTYPE (2, (UDI, V2DI, UQI))
+
+DEF_LARCH_FTYPE (2, (USI, V16QI, UQI))
+DEF_LARCH_FTYPE (2, (USI, V4SI, UQI))
+DEF_LARCH_FTYPE (2, (USI, V8HI, UQI))
+DEF_LARCH_FTYPE (1, (USI, VOID))
+
+DEF_LARCH_FTYPE (2, (UV16QI, UV16QI, UQI))
+DEF_LARCH_FTYPE (2, (UV16QI, UV16QI, USI))
+DEF_LARCH_FTYPE (2, (UV16QI, UV16QI, UV16QI))
+DEF_LARCH_FTYPE (3, (UV16QI, UV16QI, UV16QI, UQI))
+DEF_LARCH_FTYPE (3, (UV16QI, UV16QI, UV16QI, USI))
+DEF_LARCH_FTYPE (3, (UV16QI, UV16QI, UV16QI, UV16QI))
+DEF_LARCH_FTYPE (2, (UV16QI, UV16QI, V16QI))
+
+DEF_LARCH_FTYPE (2, (UV2DI, UV2DI, UQI))
+DEF_LARCH_FTYPE (2, (UV2DI, UV2DI, UV2DI))
+DEF_LARCH_FTYPE (3, (UV2DI, UV2DI, UV2DI, UQI))
+DEF_LARCH_FTYPE (3, (UV2DI, UV2DI, UV2DI, UV2DI))
+DEF_LARCH_FTYPE (3, (UV2DI, UV2DI, UV4SI, UV4SI))
+DEF_LARCH_FTYPE (2, (UV2DI, UV2DI, V2DI))
+DEF_LARCH_FTYPE (2, (UV2DI, UV4SI, UV4SI))
+DEF_LARCH_FTYPE (1, (UV2DI, V2DF))
+
+DEF_LARCH_FTYPE (2, (UV2SI, UV2SI, UQI))
+DEF_LARCH_FTYPE (2, (UV2SI, UV2SI, UV2SI))
+
+DEF_LARCH_FTYPE (2, (UV4HI, UV4HI, UQI))
+DEF_LARCH_FTYPE (2, (UV4HI, UV4HI, USI))
+DEF_LARCH_FTYPE (2, (UV4HI, UV4HI, UV4HI))
+DEF_LARCH_FTYPE (3, (UV4HI, UV4HI, UV4HI, UQI))
+DEF_LARCH_FTYPE (3, (UV4HI, UV4HI, UV4HI, USI))
+DEF_LARCH_FTYPE (1, (UV4HI, UV8QI))
+DEF_LARCH_FTYPE (2, (UV4HI, UV8QI, UV8QI))
+
+DEF_LARCH_FTYPE (2, (UV4SI, UV4SI, UQI))
+DEF_LARCH_FTYPE (2, (UV4SI, UV4SI, UV4SI))
+DEF_LARCH_FTYPE (3, (UV4SI, UV4SI, UV4SI, UQI))
+DEF_LARCH_FTYPE (3, (UV4SI, UV4SI, UV4SI, UV4SI))
+DEF_LARCH_FTYPE (3, (UV4SI, UV4SI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (2, (UV4SI, UV4SI, V4SI))
+DEF_LARCH_FTYPE (2, (UV4SI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (1, (UV4SI, V4SF))
+
+DEF_LARCH_FTYPE (2, (UV8HI, UV16QI, UV16QI))
+DEF_LARCH_FTYPE (2, (UV8HI, UV8HI, UQI))
+DEF_LARCH_FTYPE (3, (UV8HI, UV8HI, UV16QI, UV16QI))
+DEF_LARCH_FTYPE (2, (UV8HI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (3, (UV8HI, UV8HI, UV8HI, UQI))
+DEF_LARCH_FTYPE (3, (UV8HI, UV8HI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (2, (UV8HI, UV8HI, V8HI))
+
+
+
+DEF_LARCH_FTYPE (2, (UV8QI, UV4HI, UV4HI))
+DEF_LARCH_FTYPE (1, (UV8QI, UV8QI))
+DEF_LARCH_FTYPE (2, (UV8QI, UV8QI, UV8QI))
+
+DEF_LARCH_FTYPE (2, (V16QI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (2, (V16QI, CVPOINTER, DI))
+DEF_LARCH_FTYPE (1, (V16QI, HI))
+DEF_LARCH_FTYPE (1, (V16QI, SI))
+DEF_LARCH_FTYPE (2, (V16QI, UV16QI, UQI))
+DEF_LARCH_FTYPE (2, (V16QI, UV16QI, UV16QI))
+DEF_LARCH_FTYPE (1, (V16QI, V16QI))
+DEF_LARCH_FTYPE (2, (V16QI, V16QI, QI))
+DEF_LARCH_FTYPE (2, (V16QI, V16QI, SI))
+DEF_LARCH_FTYPE (2, (V16QI, V16QI, USI))
+DEF_LARCH_FTYPE (2, (V16QI, V16QI, UQI))
+DEF_LARCH_FTYPE (3, (V16QI, V16QI, UQI, SI))
+DEF_LARCH_FTYPE (3, (V16QI, V16QI, UQI, V16QI))
+DEF_LARCH_FTYPE (2, (V16QI, V16QI, V16QI))
+DEF_LARCH_FTYPE (3, (V16QI, V16QI, V16QI, SI))
+DEF_LARCH_FTYPE (3, (V16QI, V16QI, V16QI, UQI))
+DEF_LARCH_FTYPE (4, (V16QI, V16QI, V16QI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V16QI, V16QI, V16QI, USI))
+DEF_LARCH_FTYPE (3, (V16QI, V16QI, V16QI, V16QI))
+
+
+DEF_LARCH_FTYPE (1, (V2DF, DF))
+DEF_LARCH_FTYPE (1, (V2DF, UV2DI))
+DEF_LARCH_FTYPE (1, (V2DF, V2DF))
+DEF_LARCH_FTYPE (2, (V2DF, V2DF, V2DF))
+DEF_LARCH_FTYPE (3, (V2DF, V2DF, V2DF, V2DF))
+DEF_LARCH_FTYPE (2, (V2DF, V2DF, V2DI))
+DEF_LARCH_FTYPE (1, (V2DF, V2DI))
+DEF_LARCH_FTYPE (1, (V2DF, V4SF))
+DEF_LARCH_FTYPE (1, (V2DF, V4SI))
+
+DEF_LARCH_FTYPE (2, (V2DI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (1, (V2DI, DI))
+DEF_LARCH_FTYPE (1, (V2DI, HI))
+DEF_LARCH_FTYPE (2, (V2DI, UV2DI, UQI))
+DEF_LARCH_FTYPE (2, (V2DI, UV2DI, UV2DI))
+DEF_LARCH_FTYPE (2, (V2DI, UV4SI, UV4SI))
+DEF_LARCH_FTYPE (1, (V2DI, V2DF))
+DEF_LARCH_FTYPE (2, (V2DI, V2DF, V2DF))
+DEF_LARCH_FTYPE (1, (V2DI, V2DI))
+DEF_LARCH_FTYPE (1, (UV2DI, UV2DI))
+DEF_LARCH_FTYPE (2, (V2DI, V2DI, QI))
+DEF_LARCH_FTYPE (2, (V2DI, V2DI, SI))
+DEF_LARCH_FTYPE (2, (V2DI, V2DI, UQI))
+DEF_LARCH_FTYPE (2, (V2DI, V2DI, USI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, UQI, DI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, UQI, V2DI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, UV4SI, UV4SI))
+DEF_LARCH_FTYPE (2, (V2DI, V2DI, V2DI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, V2DI, SI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, V2DI, UQI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, V2DI, USI))
+DEF_LARCH_FTYPE (4, (V2DI, V2DI, V2DI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, V2DI, V2DI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, V4SI, V4SI))
+DEF_LARCH_FTYPE (2, (V2DI, V4SI, V4SI))
+
+DEF_LARCH_FTYPE (1, (V2HI, SI))
+DEF_LARCH_FTYPE (2, (V2HI, SI, SI))
+DEF_LARCH_FTYPE (3, (V2HI, SI, SI, SI))
+DEF_LARCH_FTYPE (1, (V2HI, V2HI))
+DEF_LARCH_FTYPE (2, (V2HI, V2HI, SI))
+DEF_LARCH_FTYPE (2, (V2HI, V2HI, V2HI))
+DEF_LARCH_FTYPE (1, (V2HI, V4QI))
+DEF_LARCH_FTYPE (2, (V2HI, V4QI, V2HI))
+
+DEF_LARCH_FTYPE (2, (V2SF, SF, SF))
+DEF_LARCH_FTYPE (1, (V2SF, V2SF))
+DEF_LARCH_FTYPE (2, (V2SF, V2SF, V2SF))
+DEF_LARCH_FTYPE (3, (V2SF, V2SF, V2SF, INT))
+DEF_LARCH_FTYPE (4, (V2SF, V2SF, V2SF, V2SF, V2SF))
+
+DEF_LARCH_FTYPE (2, (V2SI, V2SI, UQI))
+DEF_LARCH_FTYPE (2, (V2SI, V2SI, V2SI))
+DEF_LARCH_FTYPE (2, (V2SI, V4HI, V4HI))
+
+DEF_LARCH_FTYPE (2, (V4HI, V2SI, V2SI))
+DEF_LARCH_FTYPE (2, (V4HI, V4HI, UQI))
+DEF_LARCH_FTYPE (2, (V4HI, V4HI, USI))
+DEF_LARCH_FTYPE (2, (V4HI, V4HI, V4HI))
+DEF_LARCH_FTYPE (3, (V4HI, V4HI, V4HI, UQI))
+DEF_LARCH_FTYPE (3, (V4HI, V4HI, V4HI, USI))
+
+DEF_LARCH_FTYPE (1, (V4QI, SI))
+DEF_LARCH_FTYPE (2, (V4QI, V2HI, V2HI))
+DEF_LARCH_FTYPE (1, (V4QI, V4QI))
+DEF_LARCH_FTYPE (2, (V4QI, V4QI, SI))
+DEF_LARCH_FTYPE (2, (V4QI, V4QI, V4QI))
+
+DEF_LARCH_FTYPE (1, (V4SF, SF))
+DEF_LARCH_FTYPE (1, (V4SF, UV4SI))
+DEF_LARCH_FTYPE (2, (V4SF, V2DF, V2DF))
+DEF_LARCH_FTYPE (1, (V4SF, V4SF))
+DEF_LARCH_FTYPE (2, (V4SF, V4SF, V4SF))
+DEF_LARCH_FTYPE (3, (V4SF, V4SF, V4SF, V4SF))
+DEF_LARCH_FTYPE (2, (V4SF, V4SF, V4SI))
+DEF_LARCH_FTYPE (1, (V4SF, V4SI))
+DEF_LARCH_FTYPE (1, (V4SF, V8HI))
+
+DEF_LARCH_FTYPE (2, (V4SI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (1, (V4SI, HI))
+DEF_LARCH_FTYPE (1, (V4SI, SI))
+DEF_LARCH_FTYPE (2, (V4SI, UV4SI, UQI))
+DEF_LARCH_FTYPE (2, (V4SI, UV4SI, UV4SI))
+DEF_LARCH_FTYPE (2, (V4SI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (2, (V4SI, V2DF, V2DF))
+DEF_LARCH_FTYPE (1, (V4SI, V4SF))
+DEF_LARCH_FTYPE (2, (V4SI, V4SF, V4SF))
+DEF_LARCH_FTYPE (1, (V4SI, V4SI))
+DEF_LARCH_FTYPE (2, (V4SI, V4SI, QI))
+DEF_LARCH_FTYPE (2, (V4SI, V4SI, SI))
+DEF_LARCH_FTYPE (2, (V4SI, V4SI, UQI))
+DEF_LARCH_FTYPE (2, (V4SI, V4SI, USI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, UQI, SI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, UQI, V4SI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (2, (V4SI, V4SI, V4SI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, V4SI, SI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, V4SI, UQI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, V4SI, USI))
+DEF_LARCH_FTYPE (4, (V4SI, V4SI, V4SI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, V4SI, V4SI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, V8HI, V8HI))
+DEF_LARCH_FTYPE (2, (V4SI, V8HI, V8HI))
+
+DEF_LARCH_FTYPE (2, (V8HI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (1, (V8HI, HI))
+DEF_LARCH_FTYPE (1, (V8HI, SI))
+DEF_LARCH_FTYPE (2, (V8HI, UV16QI, UV16QI))
+DEF_LARCH_FTYPE (2, (V8HI, UV8HI, UQI))
+DEF_LARCH_FTYPE (2, (V8HI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (2, (V8HI, V16QI, V16QI))
+DEF_LARCH_FTYPE (2, (V8HI, V4SF, V4SF))
+DEF_LARCH_FTYPE (1, (V8HI, V8HI))
+DEF_LARCH_FTYPE (2, (V8HI, V8HI, QI))
+DEF_LARCH_FTYPE (2, (V8HI, V8HI, SI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, SI, UQI))
+DEF_LARCH_FTYPE (2, (V8HI, V8HI, UQI))
+DEF_LARCH_FTYPE (2, (V8HI, V8HI, USI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, UQI, SI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, UQI, V8HI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, UV16QI, UV16QI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, V16QI, V16QI))
+DEF_LARCH_FTYPE (2, (V8HI, V8HI, V8HI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, V8HI, SI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, V8HI, UQI))
+DEF_LARCH_FTYPE (4, (V8HI, V8HI, V8HI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, V8HI, USI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, V8HI, V8HI))
+
+DEF_LARCH_FTYPE (2, (V8QI, V4HI, V4HI))
+DEF_LARCH_FTYPE (1, (V8QI, V8QI))
+DEF_LARCH_FTYPE (2, (V8QI, V8QI, V8QI))
+
+DEF_LARCH_FTYPE (2, (VOID, SI, CVPOINTER))
+DEF_LARCH_FTYPE (2, (VOID, SI, SI))
+DEF_LARCH_FTYPE (2, (VOID, UQI, SI))
+DEF_LARCH_FTYPE (2, (VOID, USI, UQI))
+DEF_LARCH_FTYPE (1, (VOID, UHI))
+DEF_LARCH_FTYPE (3, (VOID, V16QI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (3, (VOID, V16QI, CVPOINTER, DI))
+DEF_LARCH_FTYPE (3, (VOID, V2DF, POINTER, SI))
+DEF_LARCH_FTYPE (3, (VOID, V2DI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (2, (VOID, V2HI, V2HI))
+DEF_LARCH_FTYPE (2, (VOID, V4QI, V4QI))
+DEF_LARCH_FTYPE (3, (VOID, V4SF, POINTER, SI))
+DEF_LARCH_FTYPE (3, (VOID, V4SI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (3, (VOID, V8HI, CVPOINTER, SI))
+
+DEF_LARCH_FTYPE (1, (V8HI, V16QI))
+DEF_LARCH_FTYPE (1, (V4SI, V16QI))
+DEF_LARCH_FTYPE (1, (V2DI, V16QI))
+DEF_LARCH_FTYPE (1, (V4SI, V8HI))
+DEF_LARCH_FTYPE (1, (V2DI, V8HI))
+DEF_LARCH_FTYPE (1, (V2DI, V4SI))
+DEF_LARCH_FTYPE (1, (UV8HI, V16QI))
+DEF_LARCH_FTYPE (1, (UV4SI, V16QI))
+DEF_LARCH_FTYPE (1, (UV2DI, V16QI))
+DEF_LARCH_FTYPE (1, (UV4SI, V8HI))
+DEF_LARCH_FTYPE (1, (UV2DI, V8HI))
+DEF_LARCH_FTYPE (1, (UV2DI, V4SI))
+DEF_LARCH_FTYPE (1, (UV8HI, UV16QI))
+DEF_LARCH_FTYPE (1, (UV4SI, UV16QI))
+DEF_LARCH_FTYPE (1, (UV2DI, UV16QI))
+DEF_LARCH_FTYPE (1, (UV4SI, UV8HI))
+DEF_LARCH_FTYPE (1, (UV2DI, UV8HI))
+DEF_LARCH_FTYPE (1, (UV2DI, UV4SI))
+DEF_LARCH_FTYPE (2, (UV8HI, V16QI, V16QI))
+DEF_LARCH_FTYPE (2, (UV4SI, V8HI, V8HI))
+DEF_LARCH_FTYPE (2, (UV2DI, V4SI, V4SI))
+DEF_LARCH_FTYPE (2, (V8HI, V16QI, UQI))
+DEF_LARCH_FTYPE (2, (V4SI, V8HI, UQI))
+DEF_LARCH_FTYPE (2, (V2DI, V4SI, UQI))
+DEF_LARCH_FTYPE (2, (UV8HI, UV16QI, UQI))
+DEF_LARCH_FTYPE (2, (UV4SI, UV8HI, UQI))
+DEF_LARCH_FTYPE (2, (UV2DI, UV4SI, UQI))
+DEF_LARCH_FTYPE (2, (V16QI, V8HI, V8HI))
+DEF_LARCH_FTYPE (2, (V8HI, V4SI, V4SI))
+DEF_LARCH_FTYPE (2, (V4SI, V2DI, V2DI))
+DEF_LARCH_FTYPE (2, (UV16QI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (2, (UV8HI, UV4SI, UV4SI))
+DEF_LARCH_FTYPE (2, (UV4SI, UV2DI, UV2DI))
+DEF_LARCH_FTYPE (2, (V16QI, V8HI, UQI))
+DEF_LARCH_FTYPE (2, (V8HI, V4SI, UQI))
+DEF_LARCH_FTYPE (2, (V4SI, V2DI, UQI))
+DEF_LARCH_FTYPE (2, (UV16QI, UV8HI, UQI))
+DEF_LARCH_FTYPE (2, (UV8HI, UV4SI, UQI))
+DEF_LARCH_FTYPE (2, (UV4SI, UV2DI, UQI))
+DEF_LARCH_FTYPE (2, (V16QI, V16QI, DI))
+DEF_LARCH_FTYPE (2, (V16QI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V16QI, V16QI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, UQI, UQI))
+DEF_LARCH_FTYPE (2, (V4SF, V2DI, V2DI))
+DEF_LARCH_FTYPE (1, (V2DI, V4SF))
+DEF_LARCH_FTYPE (2, (V2DI, UQI, USI))
+DEF_LARCH_FTYPE (2, (V2DI, UQI, UQI))
+DEF_LARCH_FTYPE (4, (VOID, SI, UQI, V16QI, CVPOINTER))
+DEF_LARCH_FTYPE (4, (VOID, SI, UQI, V8HI, CVPOINTER))
+DEF_LARCH_FTYPE (4, (VOID, SI, UQI, V4SI, CVPOINTER))
+DEF_LARCH_FTYPE (4, (VOID, SI, UQI, V2DI, CVPOINTER))
+DEF_LARCH_FTYPE (2, (V16QI, SI, CVPOINTER))
+DEF_LARCH_FTYPE (2, (V8HI, SI, CVPOINTER))
+DEF_LARCH_FTYPE (2, (V4SI, SI, CVPOINTER))
+DEF_LARCH_FTYPE (2, (V2DI, SI, CVPOINTER))
+DEF_LARCH_FTYPE (2, (V8HI, UV16QI, V16QI))
+DEF_LARCH_FTYPE (2, (V16QI, V16QI, UV16QI))
+DEF_LARCH_FTYPE (2, (UV16QI, V16QI, UV16QI))
+DEF_LARCH_FTYPE (2, (V8HI, V8HI, UV8HI))
+DEF_LARCH_FTYPE (2, (UV8HI, V8HI, UV8HI))
+DEF_LARCH_FTYPE (2, (V4SI, V4SI, UV4SI))
+DEF_LARCH_FTYPE (2, (UV4SI, V4SI, UV4SI))
+DEF_LARCH_FTYPE (2, (V4SI, V16QI, V16QI))
+DEF_LARCH_FTYPE (2, (V4SI, UV16QI, V16QI))
+DEF_LARCH_FTYPE (2, (UV4SI, UV16QI, UV16QI))
+DEF_LARCH_FTYPE (2, (V2DI, V2DI, UV2DI))
+DEF_LARCH_FTYPE (2, (UV2DI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (2, (V4SI, UV8HI, V8HI))
+DEF_LARCH_FTYPE (2, (V2DI, UV4SI, V4SI))
+DEF_LARCH_FTYPE (2, (V2DI, UV2DI, V2DI))
+DEF_LARCH_FTYPE (2, (V2DI, V8HI, V8HI))
+DEF_LARCH_FTYPE (2, (V2DI, UV8HI, V8HI))
+DEF_LARCH_FTYPE (2, (UV2DI, V2DI, UV2DI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, UV8HI, V8HI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, UV2DI, V2DI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, UV4SI, V4SI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, V8HI, V8HI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, UV8HI, V8HI))
+DEF_LARCH_FTYPE (3, (UV2DI, UV2DI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, UV16QI, V16QI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, V16QI, V16QI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, UV16QI, V16QI))
+DEF_LARCH_FTYPE (3, (UV4SI, UV4SI, UV16QI, UV16QI))
+
+DEF_LARCH_FTYPE(4,(VOID,V16QI,CVPOINTER,SI,UQI))
+DEF_LARCH_FTYPE(4,(VOID,V8HI,CVPOINTER,SI,UQI))
+DEF_LARCH_FTYPE(4,(VOID,V4SI,CVPOINTER,SI,UQI))
+DEF_LARCH_FTYPE(4,(VOID,V2DI,CVPOINTER,SI,UQI))
+
+DEF_LARCH_FTYPE (2, (DI, V16QI, UQI))
+DEF_LARCH_FTYPE (2, (DI, V8HI, UQI))
+DEF_LARCH_FTYPE (2, (DI, V4SI, UQI))
+DEF_LARCH_FTYPE (2, (UDI, V16QI, UQI))
+DEF_LARCH_FTYPE (2, (UDI, V8HI, UQI))
+DEF_LARCH_FTYPE (2, (UDI, V4SI, UQI))
+
+DEF_LARCH_FTYPE (3, (UV16QI, UV16QI, V16QI, USI))
+DEF_LARCH_FTYPE (3, (UV8HI, UV8HI, V8HI, USI))
+DEF_LARCH_FTYPE (3, (UV4SI, UV4SI, V4SI, USI))
+DEF_LARCH_FTYPE (3, (UV2DI, UV2DI, V2DI, USI))
+
+DEF_LARCH_FTYPE (1, (BOOLEAN,V16QI))
+DEF_LARCH_FTYPE(2,(V16QI,CVPOINTER,CVPOINTER))
+DEF_LARCH_FTYPE(3,(VOID,V16QI,CVPOINTER,CVPOINTER))
+
+DEF_LARCH_FTYPE (3, (V16QI, V16QI, SI, UQI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, SI, UQI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, DI, UQI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, SI, UQI))
diff --git a/gcc/config/loongarch/lsxintrin.h b/gcc/config/loongarch/lsxintrin.h
new file mode 100644
index 00000000000..ec42069904d
--- /dev/null
+++ b/gcc/config/loongarch/lsxintrin.h
@@ -0,0 +1,5181 @@ 
+/* LARCH Loongson SX intrinsics include file.
+
+   Copyright (C) 2018 Free Software Foundation, Inc.
+
+   This file is part of GCC.
+
+   GCC is free software; you can redistribute it and/or modify it
+   under the terms of the GNU General Public License as published
+   by the Free Software Foundation; either version 3, or (at your
+   option) any later version.
+
+   GCC is distributed in the hope that it will be useful, but WITHOUT
+   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
+   License for more details.
+
+   Under Section 7 of GPL version 3, you are granted additional
+   permissions described in the GCC Runtime Library Exception, version
+   3.1, as published by the Free Software Foundation.
+
+   You should have received a copy of the GNU General Public License and
+   a copy of the GCC Runtime Library Exception along with this program;
+   see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
+   <http://www.gnu.org/licenses/>.  */
+
+#ifndef _GCC_LOONGSON_SXINTRIN_H
+#define _GCC_LOONGSON_SXINTRIN_H 1
+
+#if defined(__loongarch_sx)
+typedef signed char v16i8 __attribute__ ((vector_size(16), aligned(16)));
+typedef signed char v16i8_b __attribute__ ((vector_size(16), aligned(1)));
+typedef unsigned char v16u8 __attribute__ ((vector_size(16), aligned(16)));
+typedef unsigned char v16u8_b __attribute__ ((vector_size(16), aligned(1)));
+typedef short v8i16 __attribute__ ((vector_size(16), aligned(16)));
+typedef short v8i16_h __attribute__ ((vector_size(16), aligned(2)));
+typedef unsigned short v8u16 __attribute__ ((vector_size(16), aligned(16)));
+typedef unsigned short v8u16_h __attribute__ ((vector_size(16), aligned(2)));
+typedef int v4i32 __attribute__ ((vector_size(16), aligned(16)));
+typedef int v4i32_w __attribute__ ((vector_size(16), aligned(4)));
+typedef unsigned int v4u32 __attribute__ ((vector_size(16), aligned(16)));
+typedef unsigned int v4u32_w __attribute__ ((vector_size(16), aligned(4)));
+typedef long long v2i64 __attribute__ ((vector_size(16), aligned(16)));
+typedef long long v2i64_d __attribute__ ((vector_size(16), aligned(8)));
+typedef unsigned long long v2u64 __attribute__ ((vector_size(16), aligned(16)));
+typedef unsigned long long v2u64_d __attribute__ ((vector_size(16), aligned(8)));
+typedef float v4f32 __attribute__ ((vector_size(16), aligned(16)));
+typedef float v4f32_w __attribute__ ((vector_size(16), aligned(4)));
+typedef double v2f64 __attribute__ ((vector_size(16), aligned(16)));
+typedef double v2f64_d __attribute__ ((vector_size(16), aligned(8)));
+
+typedef long long __m128i __attribute__ ((__vector_size__ (16), __may_alias__));
+typedef float __m128 __attribute__ ((__vector_size__ (16), __may_alias__));
+typedef double __m128d __attribute__ ((__vector_size__ (16), __may_alias__));
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsll_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsll_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsll_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsll_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsll_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsll_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsll_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsll_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui3.  */
+/* Data types in instruction templates:  V16QI, V16QI, UQI.  */
+#define __lsx_vslli_b(/*__m128i*/ _1, /*ui3*/ _2) \
+  ((__m128i)__builtin_lsx_vslli_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  V8HI, V8HI, UQI.  */
+#define __lsx_vslli_h(/*__m128i*/ _1, /*ui4*/ _2) \
+  ((__m128i)__builtin_lsx_vslli_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V4SI, V4SI, UQI.  */
+#define __lsx_vslli_w(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vslli_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  V2DI, V2DI, UQI.  */
+#define __lsx_vslli_d(/*__m128i*/ _1, /*ui6*/ _2) \
+  ((__m128i)__builtin_lsx_vslli_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsra_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsra_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsra_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsra_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsra_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsra_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsra_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsra_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui3.  */
+/* Data types in instruction templates:  V16QI, V16QI, UQI.  */
+#define __lsx_vsrai_b(/*__m128i*/ _1, /*ui3*/ _2) \
+  ((__m128i)__builtin_lsx_vsrai_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  V8HI, V8HI, UQI.  */
+#define __lsx_vsrai_h(/*__m128i*/ _1, /*ui4*/ _2) \
+  ((__m128i)__builtin_lsx_vsrai_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V4SI, V4SI, UQI.  */
+#define __lsx_vsrai_w(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vsrai_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  V2DI, V2DI, UQI.  */
+#define __lsx_vsrai_d(/*__m128i*/ _1, /*ui6*/ _2) \
+  ((__m128i)__builtin_lsx_vsrai_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrar_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrar_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrar_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrar_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrar_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrar_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrar_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrar_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui3.  */
+/* Data types in instruction templates:  V16QI, V16QI, UQI.  */
+#define __lsx_vsrari_b(/*__m128i*/ _1, /*ui3*/ _2) \
+  ((__m128i)__builtin_lsx_vsrari_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  V8HI, V8HI, UQI.  */
+#define __lsx_vsrari_h(/*__m128i*/ _1, /*ui4*/ _2) \
+  ((__m128i)__builtin_lsx_vsrari_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V4SI, V4SI, UQI.  */
+#define __lsx_vsrari_w(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vsrari_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  V2DI, V2DI, UQI.  */
+#define __lsx_vsrari_d(/*__m128i*/ _1, /*ui6*/ _2) \
+  ((__m128i)__builtin_lsx_vsrari_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrl_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrl_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrl_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrl_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrl_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrl_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrl_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrl_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui3.  */
+/* Data types in instruction templates:  V16QI, V16QI, UQI.  */
+#define __lsx_vsrli_b(/*__m128i*/ _1, /*ui3*/ _2) \
+  ((__m128i)__builtin_lsx_vsrli_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  V8HI, V8HI, UQI.  */
+#define __lsx_vsrli_h(/*__m128i*/ _1, /*ui4*/ _2) \
+  ((__m128i)__builtin_lsx_vsrli_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V4SI, V4SI, UQI.  */
+#define __lsx_vsrli_w(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vsrli_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  V2DI, V2DI, UQI.  */
+#define __lsx_vsrli_d(/*__m128i*/ _1, /*ui6*/ _2) \
+  ((__m128i)__builtin_lsx_vsrli_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrlr_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrlr_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrlr_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrlr_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrlr_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrlr_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrlr_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrlr_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui3.  */
+/* Data types in instruction templates:  V16QI, V16QI, UQI.  */
+#define __lsx_vsrlri_b(/*__m128i*/ _1, /*ui3*/ _2) \
+  ((__m128i)__builtin_lsx_vsrlri_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  V8HI, V8HI, UQI.  */
+#define __lsx_vsrlri_h(/*__m128i*/ _1, /*ui4*/ _2) \
+  ((__m128i)__builtin_lsx_vsrlri_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V4SI, V4SI, UQI.  */
+#define __lsx_vsrlri_w(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vsrlri_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  V2DI, V2DI, UQI.  */
+#define __lsx_vsrlri_d(/*__m128i*/ _1, /*ui6*/ _2) \
+  ((__m128i)__builtin_lsx_vsrlri_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitclr_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vbitclr_b ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitclr_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vbitclr_h ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitclr_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vbitclr_w ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitclr_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vbitclr_d ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui3.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UQI.  */
+#define __lsx_vbitclri_b(/*__m128i*/ _1, /*ui3*/ _2) \
+  ((__m128i)__builtin_lsx_vbitclri_b ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UQI.  */
+#define __lsx_vbitclri_h(/*__m128i*/ _1, /*ui4*/ _2) \
+  ((__m128i)__builtin_lsx_vbitclri_h ((v8u16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UQI.  */
+#define __lsx_vbitclri_w(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vbitclri_w ((v4u32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UQI.  */
+#define __lsx_vbitclri_d(/*__m128i*/ _1, /*ui6*/ _2) \
+  ((__m128i)__builtin_lsx_vbitclri_d ((v2u64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitset_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vbitset_b ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitset_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vbitset_h ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitset_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vbitset_w ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitset_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vbitset_d ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui3.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UQI.  */
+#define __lsx_vbitseti_b(/*__m128i*/ _1, /*ui3*/ _2) \
+  ((__m128i)__builtin_lsx_vbitseti_b ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UQI.  */
+#define __lsx_vbitseti_h(/*__m128i*/ _1, /*ui4*/ _2) \
+  ((__m128i)__builtin_lsx_vbitseti_h ((v8u16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UQI.  */
+#define __lsx_vbitseti_w(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vbitseti_w ((v4u32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UQI.  */
+#define __lsx_vbitseti_d(/*__m128i*/ _1, /*ui6*/ _2) \
+  ((__m128i)__builtin_lsx_vbitseti_d ((v2u64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitrev_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vbitrev_b ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitrev_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vbitrev_h ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitrev_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vbitrev_w ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitrev_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vbitrev_d ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui3.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UQI.  */
+#define __lsx_vbitrevi_b(/*__m128i*/ _1, /*ui3*/ _2) \
+  ((__m128i)__builtin_lsx_vbitrevi_b ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UQI.  */
+#define __lsx_vbitrevi_h(/*__m128i*/ _1, /*ui4*/ _2) \
+  ((__m128i)__builtin_lsx_vbitrevi_h ((v8u16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UQI.  */
+#define __lsx_vbitrevi_w(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vbitrevi_w ((v4u32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UQI.  */
+#define __lsx_vbitrevi_d(/*__m128i*/ _1, /*ui6*/ _2) \
+  ((__m128i)__builtin_lsx_vbitrevi_d ((v2u64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vadd_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vadd_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vadd_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vadd_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vadd_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vadd_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vadd_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vadd_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V16QI, V16QI, UQI.  */
+#define __lsx_vaddi_bu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vaddi_bu ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V8HI, V8HI, UQI.  */
+#define __lsx_vaddi_hu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vaddi_hu ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V4SI, V4SI, UQI.  */
+#define __lsx_vaddi_wu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vaddi_wu ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V2DI, V2DI, UQI.  */
+#define __lsx_vaddi_du(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vaddi_du ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsub_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsub_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsub_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsub_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsub_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsub_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsub_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsub_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V16QI, V16QI, UQI.  */
+#define __lsx_vsubi_bu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vsubi_bu ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V8HI, V8HI, UQI.  */
+#define __lsx_vsubi_hu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vsubi_hu ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V4SI, V4SI, UQI.  */
+#define __lsx_vsubi_wu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vsubi_wu ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V2DI, V2DI, UQI.  */
+#define __lsx_vsubi_du(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vsubi_du ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmax_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmax_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmax_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmax_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmax_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmax_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmax_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmax_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V16QI, V16QI, QI.  */
+#define __lsx_vmaxi_b(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vmaxi_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V8HI, V8HI, QI.  */
+#define __lsx_vmaxi_h(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vmaxi_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V4SI, V4SI, QI.  */
+#define __lsx_vmaxi_w(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vmaxi_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V2DI, V2DI, QI.  */
+#define __lsx_vmaxi_d(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vmaxi_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmax_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmax_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmax_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmax_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmax_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmax_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmax_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmax_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UQI.  */
+#define __lsx_vmaxi_bu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vmaxi_bu ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UQI.  */
+#define __lsx_vmaxi_hu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vmaxi_hu ((v8u16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UQI.  */
+#define __lsx_vmaxi_wu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vmaxi_wu ((v4u32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UQI.  */
+#define __lsx_vmaxi_du(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vmaxi_du ((v2u64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmin_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmin_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmin_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmin_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmin_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmin_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmin_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmin_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V16QI, V16QI, QI.  */
+#define __lsx_vmini_b(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vmini_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V8HI, V8HI, QI.  */
+#define __lsx_vmini_h(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vmini_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V4SI, V4SI, QI.  */
+#define __lsx_vmini_w(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vmini_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V2DI, V2DI, QI.  */
+#define __lsx_vmini_d(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vmini_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmin_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmin_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmin_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmin_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmin_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmin_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmin_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmin_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UQI.  */
+#define __lsx_vmini_bu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vmini_bu ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UQI.  */
+#define __lsx_vmini_hu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vmini_hu ((v8u16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UQI.  */
+#define __lsx_vmini_wu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vmini_wu ((v4u32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UQI.  */
+#define __lsx_vmini_du(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vmini_du ((v2u64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vseq_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vseq_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vseq_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vseq_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vseq_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vseq_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vseq_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vseq_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V16QI, V16QI, QI.  */
+#define __lsx_vseqi_b(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vseqi_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V8HI, V8HI, QI.  */
+#define __lsx_vseqi_h(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vseqi_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V4SI, V4SI, QI.  */
+#define __lsx_vseqi_w(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vseqi_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V2DI, V2DI, QI.  */
+#define __lsx_vseqi_d(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vseqi_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V16QI, V16QI, QI.  */
+#define __lsx_vslti_b(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vslti_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vslt_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vslt_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vslt_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vslt_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vslt_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vslt_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vslt_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vslt_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V8HI, V8HI, QI.  */
+#define __lsx_vslti_h(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vslti_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V4SI, V4SI, QI.  */
+#define __lsx_vslti_w(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vslti_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V2DI, V2DI, QI.  */
+#define __lsx_vslti_d(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vslti_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vslt_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vslt_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vslt_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vslt_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vslt_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vslt_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vslt_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vslt_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V16QI, UV16QI, UQI.  */
+#define __lsx_vslti_bu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vslti_bu ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V8HI, UV8HI, UQI.  */
+#define __lsx_vslti_hu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vslti_hu ((v8u16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V4SI, UV4SI, UQI.  */
+#define __lsx_vslti_wu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vslti_wu ((v4u32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V2DI, UV2DI, UQI.  */
+#define __lsx_vslti_du(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vslti_du ((v2u64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsle_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsle_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsle_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsle_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsle_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsle_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsle_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsle_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V16QI, V16QI, QI.  */
+#define __lsx_vslei_b(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vslei_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V8HI, V8HI, QI.  */
+#define __lsx_vslei_h(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vslei_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V4SI, V4SI, QI.  */
+#define __lsx_vslei_w(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vslei_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, si5.  */
+/* Data types in instruction templates:  V2DI, V2DI, QI.  */
+#define __lsx_vslei_d(/*__m128i*/ _1, /*si5*/ _2) \
+  ((__m128i)__builtin_lsx_vslei_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsle_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsle_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsle_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsle_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsle_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsle_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsle_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsle_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V16QI, UV16QI, UQI.  */
+#define __lsx_vslei_bu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vslei_bu ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V8HI, UV8HI, UQI.  */
+#define __lsx_vslei_hu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vslei_hu ((v8u16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V4SI, UV4SI, UQI.  */
+#define __lsx_vslei_wu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vslei_wu ((v4u32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V2DI, UV2DI, UQI.  */
+#define __lsx_vslei_du(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vslei_du ((v2u64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui3.  */
+/* Data types in instruction templates:  V16QI, V16QI, UQI.  */
+#define __lsx_vsat_b(/*__m128i*/ _1, /*ui3*/ _2) \
+  ((__m128i)__builtin_lsx_vsat_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  V8HI, V8HI, UQI.  */
+#define __lsx_vsat_h(/*__m128i*/ _1, /*ui4*/ _2) \
+  ((__m128i)__builtin_lsx_vsat_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V4SI, V4SI, UQI.  */
+#define __lsx_vsat_w(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vsat_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  V2DI, V2DI, UQI.  */
+#define __lsx_vsat_d(/*__m128i*/ _1, /*ui6*/ _2) \
+  ((__m128i)__builtin_lsx_vsat_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui3.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UQI.  */
+#define __lsx_vsat_bu(/*__m128i*/ _1, /*ui3*/ _2) \
+  ((__m128i)__builtin_lsx_vsat_bu ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UQI.  */
+#define __lsx_vsat_hu(/*__m128i*/ _1, /*ui4*/ _2) \
+  ((__m128i)__builtin_lsx_vsat_hu ((v8u16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UQI.  */
+#define __lsx_vsat_wu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vsat_wu ((v4u32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UQI.  */
+#define __lsx_vsat_du(/*__m128i*/ _1, /*ui6*/ _2) \
+  ((__m128i)__builtin_lsx_vsat_du ((v2u64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vadda_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vadda_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vadda_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vadda_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vadda_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vadda_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vadda_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vadda_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsadd_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsadd_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsadd_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsadd_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsadd_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsadd_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsadd_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsadd_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsadd_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsadd_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsadd_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsadd_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsadd_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsadd_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsadd_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsadd_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavg_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vavg_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavg_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vavg_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavg_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vavg_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavg_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vavg_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavg_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vavg_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavg_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vavg_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavg_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vavg_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavg_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vavg_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavgr_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vavgr_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavgr_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vavgr_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavgr_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vavgr_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavgr_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vavgr_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavgr_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vavgr_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavgr_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vavgr_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavgr_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vavgr_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavgr_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vavgr_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssub_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssub_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssub_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssub_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssub_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssub_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssub_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssub_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssub_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssub_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssub_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssub_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssub_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssub_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssub_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssub_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vabsd_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vabsd_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vabsd_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vabsd_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vabsd_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vabsd_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vabsd_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vabsd_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vabsd_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vabsd_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vabsd_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vabsd_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vabsd_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vabsd_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vabsd_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vabsd_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmul_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmul_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmul_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmul_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmul_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmul_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmul_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmul_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmadd_b (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmadd_b ((v16i8)_1, (v16i8)_2, (v16i8)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmadd_h (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmadd_h ((v8i16)_1, (v8i16)_2, (v8i16)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmadd_w (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmadd_w ((v4i32)_1, (v4i32)_2, (v4i32)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmadd_d (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmadd_d ((v2i64)_1, (v2i64)_2, (v2i64)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmsub_b (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmsub_b ((v16i8)_1, (v16i8)_2, (v16i8)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmsub_h (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmsub_h ((v8i16)_1, (v8i16)_2, (v8i16)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmsub_w (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmsub_w ((v4i32)_1, (v4i32)_2, (v4i32)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmsub_d (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmsub_d ((v2i64)_1, (v2i64)_2, (v2i64)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vdiv_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vdiv_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vdiv_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vdiv_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vdiv_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vdiv_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vdiv_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vdiv_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vdiv_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vdiv_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vdiv_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vdiv_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vdiv_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vdiv_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vdiv_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vdiv_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhaddw_h_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vhaddw_h_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhaddw_w_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vhaddw_w_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhaddw_d_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vhaddw_d_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhaddw_hu_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vhaddw_hu_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhaddw_wu_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vhaddw_wu_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhaddw_du_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vhaddw_du_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhsubw_h_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vhsubw_h_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhsubw_w_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vhsubw_w_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhsubw_d_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vhsubw_d_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhsubw_hu_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vhsubw_hu_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhsubw_wu_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vhsubw_wu_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhsubw_du_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vhsubw_du_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmod_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmod_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmod_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmod_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmod_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmod_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmod_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmod_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmod_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmod_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmod_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmod_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmod_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmod_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmod_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmod_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, rk.  */
+/* Data types in instruction templates:  V16QI, V16QI, SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vreplve_b (__m128i _1, int _2)
+{
+  return (__m128i)__builtin_lsx_vreplve_b ((v16i8)_1, (int)_2);
+}
+
+/* Assembly instruction format:	vd, vj, rk.  */
+/* Data types in instruction templates:  V8HI, V8HI, SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vreplve_h (__m128i _1, int _2)
+{
+  return (__m128i)__builtin_lsx_vreplve_h ((v8i16)_1, (int)_2);
+}
+
+/* Assembly instruction format:	vd, vj, rk.  */
+/* Data types in instruction templates:  V4SI, V4SI, SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vreplve_w (__m128i _1, int _2)
+{
+  return (__m128i)__builtin_lsx_vreplve_w ((v4i32)_1, (int)_2);
+}
+
+/* Assembly instruction format:	vd, vj, rk.  */
+/* Data types in instruction templates:  V2DI, V2DI, SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vreplve_d (__m128i _1, int _2)
+{
+  return (__m128i)__builtin_lsx_vreplve_d ((v2i64)_1, (int)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  V16QI, V16QI, UQI.  */
+#define __lsx_vreplvei_b(/*__m128i*/ _1, /*ui4*/ _2) \
+  ((__m128i)__builtin_lsx_vreplvei_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui3.  */
+/* Data types in instruction templates:  V8HI, V8HI, UQI.  */
+#define __lsx_vreplvei_h(/*__m128i*/ _1, /*ui3*/ _2) \
+  ((__m128i)__builtin_lsx_vreplvei_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui2.  */
+/* Data types in instruction templates:  V4SI, V4SI, UQI.  */
+#define __lsx_vreplvei_w(/*__m128i*/ _1, /*ui2*/ _2) \
+  ((__m128i)__builtin_lsx_vreplvei_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui1.  */
+/* Data types in instruction templates:  V2DI, V2DI, UQI.  */
+#define __lsx_vreplvei_d(/*__m128i*/ _1, /*ui1*/ _2) \
+  ((__m128i)__builtin_lsx_vreplvei_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpickev_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vpickev_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpickev_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vpickev_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpickev_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vpickev_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpickev_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vpickev_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpickod_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vpickod_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpickod_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vpickod_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpickod_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vpickod_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpickod_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vpickod_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vilvh_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vilvh_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vilvh_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vilvh_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vilvh_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vilvh_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vilvh_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vilvh_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vilvl_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vilvl_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vilvl_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vilvl_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vilvl_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vilvl_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vilvl_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vilvl_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpackev_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vpackev_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpackev_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vpackev_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpackev_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vpackev_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpackev_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vpackev_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpackod_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vpackod_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpackod_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vpackod_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpackod_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vpackod_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpackod_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vpackod_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vshuf_h (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vshuf_h ((v8i16)_1, (v8i16)_2, (v8i16)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vshuf_w (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vshuf_w ((v4i32)_1, (v4i32)_2, (v4i32)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vshuf_d (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vshuf_d ((v2i64)_1, (v2i64)_2, (v2i64)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vand_v (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vand_v ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui8.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UQI.  */
+#define __lsx_vandi_b(/*__m128i*/ _1, /*ui8*/ _2) \
+  ((__m128i)__builtin_lsx_vandi_b ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vor_v (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vor_v ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui8.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UQI.  */
+#define __lsx_vori_b(/*__m128i*/ _1, /*ui8*/ _2) \
+  ((__m128i)__builtin_lsx_vori_b ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vnor_v (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vnor_v ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui8.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UQI.  */
+#define __lsx_vnori_b(/*__m128i*/ _1, /*ui8*/ _2) \
+  ((__m128i)__builtin_lsx_vnori_b ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vxor_v (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vxor_v ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui8.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UQI.  */
+#define __lsx_vxori_b(/*__m128i*/ _1, /*ui8*/ _2) \
+  ((__m128i)__builtin_lsx_vxori_b ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk, va.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitsel_v (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vbitsel_v ((v16u8)_1, (v16u8)_2, (v16u8)_3);
+}
+
+/* Assembly instruction format:	vd, vj, ui8.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI, USI.  */
+#define __lsx_vbitseli_b(/*__m128i*/ _1, /*__m128i*/ _2, /*ui8*/ _3) \
+  ((__m128i)__builtin_lsx_vbitseli_b ((v16u8)(_1), (v16u8)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui8.  */
+/* Data types in instruction templates:  V16QI, V16QI, USI.  */
+#define __lsx_vshuf4i_b(/*__m128i*/ _1, /*ui8*/ _2) \
+  ((__m128i)__builtin_lsx_vshuf4i_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui8.  */
+/* Data types in instruction templates:  V8HI, V8HI, USI.  */
+#define __lsx_vshuf4i_h(/*__m128i*/ _1, /*ui8*/ _2) \
+  ((__m128i)__builtin_lsx_vshuf4i_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui8.  */
+/* Data types in instruction templates:  V4SI, V4SI, USI.  */
+#define __lsx_vshuf4i_w(/*__m128i*/ _1, /*ui8*/ _2) \
+  ((__m128i)__builtin_lsx_vshuf4i_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, rj.  */
+/* Data types in instruction templates:  V16QI, SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vreplgr2vr_b (int _1)
+{
+  return (__m128i)__builtin_lsx_vreplgr2vr_b ((int)_1);
+}
+
+/* Assembly instruction format:	vd, rj.  */
+/* Data types in instruction templates:  V8HI, SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vreplgr2vr_h (int _1)
+{
+  return (__m128i)__builtin_lsx_vreplgr2vr_h ((int)_1);
+}
+
+/* Assembly instruction format:	vd, rj.  */
+/* Data types in instruction templates:  V4SI, SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vreplgr2vr_w (int _1)
+{
+  return (__m128i)__builtin_lsx_vreplgr2vr_w ((int)_1);
+}
+
+/* Assembly instruction format:	vd, rj.  */
+/* Data types in instruction templates:  V2DI, DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vreplgr2vr_d (long int _1)
+{
+  return (__m128i)__builtin_lsx_vreplgr2vr_d ((long int)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpcnt_b (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vpcnt_b ((v16i8)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpcnt_h (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vpcnt_h ((v8i16)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpcnt_w (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vpcnt_w ((v4i32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpcnt_d (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vpcnt_d ((v2i64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vclo_b (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vclo_b ((v16i8)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vclo_h (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vclo_h ((v8i16)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vclo_w (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vclo_w ((v4i32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vclo_d (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vclo_d ((v2i64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vclz_b (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vclz_b ((v16i8)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vclz_h (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vclz_h ((v8i16)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vclz_w (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vclz_w ((v4i32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vclz_d (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vclz_d ((v2i64)_1);
+}
+
+/* Assembly instruction format:	rd, vj, ui4.  */
+/* Data types in instruction templates:  SI, V16QI, UQI.  */
+#define __lsx_vpickve2gr_b(/*__m128i*/ _1, /*ui4*/ _2) \
+  ((int)__builtin_lsx_vpickve2gr_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	rd, vj, ui3.  */
+/* Data types in instruction templates:  SI, V8HI, UQI.  */
+#define __lsx_vpickve2gr_h(/*__m128i*/ _1, /*ui3*/ _2) \
+  ((int)__builtin_lsx_vpickve2gr_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	rd, vj, ui2.  */
+/* Data types in instruction templates:  SI, V4SI, UQI.  */
+#define __lsx_vpickve2gr_w(/*__m128i*/ _1, /*ui2*/ _2) \
+  ((int)__builtin_lsx_vpickve2gr_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	rd, vj, ui1.  */
+/* Data types in instruction templates:  DI, V2DI, UQI.  */
+#define __lsx_vpickve2gr_d(/*__m128i*/ _1, /*ui1*/ _2) \
+  ((long int)__builtin_lsx_vpickve2gr_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format:	rd, vj, ui4.  */
+/* Data types in instruction templates:  USI, V16QI, UQI.  */
+#define __lsx_vpickve2gr_bu(/*__m128i*/ _1, /*ui4*/ _2) \
+  ((unsigned int)__builtin_lsx_vpickve2gr_bu ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	rd, vj, ui3.  */
+/* Data types in instruction templates:  USI, V8HI, UQI.  */
+#define __lsx_vpickve2gr_hu(/*__m128i*/ _1, /*ui3*/ _2) \
+  ((unsigned int)__builtin_lsx_vpickve2gr_hu ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	rd, vj, ui2.  */
+/* Data types in instruction templates:  USI, V4SI, UQI.  */
+#define __lsx_vpickve2gr_wu(/*__m128i*/ _1, /*ui2*/ _2) \
+  ((unsigned int)__builtin_lsx_vpickve2gr_wu ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	rd, vj, ui1.  */
+/* Data types in instruction templates:  UDI, V2DI, UQI.  */
+#define __lsx_vpickve2gr_du(/*__m128i*/ _1, /*ui1*/ _2) \
+  ((unsigned long int)__builtin_lsx_vpickve2gr_du ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, rj, ui4.  */
+/* Data types in instruction templates:  V16QI, V16QI, SI, UQI.  */
+#define __lsx_vinsgr2vr_b(/*__m128i*/ _1, /*int*/ _2, /*ui4*/ _3) \
+  ((__m128i)__builtin_lsx_vinsgr2vr_b ((v16i8)(_1), (int)(_2), (_3)))
+
+/* Assembly instruction format:	vd, rj, ui3.  */
+/* Data types in instruction templates:  V8HI, V8HI, SI, UQI.  */
+#define __lsx_vinsgr2vr_h(/*__m128i*/ _1, /*int*/ _2, /*ui3*/ _3) \
+  ((__m128i)__builtin_lsx_vinsgr2vr_h ((v8i16)(_1), (int)(_2), (_3)))
+
+/* Assembly instruction format:	vd, rj, ui2.  */
+/* Data types in instruction templates:  V4SI, V4SI, SI, UQI.  */
+#define __lsx_vinsgr2vr_w(/*__m128i*/ _1, /*int*/ _2, /*ui2*/ _3) \
+  ((__m128i)__builtin_lsx_vinsgr2vr_w ((v4i32)(_1), (int)(_2), (_3)))
+
+/* Assembly instruction format:	vd, rj, ui1.  */
+/* Data types in instruction templates:  V2DI, V2DI, DI, UQI.  */
+#define __lsx_vinsgr2vr_d(/*__m128i*/ _1, /*long int*/ _2, /*ui1*/ _3) \
+  ((__m128i)__builtin_lsx_vinsgr2vr_d ((v2i64)(_1), (long int)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SF, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfadd_s (__m128 _1, __m128 _2)
+{
+  return (__m128)__builtin_lsx_vfadd_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DF, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfadd_d (__m128d _1, __m128d _2)
+{
+  return (__m128d)__builtin_lsx_vfadd_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SF, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfsub_s (__m128 _1, __m128 _2)
+{
+  return (__m128)__builtin_lsx_vfsub_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DF, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfsub_d (__m128d _1, __m128d _2)
+{
+  return (__m128d)__builtin_lsx_vfsub_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SF, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfmul_s (__m128 _1, __m128 _2)
+{
+  return (__m128)__builtin_lsx_vfmul_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DF, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfmul_d (__m128d _1, __m128d _2)
+{
+  return (__m128d)__builtin_lsx_vfmul_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SF, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfdiv_s (__m128 _1, __m128 _2)
+{
+  return (__m128)__builtin_lsx_vfdiv_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DF, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfdiv_d (__m128d _1, __m128d _2)
+{
+  return (__m128d)__builtin_lsx_vfdiv_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcvt_h_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcvt_h_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SF, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfcvt_s_d (__m128d _1, __m128d _2)
+{
+  return (__m128)__builtin_lsx_vfcvt_s_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SF, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfmin_s (__m128 _1, __m128 _2)
+{
+  return (__m128)__builtin_lsx_vfmin_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DF, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfmin_d (__m128d _1, __m128d _2)
+{
+  return (__m128d)__builtin_lsx_vfmin_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SF, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfmina_s (__m128 _1, __m128 _2)
+{
+  return (__m128)__builtin_lsx_vfmina_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DF, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfmina_d (__m128d _1, __m128d _2)
+{
+  return (__m128d)__builtin_lsx_vfmina_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SF, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfmax_s (__m128 _1, __m128 _2)
+{
+  return (__m128)__builtin_lsx_vfmax_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DF, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfmax_d (__m128d _1, __m128d _2)
+{
+  return (__m128d)__builtin_lsx_vfmax_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SF, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfmaxa_s (__m128 _1, __m128 _2)
+{
+  return (__m128)__builtin_lsx_vfmaxa_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DF, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfmaxa_d (__m128d _1, __m128d _2)
+{
+  return (__m128d)__builtin_lsx_vfmaxa_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfclass_s (__m128 _1)
+{
+  return (__m128i)__builtin_lsx_vfclass_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfclass_d (__m128d _1)
+{
+  return (__m128i)__builtin_lsx_vfclass_d ((v2f64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfsqrt_s (__m128 _1)
+{
+  return (__m128)__builtin_lsx_vfsqrt_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfsqrt_d (__m128d _1)
+{
+  return (__m128d)__builtin_lsx_vfsqrt_d ((v2f64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfrecip_s (__m128 _1)
+{
+  return (__m128)__builtin_lsx_vfrecip_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfrecip_d (__m128d _1)
+{
+  return (__m128d)__builtin_lsx_vfrecip_d ((v2f64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfrint_s (__m128 _1)
+{
+  return (__m128)__builtin_lsx_vfrint_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfrint_d (__m128d _1)
+{
+  return (__m128d)__builtin_lsx_vfrint_d ((v2f64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfrsqrt_s (__m128 _1)
+{
+  return (__m128)__builtin_lsx_vfrsqrt_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfrsqrt_d (__m128d _1)
+{
+  return (__m128d)__builtin_lsx_vfrsqrt_d ((v2f64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vflogb_s (__m128 _1)
+{
+  return (__m128)__builtin_lsx_vflogb_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vflogb_d (__m128d _1)
+{
+  return (__m128d)__builtin_lsx_vflogb_d ((v2f64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SF, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfcvth_s_h (__m128i _1)
+{
+  return (__m128)__builtin_lsx_vfcvth_s_h ((v8i16)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfcvth_d_s (__m128 _1)
+{
+  return (__m128d)__builtin_lsx_vfcvth_d_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SF, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfcvtl_s_h (__m128i _1)
+{
+  return (__m128)__builtin_lsx_vfcvtl_s_h ((v8i16)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfcvtl_d_s (__m128 _1)
+{
+  return (__m128d)__builtin_lsx_vfcvtl_d_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftint_w_s (__m128 _1)
+{
+  return (__m128i)__builtin_lsx_vftint_w_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftint_l_d (__m128d _1)
+{
+  return (__m128i)__builtin_lsx_vftint_l_d ((v2f64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  UV4SI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftint_wu_s (__m128 _1)
+{
+  return (__m128i)__builtin_lsx_vftint_wu_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  UV2DI, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftint_lu_d (__m128d _1)
+{
+  return (__m128i)__builtin_lsx_vftint_lu_d ((v2f64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrz_w_s (__m128 _1)
+{
+  return (__m128i)__builtin_lsx_vftintrz_w_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrz_l_d (__m128d _1)
+{
+  return (__m128i)__builtin_lsx_vftintrz_l_d ((v2f64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  UV4SI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrz_wu_s (__m128 _1)
+{
+  return (__m128i)__builtin_lsx_vftintrz_wu_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  UV2DI, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrz_lu_d (__m128d _1)
+{
+  return (__m128i)__builtin_lsx_vftintrz_lu_d ((v2f64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SF, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vffint_s_w (__m128i _1)
+{
+  return (__m128)__builtin_lsx_vffint_s_w ((v4i32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DF, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vffint_d_l (__m128i _1)
+{
+  return (__m128d)__builtin_lsx_vffint_d_l ((v2i64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SF, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vffint_s_wu (__m128i _1)
+{
+  return (__m128)__builtin_lsx_vffint_s_wu ((v4u32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DF, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vffint_d_lu (__m128i _1)
+{
+  return (__m128d)__builtin_lsx_vffint_d_lu ((v2u64)_1);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vandn_v (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vandn_v ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vneg_b (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vneg_b ((v16i8)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vneg_h (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vneg_h ((v8i16)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vneg_w (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vneg_w ((v4i32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vneg_d (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vneg_d ((v2i64)_1);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmuh_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmuh_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmuh_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmuh_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmuh_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmuh_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmuh_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmuh_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmuh_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmuh_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmuh_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmuh_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmuh_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmuh_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmuh_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmuh_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui3.  */
+/* Data types in instruction templates:  V8HI, V16QI, UQI.  */
+#define __lsx_vsllwil_h_b(/*__m128i*/ _1, /*ui3*/ _2) \
+  ((__m128i)__builtin_lsx_vsllwil_h_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  V4SI, V8HI, UQI.  */
+#define __lsx_vsllwil_w_h(/*__m128i*/ _1, /*ui4*/ _2) \
+  ((__m128i)__builtin_lsx_vsllwil_w_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V2DI, V4SI, UQI.  */
+#define __lsx_vsllwil_d_w(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vsllwil_d_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui3.  */
+/* Data types in instruction templates:  UV8HI, UV16QI, UQI.  */
+#define __lsx_vsllwil_hu_bu(/*__m128i*/ _1, /*ui3*/ _2) \
+  ((__m128i)__builtin_lsx_vsllwil_hu_bu ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  UV4SI, UV8HI, UQI.  */
+#define __lsx_vsllwil_wu_hu(/*__m128i*/ _1, /*ui4*/ _2) \
+  ((__m128i)__builtin_lsx_vsllwil_wu_hu ((v8u16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  UV2DI, UV4SI, UQI.  */
+#define __lsx_vsllwil_du_wu(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vsllwil_du_wu ((v4u32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsran_b_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsran_b_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsran_h_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsran_h_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsran_w_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsran_w_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssran_b_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssran_b_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssran_h_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssran_h_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssran_w_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssran_w_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssran_bu_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssran_bu_h ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssran_hu_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssran_hu_w ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssran_wu_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssran_wu_d ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrarn_b_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrarn_b_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrarn_h_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrarn_h_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrarn_w_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrarn_w_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrarn_b_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssrarn_b_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrarn_h_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssrarn_h_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrarn_w_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssrarn_w_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrarn_bu_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssrarn_bu_h ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrarn_hu_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssrarn_hu_w ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrarn_wu_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssrarn_wu_d ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrln_b_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrln_b_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrln_h_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrln_h_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrln_w_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrln_w_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrln_bu_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssrln_bu_h ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrln_hu_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssrln_hu_w ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrln_wu_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssrln_wu_d ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrlrn_b_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrlrn_b_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrlrn_h_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrlrn_h_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrlrn_w_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsrlrn_w_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV16QI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrlrn_bu_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssrlrn_bu_h ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrlrn_hu_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssrlrn_hu_w ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrlrn_wu_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssrlrn_wu_d ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI, UQI.  */
+#define __lsx_vfrstpi_b(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+  ((__m128i)__builtin_lsx_vfrstpi_b ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI, UQI.  */
+#define __lsx_vfrstpi_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+  ((__m128i)__builtin_lsx_vfrstpi_h ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfrstp_b (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vfrstp_b ((v16i8)_1, (v16i8)_2, (v16i8)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfrstp_h (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vfrstp_h ((v8i16)_1, (v8i16)_2, (v8i16)_3);
+}
+
+/* Assembly instruction format:	vd, vj, ui8.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI, USI.  */
+#define __lsx_vshuf4i_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui8*/ _3) \
+  ((__m128i)__builtin_lsx_vshuf4i_d ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V16QI, V16QI, UQI.  */
+#define __lsx_vbsrl_v(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vbsrl_v ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V16QI, V16QI, UQI.  */
+#define __lsx_vbsll_v(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vbsll_v ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui8.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI, USI.  */
+#define __lsx_vextrins_b(/*__m128i*/ _1, /*__m128i*/ _2, /*ui8*/ _3) \
+  ((__m128i)__builtin_lsx_vextrins_b ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui8.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI, USI.  */
+#define __lsx_vextrins_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui8*/ _3) \
+  ((__m128i)__builtin_lsx_vextrins_h ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui8.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI, USI.  */
+#define __lsx_vextrins_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui8*/ _3) \
+  ((__m128i)__builtin_lsx_vextrins_w ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui8.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI, USI.  */
+#define __lsx_vextrins_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui8*/ _3) \
+  ((__m128i)__builtin_lsx_vextrins_d ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmskltz_b (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vmskltz_b ((v16i8)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmskltz_h (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vmskltz_h ((v8i16)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmskltz_w (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vmskltz_w ((v4i32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmskltz_d (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vmskltz_d ((v2i64)_1);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsigncov_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsigncov_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsigncov_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsigncov_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsigncov_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsigncov_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsigncov_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsigncov_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk, va.  */
+/* Data types in instruction templates:  V4SF, V4SF, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfmadd_s (__m128 _1, __m128 _2, __m128 _3)
+{
+  return (__m128)__builtin_lsx_vfmadd_s ((v4f32)_1, (v4f32)_2, (v4f32)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk, va.  */
+/* Data types in instruction templates:  V2DF, V2DF, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfmadd_d (__m128d _1, __m128d _2, __m128d _3)
+{
+  return (__m128d)__builtin_lsx_vfmadd_d ((v2f64)_1, (v2f64)_2, (v2f64)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk, va.  */
+/* Data types in instruction templates:  V4SF, V4SF, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfmsub_s (__m128 _1, __m128 _2, __m128 _3)
+{
+  return (__m128)__builtin_lsx_vfmsub_s ((v4f32)_1, (v4f32)_2, (v4f32)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk, va.  */
+/* Data types in instruction templates:  V2DF, V2DF, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfmsub_d (__m128d _1, __m128d _2, __m128d _3)
+{
+  return (__m128d)__builtin_lsx_vfmsub_d ((v2f64)_1, (v2f64)_2, (v2f64)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk, va.  */
+/* Data types in instruction templates:  V4SF, V4SF, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfnmadd_s (__m128 _1, __m128 _2, __m128 _3)
+{
+  return (__m128)__builtin_lsx_vfnmadd_s ((v4f32)_1, (v4f32)_2, (v4f32)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk, va.  */
+/* Data types in instruction templates:  V2DF, V2DF, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfnmadd_d (__m128d _1, __m128d _2, __m128d _3)
+{
+  return (__m128d)__builtin_lsx_vfnmadd_d ((v2f64)_1, (v2f64)_2, (v2f64)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk, va.  */
+/* Data types in instruction templates:  V4SF, V4SF, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfnmsub_s (__m128 _1, __m128 _2, __m128 _3)
+{
+  return (__m128)__builtin_lsx_vfnmsub_s ((v4f32)_1, (v4f32)_2, (v4f32)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk, va.  */
+/* Data types in instruction templates:  V2DF, V2DF, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfnmsub_d (__m128d _1, __m128d _2, __m128d _3)
+{
+  return (__m128d)__builtin_lsx_vfnmsub_d ((v2f64)_1, (v2f64)_2, (v2f64)_3);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrne_w_s (__m128 _1)
+{
+  return (__m128i)__builtin_lsx_vftintrne_w_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrne_l_d (__m128d _1)
+{
+  return (__m128i)__builtin_lsx_vftintrne_l_d ((v2f64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrp_w_s (__m128 _1)
+{
+  return (__m128i)__builtin_lsx_vftintrp_w_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrp_l_d (__m128d _1)
+{
+  return (__m128i)__builtin_lsx_vftintrp_l_d ((v2f64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrm_w_s (__m128 _1)
+{
+  return (__m128i)__builtin_lsx_vftintrm_w_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrm_l_d (__m128d _1)
+{
+  return (__m128i)__builtin_lsx_vftintrm_l_d ((v2f64)_1);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftint_w_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vftint_w_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SF, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vffint_s_l (__m128i _1, __m128i _2)
+{
+  return (__m128)__builtin_lsx_vffint_s_l ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrz_w_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vftintrz_w_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrp_w_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vftintrp_w_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrm_w_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vftintrm_w_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrne_w_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vftintrne_w_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintl_l_s (__m128 _1)
+{
+  return (__m128i)__builtin_lsx_vftintl_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftinth_l_s (__m128 _1)
+{
+  return (__m128i)__builtin_lsx_vftinth_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DF, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vffinth_d_w (__m128i _1)
+{
+  return (__m128d)__builtin_lsx_vffinth_d_w ((v4i32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DF, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vffintl_d_w (__m128i _1)
+{
+  return (__m128d)__builtin_lsx_vffintl_d_w ((v4i32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrzl_l_s (__m128 _1)
+{
+  return (__m128i)__builtin_lsx_vftintrzl_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrzh_l_s (__m128 _1)
+{
+  return (__m128i)__builtin_lsx_vftintrzh_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrpl_l_s (__m128 _1)
+{
+  return (__m128i)__builtin_lsx_vftintrpl_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrph_l_s (__m128 _1)
+{
+  return (__m128i)__builtin_lsx_vftintrph_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrml_l_s (__m128 _1)
+{
+  return (__m128i)__builtin_lsx_vftintrml_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrmh_l_s (__m128 _1)
+{
+  return (__m128i)__builtin_lsx_vftintrmh_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrnel_l_s (__m128 _1)
+{
+  return (__m128i)__builtin_lsx_vftintrnel_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrneh_l_s (__m128 _1)
+{
+  return (__m128i)__builtin_lsx_vftintrneh_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfrintrne_s (__m128 _1)
+{
+  return (__m128)__builtin_lsx_vfrintrne_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfrintrne_d (__m128d _1)
+{
+  return (__m128d)__builtin_lsx_vfrintrne_d ((v2f64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfrintrz_s (__m128 _1)
+{
+  return (__m128)__builtin_lsx_vfrintrz_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfrintrz_d (__m128d _1)
+{
+  return (__m128d)__builtin_lsx_vfrintrz_d ((v2f64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfrintrp_s (__m128 _1)
+{
+  return (__m128)__builtin_lsx_vfrintrp_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfrintrp_d (__m128d _1)
+{
+  return (__m128d)__builtin_lsx_vfrintrp_d ((v2f64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SI, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfrintrm_s (__m128 _1)
+{
+  return (__m128)__builtin_lsx_vfrintrm_s ((v4f32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfrintrm_d (__m128d _1)
+{
+  return (__m128d)__builtin_lsx_vfrintrm_d ((v2f64)_1);
+}
+
+/* Assembly instruction format:	vd, rj, si8, idx.  */
+/* Data types in instruction templates:  VOID, V16QI, CVPOINTER, SI, UQI.  */
+#define __lsx_vstelm_b(/*__m128i*/ _1, /*void **/ _2, /*si8*/ _3, /*idx*/ _4) \
+  ((void)__builtin_lsx_vstelm_b ((v16i8)(_1), (void *)(_2), (_3), (_4)))
+
+/* Assembly instruction format:	vd, rj, si8, idx.  */
+/* Data types in instruction templates:  VOID, V8HI, CVPOINTER, SI, UQI.  */
+#define __lsx_vstelm_h(/*__m128i*/ _1, /*void **/ _2, /*si8*/ _3, /*idx*/ _4) \
+  ((void)__builtin_lsx_vstelm_h ((v8i16)(_1), (void *)(_2), (_3), (_4)))
+
+/* Assembly instruction format:	vd, rj, si8, idx.  */
+/* Data types in instruction templates:  VOID, V4SI, CVPOINTER, SI, UQI.  */
+#define __lsx_vstelm_w(/*__m128i*/ _1, /*void **/ _2, /*si8*/ _3, /*idx*/ _4) \
+  ((void)__builtin_lsx_vstelm_w ((v4i32)(_1), (void *)(_2), (_3), (_4)))
+
+/* Assembly instruction format:	vd, rj, si8, idx.  */
+/* Data types in instruction templates:  VOID, V2DI, CVPOINTER, SI, UQI.  */
+#define __lsx_vstelm_d(/*__m128i*/ _1, /*void **/ _2, /*si8*/ _3, /*idx*/ _4) \
+  ((void)__builtin_lsx_vstelm_d ((v2i64)(_1), (void *)(_2), (_3), (_4)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_d_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwev_d_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_w_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwev_w_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_h_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwev_h_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_d_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwod_d_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_w_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwod_w_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_h_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwod_h_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_d_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwev_d_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_w_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwev_w_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_h_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwev_h_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_d_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwod_d_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_w_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwod_w_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_h_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwod_h_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_d_wu_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwev_d_wu_w ((v4u32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, UV8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_w_hu_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwev_w_hu_h ((v8u16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, UV16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_h_bu_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwev_h_bu_b ((v16u8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_d_wu_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwod_d_wu_w ((v4u32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, UV8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_w_hu_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwod_w_hu_h ((v8u16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, UV16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_h_bu_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwod_h_bu_b ((v16u8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwev_d_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsubwev_d_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwev_w_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsubwev_w_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwev_h_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsubwev_h_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwod_d_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsubwod_d_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwod_w_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsubwod_w_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwod_h_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsubwod_h_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwev_d_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsubwev_d_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwev_w_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsubwev_w_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwev_h_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsubwev_h_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwod_d_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsubwod_d_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwod_w_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsubwod_w_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwod_h_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsubwod_h_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_q_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwev_q_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_q_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwod_q_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_q_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwev_q_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_q_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwod_q_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwev_q_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsubwev_q_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwod_q_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsubwod_q_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwev_q_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsubwev_q_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwod_q_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsubwod_q_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_q_du_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwev_q_du_d ((v2u64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_q_du_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vaddwod_q_du_d ((v2u64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_d_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwev_d_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_w_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwev_w_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_h_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwev_h_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_d_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwod_d_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_w_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwod_w_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_h_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwod_h_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_d_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwev_d_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_w_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwev_w_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_h_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwev_h_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_d_wu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwod_d_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_w_hu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwod_w_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_h_bu (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwod_h_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_d_wu_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwev_d_wu_w ((v4u32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, UV8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_w_hu_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwev_w_hu_h ((v8u16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, UV16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_h_bu_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwev_h_bu_b ((v16u8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_d_wu_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwod_d_wu_w ((v4u32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, UV8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_w_hu_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwod_w_hu_h ((v8u16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, UV16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_h_bu_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwod_h_bu_b ((v16u8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_q_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwev_q_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_q_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwod_q_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_q_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwev_q_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_q_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwod_q_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_q_du_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwev_q_du_d ((v2u64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, UV2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_q_du_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vmulwod_q_du_d ((v2u64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhaddw_q_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vhaddw_q_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhaddw_qu_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vhaddw_qu_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhsubw_q_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vhsubw_q_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhsubw_qu_du (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vhsubw_qu_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_d_w (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwev_d_w ((v2i64)_1, (v4i32)_2, (v4i32)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_w_h (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwev_w_h ((v4i32)_1, (v8i16)_2, (v8i16)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_h_b (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwev_h_b ((v8i16)_1, (v16i8)_2, (v16i8)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_d_wu (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwev_d_wu ((v2u64)_1, (v4u32)_2, (v4u32)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_w_hu (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwev_w_hu ((v4u32)_1, (v8u16)_2, (v8u16)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_h_bu (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwev_h_bu ((v8u16)_1, (v16u8)_2, (v16u8)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_d_w (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwod_d_w ((v2i64)_1, (v4i32)_2, (v4i32)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_w_h (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwod_w_h ((v4i32)_1, (v8i16)_2, (v8i16)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_h_b (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwod_h_b ((v8i16)_1, (v16i8)_2, (v16i8)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV4SI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_d_wu (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwod_d_wu ((v2u64)_1, (v4u32)_2, (v4u32)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, UV8HI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_w_hu (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwod_w_hu ((v4u32)_1, (v8u16)_2, (v8u16)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, UV16QI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_h_bu (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwod_h_bu ((v8u16)_1, (v16u8)_2, (v16u8)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, UV4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_d_wu_w (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwev_d_wu_w ((v2i64)_1, (v4u32)_2, (v4i32)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, UV8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_w_hu_h (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwev_w_hu_h ((v4i32)_1, (v8u16)_2, (v8i16)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, UV16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_h_bu_b (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwev_h_bu_b ((v8i16)_1, (v16u8)_2, (v16i8)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, UV4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_d_wu_w (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwod_d_wu_w ((v2i64)_1, (v4u32)_2, (v4i32)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, UV8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_w_hu_h (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwod_w_hu_h ((v4i32)_1, (v8u16)_2, (v8i16)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, UV16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_h_bu_b (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwod_h_bu_b ((v8i16)_1, (v16u8)_2, (v16i8)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_q_d (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwev_q_d ((v2i64)_1, (v2i64)_2, (v2i64)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_q_d (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwod_q_d ((v2i64)_1, (v2i64)_2, (v2i64)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_q_du (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwev_q_du ((v2u64)_1, (v2u64)_2, (v2u64)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_q_du (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwod_q_du ((v2u64)_1, (v2u64)_2, (v2u64)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, UV2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_q_du_d (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwev_q_du_d ((v2i64)_1, (v2u64)_2, (v2i64)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, UV2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_q_du_d (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vmaddwod_q_du_d ((v2i64)_1, (v2u64)_2, (v2i64)_3);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vrotr_b (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vrotr_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vrotr_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vrotr_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vrotr_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vrotr_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vrotr_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vrotr_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vadd_q (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vadd_q ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsub_q (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vsub_q ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, rj, si12.  */
+/* Data types in instruction templates:  V16QI, CVPOINTER, SI.  */
+#define __lsx_vldrepl_b(/*void **/ _1, /*si12*/ _2) \
+  ((__m128i)__builtin_lsx_vldrepl_b ((void *)(_1), (_2)))
+
+/* Assembly instruction format:	vd, rj, si11.  */
+/* Data types in instruction templates:  V8HI, CVPOINTER, SI.  */
+#define __lsx_vldrepl_h(/*void **/ _1, /*si11*/ _2) \
+  ((__m128i)__builtin_lsx_vldrepl_h ((void *)(_1), (_2)))
+
+/* Assembly instruction format:	vd, rj, si10.  */
+/* Data types in instruction templates:  V4SI, CVPOINTER, SI.  */
+#define __lsx_vldrepl_w(/*void **/ _1, /*si10*/ _2) \
+  ((__m128i)__builtin_lsx_vldrepl_w ((void *)(_1), (_2)))
+
+/* Assembly instruction format:	vd, rj, si9.  */
+/* Data types in instruction templates:  V2DI, CVPOINTER, SI.  */
+#define __lsx_vldrepl_d(/*void **/ _1, /*si9*/ _2) \
+  ((__m128i)__builtin_lsx_vldrepl_d ((void *)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmskgez_b (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vmskgez_b ((v16i8)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmsknz_b (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vmsknz_b ((v16i8)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V8HI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vexth_h_b (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vexth_h_b ((v16i8)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V4SI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vexth_w_h (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vexth_w_h ((v8i16)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vexth_d_w (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vexth_d_w ((v4i32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vexth_q_d (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vexth_q_d ((v2i64)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  UV8HI, UV16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vexth_hu_bu (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vexth_hu_bu ((v16u8)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  UV4SI, UV8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vexth_wu_hu (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vexth_wu_hu ((v8u16)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  UV2DI, UV4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vexth_du_wu (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vexth_du_wu ((v4u32)_1);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vexth_qu_du (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vexth_qu_du ((v2u64)_1);
+}
+
+/* Assembly instruction format:	vd, vj, ui3.  */
+/* Data types in instruction templates:  V16QI, V16QI, UQI.  */
+#define __lsx_vrotri_b(/*__m128i*/ _1, /*ui3*/ _2) \
+  ((__m128i)__builtin_lsx_vrotri_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  V8HI, V8HI, UQI.  */
+#define __lsx_vrotri_h(/*__m128i*/ _1, /*ui4*/ _2) \
+  ((__m128i)__builtin_lsx_vrotri_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V4SI, V4SI, UQI.  */
+#define __lsx_vrotri_w(/*__m128i*/ _1, /*ui5*/ _2) \
+  ((__m128i)__builtin_lsx_vrotri_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  V2DI, V2DI, UQI.  */
+#define __lsx_vrotri_d(/*__m128i*/ _1, /*ui6*/ _2) \
+  ((__m128i)__builtin_lsx_vrotri_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vextl_q_d (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vextl_q_d ((v2i64)_1);
+}
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI, USI.  */
+#define __lsx_vsrlni_b_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+  ((__m128i)__builtin_lsx_vsrlni_b_h ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI, USI.  */
+#define __lsx_vsrlni_h_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+  ((__m128i)__builtin_lsx_vsrlni_h_w ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI, USI.  */
+#define __lsx_vsrlni_w_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+  ((__m128i)__builtin_lsx_vsrlni_w_d ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui7.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI, USI.  */
+#define __lsx_vsrlni_d_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+  ((__m128i)__builtin_lsx_vsrlni_d_q ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI, USI.  */
+#define __lsx_vsrlrni_b_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+  ((__m128i)__builtin_lsx_vsrlrni_b_h ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI, USI.  */
+#define __lsx_vsrlrni_h_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+  ((__m128i)__builtin_lsx_vsrlrni_h_w ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI, USI.  */
+#define __lsx_vsrlrni_w_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+  ((__m128i)__builtin_lsx_vsrlrni_w_d ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui7.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI, USI.  */
+#define __lsx_vsrlrni_d_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+  ((__m128i)__builtin_lsx_vsrlrni_d_q ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI, USI.  */
+#define __lsx_vssrlni_b_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+  ((__m128i)__builtin_lsx_vssrlni_b_h ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI, USI.  */
+#define __lsx_vssrlni_h_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+  ((__m128i)__builtin_lsx_vssrlni_h_w ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI, USI.  */
+#define __lsx_vssrlni_w_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+  ((__m128i)__builtin_lsx_vssrlni_w_d ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui7.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI, USI.  */
+#define __lsx_vssrlni_d_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+  ((__m128i)__builtin_lsx_vssrlni_d_q ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, V16QI, USI.  */
+#define __lsx_vssrlni_bu_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+  ((__m128i)__builtin_lsx_vssrlni_bu_h ((v16u8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, V8HI, USI.  */
+#define __lsx_vssrlni_hu_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+  ((__m128i)__builtin_lsx_vssrlni_hu_w ((v8u16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, V4SI, USI.  */
+#define __lsx_vssrlni_wu_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+  ((__m128i)__builtin_lsx_vssrlni_wu_d ((v4u32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui7.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, V2DI, USI.  */
+#define __lsx_vssrlni_du_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+  ((__m128i)__builtin_lsx_vssrlni_du_q ((v2u64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI, USI.  */
+#define __lsx_vssrlrni_b_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+  ((__m128i)__builtin_lsx_vssrlrni_b_h ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI, USI.  */
+#define __lsx_vssrlrni_h_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+  ((__m128i)__builtin_lsx_vssrlrni_h_w ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI, USI.  */
+#define __lsx_vssrlrni_w_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+  ((__m128i)__builtin_lsx_vssrlrni_w_d ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui7.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI, USI.  */
+#define __lsx_vssrlrni_d_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+  ((__m128i)__builtin_lsx_vssrlrni_d_q ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, V16QI, USI.  */
+#define __lsx_vssrlrni_bu_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+  ((__m128i)__builtin_lsx_vssrlrni_bu_h ((v16u8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, V8HI, USI.  */
+#define __lsx_vssrlrni_hu_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+  ((__m128i)__builtin_lsx_vssrlrni_hu_w ((v8u16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, V4SI, USI.  */
+#define __lsx_vssrlrni_wu_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+  ((__m128i)__builtin_lsx_vssrlrni_wu_d ((v4u32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui7.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, V2DI, USI.  */
+#define __lsx_vssrlrni_du_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+  ((__m128i)__builtin_lsx_vssrlrni_du_q ((v2u64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI, USI.  */
+#define __lsx_vsrani_b_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+  ((__m128i)__builtin_lsx_vsrani_b_h ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI, USI.  */
+#define __lsx_vsrani_h_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+  ((__m128i)__builtin_lsx_vsrani_h_w ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI, USI.  */
+#define __lsx_vsrani_w_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+  ((__m128i)__builtin_lsx_vsrani_w_d ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui7.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI, USI.  */
+#define __lsx_vsrani_d_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+  ((__m128i)__builtin_lsx_vsrani_d_q ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI, USI.  */
+#define __lsx_vsrarni_b_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+  ((__m128i)__builtin_lsx_vsrarni_b_h ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI, USI.  */
+#define __lsx_vsrarni_h_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+  ((__m128i)__builtin_lsx_vsrarni_h_w ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI, USI.  */
+#define __lsx_vsrarni_w_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+  ((__m128i)__builtin_lsx_vsrarni_w_d ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui7.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI, USI.  */
+#define __lsx_vsrarni_d_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+  ((__m128i)__builtin_lsx_vsrarni_d_q ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI, USI.  */
+#define __lsx_vssrani_b_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+  ((__m128i)__builtin_lsx_vssrani_b_h ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI, USI.  */
+#define __lsx_vssrani_h_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+  ((__m128i)__builtin_lsx_vssrani_h_w ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI, USI.  */
+#define __lsx_vssrani_w_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+  ((__m128i)__builtin_lsx_vssrani_w_d ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui7.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI, USI.  */
+#define __lsx_vssrani_d_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+  ((__m128i)__builtin_lsx_vssrani_d_q ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, V16QI, USI.  */
+#define __lsx_vssrani_bu_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+  ((__m128i)__builtin_lsx_vssrani_bu_h ((v16u8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, V8HI, USI.  */
+#define __lsx_vssrani_hu_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+  ((__m128i)__builtin_lsx_vssrani_hu_w ((v8u16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, V4SI, USI.  */
+#define __lsx_vssrani_wu_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+  ((__m128i)__builtin_lsx_vssrani_wu_d ((v4u32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui7.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, V2DI, USI.  */
+#define __lsx_vssrani_du_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+  ((__m128i)__builtin_lsx_vssrani_du_q ((v2u64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI, USI.  */
+#define __lsx_vssrarni_b_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+  ((__m128i)__builtin_lsx_vssrarni_b_h ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  V8HI, V8HI, V8HI, USI.  */
+#define __lsx_vssrarni_h_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+  ((__m128i)__builtin_lsx_vssrarni_h_w ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI, USI.  */
+#define __lsx_vssrarni_w_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+  ((__m128i)__builtin_lsx_vssrarni_w_d ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui7.  */
+/* Data types in instruction templates:  V2DI, V2DI, V2DI, USI.  */
+#define __lsx_vssrarni_d_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+  ((__m128i)__builtin_lsx_vssrarni_d_q ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui4.  */
+/* Data types in instruction templates:  UV16QI, UV16QI, V16QI, USI.  */
+#define __lsx_vssrarni_bu_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+  ((__m128i)__builtin_lsx_vssrarni_bu_h ((v16u8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui5.  */
+/* Data types in instruction templates:  UV8HI, UV8HI, V8HI, USI.  */
+#define __lsx_vssrarni_hu_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+  ((__m128i)__builtin_lsx_vssrarni_hu_w ((v8u16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui6.  */
+/* Data types in instruction templates:  UV4SI, UV4SI, V4SI, USI.  */
+#define __lsx_vssrarni_wu_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+  ((__m128i)__builtin_lsx_vssrarni_wu_d ((v4u32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui7.  */
+/* Data types in instruction templates:  UV2DI, UV2DI, V2DI, USI.  */
+#define __lsx_vssrarni_du_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+  ((__m128i)__builtin_lsx_vssrarni_du_q ((v2u64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, ui8.  */
+/* Data types in instruction templates:  V4SI, V4SI, V4SI, USI.  */
+#define __lsx_vpermi_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui8*/ _3) \
+  ((__m128i)__builtin_lsx_vpermi_w ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format:	vd, rj, si12.  */
+/* Data types in instruction templates:  V16QI, CVPOINTER, SI.  */
+#define __lsx_vld(/*void **/ _1, /*si12*/ _2) \
+  ((__m128i)__builtin_lsx_vld ((void *)(_1), (_2)))
+
+/* Assembly instruction format:	vd, rj, si12.  */
+/* Data types in instruction templates:  VOID, V16QI, CVPOINTER, SI.  */
+#define __lsx_vst(/*__m128i*/ _1, /*void **/ _2, /*si12*/ _3) \
+  ((void)__builtin_lsx_vst ((v16i8)(_1), (void *)(_2), (_3)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrlrn_b_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssrlrn_b_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrlrn_h_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssrlrn_h_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrlrn_w_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssrlrn_w_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V8HI, V8HI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrln_b_h (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssrln_b_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V8HI, V4SI, V4SI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrln_h_w (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssrln_h_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V2DI, V2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrln_w_d (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vssrln_w_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vorn_v (__m128i _1, __m128i _2)
+{
+  return (__m128i)__builtin_lsx_vorn_v ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format:	vd, i13.  */
+/* Data types in instruction templates:  V2DI, HI.  */
+#define __lsx_vldi(/*i13*/ _1) \
+  ((__m128i)__builtin_lsx_vldi ((_1)))
+
+/* Assembly instruction format:	vd, vj, vk, va.  */
+/* Data types in instruction templates:  V16QI, V16QI, V16QI, V16QI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vshuf_b (__m128i _1, __m128i _2, __m128i _3)
+{
+  return (__m128i)__builtin_lsx_vshuf_b ((v16i8)_1, (v16i8)_2, (v16i8)_3);
+}
+
+/* Assembly instruction format:	vd, rj, rk.  */
+/* Data types in instruction templates:  V16QI, CVPOINTER, DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vldx (void * _1, long int _2)
+{
+  return (__m128i)__builtin_lsx_vldx ((void *)_1, (long int)_2);
+}
+
+/* Assembly instruction format:	vd, rj, rk.  */
+/* Data types in instruction templates:  VOID, V16QI, CVPOINTER, DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+void __lsx_vstx (__m128i _1, void * _2, long int _3)
+{
+  return (void)__builtin_lsx_vstx ((v16i8)_1, (void *)_2, (long int)_3);
+}
+
+/* Assembly instruction format:	vd, vj.  */
+/* Data types in instruction templates:  UV2DI, UV2DI.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vextl_qu_du (__m128i _1)
+{
+  return (__m128i)__builtin_lsx_vextl_qu_du ((v2u64)_1);
+}
+
+/* Assembly instruction format:	cd, vj.  */
+/* Data types in instruction templates:  SI, UV16QI.  */
+#define __lsx_bnz_b(/*__m128i*/ _1) \
+  ((int)__builtin_lsx_bnz_b ((v16u8)(_1)))
+
+/* Assembly instruction format:	cd, vj.  */
+/* Data types in instruction templates:  SI, UV2DI.  */
+#define __lsx_bnz_d(/*__m128i*/ _1) \
+  ((int)__builtin_lsx_bnz_d ((v2u64)(_1)))
+
+/* Assembly instruction format:	cd, vj.  */
+/* Data types in instruction templates:  SI, UV8HI.  */
+#define __lsx_bnz_h(/*__m128i*/ _1) \
+  ((int)__builtin_lsx_bnz_h ((v8u16)(_1)))
+
+/* Assembly instruction format:	cd, vj.  */
+/* Data types in instruction templates:  SI, UV16QI.  */
+#define __lsx_bnz_v(/*__m128i*/ _1) \
+  ((int)__builtin_lsx_bnz_v ((v16u8)(_1)))
+
+/* Assembly instruction format:	cd, vj.  */
+/* Data types in instruction templates:  SI, UV4SI.  */
+#define __lsx_bnz_w(/*__m128i*/ _1) \
+  ((int)__builtin_lsx_bnz_w ((v4u32)(_1)))
+
+/* Assembly instruction format:	cd, vj.  */
+/* Data types in instruction templates:  SI, UV16QI.  */
+#define __lsx_bz_b(/*__m128i*/ _1) \
+  ((int)__builtin_lsx_bz_b ((v16u8)(_1)))
+
+/* Assembly instruction format:	cd, vj.  */
+/* Data types in instruction templates:  SI, UV2DI.  */
+#define __lsx_bz_d(/*__m128i*/ _1) \
+  ((int)__builtin_lsx_bz_d ((v2u64)(_1)))
+
+/* Assembly instruction format:	cd, vj.  */
+/* Data types in instruction templates:  SI, UV8HI.  */
+#define __lsx_bz_h(/*__m128i*/ _1) \
+  ((int)__builtin_lsx_bz_h ((v8u16)(_1)))
+
+/* Assembly instruction format:	cd, vj.  */
+/* Data types in instruction templates:  SI, UV16QI.  */
+#define __lsx_bz_v(/*__m128i*/ _1) \
+  ((int)__builtin_lsx_bz_v ((v16u8)(_1)))
+
+/* Assembly instruction format:	cd, vj.  */
+/* Data types in instruction templates:  SI, UV4SI.  */
+#define __lsx_bz_w(/*__m128i*/ _1) \
+  ((int)__builtin_lsx_bz_w ((v4u32)(_1)))
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_caf_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_caf_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_caf_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_caf_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_ceq_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_ceq_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_ceq_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_ceq_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cle_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_cle_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cle_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_cle_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_clt_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_clt_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_clt_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_clt_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cne_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_cne_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cne_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_cne_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cor_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_cor_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cor_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_cor_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cueq_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_cueq_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cueq_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_cueq_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cule_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_cule_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cule_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_cule_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cult_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_cult_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cult_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_cult_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cun_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_cun_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cune_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_cune_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cune_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_cune_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cun_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_cun_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_saf_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_saf_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_saf_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_saf_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_seq_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_seq_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_seq_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_seq_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sle_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_sle_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sle_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_sle_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_slt_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_slt_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_slt_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_slt_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sne_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_sne_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sne_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_sne_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sor_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_sor_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sor_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_sor_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sueq_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_sueq_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sueq_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_sueq_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sule_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_sule_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sule_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_sule_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sult_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_sult_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sult_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_sult_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sun_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_sun_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V2DI, V2DF, V2DF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sune_d (__m128d _1, __m128d _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_sune_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sune_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_sune_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, vj, vk.  */
+/* Data types in instruction templates:  V4SI, V4SF, V4SF.  */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sun_s (__m128 _1, __m128 _2)
+{
+  return (__m128i)__builtin_lsx_vfcmp_sun_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format:	vd, si10.  */
+/* Data types in instruction templates:  V16QI, HI.  */
+#define __lsx_vrepli_b(/*si10*/ _1) \
+  ((__m128i)__builtin_lsx_vrepli_b ((_1)))
+
+/* Assembly instruction format:	vd, si10.  */
+/* Data types in instruction templates:  V2DI, HI.  */
+#define __lsx_vrepli_d(/*si10*/ _1) \
+  ((__m128i)__builtin_lsx_vrepli_d ((_1)))
+
+/* Assembly instruction format:	vd, si10.  */
+/* Data types in instruction templates:  V8HI, HI.  */
+#define __lsx_vrepli_h(/*si10*/ _1) \
+  ((__m128i)__builtin_lsx_vrepli_h ((_1)))
+
+/* Assembly instruction format:	vd, si10.  */
+/* Data types in instruction templates:  V4SI, HI.  */
+#define __lsx_vrepli_w(/*si10*/ _1) \
+  ((__m128i)__builtin_lsx_vrepli_w ((_1)))
+
+#endif /* defined(__loongarch_sx) */
+#endif /* _GCC_LOONGSON_SXINTRIN_H */