Author: Ikhlas Ajbar Date: 2025-01-14T12:21:40-06:00 New Revision: 71f238a221ff307a1c079678b85ef3abe43c71ab
URL: https://github.com/llvm/llvm-project/commit/71f238a221ff307a1c079678b85ef3abe43c71ab DIFF: https://github.com/llvm/llvm-project/commit/71f238a221ff307a1c079678b85ef3abe43c71ab.diff LOG: [Hexagon] Add missing builtins for V79 (#122916) This patch adds new builtins that were added in V79 architecture. Added: Modified: clang/include/clang/Basic/BuiltinsHexagonDep.def llvm/lib/Target/Hexagon/HexagonDepInstrInfo.td llvm/lib/Target/Hexagon/HexagonDepMapAsm2Intrin.td llvm/lib/Target/Hexagon/HexagonSubtarget.h Removed: ################################################################################ diff --git a/clang/include/clang/Basic/BuiltinsHexagonDep.def b/clang/include/clang/Basic/BuiltinsHexagonDep.def index 6f1ae69037e3a3..616ff3ccf5b6b0 100644 --- a/clang/include/clang/Basic/BuiltinsHexagonDep.def +++ b/clang/include/clang/Basic/BuiltinsHexagonDep.def @@ -1923,3 +1923,48 @@ TARGET_BUILTIN(__builtin_HEXAGON_V6_vmpy_sf_bf_acc, "V32iV32iV16iV16i", "", HVXV TARGET_BUILTIN(__builtin_HEXAGON_V6_vmpy_sf_bf_acc_128B, "V64iV64iV32iV32i", "", HVXV73) TARGET_BUILTIN(__builtin_HEXAGON_V6_vsub_sf_bf, "V32iV16iV16i", "", HVXV73) TARGET_BUILTIN(__builtin_HEXAGON_V6_vsub_sf_bf_128B, "V64iV32iV32i", "", HVXV73) + +// V79 HVX Instructions. + +TARGET_BUILTIN(__builtin_HEXAGON_V6_get_qfext, "V16iV16ii", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_get_qfext_128B, "V32iV32ii", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_get_qfext_oracc, "V16iV16iV16ii", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_get_qfext_oracc_128B, "V32iV32iV32ii", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_set_qfext, "V16iV16ii", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_set_qfext_128B, "V32iV32ii", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vabs_f8, "V16iV16i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vabs_f8_128B, "V32iV32i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vadd_hf_f8, "V32iV16iV16i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vadd_hf_f8_128B, "V64iV32iV32i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vcvt2_b_hf, "V16iV16iV16i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vcvt2_b_hf_128B, "V32iV32iV32i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vcvt2_hf_b, "V32iV16i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vcvt2_hf_b_128B, "V64iV32i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vcvt2_hf_ub, "V32iV16i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vcvt2_hf_ub_128B, "V64iV32i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vcvt2_ub_hf, "V16iV16iV16i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vcvt2_ub_hf_128B, "V32iV32iV32i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vcvt_f8_hf, "V16iV16iV16i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vcvt_f8_hf_128B, "V32iV32iV32i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vcvt_hf_f8, "V32iV16i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vcvt_hf_f8_128B, "V64iV32i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vfmax_f8, "V16iV16iV16i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vfmax_f8_128B, "V32iV32iV32i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vfmin_f8, "V16iV16iV16i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vfmin_f8_128B, "V32iV32iV32i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vfneg_f8, "V16iV16i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vfneg_f8_128B, "V32iV32i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vmerge_qf, "V16iV16iV16i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vmerge_qf_128B, "V32iV32iV32i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vmpy_hf_f8, "V32iV16iV16i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vmpy_hf_f8_128B, "V64iV32iV32i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vmpy_hf_f8_acc, "V32iV32iV16iV16i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vmpy_hf_f8_acc_128B, "V64iV64iV32iV32i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vmpy_rt_hf, "V16iV16ii", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vmpy_rt_hf_128B, "V32iV32ii", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vmpy_rt_qf16, "V16iV16ii", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vmpy_rt_qf16_128B, "V32iV32ii", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vmpy_rt_sf, "V16iV16ii", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vmpy_rt_sf_128B, "V32iV32ii", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vsub_hf_f8, "V32iV16iV16i", "", HVXV79) +TARGET_BUILTIN(__builtin_HEXAGON_V6_vsub_hf_f8_128B, "V64iV32iV32i", "", HVXV79) diff --git a/llvm/lib/Target/Hexagon/HexagonDepInstrInfo.td b/llvm/lib/Target/Hexagon/HexagonDepInstrInfo.td index 7935c4b86af535..ae96753f40cf2c 100644 --- a/llvm/lib/Target/Hexagon/HexagonDepInstrInfo.td +++ b/llvm/lib/Target/Hexagon/HexagonDepInstrInfo.td @@ -26853,6 +26853,21 @@ let isPseudo = 1; let isCodeGenOnly = 1; let DecoderNamespace = "EXT_mmvec"; } +def V6_get_qfext_oracc : HInst< +(outs HvxVR:$Vx32), +(ins HvxVR:$Vx32in, HvxVR:$Vu32, IntRegs:$Rt32), +"$Vx32 |= vgetqfext($Vu32.x,$Rt32)", +tc_b091f1c6, TypeCVI_VX>, Enc_5138b3, Requires<[UseHVXV79,UseHVXQFloat]> { +let Inst{7-5} = 0b110; +let Inst{13-13} = 0b0; +let Inst{31-21} = 0b00011001110; +let hasNewValue = 1; +let opNewValue = 0; +let isAccumulator = 1; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +let Constraints = "$Vx32 = $Vx32in"; +} def V6_hi : HInst< (outs HvxVR:$Vd32), (ins HvxWR:$Vss32), @@ -27200,6 +27215,19 @@ let opNewValue = 0; let isCVI = 1; let DecoderNamespace = "EXT_mmvec"; } +def V6_set_qfext : HInst< +(outs HvxVR:$Vd32), +(ins HvxVR:$Vu32, IntRegs:$Rt32), +"$Vd32.x = vsetqfext($Vu32,$Rt32)", +tc_b091f1c6, TypeCVI_VX>, Enc_b087ac, Requires<[UseHVXV79,UseHVXQFloat]> { +let Inst{7-5} = 0b011; +let Inst{13-13} = 0b0; +let Inst{31-21} = 0b00011001110; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} def V6_shuffeqh : HInst< (outs HvxQR:$Qd4), (ins HvxQR:$Qs4, HvxQR:$Qt4), @@ -27469,6 +27497,19 @@ let isPseudo = 1; let isCodeGenOnly = 1; let DecoderNamespace = "EXT_mmvec"; } +def V6_get_qfext : HInst< +(outs HvxVR:$Vd32), +(ins HvxVR:$Vu32, IntRegs:$Rt32), +"$Vd32 = vgetqfext($Vu32.x,$Rt32)", +tc_b091f1c6, TypeCVI_VX>, Enc_b087ac, Requires<[UseHVXV79,UseHVXQFloat]> { +let Inst{7-5} = 0b111; +let Inst{13-13} = 0b0; +let Inst{31-21} = 0b00011001110; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} def V6_v6mpyvubs10_vxx : HInst< (outs HvxWR:$Vxx32), (ins HvxWR:$Vxx32in, HvxWR:$Vuu32, HvxWR:$Vvv32, u2_0Imm:$Ii), @@ -29872,6 +29913,19 @@ let mayStore = 1; let DecoderNamespace = "EXT_mmvec"; let Constraints = "$Rx32 = $Rx32in"; } +def V6_vabs_f8 : HInst< +(outs HvxVR:$Vd32), +(ins HvxVR:$Vu32), +"$Vd32.f8 = vabs($Vu32.f8)", +tc_5cdf8c84, TypeCVI_VX_LATE>, Enc_e7581c, Requires<[UseHVXV79,UseHVXIEEEFP]> { +let Inst{7-5} = 0b110; +let Inst{13-13} = 0b1; +let Inst{31-16} = 0b0001110001100110; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} def V6_vabs_hf : HInst< (outs HvxVR:$Vd32), (ins HvxVR:$Vu32), @@ -30209,6 +30263,19 @@ let opNewValue = 0; let isCVI = 1; let DecoderNamespace = "EXT_mmvec"; } +def V6_vadd_hf_f8 : HInst< +(outs HvxWR:$Vdd32), +(ins HvxVR:$Vu32, HvxVR:$Vv32), +"$Vdd32.hf = vadd($Vu32.f8,$Vv32.f8)", +tc_d8287c14, TypeCVI_VX_DV>, Enc_71bb9b, Requires<[UseHVXV79,UseHVXIEEEFP]> { +let Inst{7-5} = 0b100; +let Inst{13-13} = 0b0; +let Inst{31-21} = 0b00011111100; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} def V6_vadd_hf_hf : HInst< (outs HvxVR:$Vd32), (ins HvxVR:$Vu32, HvxVR:$Vv32), @@ -32607,6 +32674,58 @@ let opNewValue = 0; let isCVI = 1; let DecoderNamespace = "EXT_mmvec"; } +def V6_vcvt2_b_hf : HInst< +(outs HvxVR:$Vd32), +(ins HvxVR:$Vu32, HvxVR:$Vv32), +"$Vd32.b = vcvt2($Vu32.hf,$Vv32.hf)", +tc_c127de3a, TypeCVI_VX>, Enc_45364e, Requires<[UseHVXV79,UseHVXIEEEFP]> { +let Inst{7-5} = 0b110; +let Inst{13-13} = 0b1; +let Inst{31-21} = 0b00011010110; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} +def V6_vcvt2_hf_b : HInst< +(outs HvxWR:$Vdd32), +(ins HvxVR:$Vu32), +"$Vdd32.hf = vcvt2($Vu32.b)", +tc_c127de3a, TypeCVI_VX_DV>, Enc_dd766a, Requires<[UseHVXV79,UseHVXIEEEFP]> { +let Inst{7-5} = 0b110; +let Inst{13-13} = 0b1; +let Inst{31-16} = 0b0001111011010101; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} +def V6_vcvt2_hf_ub : HInst< +(outs HvxWR:$Vdd32), +(ins HvxVR:$Vu32), +"$Vdd32.hf = vcvt2($Vu32.ub)", +tc_c127de3a, TypeCVI_VX_DV>, Enc_dd766a, Requires<[UseHVXV79,UseHVXIEEEFP]> { +let Inst{7-5} = 0b111; +let Inst{13-13} = 0b1; +let Inst{31-16} = 0b0001111011010101; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} +def V6_vcvt2_ub_hf : HInst< +(outs HvxVR:$Vd32), +(ins HvxVR:$Vu32, HvxVR:$Vv32), +"$Vd32.ub = vcvt2($Vu32.hf,$Vv32.hf)", +tc_c127de3a, TypeCVI_VX>, Enc_45364e, Requires<[UseHVXV79,UseHVXIEEEFP]> { +let Inst{7-5} = 0b111; +let Inst{13-13} = 0b1; +let Inst{31-21} = 0b00011010110; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} def V6_vcvt_b_hf : HInst< (outs HvxVR:$Vd32), (ins HvxVR:$Vu32, HvxVR:$Vv32), @@ -32633,6 +32752,19 @@ let opNewValue = 0; let isCVI = 1; let DecoderNamespace = "EXT_mmvec"; } +def V6_vcvt_f8_hf : HInst< +(outs HvxVR:$Vd32), +(ins HvxVR:$Vu32, HvxVR:$Vv32), +"$Vd32.f8 = vcvt($Vu32.hf,$Vv32.hf)", +tc_c127de3a, TypeCVI_VX>, Enc_45364e, Requires<[UseHVXV79,UseHVXIEEEFP]> { +let Inst{7-5} = 0b010; +let Inst{13-13} = 0b1; +let Inst{31-21} = 0b00011111111; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} def V6_vcvt_h_hf : HInst< (outs HvxVR:$Vd32), (ins HvxVR:$Vu32), @@ -32659,6 +32791,19 @@ let opNewValue = 0; let isCVI = 1; let DecoderNamespace = "EXT_mmvec"; } +def V6_vcvt_hf_f8 : HInst< +(outs HvxWR:$Vdd32), +(ins HvxVR:$Vu32), +"$Vdd32.hf = vcvt($Vu32.f8)", +tc_c127de3a, TypeCVI_VX_DV>, Enc_dd766a, Requires<[UseHVXV79,UseHVXIEEEFP]> { +let Inst{7-5} = 0b101; +let Inst{13-13} = 0b1; +let Inst{31-16} = 0b0001111000000101; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} def V6_vcvt_hf_h : HInst< (outs HvxVR:$Vd32), (ins HvxVR:$Vu32), @@ -33633,6 +33778,19 @@ let isHVXALU2SRC = 1; let DecoderNamespace = "EXT_mmvec"; let Constraints = "$Qx4 = $Qx4in"; } +def V6_vfmax_f8 : HInst< +(outs HvxVR:$Vd32), +(ins HvxVR:$Vu32, HvxVR:$Vv32), +"$Vd32.f8 = vfmax($Vu32.f8,$Vv32.f8)", +tc_cda936da, TypeCVI_VX_LATE>, Enc_45364e, Requires<[UseHVXV79,UseHVXIEEEFP]> { +let Inst{7-5} = 0b101; +let Inst{13-13} = 0b1; +let Inst{31-21} = 0b00011100011; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} def V6_vfmax_hf : HInst< (outs HvxVR:$Vd32), (ins HvxVR:$Vu32, HvxVR:$Vv32), @@ -33659,6 +33817,19 @@ let opNewValue = 0; let isCVI = 1; let DecoderNamespace = "EXT_mmvec"; } +def V6_vfmin_f8 : HInst< +(outs HvxVR:$Vd32), +(ins HvxVR:$Vu32, HvxVR:$Vv32), +"$Vd32.f8 = vfmin($Vu32.f8,$Vv32.f8)", +tc_cda936da, TypeCVI_VX_LATE>, Enc_45364e, Requires<[UseHVXV79,UseHVXIEEEFP]> { +let Inst{7-5} = 0b100; +let Inst{13-13} = 0b1; +let Inst{31-21} = 0b00011100011; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} def V6_vfmin_hf : HInst< (outs HvxVR:$Vd32), (ins HvxVR:$Vu32, HvxVR:$Vv32), @@ -33685,6 +33856,19 @@ let opNewValue = 0; let isCVI = 1; let DecoderNamespace = "EXT_mmvec"; } +def V6_vfneg_f8 : HInst< +(outs HvxVR:$Vd32), +(ins HvxVR:$Vu32), +"$Vd32.f8 = vfneg($Vu32.f8)", +tc_5cdf8c84, TypeCVI_VX_LATE>, Enc_e7581c, Requires<[UseHVXV79,UseHVXIEEEFP]> { +let Inst{7-5} = 0b111; +let Inst{13-13} = 0b1; +let Inst{31-16} = 0b0001110001100110; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} def V6_vfneg_hf : HInst< (outs HvxVR:$Vd32), (ins HvxVR:$Vu32), @@ -34830,6 +35014,19 @@ let isPseudo = 1; let isCodeGenOnly = 1; let DecoderNamespace = "EXT_mmvec"; } +def V6_vmerge_qf : HInst< +(outs HvxVR:$Vd32), +(ins HvxVR:$Vu32, HvxVR:$Vv32), +"$Vd32 = vmerge($Vu32.x,$Vv32.w)", +tc_05ca8cfd, TypeCVI_VS>, Enc_45364e, Requires<[UseHVXV79,UseHVXQFloat]> { +let Inst{7-5} = 0b111; +let Inst{13-13} = 0b1; +let Inst{31-21} = 0b00011111000; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} def V6_vmin_bf : HInst< (outs HvxVR:$Vd32), (ins HvxVR:$Vu32, HvxVR:$Vv32), @@ -35316,6 +35513,21 @@ let isCVI = 1; let DecoderNamespace = "EXT_mmvec"; let Constraints = "$Vx32 = $Vx32in"; } +def V6_vmpy_hf_f8_acc : HInst< +(outs HvxWR:$Vxx32), +(ins HvxWR:$Vxx32in, HvxVR:$Vu32, HvxVR:$Vv32), +"$Vxx32.hf += vmpy($Vu32.f8,$Vv32.f8)", +tc_08a4f1b6, TypeCVI_VX_DV>, Enc_3fc427, Requires<[UseHVXV79,UseHVXIEEEFP]> { +let Inst{7-5} = 0b111; +let Inst{13-13} = 0b0; +let Inst{31-21} = 0b00011111100; +let hasNewValue = 1; +let opNewValue = 0; +let isAccumulator = 1; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +let Constraints = "$Vxx32 = $Vxx32in"; +} def V6_vmpy_hf_hf : HInst< (outs HvxVR:$Vd32), (ins HvxVR:$Vu32, HvxVR:$Vv32), @@ -35448,6 +35660,45 @@ let opNewValue = 0; let isCVI = 1; let DecoderNamespace = "EXT_mmvec"; } +def V6_vmpy_rt_hf : HInst< +(outs HvxVR:$Vd32), +(ins HvxVR:$Vu32, IntRegs:$Rt32), +"$Vd32.qf16 = vmpy($Vu32.hf,$Rt32.hf)", +tc_0b04c6c7, TypeCVI_VX_DV>, Enc_b087ac, Requires<[UseHVXV79,UseHVXQFloat]> { +let Inst{7-5} = 0b011; +let Inst{13-13} = 0b1; +let Inst{31-21} = 0b00011010000; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} +def V6_vmpy_rt_qf16 : HInst< +(outs HvxVR:$Vd32), +(ins HvxVR:$Vu32, IntRegs:$Rt32), +"$Vd32.qf16 = vmpy($Vu32.qf16,$Rt32.hf)", +tc_0b04c6c7, TypeCVI_VX_DV>, Enc_b087ac, Requires<[UseHVXV79,UseHVXQFloat]> { +let Inst{7-5} = 0b010; +let Inst{13-13} = 0b1; +let Inst{31-21} = 0b00011010000; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} +def V6_vmpy_rt_sf : HInst< +(outs HvxVR:$Vd32), +(ins HvxVR:$Vu32, IntRegs:$Rt32), +"$Vd32.qf32 = vmpy($Vu32.sf,$Rt32.sf)", +tc_0b04c6c7, TypeCVI_VX_DV>, Enc_b087ac, Requires<[UseHVXV79,UseHVXQFloat]> { +let Inst{7-5} = 0b001; +let Inst{13-13} = 0b1; +let Inst{31-21} = 0b00011010000; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} def V6_vmpy_sf_bf : HInst< (outs HvxWR:$Vdd32), (ins HvxVR:$Vu32, HvxVR:$Vv32), @@ -36685,6 +36936,19 @@ let isCVI = 1; let DecoderNamespace = "EXT_mmvec"; let Constraints = "$Vx32 = $Vx32in"; } +def V6_vmpy_hf_f8 : HInst< +(outs HvxWR:$Vdd32), +(ins HvxVR:$Vu32, HvxVR:$Vv32), +"$Vdd32.hf = vmpy($Vu32.f8,$Vv32.f8)", +tc_d8287c14, TypeCVI_VX_DV>, Enc_71bb9b, Requires<[UseHVXV79,UseHVXIEEEFP]> { +let Inst{7-5} = 0b110; +let Inst{13-13} = 0b0; +let Inst{31-21} = 0b00011111100; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} def V6_vmpyuhv : HInst< (outs HvxWR:$Vdd32), (ins HvxVR:$Vu32, HvxVR:$Vv32), @@ -38888,6 +39152,19 @@ let opNewValue = 0; let isCVI = 1; let DecoderNamespace = "EXT_mmvec"; } +def V6_vsub_hf_f8 : HInst< +(outs HvxWR:$Vdd32), +(ins HvxVR:$Vu32, HvxVR:$Vv32), +"$Vdd32.hf = vsub($Vu32.f8,$Vv32.f8)", +tc_d8287c14, TypeCVI_VX_DV>, Enc_71bb9b, Requires<[UseHVXV79,UseHVXIEEEFP]> { +let Inst{7-5} = 0b101; +let Inst{13-13} = 0b0; +let Inst{31-21} = 0b00011111100; +let hasNewValue = 1; +let opNewValue = 0; +let isCVI = 1; +let DecoderNamespace = "EXT_mmvec"; +} def V6_vsub_hf_hf : HInst< (outs HvxVR:$Vd32), (ins HvxVR:$Vu32, HvxVR:$Vv32), diff --git a/llvm/lib/Target/Hexagon/HexagonDepMapAsm2Intrin.td b/llvm/lib/Target/Hexagon/HexagonDepMapAsm2Intrin.td index c1a90ee281d9d4..17cb96cdee9f90 100644 --- a/llvm/lib/Target/Hexagon/HexagonDepMapAsm2Intrin.td +++ b/llvm/lib/Target/Hexagon/HexagonDepMapAsm2Intrin.td @@ -3740,3 +3740,90 @@ def: Pat<(int_hexagon_V6_vsub_sf_bf HvxVR:$src1, HvxVR:$src2), (V6_vsub_sf_bf HvxVR:$src1, HvxVR:$src2)>, Requires<[UseHVXV73, UseHVX64B]>; def: Pat<(int_hexagon_V6_vsub_sf_bf_128B HvxVR:$src1, HvxVR:$src2), (V6_vsub_sf_bf HvxVR:$src1, HvxVR:$src2)>, Requires<[UseHVXV73, UseHVX128B]>; + +// V79 HVX Instructions. + +def: Pat<(int_hexagon_V6_get_qfext HvxVR:$src1, IntRegs:$src2), + (V6_get_qfext HvxVR:$src1, IntRegs:$src2)>, Requires<[UseHVXV79, UseHVX64B, UseHVXQFloat]>; +def: Pat<(int_hexagon_V6_get_qfext_128B HvxVR:$src1, IntRegs:$src2), + (V6_get_qfext HvxVR:$src1, IntRegs:$src2)>, Requires<[UseHVXV79, UseHVX128B, UseHVXQFloat]>; +def: Pat<(int_hexagon_V6_get_qfext_oracc HvxVR:$src1, HvxVR:$src2, IntRegs:$src3), + (V6_get_qfext_oracc HvxVR:$src1, HvxVR:$src2, IntRegs:$src3)>, Requires<[UseHVXV79, UseHVX64B, UseHVXQFloat]>; +def: Pat<(int_hexagon_V6_get_qfext_oracc_128B HvxVR:$src1, HvxVR:$src2, IntRegs:$src3), + (V6_get_qfext_oracc HvxVR:$src1, HvxVR:$src2, IntRegs:$src3)>, Requires<[UseHVXV79, UseHVX128B, UseHVXQFloat]>; +def: Pat<(int_hexagon_V6_set_qfext HvxVR:$src1, IntRegs:$src2), + (V6_set_qfext HvxVR:$src1, IntRegs:$src2)>, Requires<[UseHVXV79, UseHVX64B, UseHVXQFloat]>; +def: Pat<(int_hexagon_V6_set_qfext_128B HvxVR:$src1, IntRegs:$src2), + (V6_set_qfext HvxVR:$src1, IntRegs:$src2)>, Requires<[UseHVXV79, UseHVX128B, UseHVXQFloat]>; +def: Pat<(int_hexagon_V6_vabs_f8 HvxVR:$src1), + (V6_vabs_f8 HvxVR:$src1)>, Requires<[HasV79, UseHVX64B]>; +def: Pat<(int_hexagon_V6_vabs_f8_128B HvxVR:$src1), + (V6_vabs_f8 HvxVR:$src1)>, Requires<[HasV79, UseHVX128B]>; +def: Pat<(int_hexagon_V6_vadd_hf_f8 HvxVR:$src1, HvxVR:$src2), + (V6_vadd_hf_f8 HvxVR:$src1, HvxVR:$src2)>, Requires<[HasV79, UseHVX64B]>; +def: Pat<(int_hexagon_V6_vadd_hf_f8_128B HvxVR:$src1, HvxVR:$src2), + (V6_vadd_hf_f8 HvxVR:$src1, HvxVR:$src2)>, Requires<[HasV79, UseHVX128B]>; +def: Pat<(int_hexagon_V6_vcvt2_b_hf HvxVR:$src1, HvxVR:$src2), + (V6_vcvt2_b_hf HvxVR:$src1, HvxVR:$src2)>, Requires<[UseHVXV79, UseHVX64B]>; +def: Pat<(int_hexagon_V6_vcvt2_b_hf_128B HvxVR:$src1, HvxVR:$src2), + (V6_vcvt2_b_hf HvxVR:$src1, HvxVR:$src2)>, Requires<[UseHVXV79, UseHVX128B]>; +def: Pat<(int_hexagon_V6_vcvt2_hf_b HvxVR:$src1), + (V6_vcvt2_hf_b HvxVR:$src1)>, Requires<[UseHVXV79, UseHVX64B]>; +def: Pat<(int_hexagon_V6_vcvt2_hf_b_128B HvxVR:$src1), + (V6_vcvt2_hf_b HvxVR:$src1)>, Requires<[UseHVXV79, UseHVX128B]>; +def: Pat<(int_hexagon_V6_vcvt2_hf_ub HvxVR:$src1), + (V6_vcvt2_hf_ub HvxVR:$src1)>, Requires<[UseHVXV79, UseHVX64B]>; +def: Pat<(int_hexagon_V6_vcvt2_hf_ub_128B HvxVR:$src1), + (V6_vcvt2_hf_ub HvxVR:$src1)>, Requires<[UseHVXV79, UseHVX128B]>; +def: Pat<(int_hexagon_V6_vcvt2_ub_hf HvxVR:$src1, HvxVR:$src2), + (V6_vcvt2_ub_hf HvxVR:$src1, HvxVR:$src2)>, Requires<[UseHVXV79, UseHVX64B]>; +def: Pat<(int_hexagon_V6_vcvt2_ub_hf_128B HvxVR:$src1, HvxVR:$src2), + (V6_vcvt2_ub_hf HvxVR:$src1, HvxVR:$src2)>, Requires<[UseHVXV79, UseHVX128B]>; +def: Pat<(int_hexagon_V6_vcvt_f8_hf HvxVR:$src1, HvxVR:$src2), + (V6_vcvt_f8_hf HvxVR:$src1, HvxVR:$src2)>, Requires<[HasV79, UseHVX64B]>; +def: Pat<(int_hexagon_V6_vcvt_f8_hf_128B HvxVR:$src1, HvxVR:$src2), + (V6_vcvt_f8_hf HvxVR:$src1, HvxVR:$src2)>, Requires<[HasV79, UseHVX128B]>; +def: Pat<(int_hexagon_V6_vcvt_hf_f8 HvxVR:$src1), + (V6_vcvt_hf_f8 HvxVR:$src1)>, Requires<[HasV79, UseHVX64B]>; +def: Pat<(int_hexagon_V6_vcvt_hf_f8_128B HvxVR:$src1), + (V6_vcvt_hf_f8 HvxVR:$src1)>, Requires<[HasV79, UseHVX128B]>; +def: Pat<(int_hexagon_V6_vfmax_f8 HvxVR:$src1, HvxVR:$src2), + (V6_vfmax_f8 HvxVR:$src1, HvxVR:$src2)>, Requires<[HasV79, UseHVX64B]>; +def: Pat<(int_hexagon_V6_vfmax_f8_128B HvxVR:$src1, HvxVR:$src2), + (V6_vfmax_f8 HvxVR:$src1, HvxVR:$src2)>, Requires<[HasV79, UseHVX128B]>; +def: Pat<(int_hexagon_V6_vfmin_f8 HvxVR:$src1, HvxVR:$src2), + (V6_vfmin_f8 HvxVR:$src1, HvxVR:$src2)>, Requires<[HasV79, UseHVX64B]>; +def: Pat<(int_hexagon_V6_vfmin_f8_128B HvxVR:$src1, HvxVR:$src2), + (V6_vfmin_f8 HvxVR:$src1, HvxVR:$src2)>, Requires<[HasV79, UseHVX128B]>; +def: Pat<(int_hexagon_V6_vfneg_f8 HvxVR:$src1), + (V6_vfneg_f8 HvxVR:$src1)>, Requires<[HasV79, UseHVX64B]>; +def: Pat<(int_hexagon_V6_vfneg_f8_128B HvxVR:$src1), + (V6_vfneg_f8 HvxVR:$src1)>, Requires<[HasV79, UseHVX128B]>; +def: Pat<(int_hexagon_V6_vmerge_qf HvxVR:$src1, HvxVR:$src2), + (V6_vmerge_qf HvxVR:$src1, HvxVR:$src2)>, Requires<[UseHVXV79, UseHVX64B, UseHVXQFloat]>; +def: Pat<(int_hexagon_V6_vmerge_qf_128B HvxVR:$src1, HvxVR:$src2), + (V6_vmerge_qf HvxVR:$src1, HvxVR:$src2)>, Requires<[UseHVXV79, UseHVX128B, UseHVXQFloat]>; +def: Pat<(int_hexagon_V6_vmpy_hf_f8 HvxVR:$src1, HvxVR:$src2), + (V6_vmpy_hf_f8 HvxVR:$src1, HvxVR:$src2)>, Requires<[HasV79, UseHVX64B]>; +def: Pat<(int_hexagon_V6_vmpy_hf_f8_128B HvxVR:$src1, HvxVR:$src2), + (V6_vmpy_hf_f8 HvxVR:$src1, HvxVR:$src2)>, Requires<[HasV79, UseHVX128B]>; +def: Pat<(int_hexagon_V6_vmpy_hf_f8_acc HvxWR:$src1, HvxVR:$src2, HvxVR:$src3), + (V6_vmpy_hf_f8_acc HvxWR:$src1, HvxVR:$src2, HvxVR:$src3)>, Requires<[HasV79, UseHVX64B]>; +def: Pat<(int_hexagon_V6_vmpy_hf_f8_acc_128B HvxWR:$src1, HvxVR:$src2, HvxVR:$src3), + (V6_vmpy_hf_f8_acc HvxWR:$src1, HvxVR:$src2, HvxVR:$src3)>, Requires<[HasV79, UseHVX128B]>; +def: Pat<(int_hexagon_V6_vmpy_rt_hf HvxVR:$src1, IntRegs:$src2), + (V6_vmpy_rt_hf HvxVR:$src1, IntRegs:$src2)>, Requires<[UseHVXV79, UseHVX64B, UseHVXQFloat]>; +def: Pat<(int_hexagon_V6_vmpy_rt_hf_128B HvxVR:$src1, IntRegs:$src2), + (V6_vmpy_rt_hf HvxVR:$src1, IntRegs:$src2)>, Requires<[UseHVXV79, UseHVX128B, UseHVXQFloat]>; +def: Pat<(int_hexagon_V6_vmpy_rt_qf16 HvxVR:$src1, IntRegs:$src2), + (V6_vmpy_rt_qf16 HvxVR:$src1, IntRegs:$src2)>, Requires<[UseHVXV79, UseHVX64B, UseHVXQFloat]>; +def: Pat<(int_hexagon_V6_vmpy_rt_qf16_128B HvxVR:$src1, IntRegs:$src2), + (V6_vmpy_rt_qf16 HvxVR:$src1, IntRegs:$src2)>, Requires<[UseHVXV79, UseHVX128B, UseHVXQFloat]>; +def: Pat<(int_hexagon_V6_vmpy_rt_sf HvxVR:$src1, IntRegs:$src2), + (V6_vmpy_rt_sf HvxVR:$src1, IntRegs:$src2)>, Requires<[UseHVXV79, UseHVX64B, UseHVXQFloat]>; +def: Pat<(int_hexagon_V6_vmpy_rt_sf_128B HvxVR:$src1, IntRegs:$src2), + (V6_vmpy_rt_sf HvxVR:$src1, IntRegs:$src2)>, Requires<[UseHVXV79, UseHVX128B, UseHVXQFloat]>; +def: Pat<(int_hexagon_V6_vsub_hf_f8 HvxVR:$src1, HvxVR:$src2), + (V6_vsub_hf_f8 HvxVR:$src1, HvxVR:$src2)>, Requires<[HasV79, UseHVX64B]>; +def: Pat<(int_hexagon_V6_vsub_hf_f8_128B HvxVR:$src1, HvxVR:$src2), + (V6_vsub_hf_f8 HvxVR:$src1, HvxVR:$src2)>, Requires<[HasV79, UseHVX128B]>; diff --git a/llvm/lib/Target/Hexagon/HexagonSubtarget.h b/llvm/lib/Target/Hexagon/HexagonSubtarget.h index 36ec3f949c6d15..41555db4ac662a 100644 --- a/llvm/lib/Target/Hexagon/HexagonSubtarget.h +++ b/llvm/lib/Target/Hexagon/HexagonSubtarget.h @@ -222,6 +222,9 @@ class HexagonSubtarget : public HexagonGenSubtargetInfo { bool hasV79OpsOnly() const { return getHexagonArchVersion() == Hexagon::ArchEnum::V79; } + bool useHVXV79Ops() const { + return HexagonHVXVersion >= Hexagon::ArchEnum::V79; + } bool useAudioOps() const { return UseAudioOps; } bool useCompound() const { return UseCompound; } _______________________________________________ cfe-commits mailing list cfe-commits@lists.llvm.org https://lists.llvm.org/cgi-bin/mailman/listinfo/cfe-commits