[PATCH] D75858: [AArch64][SVE] Add SVE intrinsics for address calculations

2020-03-10 Thread Kerry McLaughlin via Phabricator via cfe-commits
This revision was automatically updated to reflect the committed changes.
Closed by commit rG0bba37a32024: [AArch64][SVE] Add SVE intrinsics for address 
calculations (authored by kmclaughlin).

Repository:
  rG LLVM Github Monorepo

CHANGES SINCE LAST ACTION
  https://reviews.llvm.org/D75858/new/

https://reviews.llvm.org/D75858

Files:
  llvm/include/llvm/IR/IntrinsicsAArch64.td
  llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td
  llvm/test/CodeGen/AArch64/sve-intrinsics-adr.ll

Index: llvm/test/CodeGen/AArch64/sve-intrinsics-adr.ll
===
--- /dev/null
+++ llvm/test/CodeGen/AArch64/sve-intrinsics-adr.ll
@@ -0,0 +1,101 @@
+; RUN: llc -mtriple=aarch64-linux-gnu -mattr=+sve -verify-machineinstrs < %s | FileCheck %s
+
+;
+; ADRB
+;
+
+define  @adrb_i32( %a,  %b) {
+; CHECK-LABEL: adrb_i32:
+; CHECK: adr z0.s, [z0.s, z1.s]
+; CHECK-NEXT: ret
+  %out = call  @llvm.aarch64.sve.adrb.nxv4i32( %a,
+ %b)
+  ret  %out
+}
+
+define  @adrb_i64( %a,  %b) {
+; CHECK-LABEL: adrb_i64:
+; CHECK: adr z0.d, [z0.d, z1.d]
+; CHECK-NEXT: ret
+  %out = call  @llvm.aarch64.sve.adrb.nxv2i64( %a,
+ %b)
+  ret  %out
+}
+
+;
+; ADRH
+;
+
+define  @adrh_i32( %a,  %b) {
+; CHECK-LABEL: adrh_i32:
+; CHECK: adr z0.s, [z0.s, z1.s, lsl #1]
+; CHECK-NEXT: ret
+  %out = call  @llvm.aarch64.sve.adrh.nxv4i32( %a,
+ %b)
+  ret  %out
+}
+
+define  @adrh_i64( %a,  %b) {
+; CHECK-LABEL: adrh_i64:
+; CHECK: adr z0.d, [z0.d, z1.d, lsl #1]
+; CHECK-NEXT: ret
+  %out = call  @llvm.aarch64.sve.adrh.nxv2i64( %a,
+ %b)
+  ret  %out
+}
+
+;
+; ADRW
+;
+
+define  @adrw_i32( %a,  %b) {
+; CHECK-LABEL: adrw_i32:
+; CHECK: adr z0.s, [z0.s, z1.s, lsl #2]
+; CHECK-NEXT: ret
+  %out = call  @llvm.aarch64.sve.adrw.nxv4i32( %a,
+ %b)
+  ret  %out
+}
+
+define  @adrw_i64( %a,  %b) {
+; CHECK-LABEL: adrw_i64:
+; CHECK: adr z0.d, [z0.d, z1.d, lsl #2]
+; CHECK-NEXT: ret
+  %out = call  @llvm.aarch64.sve.adrw.nxv2i64( %a,
+ %b)
+  ret  %out
+}
+
+;
+; ADRD
+;
+
+define  @adrd_i32( %a,  %b) {
+; CHECK-LABEL: adrd_i32:
+; CHECK: adr z0.s, [z0.s, z1.s, lsl #3]
+; CHECK-NEXT: ret
+  %out = call  @llvm.aarch64.sve.adrd.nxv4i32( %a,
+ %b)
+  ret  %out
+}
+
+define  @adrd_i64( %a,  %b) {
+; CHECK-LABEL: adrd_i64:
+; CHECK: adr z0.d, [z0.d, z1.d, lsl #3]
+; CHECK-NEXT: ret
+  %out = call  @llvm.aarch64.sve.adrd.nxv2i64( %a,
+ %b)
+  ret  %out
+}
+
+declare  @llvm.aarch64.sve.adrb.nxv4i32(, )
+declare  @llvm.aarch64.sve.adrb.nxv2i64(, )
+
+declare  @llvm.aarch64.sve.adrh.nxv4i32(, )
+declare  @llvm.aarch64.sve.adrh.nxv2i64(, )
+
+declare  @llvm.aarch64.sve.adrw.nxv4i32(, )
+declare  @llvm.aarch64.sve.adrw.nxv2i64(, )
+
+declare  @llvm.aarch64.sve.adrd.nxv4i32(, )
+declare  @llvm.aarch64.sve.adrd.nxv2i64(, )
Index: llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td
===
--- llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td
+++ llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td
@@ -917,6 +917,24 @@
   defm ADR_LSL_ZZZ_S  : sve_int_bin_cons_misc_0_a_32_lsl<0b10, "adr">;
   defm ADR_LSL_ZZZ_D  : sve_int_bin_cons_misc_0_a_64_lsl<0b11, "adr">;
 
+  def : Pat<(nxv4i32 (int_aarch64_sve_adrb nxv4i32:$Op1, nxv4i32:$Op2)),
+(ADR_LSL_ZZZ_S_0 $Op1, $Op2)>;
+  def : Pat<(nxv4i32 (int_aarch64_sve_adrh nxv4i32:$Op1, nxv4i32:$Op2)),
+(ADR_LSL_ZZZ_S_1 $Op1, $Op2)>;
+  def : Pat<(nxv4i32 (int_aarch64_sve_adrw nxv4i32:$Op1, nxv4i32:$Op2)),
+(ADR_LSL_ZZZ_S_2 $Op1, $Op2)>;
+  def : Pat<(nxv4i32 (int_aarch64_sve_adrd nxv4i32:$Op1, nxv4i32:$Op2)),
+(ADR_LSL_ZZZ_S_3 $Op1, $Op2)>;
+
+  def : Pat<(nxv2i64 (int_aarch64_sve_adrb nxv2i64:$Op1, nxv2i64:$Op2)),
+(ADR_LSL_ZZZ_D_0 $Op1, $Op2)>;
+  def : Pat<(nxv2i64 (int_aarch64_sve_adrh nxv2i64:$Op1, nxv2i64:$Op2)),
+(ADR_LSL_ZZZ_D_1 $Op1, $Op2)>;
+  def : Pat<(nxv2i64 (int_aarch64_sve_adrw nxv2i64:$Op1, nxv2i64:$Op2)),
+(ADR_LSL_ZZZ_D_2 $Op1, $Op2)>;
+  def : Pat<(nxv2i64 (int_aarch64_sve_adrd nxv2i64:$Op1, nxv2i64:$Op2)),
+(ADR_LSL_ZZZ_D_3 $Op1, $Op2)>;
+
   defm TBL_ZZZ  : sve_int_perm_tbl<"tbl", AArch64tbl>;
 
   defm ZIP1_ZZZ : sve_int_perm_bin_perm_zz<0b000, "zip1", AArch64zip1>;
Index: llvm/include/llvm/IR/IntrinsicsAArch64.td
===
--- llvm/include/llvm/IR/IntrinsicsAArch64.td
+++ llvm/include/llvm/IR/IntrinsicsAArch64.td
@@ -1286,6 +1286,15 @@
 def int_aarch64_sve_index : 

[PATCH] D75858: [AArch64][SVE] Add SVE intrinsics for address calculations

2020-03-09 Thread Sander de Smalen via Phabricator via cfe-commits
sdesmalen accepted this revision.
sdesmalen added a comment.
This revision is now accepted and ready to land.

LGTM!


Repository:
  rG LLVM Github Monorepo

CHANGES SINCE LAST ACTION
  https://reviews.llvm.org/D75858/new/

https://reviews.llvm.org/D75858



___
cfe-commits mailing list
cfe-commits@lists.llvm.org
https://lists.llvm.org/cgi-bin/mailman/listinfo/cfe-commits


[PATCH] D75858: [AArch64][SVE] Add SVE intrinsics for address calculations

2020-03-09 Thread Kerry McLaughlin via Phabricator via cfe-commits
kmclaughlin created this revision.
kmclaughlin added reviewers: sdesmalen, andwar, efriedma, dancgr, 
cameron.mcinally.
Herald added subscribers: danielkiss, psnobl, rkruppe, hiraditya, 
kristof.beyls, tschuett.
Herald added a reviewer: rengolin.
Herald added a project: LLVM.

Adds the @llvm.aarch64.sve.adr[b|h|w|d] intrinsics


Repository:
  rG LLVM Github Monorepo

https://reviews.llvm.org/D75858

Files:
  llvm/include/llvm/IR/IntrinsicsAArch64.td
  llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td
  llvm/test/CodeGen/AArch64/sve-intrinsics-adr.ll

Index: llvm/test/CodeGen/AArch64/sve-intrinsics-adr.ll
===
--- /dev/null
+++ llvm/test/CodeGen/AArch64/sve-intrinsics-adr.ll
@@ -0,0 +1,101 @@
+; RUN: llc -mtriple=aarch64-linux-gnu -mattr=+sve -verify-machineinstrs < %s | FileCheck %s
+
+;
+; ADRB
+;
+
+define  @adrb_i32( %a,  %b) {
+; CHECK-LABEL: adrb_i32:
+; CHECK: adr z0.s, [z0.s, z1.s]
+; CHECK-NEXT: ret
+  %out = call  @llvm.aarch64.sve.adrb.nxv4i32( %a,
+ %b)
+  ret  %out
+}
+
+define  @adrb_i64( %a,  %b) {
+; CHECK-LABEL: adrb_i64:
+; CHECK: adr z0.d, [z0.d, z1.d]
+; CHECK-NEXT: ret
+  %out = call  @llvm.aarch64.sve.adrb.nxv2i64( %a,
+ %b)
+  ret  %out
+}
+
+;
+; ADRH
+;
+
+define  @adrh_i32( %a,  %b) {
+; CHECK-LABEL: adrh_i32:
+; CHECK: adr z0.s, [z0.s, z1.s, lsl #1]
+; CHECK-NEXT: ret
+  %out = call  @llvm.aarch64.sve.adrh.nxv4i32( %a,
+ %b)
+  ret  %out
+}
+
+define  @adrh_i64( %a,  %b) {
+; CHECK-LABEL: adrh_i64:
+; CHECK: adr z0.d, [z0.d, z1.d, lsl #1]
+; CHECK-NEXT: ret
+  %out = call  @llvm.aarch64.sve.adrh.nxv2i64( %a,
+ %b)
+  ret  %out
+}
+
+;
+; ADRW
+;
+
+define  @adrw_i32( %a,  %b) {
+; CHECK-LABEL: adrw_i32:
+; CHECK: adr z0.s, [z0.s, z1.s, lsl #2]
+; CHECK-NEXT: ret
+  %out = call  @llvm.aarch64.sve.adrw.nxv4i32( %a,
+ %b)
+  ret  %out
+}
+
+define  @adrw_i64( %a,  %b) {
+; CHECK-LABEL: adrw_i64:
+; CHECK: adr z0.d, [z0.d, z1.d, lsl #2]
+; CHECK-NEXT: ret
+  %out = call  @llvm.aarch64.sve.adrw.nxv2i64( %a,
+ %b)
+  ret  %out
+}
+
+;
+; ADRD
+;
+
+define  @adrd_i32( %a,  %b) {
+; CHECK-LABEL: adrd_i32:
+; CHECK: adr z0.s, [z0.s, z1.s, lsl #3]
+; CHECK-NEXT: ret
+  %out = call  @llvm.aarch64.sve.adrd.nxv4i32( %a,
+ %b)
+  ret  %out
+}
+
+define  @adrd_i64( %a,  %b) {
+; CHECK-LABEL: adrd_i64:
+; CHECK: adr z0.d, [z0.d, z1.d, lsl #3]
+; CHECK-NEXT: ret
+  %out = call  @llvm.aarch64.sve.adrd.nxv2i64( %a,
+ %b)
+  ret  %out
+}
+
+declare  @llvm.aarch64.sve.adrb.nxv4i32(, )
+declare  @llvm.aarch64.sve.adrb.nxv2i64(, )
+
+declare  @llvm.aarch64.sve.adrh.nxv4i32(, )
+declare  @llvm.aarch64.sve.adrh.nxv2i64(, )
+
+declare  @llvm.aarch64.sve.adrw.nxv4i32(, )
+declare  @llvm.aarch64.sve.adrw.nxv2i64(, )
+
+declare  @llvm.aarch64.sve.adrd.nxv4i32(, )
+declare  @llvm.aarch64.sve.adrd.nxv2i64(, )
Index: llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td
===
--- llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td
+++ llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td
@@ -894,6 +894,24 @@
   defm ADR_LSL_ZZZ_S  : sve_int_bin_cons_misc_0_a_32_lsl<0b10, "adr">;
   defm ADR_LSL_ZZZ_D  : sve_int_bin_cons_misc_0_a_64_lsl<0b11, "adr">;
 
+  def : Pat<(nxv4i32 (int_aarch64_sve_adrb nxv4i32:$Op1, nxv4i32:$Op2)),
+(ADR_LSL_ZZZ_S_0 $Op1, $Op2)>;
+  def : Pat<(nxv4i32 (int_aarch64_sve_adrh nxv4i32:$Op1, nxv4i32:$Op2)),
+(ADR_LSL_ZZZ_S_1 $Op1, $Op2)>;
+  def : Pat<(nxv4i32 (int_aarch64_sve_adrw nxv4i32:$Op1, nxv4i32:$Op2)),
+(ADR_LSL_ZZZ_S_2 $Op1, $Op2)>;
+  def : Pat<(nxv4i32 (int_aarch64_sve_adrd nxv4i32:$Op1, nxv4i32:$Op2)),
+(ADR_LSL_ZZZ_S_3 $Op1, $Op2)>;
+
+  def : Pat<(nxv2i64 (int_aarch64_sve_adrb nxv2i64:$Op1, nxv2i64:$Op2)),
+(ADR_LSL_ZZZ_D_0 $Op1, $Op2)>;
+  def : Pat<(nxv2i64 (int_aarch64_sve_adrh nxv2i64:$Op1, nxv2i64:$Op2)),
+(ADR_LSL_ZZZ_D_1 $Op1, $Op2)>;
+  def : Pat<(nxv2i64 (int_aarch64_sve_adrw nxv2i64:$Op1, nxv2i64:$Op2)),
+(ADR_LSL_ZZZ_D_2 $Op1, $Op2)>;
+  def : Pat<(nxv2i64 (int_aarch64_sve_adrd nxv2i64:$Op1, nxv2i64:$Op2)),
+(ADR_LSL_ZZZ_D_3 $Op1, $Op2)>;
+
   defm TBL_ZZZ  : sve_int_perm_tbl<"tbl", AArch64tbl>;
 
   defm ZIP1_ZZZ : sve_int_perm_bin_perm_zz<0b000, "zip1", AArch64zip1>;
Index: llvm/include/llvm/IR/IntrinsicsAArch64.td
===
--- llvm/include/llvm/IR/IntrinsicsAArch64.td
+++