1 //===-- SystemZOperators.td - SystemZ-specific operators ------*- tblgen-*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 //===----------------------------------------------------------------------===//
12 //===----------------------------------------------------------------------===//
13 def SDT_CallSeqStart : SDCallSeqStart<[SDTCisVT<0, i64>]>;
14 def SDT_CallSeqEnd : SDCallSeqEnd<[SDTCisVT<0, i64>,
16 def SDT_ZCall : SDTypeProfile<0, -1, [SDTCisPtrTy<0>]>;
17 def SDT_ZCmp : SDTypeProfile<0, 2, [SDTCisSameAs<0, 1>]>;
18 def SDT_ZICmp : SDTypeProfile<0, 3,
21 def SDT_ZBRCCMask : SDTypeProfile<0, 3,
24 SDTCisVT<2, OtherVT>]>;
25 def SDT_ZSelectCCMask : SDTypeProfile<1, 4,
30 def SDT_ZWrapPtr : SDTypeProfile<1, 1,
33 def SDT_ZWrapOffset : SDTypeProfile<1, 2,
37 def SDT_ZAdjDynAlloc : SDTypeProfile<1, 0, [SDTCisVT<0, i64>]>;
38 def SDT_ZExtractAccess : SDTypeProfile<1, 1,
41 def SDT_ZGR128Binary32 : SDTypeProfile<1, 2,
42 [SDTCisVT<0, untyped>,
45 def SDT_ZGR128Binary64 : SDTypeProfile<1, 2,
46 [SDTCisVT<0, untyped>,
49 def SDT_ZAtomicLoadBinaryW : SDTypeProfile<1, 5,
56 def SDT_ZAtomicCmpSwapW : SDTypeProfile<1, 6,
64 def SDT_ZMemMemLength : SDTypeProfile<0, 3,
68 def SDT_ZMemMemLoop : SDTypeProfile<0, 4,
73 def SDT_ZString : SDTypeProfile<1, 3,
78 def SDT_ZI32Intrinsic : SDTypeProfile<1, 0, [SDTCisVT<0, i32>]>;
79 def SDT_ZPrefetch : SDTypeProfile<0, 2,
83 //===----------------------------------------------------------------------===//
85 //===----------------------------------------------------------------------===//
87 // These are target-independent nodes, but have target-specific formats.
88 def callseq_start : SDNode<"ISD::CALLSEQ_START", SDT_CallSeqStart,
89 [SDNPHasChain, SDNPSideEffect, SDNPOutGlue]>;
90 def callseq_end : SDNode<"ISD::CALLSEQ_END", SDT_CallSeqEnd,
91 [SDNPHasChain, SDNPSideEffect, SDNPOptInGlue,
94 // Nodes for SystemZISD::*. See SystemZISelLowering.h for more details.
95 def z_retflag : SDNode<"SystemZISD::RET_FLAG", SDTNone,
96 [SDNPHasChain, SDNPOptInGlue, SDNPVariadic]>;
97 def z_call : SDNode<"SystemZISD::CALL", SDT_ZCall,
98 [SDNPHasChain, SDNPOutGlue, SDNPOptInGlue,
100 def z_sibcall : SDNode<"SystemZISD::SIBCALL", SDT_ZCall,
101 [SDNPHasChain, SDNPOutGlue, SDNPOptInGlue,
103 def z_pcrel_wrapper : SDNode<"SystemZISD::PCREL_WRAPPER", SDT_ZWrapPtr, []>;
104 def z_pcrel_offset : SDNode<"SystemZISD::PCREL_OFFSET",
105 SDT_ZWrapOffset, []>;
106 def z_iabs : SDNode<"SystemZISD::IABS", SDTIntUnaryOp, []>;
107 def z_icmp : SDNode<"SystemZISD::ICMP", SDT_ZICmp, [SDNPOutGlue]>;
108 def z_fcmp : SDNode<"SystemZISD::FCMP", SDT_ZCmp, [SDNPOutGlue]>;
109 def z_tm : SDNode<"SystemZISD::TM", SDT_ZICmp, [SDNPOutGlue]>;
110 def z_br_ccmask : SDNode<"SystemZISD::BR_CCMASK", SDT_ZBRCCMask,
111 [SDNPHasChain, SDNPInGlue]>;
112 def z_select_ccmask : SDNode<"SystemZISD::SELECT_CCMASK", SDT_ZSelectCCMask,
114 def z_adjdynalloc : SDNode<"SystemZISD::ADJDYNALLOC", SDT_ZAdjDynAlloc>;
115 def z_extract_access : SDNode<"SystemZISD::EXTRACT_ACCESS",
117 def z_umul_lohi64 : SDNode<"SystemZISD::UMUL_LOHI64", SDT_ZGR128Binary64>;
118 def z_sdivrem32 : SDNode<"SystemZISD::SDIVREM32", SDT_ZGR128Binary32>;
119 def z_sdivrem64 : SDNode<"SystemZISD::SDIVREM64", SDT_ZGR128Binary64>;
120 def z_udivrem32 : SDNode<"SystemZISD::UDIVREM32", SDT_ZGR128Binary32>;
121 def z_udivrem64 : SDNode<"SystemZISD::UDIVREM64", SDT_ZGR128Binary64>;
123 def z_serialize : SDNode<"SystemZISD::SERIALIZE", SDTNone,
124 [SDNPHasChain, SDNPMayStore]>;
126 class AtomicWOp<string name, SDTypeProfile profile = SDT_ZAtomicLoadBinaryW>
127 : SDNode<"SystemZISD::"##name, profile,
128 [SDNPHasChain, SDNPMayStore, SDNPMayLoad, SDNPMemOperand]>;
130 def z_atomic_swapw : AtomicWOp<"ATOMIC_SWAPW">;
131 def z_atomic_loadw_add : AtomicWOp<"ATOMIC_LOADW_ADD">;
132 def z_atomic_loadw_sub : AtomicWOp<"ATOMIC_LOADW_SUB">;
133 def z_atomic_loadw_and : AtomicWOp<"ATOMIC_LOADW_AND">;
134 def z_atomic_loadw_or : AtomicWOp<"ATOMIC_LOADW_OR">;
135 def z_atomic_loadw_xor : AtomicWOp<"ATOMIC_LOADW_XOR">;
136 def z_atomic_loadw_nand : AtomicWOp<"ATOMIC_LOADW_NAND">;
137 def z_atomic_loadw_min : AtomicWOp<"ATOMIC_LOADW_MIN">;
138 def z_atomic_loadw_max : AtomicWOp<"ATOMIC_LOADW_MAX">;
139 def z_atomic_loadw_umin : AtomicWOp<"ATOMIC_LOADW_UMIN">;
140 def z_atomic_loadw_umax : AtomicWOp<"ATOMIC_LOADW_UMAX">;
141 def z_atomic_cmp_swapw : AtomicWOp<"ATOMIC_CMP_SWAPW", SDT_ZAtomicCmpSwapW>;
143 def z_mvc : SDNode<"SystemZISD::MVC", SDT_ZMemMemLength,
144 [SDNPHasChain, SDNPMayStore, SDNPMayLoad]>;
145 def z_mvc_loop : SDNode<"SystemZISD::MVC_LOOP", SDT_ZMemMemLoop,
146 [SDNPHasChain, SDNPMayStore, SDNPMayLoad]>;
147 def z_nc : SDNode<"SystemZISD::NC", SDT_ZMemMemLength,
148 [SDNPHasChain, SDNPMayStore, SDNPMayLoad]>;
149 def z_nc_loop : SDNode<"SystemZISD::NC_LOOP", SDT_ZMemMemLoop,
150 [SDNPHasChain, SDNPMayStore, SDNPMayLoad]>;
151 def z_oc : SDNode<"SystemZISD::OC", SDT_ZMemMemLength,
152 [SDNPHasChain, SDNPMayStore, SDNPMayLoad]>;
153 def z_oc_loop : SDNode<"SystemZISD::OC_LOOP", SDT_ZMemMemLoop,
154 [SDNPHasChain, SDNPMayStore, SDNPMayLoad]>;
155 def z_xc : SDNode<"SystemZISD::XC", SDT_ZMemMemLength,
156 [SDNPHasChain, SDNPMayStore, SDNPMayLoad]>;
157 def z_xc_loop : SDNode<"SystemZISD::XC_LOOP", SDT_ZMemMemLoop,
158 [SDNPHasChain, SDNPMayStore, SDNPMayLoad]>;
159 def z_clc : SDNode<"SystemZISD::CLC", SDT_ZMemMemLength,
160 [SDNPHasChain, SDNPOutGlue, SDNPMayLoad]>;
161 def z_clc_loop : SDNode<"SystemZISD::CLC_LOOP", SDT_ZMemMemLoop,
162 [SDNPHasChain, SDNPOutGlue, SDNPMayLoad]>;
163 def z_strcmp : SDNode<"SystemZISD::STRCMP", SDT_ZString,
164 [SDNPHasChain, SDNPOutGlue, SDNPMayLoad]>;
165 def z_stpcpy : SDNode<"SystemZISD::STPCPY", SDT_ZString,
166 [SDNPHasChain, SDNPMayStore, SDNPMayLoad]>;
167 def z_search_string : SDNode<"SystemZISD::SEARCH_STRING", SDT_ZString,
168 [SDNPHasChain, SDNPOutGlue, SDNPMayLoad]>;
169 def z_ipm : SDNode<"SystemZISD::IPM", SDT_ZI32Intrinsic,
171 def z_prefetch : SDNode<"SystemZISD::PREFETCH", SDT_ZPrefetch,
172 [SDNPHasChain, SDNPMayLoad, SDNPMayStore,
175 //===----------------------------------------------------------------------===//
177 //===----------------------------------------------------------------------===//
179 // Signed and unsigned comparisons.
180 def z_scmp : PatFrag<(ops node:$a, node:$b), (z_icmp node:$a, node:$b, imm), [{
181 unsigned Type = cast<ConstantSDNode>(N->getOperand(2))->getZExtValue();
182 return Type != SystemZICMP::UnsignedOnly;
184 def z_ucmp : PatFrag<(ops node:$a, node:$b), (z_icmp node:$a, node:$b, imm), [{
185 unsigned Type = cast<ConstantSDNode>(N->getOperand(2))->getZExtValue();
186 return Type != SystemZICMP::SignedOnly;
189 // Register- and memory-based TEST UNDER MASK.
190 def z_tm_reg : PatFrag<(ops node:$a, node:$b), (z_tm node:$a, node:$b, imm)>;
191 def z_tm_mem : PatFrag<(ops node:$a, node:$b), (z_tm node:$a, node:$b, 0)>;
193 // Register sign-extend operations. Sub-32-bit values are represented as i32s.
194 def sext8 : PatFrag<(ops node:$src), (sext_inreg node:$src, i8)>;
195 def sext16 : PatFrag<(ops node:$src), (sext_inreg node:$src, i16)>;
196 def sext32 : PatFrag<(ops node:$src), (sext (i32 node:$src))>;
198 // Register zero-extend operations. Sub-32-bit values are represented as i32s.
199 def zext8 : PatFrag<(ops node:$src), (and node:$src, 0xff)>;
200 def zext16 : PatFrag<(ops node:$src), (and node:$src, 0xffff)>;
201 def zext32 : PatFrag<(ops node:$src), (zext (i32 node:$src))>;
203 // Typed floating-point loads.
204 def loadf32 : PatFrag<(ops node:$src), (f32 (load node:$src))>;
205 def loadf64 : PatFrag<(ops node:$src), (f64 (load node:$src))>;
207 // Extending loads in which the extension type can be signed.
208 def asextload : PatFrag<(ops node:$ptr), (unindexedload node:$ptr), [{
209 unsigned Type = cast<LoadSDNode>(N)->getExtensionType();
210 return Type == ISD::EXTLOAD || Type == ISD::SEXTLOAD;
212 def asextloadi8 : PatFrag<(ops node:$ptr), (asextload node:$ptr), [{
213 return cast<LoadSDNode>(N)->getMemoryVT() == MVT::i8;
215 def asextloadi16 : PatFrag<(ops node:$ptr), (asextload node:$ptr), [{
216 return cast<LoadSDNode>(N)->getMemoryVT() == MVT::i16;
218 def asextloadi32 : PatFrag<(ops node:$ptr), (asextload node:$ptr), [{
219 return cast<LoadSDNode>(N)->getMemoryVT() == MVT::i32;
222 // Extending loads in which the extension type can be unsigned.
223 def azextload : PatFrag<(ops node:$ptr), (unindexedload node:$ptr), [{
224 unsigned Type = cast<LoadSDNode>(N)->getExtensionType();
225 return Type == ISD::EXTLOAD || Type == ISD::ZEXTLOAD;
227 def azextloadi8 : PatFrag<(ops node:$ptr), (azextload node:$ptr), [{
228 return cast<LoadSDNode>(N)->getMemoryVT() == MVT::i8;
230 def azextloadi16 : PatFrag<(ops node:$ptr), (azextload node:$ptr), [{
231 return cast<LoadSDNode>(N)->getMemoryVT() == MVT::i16;
233 def azextloadi32 : PatFrag<(ops node:$ptr), (azextload node:$ptr), [{
234 return cast<LoadSDNode>(N)->getMemoryVT() == MVT::i32;
237 // Extending loads in which the extension type doesn't matter.
238 def anyextload : PatFrag<(ops node:$ptr), (unindexedload node:$ptr), [{
239 return cast<LoadSDNode>(N)->getExtensionType() != ISD::NON_EXTLOAD;
241 def anyextloadi8 : PatFrag<(ops node:$ptr), (anyextload node:$ptr), [{
242 return cast<LoadSDNode>(N)->getMemoryVT() == MVT::i8;
244 def anyextloadi16 : PatFrag<(ops node:$ptr), (anyextload node:$ptr), [{
245 return cast<LoadSDNode>(N)->getMemoryVT() == MVT::i16;
247 def anyextloadi32 : PatFrag<(ops node:$ptr), (anyextload node:$ptr), [{
248 return cast<LoadSDNode>(N)->getMemoryVT() == MVT::i32;
252 class AlignedLoad<SDPatternOperator load>
253 : PatFrag<(ops node:$addr), (load node:$addr), [{
254 auto *Load = cast<LoadSDNode>(N);
255 return Load->getAlignment() >= Load->getMemoryVT().getStoreSize();
257 def aligned_load : AlignedLoad<load>;
258 def aligned_asextloadi16 : AlignedLoad<asextloadi16>;
259 def aligned_asextloadi32 : AlignedLoad<asextloadi32>;
260 def aligned_azextloadi16 : AlignedLoad<azextloadi16>;
261 def aligned_azextloadi32 : AlignedLoad<azextloadi32>;
264 class AlignedStore<SDPatternOperator store>
265 : PatFrag<(ops node:$src, node:$addr), (store node:$src, node:$addr), [{
266 auto *Store = cast<StoreSDNode>(N);
267 return Store->getAlignment() >= Store->getMemoryVT().getStoreSize();
269 def aligned_store : AlignedStore<store>;
270 def aligned_truncstorei16 : AlignedStore<truncstorei16>;
271 def aligned_truncstorei32 : AlignedStore<truncstorei32>;
273 // Non-volatile loads. Used for instructions that might access the storage
274 // location multiple times.
275 class NonvolatileLoad<SDPatternOperator load>
276 : PatFrag<(ops node:$addr), (load node:$addr), [{
277 auto *Load = cast<LoadSDNode>(N);
278 return !Load->isVolatile();
280 def nonvolatile_load : NonvolatileLoad<load>;
281 def nonvolatile_anyextloadi8 : NonvolatileLoad<anyextloadi8>;
282 def nonvolatile_anyextloadi16 : NonvolatileLoad<anyextloadi16>;
283 def nonvolatile_anyextloadi32 : NonvolatileLoad<anyextloadi32>;
285 // Non-volatile stores.
286 class NonvolatileStore<SDPatternOperator store>
287 : PatFrag<(ops node:$src, node:$addr), (store node:$src, node:$addr), [{
288 auto *Store = cast<StoreSDNode>(N);
289 return !Store->isVolatile();
291 def nonvolatile_store : NonvolatileStore<store>;
292 def nonvolatile_truncstorei8 : NonvolatileStore<truncstorei8>;
293 def nonvolatile_truncstorei16 : NonvolatileStore<truncstorei16>;
294 def nonvolatile_truncstorei32 : NonvolatileStore<truncstorei32>;
296 // A store of a load that can be implemented using MVC.
297 def mvc_store : PatFrag<(ops node:$value, node:$addr),
298 (unindexedstore node:$value, node:$addr),
299 [{ return storeLoadCanUseMVC(N); }]>;
301 // Binary read-modify-write operations on memory in which the other
302 // operand is also memory and for which block operations like NC can
303 // be used. There are two patterns for each operator, depending on
304 // which operand contains the "other" load.
305 multiclass block_op<SDPatternOperator operator> {
306 def "1" : PatFrag<(ops node:$value, node:$addr),
307 (unindexedstore (operator node:$value,
308 (unindexedload node:$addr)),
310 [{ return storeLoadCanUseBlockBinary(N, 0); }]>;
311 def "2" : PatFrag<(ops node:$value, node:$addr),
312 (unindexedstore (operator (unindexedload node:$addr),
315 [{ return storeLoadCanUseBlockBinary(N, 1); }]>;
317 defm block_and : block_op<and>;
318 defm block_or : block_op<or>;
319 defm block_xor : block_op<xor>;
322 def inserti8 : PatFrag<(ops node:$src1, node:$src2),
323 (or (and node:$src1, -256), node:$src2)>;
324 def insertll : PatFrag<(ops node:$src1, node:$src2),
325 (or (and node:$src1, 0xffffffffffff0000), node:$src2)>;
326 def insertlh : PatFrag<(ops node:$src1, node:$src2),
327 (or (and node:$src1, 0xffffffff0000ffff), node:$src2)>;
328 def inserthl : PatFrag<(ops node:$src1, node:$src2),
329 (or (and node:$src1, 0xffff0000ffffffff), node:$src2)>;
330 def inserthh : PatFrag<(ops node:$src1, node:$src2),
331 (or (and node:$src1, 0x0000ffffffffffff), node:$src2)>;
332 def insertlf : PatFrag<(ops node:$src1, node:$src2),
333 (or (and node:$src1, 0xffffffff00000000), node:$src2)>;
334 def inserthf : PatFrag<(ops node:$src1, node:$src2),
335 (or (and node:$src1, 0x00000000ffffffff), node:$src2)>;
337 // ORs that can be treated as insertions.
338 def or_as_inserti8 : PatFrag<(ops node:$src1, node:$src2),
339 (or node:$src1, node:$src2), [{
340 unsigned BitWidth = N->getValueType(0).getScalarType().getSizeInBits();
341 return CurDAG->MaskedValueIsZero(N->getOperand(0),
342 APInt::getLowBitsSet(BitWidth, 8));
345 // ORs that can be treated as reversed insertions.
346 def or_as_revinserti8 : PatFrag<(ops node:$src1, node:$src2),
347 (or node:$src1, node:$src2), [{
348 unsigned BitWidth = N->getValueType(0).getScalarType().getSizeInBits();
349 return CurDAG->MaskedValueIsZero(N->getOperand(1),
350 APInt::getLowBitsSet(BitWidth, 8));
353 // Negative integer absolute.
354 def z_inegabs : PatFrag<(ops node:$src), (ineg (z_iabs node:$src))>;
356 // Integer absolute, matching the canonical form generated by DAGCombiner.
357 def z_iabs32 : PatFrag<(ops node:$src),
358 (xor (add node:$src, (sra node:$src, (i32 31))),
359 (sra node:$src, (i32 31)))>;
360 def z_iabs64 : PatFrag<(ops node:$src),
361 (xor (add node:$src, (sra node:$src, (i32 63))),
362 (sra node:$src, (i32 63)))>;
363 def z_inegabs32 : PatFrag<(ops node:$src), (ineg (z_iabs32 node:$src))>;
364 def z_inegabs64 : PatFrag<(ops node:$src), (ineg (z_iabs64 node:$src))>;
366 // Fused multiply-add and multiply-subtract, but with the order of the
367 // operands matching SystemZ's MA and MS instructions.
368 def z_fma : PatFrag<(ops node:$src1, node:$src2, node:$src3),
369 (fma node:$src2, node:$src3, node:$src1)>;
370 def z_fms : PatFrag<(ops node:$src1, node:$src2, node:$src3),
371 (fma node:$src2, node:$src3, (fneg node:$src1))>;
373 // Floating-point negative absolute.
374 def fnabs : PatFrag<(ops node:$ptr), (fneg (fabs node:$ptr))>;
376 // Create a unary operator that loads from memory and then performs
377 // the given operation on it.
378 class loadu<SDPatternOperator operator, SDPatternOperator load = load>
379 : PatFrag<(ops node:$addr), (operator (load node:$addr))>;
381 // Create a store operator that performs the given unary operation
382 // on the value before storing it.
383 class storeu<SDPatternOperator operator, SDPatternOperator store = store>
384 : PatFrag<(ops node:$value, node:$addr),
385 (store (operator node:$value), node:$addr)>;