1 //===-- FLATInstructions.td - FLAT Instruction Definitions ----------------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 def FlatOffset : ComplexPattern<iPTR, 2, "SelectFlatOffset", [], [SDNPWantRoot], -10>;
10 def GlobalOffset : ComplexPattern<iPTR, 2, "SelectGlobalOffset", [], [SDNPWantRoot], -10>;
11 def ScratchOffset : ComplexPattern<iPTR, 2, "SelectScratchOffset", [], [SDNPWantRoot], -10>;
13 def GlobalSAddr : ComplexPattern<iPTR, 3, "SelectGlobalSAddr", [], [SDNPWantRoot], -10>;
14 def ScratchSAddr : ComplexPattern<iPTR, 2, "SelectScratchSAddr", [], [SDNPWantRoot], -10>;
16 //===----------------------------------------------------------------------===//
18 //===----------------------------------------------------------------------===//
20 class FLAT_Pseudo<string opName, dag outs, dag ins,
21 string asmOps, list<dag> pattern=[]> :
22 InstSI<outs, ins, "", pattern>,
23 SIMCInstr<opName, SIEncodingFamily.NONE> {
26 let isCodeGenOnly = 1;
30 let UseNamedOperandTable = 1;
31 let hasSideEffects = 0;
32 let SchedRW = [WriteVMEM];
34 string Mnemonic = opName;
35 string AsmOperands = asmOps;
37 bits<1> is_flat_global = 0;
38 bits<1> is_flat_scratch = 0;
42 // We need to distinguish having saddr and enabling saddr because
43 // saddr is only valid for scratch and global instructions. Pre-gfx9
44 // these bits were reserved, so we also don't necessarily want to
45 // set these bits to the disabled value for the original flat
46 // segment instructions.
47 bits<1> has_saddr = 0;
48 bits<1> enabled_saddr = 0;
49 bits<7> saddr_value = 0;
50 bits<1> has_vaddr = 1;
58 bits<1> sccbValue = 0;
60 let SubtargetPredicate = !if(is_flat_global, HasFlatGlobalInsts,
61 !if(is_flat_scratch, HasFlatScratchInsts, HasFlatAddressSpace));
63 // TODO: M0 if it could possibly access LDS (before gfx9? only)?
64 let Uses = !if(is_flat_global, [EXEC], [EXEC, FLAT_SCR]);
66 // Internally, FLAT instruction are executed as both an LDS and a
67 // Buffer instruction; so, they increment both VM_CNT and LGKM_CNT
68 // and are not considered done until both have been decremented.
70 let LGKM_CNT = !not(!or(is_flat_global, is_flat_scratch));
72 let FlatGlobal = is_flat_global;
74 let FlatScratch = is_flat_scratch;
77 class FLAT_Real <bits<7> op, FLAT_Pseudo ps> :
78 InstSI <ps.OutOperandList, ps.InOperandList, ps.Mnemonic # ps.AsmOperands, []>,
82 let isCodeGenOnly = 0;
86 // copy relevant pseudo op flags
87 let SubtargetPredicate = ps.SubtargetPredicate;
88 let AsmMatchConverter = ps.AsmMatchConverter;
89 let OtherPredicates = ps.OtherPredicates;
90 let TSFlags = ps.TSFlags;
91 let UseNamedOperandTable = ps.UseNamedOperandTable;
92 let SchedRW = ps.SchedRW;
93 let mayLoad = ps.mayLoad;
94 let mayStore = ps.mayStore;
95 let IsAtomicRet = ps.IsAtomicRet;
96 let IsAtomicNoRet = ps.IsAtomicNoRet;
97 let VM_CNT = ps.VM_CNT;
98 let LGKM_CNT = ps.LGKM_CNT;
108 // Only valid on gfx9
109 bits<1> lds = 0; // XXX - What does this actually do?
111 // Segment, 00=flat, 01=scratch, 10=global, 11=reserved
112 bits<2> seg = !if(ps.is_flat_global, 0b10,
113 !if(ps.is_flat_scratch, 0b01, 0));
115 // Signed offset. Highest bit ignored for flat and treated as 12-bit
116 // unsigned for flat accesses.
118 // GFX90A+ only: instruction uses AccVGPR for data
119 bits<1> acc = !if(ps.has_vdst, vdst{9}, !if(ps.has_data, vdata{9}, 0));
121 // We don't use tfe right now, and it was removed in gfx9.
124 // Only valid on GFX9+
125 let Inst{12-0} = offset;
127 let Inst{15-14} = seg;
129 let Inst{16} = !if(ps.has_glc, cpol{CPolBit.GLC}, ps.glcValue);
130 let Inst{17} = cpol{CPolBit.SLC};
131 let Inst{24-18} = op;
132 let Inst{31-26} = 0x37; // Encoding.
133 let Inst{39-32} = !if(ps.has_vaddr, vaddr, ?);
134 let Inst{47-40} = !if(ps.has_data, vdata{7-0}, ?);
135 let Inst{54-48} = !if(ps.has_saddr, !if(ps.enabled_saddr, saddr, 0x7f), 0);
137 // 54-48 is reserved.
138 let Inst{55} = acc; // nv on GFX9+, TFE before. AccVGPR for data on GFX90A.
139 let Inst{63-56} = !if(ps.has_vdst, vdst{7-0}, ?);
142 class GlobalSaddrTable <bit is_saddr, string Name = ""> {
143 bit IsSaddr = is_saddr;
144 string SaddrOp = Name;
147 // TODO: Is exec allowed for saddr? The disabled value 0x7f is the
148 // same encoding value as exec_hi, so it isn't possible to use that if
149 // saddr is 32-bit (which isn't handled here yet).
150 class FLAT_Load_Pseudo <string opName, RegisterClass regClass,
151 bit HasTiedOutput = 0,
152 bit HasSaddr = 0, bit EnableSaddr = 0,
153 RegisterOperand vdata_op = getLdStRegisterOperand<regClass>.ret> : FLAT_Pseudo<
155 (outs vdata_op:$vdst),
159 (ins SReg_64:$saddr, VGPR_32:$vaddr),
160 (ins VReg_64:$vaddr)),
161 (ins flat_offset:$offset)),
162 // FIXME: Operands with default values do not work with following non-optional operands.
163 !if(HasTiedOutput, (ins CPol:$cpol, vdata_op:$vdst_in),
164 (ins CPol_0:$cpol))),
165 " $vdst, $vaddr"#!if(HasSaddr, !if(EnableSaddr, ", $saddr", ", off"), "")#"$offset$cpol"> {
168 let has_saddr = HasSaddr;
169 let enabled_saddr = EnableSaddr;
170 let PseudoInstr = opName#!if(!and(HasSaddr, EnableSaddr), "_SADDR", "");
173 let Constraints = !if(HasTiedOutput, "$vdst = $vdst_in", "");
174 let DisableEncoding = !if(HasTiedOutput, "$vdst_in", "");
177 class FLAT_Store_Pseudo <string opName, RegisterClass vdataClass,
178 bit HasSaddr = 0, bit EnableSaddr = 0> : FLAT_Pseudo<
183 (ins VGPR_32:$vaddr, getLdStRegisterOperand<vdataClass>.ret:$vdata, SReg_64:$saddr),
184 (ins VReg_64:$vaddr, getLdStRegisterOperand<vdataClass>.ret:$vdata)),
185 (ins flat_offset:$offset, CPol_0:$cpol)),
186 " $vaddr, $vdata"#!if(HasSaddr, !if(EnableSaddr, ", $saddr", ", off"), "")#"$offset$cpol"> {
190 let has_saddr = HasSaddr;
191 let enabled_saddr = EnableSaddr;
192 let PseudoInstr = opName#!if(!and(HasSaddr, EnableSaddr), "_SADDR", "");
196 multiclass FLAT_Global_Load_Pseudo<string opName, RegisterClass regClass, bit HasTiedInput = 0> {
197 let is_flat_global = 1, SubtargetPredicate = HasFlatGlobalInsts in {
198 def "" : FLAT_Load_Pseudo<opName, regClass, HasTiedInput, 1>,
199 GlobalSaddrTable<0, opName>;
200 def _SADDR : FLAT_Load_Pseudo<opName, regClass, HasTiedInput, 1, 1>,
201 GlobalSaddrTable<1, opName>;
205 class FLAT_Global_Load_AddTid_Pseudo <string opName, RegisterClass regClass,
206 bit HasTiedOutput = 0, bit EnableSaddr = 0> : FLAT_Pseudo<
208 (outs regClass:$vdst),
209 !con(!if(EnableSaddr, (ins SReg_64:$saddr), (ins)),
210 (ins flat_offset:$offset, CPol_0:$cpol),
211 !if(HasTiedOutput, (ins regClass:$vdst_in), (ins))),
212 " $vdst, "#!if(EnableSaddr, "$saddr", "off")#"$offset$cpol"> {
213 let is_flat_global = 1;
218 let enabled_saddr = EnableSaddr;
220 let PseudoInstr = opName#!if(EnableSaddr, "_SADDR", "");
222 let Constraints = !if(HasTiedOutput, "$vdst = $vdst_in", "");
223 let DisableEncoding = !if(HasTiedOutput, "$vdst_in", "");
226 multiclass FLAT_Global_Load_AddTid_Pseudo<string opName, RegisterClass regClass,
227 bit HasTiedOutput = 0> {
228 def "" : FLAT_Global_Load_AddTid_Pseudo<opName, regClass, HasTiedOutput>,
229 GlobalSaddrTable<0, opName>;
230 def _SADDR : FLAT_Global_Load_AddTid_Pseudo<opName, regClass, HasTiedOutput, 1>,
231 GlobalSaddrTable<1, opName>;
234 multiclass FLAT_Global_Store_Pseudo<string opName, RegisterClass regClass> {
235 let is_flat_global = 1, SubtargetPredicate = HasFlatGlobalInsts in {
236 def "" : FLAT_Store_Pseudo<opName, regClass, 1>,
237 GlobalSaddrTable<0, opName>;
238 def _SADDR : FLAT_Store_Pseudo<opName, regClass, 1, 1>,
239 GlobalSaddrTable<1, opName>;
243 class FLAT_Global_Store_AddTid_Pseudo <string opName, RegisterClass vdataClass,
244 bit EnableSaddr = 0> : FLAT_Pseudo<
247 !con(!if(EnableSaddr, (ins vdataClass:$vdata, SReg_64:$saddr), (ins vdataClass:$vdata)),
248 (ins flat_offset:$offset, CPol:$cpol)),
249 " $vdata, "#!if(EnableSaddr, "$saddr", "off")#"$offset$cpol"> {
250 let is_flat_global = 1;
256 let enabled_saddr = EnableSaddr;
258 let PseudoInstr = opName#!if(EnableSaddr, "_SADDR", "");
261 multiclass FLAT_Global_Store_AddTid_Pseudo<string opName, RegisterClass regClass> {
262 def "" : FLAT_Global_Store_AddTid_Pseudo<opName, regClass>,
263 GlobalSaddrTable<0, opName>;
264 def _SADDR : FLAT_Global_Store_AddTid_Pseudo<opName, regClass, 1>,
265 GlobalSaddrTable<1, opName>;
268 class FlatScratchInst <string sv_op, string mode> {
273 class FLAT_Scratch_Load_Pseudo <string opName, RegisterClass regClass,
274 bit HasTiedOutput = 0,
276 bit EnableVaddr = !not(EnableSaddr)>
279 (outs getLdStRegisterOperand<regClass>.ret:$vdst),
282 (ins SReg_32_XEXEC_HI:$saddr, flat_offset:$offset),
284 (ins VGPR_32:$vaddr, flat_offset:$offset),
285 (ins flat_offset:$offset))),
286 !if(HasTiedOutput, (ins CPol:$cpol, getLdStRegisterOperand<regClass>.ret:$vdst_in),
287 (ins CPol_0:$cpol))),
288 " $vdst, "#!if(EnableVaddr, "$vaddr, ", "off, ")#!if(EnableSaddr, "$saddr", "off")#"$offset$cpol"> {
292 let enabled_saddr = EnableSaddr;
293 let has_vaddr = EnableVaddr;
294 let PseudoInstr = opName#!if(EnableSaddr, "_SADDR", !if(EnableVaddr, "", "_ST"));
297 let Constraints = !if(HasTiedOutput, "$vdst = $vdst_in", "");
298 let DisableEncoding = !if(HasTiedOutput, "$vdst_in", "");
301 class FLAT_Scratch_Store_Pseudo <string opName, RegisterClass vdataClass, bit EnableSaddr = 0,
302 bit EnableVaddr = !not(EnableSaddr),
303 RegisterOperand vdata_op = getLdStRegisterOperand<vdataClass>.ret> : FLAT_Pseudo<
307 (ins vdata_op:$vdata, SReg_32_XEXEC_HI:$saddr, flat_offset:$offset, CPol_0:$cpol),
309 (ins vdata_op:$vdata, VGPR_32:$vaddr, flat_offset:$offset, CPol_0:$cpol),
310 (ins vdata_op:$vdata, flat_offset:$offset, CPol_0:$cpol))),
311 " "#!if(EnableVaddr, "$vaddr", "off")#", $vdata, "#!if(EnableSaddr, "$saddr", "off")#"$offset$cpol"> {
316 let enabled_saddr = EnableSaddr;
317 let has_vaddr = EnableVaddr;
318 let PseudoInstr = opName#!if(EnableSaddr, "_SADDR", !if(EnableVaddr, "", "_ST"));
322 multiclass FLAT_Scratch_Load_Pseudo<string opName, RegisterClass regClass, bit HasTiedOutput = 0> {
323 let is_flat_scratch = 1 in {
324 def "" : FLAT_Scratch_Load_Pseudo<opName, regClass, HasTiedOutput>,
325 FlatScratchInst<opName, "SV">;
326 def _SADDR : FLAT_Scratch_Load_Pseudo<opName, regClass, HasTiedOutput, 1>,
327 FlatScratchInst<opName, "SS">;
329 let SubtargetPredicate = HasFlatScratchSTMode in
330 def _ST : FLAT_Scratch_Load_Pseudo<opName, regClass, HasTiedOutput, 0, 0>,
331 FlatScratchInst<opName, "ST">;
335 multiclass FLAT_Scratch_Store_Pseudo<string opName, RegisterClass regClass> {
336 let is_flat_scratch = 1 in {
337 def "" : FLAT_Scratch_Store_Pseudo<opName, regClass>,
338 FlatScratchInst<opName, "SV">;
339 def _SADDR : FLAT_Scratch_Store_Pseudo<opName, regClass, 1>,
340 FlatScratchInst<opName, "SS">;
342 let SubtargetPredicate = HasFlatScratchSTMode in
343 def _ST : FLAT_Scratch_Store_Pseudo<opName, regClass, 0, 0>,
344 FlatScratchInst<opName, "ST">;
348 class FLAT_AtomicNoRet_Pseudo<string opName, dag outs, dag ins,
349 string asm, list<dag> pattern = []> :
350 FLAT_Pseudo<opName, outs, ins, asm, pattern> {
359 let IsAtomicNoRet = 1;
362 class FLAT_AtomicRet_Pseudo<string opName, dag outs, dag ins,
363 string asm, list<dag> pattern = []>
364 : FLAT_AtomicNoRet_Pseudo<opName, outs, ins, asm, pattern> {
365 let hasPostISelHook = 1;
369 let IsAtomicNoRet = 0;
371 let PseudoInstr = NAME # "_RTN";
374 multiclass FLAT_Atomic_Pseudo<
376 RegisterClass vdst_rc,
378 SDPatternOperator atomic = null_frag,
379 ValueType data_vt = vt,
380 RegisterClass data_rc = vdst_rc,
381 bit isFP = isFloatType<data_vt>.ret,
382 RegisterOperand data_op = getLdStRegisterOperand<data_rc>.ret> {
383 def "" : FLAT_AtomicNoRet_Pseudo <opName,
385 (ins VReg_64:$vaddr, data_op:$vdata, flat_offset:$offset, CPol_0:$cpol),
386 " $vaddr, $vdata$offset$cpol">,
387 GlobalSaddrTable<0, opName>,
388 AtomicNoRet <opName, 0> {
389 let PseudoInstr = NAME;
391 let AddedComplexity = -1; // Prefer global atomics if available
394 def _RTN : FLAT_AtomicRet_Pseudo <opName,
395 (outs getLdStRegisterOperand<vdst_rc>.ret:$vdst),
396 (ins VReg_64:$vaddr, data_op:$vdata, flat_offset:$offset, CPol_GLC1:$cpol),
397 " $vdst, $vaddr, $vdata$offset$cpol",
399 (atomic (FlatOffset i64:$vaddr, i16:$offset), data_vt:$vdata))]>,
400 GlobalSaddrTable<0, opName#"_rtn">,
401 AtomicNoRet <opName, 1>{
403 let AddedComplexity = -1; // Prefer global atomics if available
407 multiclass FLAT_Global_Atomic_Pseudo_NO_RTN<
409 RegisterClass vdst_rc,
411 ValueType data_vt = vt,
412 RegisterClass data_rc = vdst_rc,
413 bit isFP = isFloatType<data_vt>.ret,
414 RegisterOperand data_op = getLdStRegisterOperand<data_rc>.ret> {
416 def "" : FLAT_AtomicNoRet_Pseudo <opName,
418 (ins VReg_64:$vaddr, data_op:$vdata, flat_offset:$offset, CPol_0:$cpol),
419 " $vaddr, $vdata, off$offset$cpol">,
420 GlobalSaddrTable<0, opName>,
421 AtomicNoRet <opName, 0> {
423 let PseudoInstr = NAME;
427 def _SADDR : FLAT_AtomicNoRet_Pseudo <opName,
429 (ins VGPR_32:$vaddr, data_op:$vdata, SReg_64:$saddr, flat_offset:$offset, CPol_0:$cpol),
430 " $vaddr, $vdata, $saddr$offset$cpol">,
431 GlobalSaddrTable<1, opName>,
432 AtomicNoRet <opName#"_saddr", 0> {
434 let enabled_saddr = 1;
435 let PseudoInstr = NAME#"_SADDR";
440 multiclass FLAT_Global_Atomic_Pseudo_RTN<
442 RegisterClass vdst_rc,
444 SDPatternOperator atomic = null_frag,
445 ValueType data_vt = vt,
446 RegisterClass data_rc = vdst_rc,
447 bit isFP = isFloatType<data_vt>.ret,
448 RegisterOperand data_op = getLdStRegisterOperand<data_rc>.ret,
449 RegisterOperand vdst_op = getLdStRegisterOperand<vdst_rc>.ret> {
451 def _RTN : FLAT_AtomicRet_Pseudo <opName,
452 (outs vdst_op:$vdst),
453 (ins VReg_64:$vaddr, data_op:$vdata, flat_offset:$offset, CPol_GLC1:$cpol),
454 " $vdst, $vaddr, $vdata, off$offset$cpol",
456 (atomic (GlobalOffset i64:$vaddr, i16:$offset), data_vt:$vdata))]>,
457 GlobalSaddrTable<0, opName#"_rtn">,
458 AtomicNoRet <opName, 1> {
463 def _SADDR_RTN : FLAT_AtomicRet_Pseudo <opName,
464 (outs vdst_op:$vdst),
465 (ins VGPR_32:$vaddr, data_op:$vdata, SReg_64:$saddr, flat_offset:$offset, CPol_GLC1:$cpol),
466 " $vdst, $vaddr, $vdata, $saddr$offset$cpol">,
467 GlobalSaddrTable<1, opName#"_rtn">,
468 AtomicNoRet <opName#"_saddr", 1> {
470 let enabled_saddr = 1;
471 let PseudoInstr = NAME#"_SADDR_RTN";
476 multiclass FLAT_Global_Atomic_Pseudo<
478 RegisterClass vdst_rc,
480 SDPatternOperator atomic_rtn = null_frag,
481 ValueType data_vt = vt,
482 RegisterClass data_rc = vdst_rc> {
483 let is_flat_global = 1, SubtargetPredicate = HasFlatGlobalInsts in {
484 defm "" : FLAT_Global_Atomic_Pseudo_NO_RTN<opName, vdst_rc, vt, data_vt, data_rc>;
485 defm "" : FLAT_Global_Atomic_Pseudo_RTN<opName, vdst_rc, vt, atomic_rtn, data_vt, data_rc>;
489 //===----------------------------------------------------------------------===//
491 //===----------------------------------------------------------------------===//
493 def FLAT_LOAD_UBYTE : FLAT_Load_Pseudo <"flat_load_ubyte", VGPR_32>;
494 def FLAT_LOAD_SBYTE : FLAT_Load_Pseudo <"flat_load_sbyte", VGPR_32>;
495 def FLAT_LOAD_USHORT : FLAT_Load_Pseudo <"flat_load_ushort", VGPR_32>;
496 def FLAT_LOAD_SSHORT : FLAT_Load_Pseudo <"flat_load_sshort", VGPR_32>;
497 def FLAT_LOAD_DWORD : FLAT_Load_Pseudo <"flat_load_dword", VGPR_32>;
498 def FLAT_LOAD_DWORDX2 : FLAT_Load_Pseudo <"flat_load_dwordx2", VReg_64>;
499 def FLAT_LOAD_DWORDX4 : FLAT_Load_Pseudo <"flat_load_dwordx4", VReg_128>;
500 def FLAT_LOAD_DWORDX3 : FLAT_Load_Pseudo <"flat_load_dwordx3", VReg_96>;
502 def FLAT_STORE_BYTE : FLAT_Store_Pseudo <"flat_store_byte", VGPR_32>;
503 def FLAT_STORE_SHORT : FLAT_Store_Pseudo <"flat_store_short", VGPR_32>;
504 def FLAT_STORE_DWORD : FLAT_Store_Pseudo <"flat_store_dword", VGPR_32>;
505 def FLAT_STORE_DWORDX2 : FLAT_Store_Pseudo <"flat_store_dwordx2", VReg_64>;
506 def FLAT_STORE_DWORDX4 : FLAT_Store_Pseudo <"flat_store_dwordx4", VReg_128>;
507 def FLAT_STORE_DWORDX3 : FLAT_Store_Pseudo <"flat_store_dwordx3", VReg_96>;
509 let SubtargetPredicate = HasD16LoadStore in {
510 def FLAT_LOAD_UBYTE_D16 : FLAT_Load_Pseudo <"flat_load_ubyte_d16", VGPR_32, 1>;
511 def FLAT_LOAD_UBYTE_D16_HI : FLAT_Load_Pseudo <"flat_load_ubyte_d16_hi", VGPR_32, 1>;
512 def FLAT_LOAD_SBYTE_D16 : FLAT_Load_Pseudo <"flat_load_sbyte_d16", VGPR_32, 1>;
513 def FLAT_LOAD_SBYTE_D16_HI : FLAT_Load_Pseudo <"flat_load_sbyte_d16_hi", VGPR_32, 1>;
514 def FLAT_LOAD_SHORT_D16 : FLAT_Load_Pseudo <"flat_load_short_d16", VGPR_32, 1>;
515 def FLAT_LOAD_SHORT_D16_HI : FLAT_Load_Pseudo <"flat_load_short_d16_hi", VGPR_32, 1>;
517 def FLAT_STORE_BYTE_D16_HI : FLAT_Store_Pseudo <"flat_store_byte_d16_hi", VGPR_32>;
518 def FLAT_STORE_SHORT_D16_HI : FLAT_Store_Pseudo <"flat_store_short_d16_hi", VGPR_32>;
521 defm FLAT_ATOMIC_CMPSWAP : FLAT_Atomic_Pseudo <"flat_atomic_cmpswap",
522 VGPR_32, i32, AMDGPUatomic_cmp_swap_flat_32,
525 defm FLAT_ATOMIC_CMPSWAP_X2 : FLAT_Atomic_Pseudo <"flat_atomic_cmpswap_x2",
526 VReg_64, i64, AMDGPUatomic_cmp_swap_flat_64,
529 defm FLAT_ATOMIC_SWAP : FLAT_Atomic_Pseudo <"flat_atomic_swap",
530 VGPR_32, i32, atomic_swap_flat_32>;
532 defm FLAT_ATOMIC_SWAP_X2 : FLAT_Atomic_Pseudo <"flat_atomic_swap_x2",
533 VReg_64, i64, atomic_swap_flat_64>;
535 defm FLAT_ATOMIC_ADD : FLAT_Atomic_Pseudo <"flat_atomic_add",
536 VGPR_32, i32, atomic_load_add_flat_32>;
538 defm FLAT_ATOMIC_SUB : FLAT_Atomic_Pseudo <"flat_atomic_sub",
539 VGPR_32, i32, atomic_load_sub_flat_32>;
541 defm FLAT_ATOMIC_SMIN : FLAT_Atomic_Pseudo <"flat_atomic_smin",
542 VGPR_32, i32, atomic_load_min_flat_32>;
544 defm FLAT_ATOMIC_UMIN : FLAT_Atomic_Pseudo <"flat_atomic_umin",
545 VGPR_32, i32, atomic_load_umin_flat_32>;
547 defm FLAT_ATOMIC_SMAX : FLAT_Atomic_Pseudo <"flat_atomic_smax",
548 VGPR_32, i32, atomic_load_max_flat_32>;
550 defm FLAT_ATOMIC_UMAX : FLAT_Atomic_Pseudo <"flat_atomic_umax",
551 VGPR_32, i32, atomic_load_umax_flat_32>;
553 defm FLAT_ATOMIC_AND : FLAT_Atomic_Pseudo <"flat_atomic_and",
554 VGPR_32, i32, atomic_load_and_flat_32>;
556 defm FLAT_ATOMIC_OR : FLAT_Atomic_Pseudo <"flat_atomic_or",
557 VGPR_32, i32, atomic_load_or_flat_32>;
559 defm FLAT_ATOMIC_XOR : FLAT_Atomic_Pseudo <"flat_atomic_xor",
560 VGPR_32, i32, atomic_load_xor_flat_32>;
562 defm FLAT_ATOMIC_INC : FLAT_Atomic_Pseudo <"flat_atomic_inc",
563 VGPR_32, i32, atomic_inc_flat_32>;
565 defm FLAT_ATOMIC_DEC : FLAT_Atomic_Pseudo <"flat_atomic_dec",
566 VGPR_32, i32, atomic_dec_flat_32>;
568 defm FLAT_ATOMIC_ADD_X2 : FLAT_Atomic_Pseudo <"flat_atomic_add_x2",
569 VReg_64, i64, atomic_load_add_flat_64>;
571 defm FLAT_ATOMIC_SUB_X2 : FLAT_Atomic_Pseudo <"flat_atomic_sub_x2",
572 VReg_64, i64, atomic_load_sub_flat_64>;
574 defm FLAT_ATOMIC_SMIN_X2 : FLAT_Atomic_Pseudo <"flat_atomic_smin_x2",
575 VReg_64, i64, atomic_load_min_flat_64>;
577 defm FLAT_ATOMIC_UMIN_X2 : FLAT_Atomic_Pseudo <"flat_atomic_umin_x2",
578 VReg_64, i64, atomic_load_umin_flat_64>;
580 defm FLAT_ATOMIC_SMAX_X2 : FLAT_Atomic_Pseudo <"flat_atomic_smax_x2",
581 VReg_64, i64, atomic_load_max_flat_64>;
583 defm FLAT_ATOMIC_UMAX_X2 : FLAT_Atomic_Pseudo <"flat_atomic_umax_x2",
584 VReg_64, i64, atomic_load_umax_flat_64>;
586 defm FLAT_ATOMIC_AND_X2 : FLAT_Atomic_Pseudo <"flat_atomic_and_x2",
587 VReg_64, i64, atomic_load_and_flat_64>;
589 defm FLAT_ATOMIC_OR_X2 : FLAT_Atomic_Pseudo <"flat_atomic_or_x2",
590 VReg_64, i64, atomic_load_or_flat_64>;
592 defm FLAT_ATOMIC_XOR_X2 : FLAT_Atomic_Pseudo <"flat_atomic_xor_x2",
593 VReg_64, i64, atomic_load_xor_flat_64>;
595 defm FLAT_ATOMIC_INC_X2 : FLAT_Atomic_Pseudo <"flat_atomic_inc_x2",
596 VReg_64, i64, atomic_inc_flat_64>;
598 defm FLAT_ATOMIC_DEC_X2 : FLAT_Atomic_Pseudo <"flat_atomic_dec_x2",
599 VReg_64, i64, atomic_dec_flat_64>;
601 // GFX7-, GFX10-only flat instructions.
602 let SubtargetPredicate = isGFX7GFX10 in {
604 defm FLAT_ATOMIC_FCMPSWAP : FLAT_Atomic_Pseudo <"flat_atomic_fcmpswap",
605 VGPR_32, f32, null_frag, v2f32, VReg_64>;
607 defm FLAT_ATOMIC_FCMPSWAP_X2 : FLAT_Atomic_Pseudo <"flat_atomic_fcmpswap_x2",
608 VReg_64, f64, null_frag, v2f64, VReg_128>;
610 defm FLAT_ATOMIC_FMIN : FLAT_Atomic_Pseudo <"flat_atomic_fmin",
613 defm FLAT_ATOMIC_FMAX : FLAT_Atomic_Pseudo <"flat_atomic_fmax",
616 defm FLAT_ATOMIC_FMIN_X2 : FLAT_Atomic_Pseudo <"flat_atomic_fmin_x2",
619 defm FLAT_ATOMIC_FMAX_X2 : FLAT_Atomic_Pseudo <"flat_atomic_fmax_x2",
622 } // End SubtargetPredicate = isGFX7GFX10
624 let SubtargetPredicate = isGFX90APlus in {
625 defm FLAT_ATOMIC_ADD_F64 : FLAT_Atomic_Pseudo<"flat_atomic_add_f64", VReg_64, f64, int_amdgcn_flat_atomic_fadd>;
626 defm FLAT_ATOMIC_MIN_F64 : FLAT_Atomic_Pseudo<"flat_atomic_min_f64", VReg_64, f64, int_amdgcn_flat_atomic_fmin>;
627 defm FLAT_ATOMIC_MAX_F64 : FLAT_Atomic_Pseudo<"flat_atomic_max_f64", VReg_64, f64, int_amdgcn_flat_atomic_fmax>;
628 defm GLOBAL_ATOMIC_ADD_F64 : FLAT_Global_Atomic_Pseudo<"global_atomic_add_f64", VReg_64, f64, int_amdgcn_global_atomic_fadd>;
629 defm GLOBAL_ATOMIC_MIN_F64 : FLAT_Global_Atomic_Pseudo<"global_atomic_min_f64", VReg_64, f64, int_amdgcn_global_atomic_fmin>;
630 defm GLOBAL_ATOMIC_MAX_F64 : FLAT_Global_Atomic_Pseudo<"global_atomic_max_f64", VReg_64, f64, int_amdgcn_global_atomic_fmax>;
631 } // End SubtargetPredicate = isGFX90APlus
633 defm GLOBAL_LOAD_UBYTE : FLAT_Global_Load_Pseudo <"global_load_ubyte", VGPR_32>;
634 defm GLOBAL_LOAD_SBYTE : FLAT_Global_Load_Pseudo <"global_load_sbyte", VGPR_32>;
635 defm GLOBAL_LOAD_USHORT : FLAT_Global_Load_Pseudo <"global_load_ushort", VGPR_32>;
636 defm GLOBAL_LOAD_SSHORT : FLAT_Global_Load_Pseudo <"global_load_sshort", VGPR_32>;
637 defm GLOBAL_LOAD_DWORD : FLAT_Global_Load_Pseudo <"global_load_dword", VGPR_32>;
638 defm GLOBAL_LOAD_DWORDX2 : FLAT_Global_Load_Pseudo <"global_load_dwordx2", VReg_64>;
639 defm GLOBAL_LOAD_DWORDX3 : FLAT_Global_Load_Pseudo <"global_load_dwordx3", VReg_96>;
640 defm GLOBAL_LOAD_DWORDX4 : FLAT_Global_Load_Pseudo <"global_load_dwordx4", VReg_128>;
642 defm GLOBAL_LOAD_UBYTE_D16 : FLAT_Global_Load_Pseudo <"global_load_ubyte_d16", VGPR_32, 1>;
643 defm GLOBAL_LOAD_UBYTE_D16_HI : FLAT_Global_Load_Pseudo <"global_load_ubyte_d16_hi", VGPR_32, 1>;
644 defm GLOBAL_LOAD_SBYTE_D16 : FLAT_Global_Load_Pseudo <"global_load_sbyte_d16", VGPR_32, 1>;
645 defm GLOBAL_LOAD_SBYTE_D16_HI : FLAT_Global_Load_Pseudo <"global_load_sbyte_d16_hi", VGPR_32, 1>;
646 defm GLOBAL_LOAD_SHORT_D16 : FLAT_Global_Load_Pseudo <"global_load_short_d16", VGPR_32, 1>;
647 defm GLOBAL_LOAD_SHORT_D16_HI : FLAT_Global_Load_Pseudo <"global_load_short_d16_hi", VGPR_32, 1>;
648 let OtherPredicates = [HasGFX10_BEncoding] in
649 defm GLOBAL_LOAD_DWORD_ADDTID : FLAT_Global_Load_AddTid_Pseudo <"global_load_dword_addtid", VGPR_32>;
651 defm GLOBAL_STORE_BYTE : FLAT_Global_Store_Pseudo <"global_store_byte", VGPR_32>;
652 defm GLOBAL_STORE_SHORT : FLAT_Global_Store_Pseudo <"global_store_short", VGPR_32>;
653 defm GLOBAL_STORE_DWORD : FLAT_Global_Store_Pseudo <"global_store_dword", VGPR_32>;
654 defm GLOBAL_STORE_DWORDX2 : FLAT_Global_Store_Pseudo <"global_store_dwordx2", VReg_64>;
655 defm GLOBAL_STORE_DWORDX3 : FLAT_Global_Store_Pseudo <"global_store_dwordx3", VReg_96>;
656 defm GLOBAL_STORE_DWORDX4 : FLAT_Global_Store_Pseudo <"global_store_dwordx4", VReg_128>;
657 let OtherPredicates = [HasGFX10_BEncoding] in
658 defm GLOBAL_STORE_DWORD_ADDTID : FLAT_Global_Store_AddTid_Pseudo <"global_store_dword_addtid", VGPR_32>;
660 defm GLOBAL_STORE_BYTE_D16_HI : FLAT_Global_Store_Pseudo <"global_store_byte_d16_hi", VGPR_32>;
661 defm GLOBAL_STORE_SHORT_D16_HI : FLAT_Global_Store_Pseudo <"global_store_short_d16_hi", VGPR_32>;
663 let is_flat_global = 1 in {
664 defm GLOBAL_ATOMIC_CMPSWAP : FLAT_Global_Atomic_Pseudo <"global_atomic_cmpswap",
665 VGPR_32, i32, AMDGPUatomic_cmp_swap_global_32,
668 defm GLOBAL_ATOMIC_CMPSWAP_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_cmpswap_x2",
669 VReg_64, i64, AMDGPUatomic_cmp_swap_global_64,
672 defm GLOBAL_ATOMIC_SWAP : FLAT_Global_Atomic_Pseudo <"global_atomic_swap",
673 VGPR_32, i32, atomic_swap_global_32>;
675 defm GLOBAL_ATOMIC_SWAP_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_swap_x2",
676 VReg_64, i64, atomic_swap_global_64>;
678 defm GLOBAL_ATOMIC_ADD : FLAT_Global_Atomic_Pseudo <"global_atomic_add",
679 VGPR_32, i32, atomic_load_add_global_32>;
681 defm GLOBAL_ATOMIC_SUB : FLAT_Global_Atomic_Pseudo <"global_atomic_sub",
682 VGPR_32, i32, atomic_load_sub_global_32>;
684 defm GLOBAL_ATOMIC_SMIN : FLAT_Global_Atomic_Pseudo <"global_atomic_smin",
685 VGPR_32, i32, atomic_load_min_global_32>;
687 defm GLOBAL_ATOMIC_UMIN : FLAT_Global_Atomic_Pseudo <"global_atomic_umin",
688 VGPR_32, i32, atomic_load_umin_global_32>;
690 defm GLOBAL_ATOMIC_SMAX : FLAT_Global_Atomic_Pseudo <"global_atomic_smax",
691 VGPR_32, i32, atomic_load_max_global_32>;
693 defm GLOBAL_ATOMIC_UMAX : FLAT_Global_Atomic_Pseudo <"global_atomic_umax",
694 VGPR_32, i32, atomic_load_umax_global_32>;
696 defm GLOBAL_ATOMIC_AND : FLAT_Global_Atomic_Pseudo <"global_atomic_and",
697 VGPR_32, i32, atomic_load_and_global_32>;
699 defm GLOBAL_ATOMIC_OR : FLAT_Global_Atomic_Pseudo <"global_atomic_or",
700 VGPR_32, i32, atomic_load_or_global_32>;
702 defm GLOBAL_ATOMIC_XOR : FLAT_Global_Atomic_Pseudo <"global_atomic_xor",
703 VGPR_32, i32, atomic_load_xor_global_32>;
705 defm GLOBAL_ATOMIC_INC : FLAT_Global_Atomic_Pseudo <"global_atomic_inc",
706 VGPR_32, i32, atomic_inc_global_32>;
708 defm GLOBAL_ATOMIC_DEC : FLAT_Global_Atomic_Pseudo <"global_atomic_dec",
709 VGPR_32, i32, atomic_dec_global_32>;
711 defm GLOBAL_ATOMIC_ADD_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_add_x2",
712 VReg_64, i64, atomic_load_add_global_64>;
714 defm GLOBAL_ATOMIC_SUB_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_sub_x2",
715 VReg_64, i64, atomic_load_sub_global_64>;
717 defm GLOBAL_ATOMIC_SMIN_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_smin_x2",
718 VReg_64, i64, atomic_load_min_global_64>;
720 defm GLOBAL_ATOMIC_UMIN_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_umin_x2",
721 VReg_64, i64, atomic_load_umin_global_64>;
723 defm GLOBAL_ATOMIC_SMAX_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_smax_x2",
724 VReg_64, i64, atomic_load_max_global_64>;
726 defm GLOBAL_ATOMIC_UMAX_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_umax_x2",
727 VReg_64, i64, atomic_load_umax_global_64>;
729 defm GLOBAL_ATOMIC_AND_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_and_x2",
730 VReg_64, i64, atomic_load_and_global_64>;
732 defm GLOBAL_ATOMIC_OR_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_or_x2",
733 VReg_64, i64, atomic_load_or_global_64>;
735 defm GLOBAL_ATOMIC_XOR_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_xor_x2",
736 VReg_64, i64, atomic_load_xor_global_64>;
738 defm GLOBAL_ATOMIC_INC_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_inc_x2",
739 VReg_64, i64, atomic_inc_global_64>;
741 defm GLOBAL_ATOMIC_DEC_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_dec_x2",
742 VReg_64, i64, atomic_dec_global_64>;
744 let SubtargetPredicate = HasGFX10_BEncoding in
745 defm GLOBAL_ATOMIC_CSUB : FLAT_Global_Atomic_Pseudo_RTN <"global_atomic_csub",
746 VGPR_32, i32, int_amdgcn_global_atomic_csub>;
747 } // End is_flat_global = 1
751 let SubtargetPredicate = HasFlatScratchInsts in {
752 defm SCRATCH_LOAD_UBYTE : FLAT_Scratch_Load_Pseudo <"scratch_load_ubyte", VGPR_32>;
753 defm SCRATCH_LOAD_SBYTE : FLAT_Scratch_Load_Pseudo <"scratch_load_sbyte", VGPR_32>;
754 defm SCRATCH_LOAD_USHORT : FLAT_Scratch_Load_Pseudo <"scratch_load_ushort", VGPR_32>;
755 defm SCRATCH_LOAD_SSHORT : FLAT_Scratch_Load_Pseudo <"scratch_load_sshort", VGPR_32>;
756 defm SCRATCH_LOAD_DWORD : FLAT_Scratch_Load_Pseudo <"scratch_load_dword", VGPR_32>;
757 defm SCRATCH_LOAD_DWORDX2 : FLAT_Scratch_Load_Pseudo <"scratch_load_dwordx2", VReg_64>;
758 defm SCRATCH_LOAD_DWORDX3 : FLAT_Scratch_Load_Pseudo <"scratch_load_dwordx3", VReg_96>;
759 defm SCRATCH_LOAD_DWORDX4 : FLAT_Scratch_Load_Pseudo <"scratch_load_dwordx4", VReg_128>;
761 defm SCRATCH_LOAD_UBYTE_D16 : FLAT_Scratch_Load_Pseudo <"scratch_load_ubyte_d16", VGPR_32, 1>;
762 defm SCRATCH_LOAD_UBYTE_D16_HI : FLAT_Scratch_Load_Pseudo <"scratch_load_ubyte_d16_hi", VGPR_32, 1>;
763 defm SCRATCH_LOAD_SBYTE_D16 : FLAT_Scratch_Load_Pseudo <"scratch_load_sbyte_d16", VGPR_32, 1>;
764 defm SCRATCH_LOAD_SBYTE_D16_HI : FLAT_Scratch_Load_Pseudo <"scratch_load_sbyte_d16_hi", VGPR_32, 1>;
765 defm SCRATCH_LOAD_SHORT_D16 : FLAT_Scratch_Load_Pseudo <"scratch_load_short_d16", VGPR_32, 1>;
766 defm SCRATCH_LOAD_SHORT_D16_HI : FLAT_Scratch_Load_Pseudo <"scratch_load_short_d16_hi", VGPR_32, 1>;
768 defm SCRATCH_STORE_BYTE : FLAT_Scratch_Store_Pseudo <"scratch_store_byte", VGPR_32>;
769 defm SCRATCH_STORE_SHORT : FLAT_Scratch_Store_Pseudo <"scratch_store_short", VGPR_32>;
770 defm SCRATCH_STORE_DWORD : FLAT_Scratch_Store_Pseudo <"scratch_store_dword", VGPR_32>;
771 defm SCRATCH_STORE_DWORDX2 : FLAT_Scratch_Store_Pseudo <"scratch_store_dwordx2", VReg_64>;
772 defm SCRATCH_STORE_DWORDX3 : FLAT_Scratch_Store_Pseudo <"scratch_store_dwordx3", VReg_96>;
773 defm SCRATCH_STORE_DWORDX4 : FLAT_Scratch_Store_Pseudo <"scratch_store_dwordx4", VReg_128>;
775 defm SCRATCH_STORE_BYTE_D16_HI : FLAT_Scratch_Store_Pseudo <"scratch_store_byte_d16_hi", VGPR_32>;
776 defm SCRATCH_STORE_SHORT_D16_HI : FLAT_Scratch_Store_Pseudo <"scratch_store_short_d16_hi", VGPR_32>;
778 } // End SubtargetPredicate = HasFlatScratchInsts
780 let SubtargetPredicate = isGFX10Plus, is_flat_global = 1 in {
781 defm GLOBAL_ATOMIC_FCMPSWAP :
782 FLAT_Global_Atomic_Pseudo<"global_atomic_fcmpswap", VGPR_32, f32, null_frag, v2f32, VReg_64>;
783 defm GLOBAL_ATOMIC_FMIN :
784 FLAT_Global_Atomic_Pseudo<"global_atomic_fmin", VGPR_32, f32, int_amdgcn_global_atomic_fmin>;
785 defm GLOBAL_ATOMIC_FMAX :
786 FLAT_Global_Atomic_Pseudo<"global_atomic_fmax", VGPR_32, f32, int_amdgcn_global_atomic_fmax>;
787 defm GLOBAL_ATOMIC_FCMPSWAP_X2 :
788 FLAT_Global_Atomic_Pseudo<"global_atomic_fcmpswap_x2", VReg_64, f64, null_frag, v2f64, VReg_128>;
789 defm GLOBAL_ATOMIC_FMIN_X2 :
790 FLAT_Global_Atomic_Pseudo<"global_atomic_fmin_x2", VReg_64, f64, int_amdgcn_global_atomic_fmin>;
791 defm GLOBAL_ATOMIC_FMAX_X2 :
792 FLAT_Global_Atomic_Pseudo<"global_atomic_fmax_x2", VReg_64, f64, int_amdgcn_global_atomic_fmax>;
793 } // End SubtargetPredicate = isGFX10Plus, is_flat_global = 1
795 let is_flat_global = 1 in {
796 let OtherPredicates = [HasAtomicFaddInsts] in {
797 defm GLOBAL_ATOMIC_ADD_F32 : FLAT_Global_Atomic_Pseudo_NO_RTN <
798 "global_atomic_add_f32", VGPR_32, f32
800 defm GLOBAL_ATOMIC_PK_ADD_F16 : FLAT_Global_Atomic_Pseudo_NO_RTN <
801 "global_atomic_pk_add_f16", VGPR_32, v2f16
803 } // End OtherPredicates = [HasAtomicFaddInsts]
805 let OtherPredicates = [isGFX90APlus] in {
806 defm GLOBAL_ATOMIC_ADD_F32 : FLAT_Global_Atomic_Pseudo_RTN <
807 "global_atomic_add_f32", VGPR_32, f32, int_amdgcn_global_atomic_fadd
809 defm GLOBAL_ATOMIC_PK_ADD_F16 : FLAT_Global_Atomic_Pseudo_RTN <
810 "global_atomic_pk_add_f16", VGPR_32, v2f16, int_amdgcn_global_atomic_fadd
812 } // End OtherPredicates = [isGFX90APlus]
813 } // End is_flat_global = 1
815 //===----------------------------------------------------------------------===//
817 //===----------------------------------------------------------------------===//
819 // Patterns for global loads with no offset.
820 class FlatLoadPat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
821 (vt (node (FlatOffset i64:$vaddr, i16:$offset))),
822 (inst $vaddr, $offset)
825 class FlatLoadPat_D16 <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
826 (node (FlatOffset (i64 VReg_64:$vaddr), i16:$offset), vt:$in),
827 (inst $vaddr, $offset, 0, $in)
830 class FlatSignedLoadPat_D16 <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
831 (node (GlobalOffset (i64 VReg_64:$vaddr), i16:$offset), vt:$in),
832 (inst $vaddr, $offset, 0, $in)
835 class GlobalLoadSaddrPat_D16 <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
836 (vt (node (GlobalSAddr (i64 SReg_64:$saddr), (i32 VGPR_32:$voffset), i16:$offset), vt:$in)),
837 (inst $saddr, $voffset, $offset, 0, $in)
840 class FlatLoadSignedPat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
841 (vt (node (GlobalOffset (i64 VReg_64:$vaddr), i16:$offset))),
842 (inst $vaddr, $offset)
845 class GlobalLoadSaddrPat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
846 (vt (node (GlobalSAddr (i64 SReg_64:$saddr), (i32 VGPR_32:$voffset), i16:$offset))),
847 (inst $saddr, $voffset, $offset, 0)
850 class GlobalStoreSaddrPat <FLAT_Pseudo inst, SDPatternOperator node,
851 ValueType vt> : GCNPat <
852 (node vt:$data, (GlobalSAddr (i64 SReg_64:$saddr), (i32 VGPR_32:$voffset), i16:$offset)),
853 (inst $voffset, getVregSrcForVT<vt>.ret:$data, $saddr, $offset)
856 class GlobalAtomicStoreSaddrPat <FLAT_Pseudo inst, SDPatternOperator node,
857 ValueType vt> : GCNPat <
858 (node (GlobalSAddr (i64 SReg_64:$saddr), (i32 VGPR_32:$voffset), i16:$offset), vt:$data),
859 (inst $voffset, getVregSrcForVT<vt>.ret:$data, $saddr, $offset)
862 class GlobalAtomicSaddrPat <FLAT_Pseudo inst, SDPatternOperator node,
863 ValueType vt, ValueType data_vt = vt> : GCNPat <
864 (vt (node (GlobalSAddr (i64 SReg_64:$saddr), (i32 VGPR_32:$voffset), i16:$offset), data_vt:$data)),
865 (inst $voffset, getVregSrcForVT<data_vt>.ret:$data, $saddr, $offset)
868 class GlobalAtomicNoRtnSaddrPat <FLAT_Pseudo inst, SDPatternOperator node,
869 ValueType vt> : GCNPat <
870 (node (GlobalSAddr (i64 SReg_64:$saddr), (i32 VGPR_32:$voffset), i16:$offset), vt:$data),
871 (inst $voffset, getVregSrcForVT<vt>.ret:$data, $saddr, $offset)
874 class FlatStorePat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
875 (node vt:$data, (FlatOffset i64:$vaddr, i16:$offset)),
876 (inst $vaddr, getVregSrcForVT<vt>.ret:$data, $offset)
879 class FlatStoreSignedPat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
880 (node vt:$data, (GlobalOffset i64:$vaddr, i16:$offset)),
881 (inst $vaddr, getVregSrcForVT<vt>.ret:$data, $offset)
884 class FlatStoreAtomicPat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
885 // atomic store follows atomic binop convention so the address comes
887 (node (FlatOffset i64:$vaddr, i16:$offset), vt:$data),
888 (inst $vaddr, getVregSrcForVT<vt>.ret:$data, $offset)
891 class FlatStoreSignedAtomicPat <FLAT_Pseudo inst, SDPatternOperator node,
892 ValueType vt, ValueType data_vt = vt> : GCNPat <
893 // atomic store follows atomic binop convention so the address comes
895 (node (GlobalOffset i64:$vaddr, i16:$offset), data_vt:$data),
896 (inst $vaddr, getVregSrcForVT<data_vt>.ret:$data, $offset)
899 class FlatAtomicPat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt,
900 ValueType data_vt = vt> : GCNPat <
901 (vt (node (FlatOffset i64:$vaddr, i16:$offset), data_vt:$data)),
902 (inst $vaddr, $data, $offset)
905 class FlatAtomicPatNoRtn <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
906 (node (FlatOffset i64:$vaddr, i16:$offset), vt:$data),
907 (inst VReg_64:$vaddr, getVregSrcForVT<vt>.ret:$data, $offset)
910 class FlatSignedAtomicPatNoRtn <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
911 (node (GlobalOffset i64:$vaddr, i16:$offset), vt:$data),
912 (inst VReg_64:$vaddr, getVregSrcForVT<vt>.ret:$data, $offset)
915 class FlatSignedAtomicPat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt,
916 ValueType data_vt = vt> : GCNPat <
917 (vt (node (GlobalOffset i64:$vaddr, i16:$offset), data_vt:$data)),
918 (inst VReg_64:$vaddr, getVregSrcForVT<data_vt>.ret:$data, $offset)
921 class ScratchLoadSignedPat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
922 (vt (node (ScratchOffset (i32 VGPR_32:$vaddr), i16:$offset))),
923 (inst $vaddr, $offset)
926 class ScratchLoadSignedPat_D16 <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
927 (node (ScratchOffset (i32 VGPR_32:$vaddr), i16:$offset), vt:$in),
928 (inst $vaddr, $offset, 0, $in)
931 class ScratchStoreSignedPat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
932 (node vt:$data, (ScratchOffset (i32 VGPR_32:$vaddr), i16:$offset)),
933 (inst getVregSrcForVT<vt>.ret:$data, $vaddr, $offset)
936 class ScratchLoadSaddrPat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
937 (vt (node (ScratchSAddr (i32 SGPR_32:$saddr), i16:$offset))),
938 (inst $saddr, $offset)
941 class ScratchLoadSaddrPat_D16 <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
942 (vt (node (ScratchSAddr (i32 SGPR_32:$saddr), i16:$offset), vt:$in)),
943 (inst $saddr, $offset, 0, $in)
946 class ScratchStoreSaddrPat <FLAT_Pseudo inst, SDPatternOperator node,
947 ValueType vt> : GCNPat <
948 (node vt:$data, (ScratchSAddr (i32 SGPR_32:$saddr), i16:$offset)),
949 (inst getVregSrcForVT<vt>.ret:$data, $saddr, $offset)
952 let OtherPredicates = [HasFlatAddressSpace] in {
954 def : FlatLoadPat <FLAT_LOAD_UBYTE, extloadi8_flat, i32>;
955 def : FlatLoadPat <FLAT_LOAD_UBYTE, zextloadi8_flat, i32>;
956 def : FlatLoadPat <FLAT_LOAD_SBYTE, sextloadi8_flat, i32>;
957 def : FlatLoadPat <FLAT_LOAD_UBYTE, extloadi8_flat, i16>;
958 def : FlatLoadPat <FLAT_LOAD_UBYTE, zextloadi8_flat, i16>;
959 def : FlatLoadPat <FLAT_LOAD_SBYTE, sextloadi8_flat, i16>;
960 def : FlatLoadPat <FLAT_LOAD_USHORT, extloadi16_flat, i32>;
961 def : FlatLoadPat <FLAT_LOAD_USHORT, zextloadi16_flat, i32>;
962 def : FlatLoadPat <FLAT_LOAD_USHORT, load_flat, i16>;
963 def : FlatLoadPat <FLAT_LOAD_SSHORT, sextloadi16_flat, i32>;
964 def : FlatLoadPat <FLAT_LOAD_DWORDX3, load_flat, v3i32>;
966 def : FlatLoadPat <FLAT_LOAD_DWORD, atomic_load_32_flat, i32>;
967 def : FlatLoadPat <FLAT_LOAD_DWORDX2, atomic_load_64_flat, i64>;
969 def : FlatStorePat <FLAT_STORE_BYTE, truncstorei8_flat, i32>;
970 def : FlatStorePat <FLAT_STORE_SHORT, truncstorei16_flat, i32>;
972 foreach vt = Reg32Types.types in {
973 def : FlatLoadPat <FLAT_LOAD_DWORD, load_flat, vt>;
974 def : FlatStorePat <FLAT_STORE_DWORD, store_flat, vt>;
977 foreach vt = VReg_64.RegTypes in {
978 def : FlatStorePat <FLAT_STORE_DWORDX2, store_flat, vt>;
979 def : FlatLoadPat <FLAT_LOAD_DWORDX2, load_flat, vt>;
982 def : FlatStorePat <FLAT_STORE_DWORDX3, store_flat, v3i32>;
984 foreach vt = VReg_128.RegTypes in {
985 def : FlatLoadPat <FLAT_LOAD_DWORDX4, load_flat, vt>;
986 def : FlatStorePat <FLAT_STORE_DWORDX4, store_flat, vt>;
989 def : FlatStoreAtomicPat <FLAT_STORE_DWORD, atomic_store_flat_32, i32>;
990 def : FlatStoreAtomicPat <FLAT_STORE_DWORDX2, atomic_store_flat_64, i64>;
992 def : FlatAtomicPat <FLAT_ATOMIC_ADD_RTN, atomic_load_add_global_32, i32>;
993 def : FlatAtomicPat <FLAT_ATOMIC_SUB_RTN, atomic_load_sub_global_32, i32>;
994 def : FlatAtomicPat <FLAT_ATOMIC_INC_RTN, atomic_inc_global_32, i32>;
995 def : FlatAtomicPat <FLAT_ATOMIC_DEC_RTN, atomic_dec_global_32, i32>;
996 def : FlatAtomicPat <FLAT_ATOMIC_AND_RTN, atomic_load_and_global_32, i32>;
997 def : FlatAtomicPat <FLAT_ATOMIC_SMAX_RTN, atomic_load_max_global_32, i32>;
998 def : FlatAtomicPat <FLAT_ATOMIC_UMAX_RTN, atomic_load_umax_global_32, i32>;
999 def : FlatAtomicPat <FLAT_ATOMIC_SMIN_RTN, atomic_load_min_global_32, i32>;
1000 def : FlatAtomicPat <FLAT_ATOMIC_UMIN_RTN, atomic_load_umin_global_32, i32>;
1001 def : FlatAtomicPat <FLAT_ATOMIC_OR_RTN, atomic_load_or_global_32, i32>;
1002 def : FlatAtomicPat <FLAT_ATOMIC_SWAP_RTN, atomic_swap_global_32, i32>;
1003 def : FlatAtomicPat <FLAT_ATOMIC_CMPSWAP_RTN, AMDGPUatomic_cmp_swap_global_32, i32, v2i32>;
1004 def : FlatAtomicPat <FLAT_ATOMIC_XOR_RTN, atomic_load_xor_global_32, i32>;
1006 def : FlatAtomicPat <FLAT_ATOMIC_ADD_X2_RTN, atomic_load_add_global_64, i64>;
1007 def : FlatAtomicPat <FLAT_ATOMIC_SUB_X2_RTN, atomic_load_sub_global_64, i64>;
1008 def : FlatAtomicPat <FLAT_ATOMIC_INC_X2_RTN, atomic_inc_global_64, i64>;
1009 def : FlatAtomicPat <FLAT_ATOMIC_DEC_X2_RTN, atomic_dec_global_64, i64>;
1010 def : FlatAtomicPat <FLAT_ATOMIC_AND_X2_RTN, atomic_load_and_global_64, i64>;
1011 def : FlatAtomicPat <FLAT_ATOMIC_SMAX_X2_RTN, atomic_load_max_global_64, i64>;
1012 def : FlatAtomicPat <FLAT_ATOMIC_UMAX_X2_RTN, atomic_load_umax_global_64, i64>;
1013 def : FlatAtomicPat <FLAT_ATOMIC_SMIN_X2_RTN, atomic_load_min_global_64, i64>;
1014 def : FlatAtomicPat <FLAT_ATOMIC_UMIN_X2_RTN, atomic_load_umin_global_64, i64>;
1015 def : FlatAtomicPat <FLAT_ATOMIC_OR_X2_RTN, atomic_load_or_global_64, i64>;
1016 def : FlatAtomicPat <FLAT_ATOMIC_SWAP_X2_RTN, atomic_swap_global_64, i64>;
1017 def : FlatAtomicPat <FLAT_ATOMIC_CMPSWAP_X2_RTN, AMDGPUatomic_cmp_swap_global_64, i64, v2i64>;
1018 def : FlatAtomicPat <FLAT_ATOMIC_XOR_X2_RTN, atomic_load_xor_global_64, i64>;
1020 def : FlatStorePat <FLAT_STORE_BYTE, truncstorei8_flat, i16>;
1021 def : FlatStorePat <FLAT_STORE_SHORT, store_flat, i16>;
1023 let OtherPredicates = [D16PreservesUnusedBits] in {
1024 def : FlatStorePat <FLAT_STORE_SHORT_D16_HI, truncstorei16_hi16_flat, i32>;
1025 def : FlatStorePat <FLAT_STORE_BYTE_D16_HI, truncstorei8_hi16_flat, i32>;
1027 def : FlatLoadPat_D16 <FLAT_LOAD_UBYTE_D16_HI, az_extloadi8_d16_hi_flat, v2i16>;
1028 def : FlatLoadPat_D16 <FLAT_LOAD_UBYTE_D16_HI, az_extloadi8_d16_hi_flat, v2f16>;
1029 def : FlatLoadPat_D16 <FLAT_LOAD_SBYTE_D16_HI, sextloadi8_d16_hi_flat, v2i16>;
1030 def : FlatLoadPat_D16 <FLAT_LOAD_SBYTE_D16_HI, sextloadi8_d16_hi_flat, v2f16>;
1031 def : FlatLoadPat_D16 <FLAT_LOAD_SHORT_D16_HI, load_d16_hi_flat, v2i16>;
1032 def : FlatLoadPat_D16 <FLAT_LOAD_SHORT_D16_HI, load_d16_hi_flat, v2f16>;
1034 def : FlatLoadPat_D16 <FLAT_LOAD_UBYTE_D16, az_extloadi8_d16_lo_flat, v2i16>;
1035 def : FlatLoadPat_D16 <FLAT_LOAD_UBYTE_D16, az_extloadi8_d16_lo_flat, v2f16>;
1036 def : FlatLoadPat_D16 <FLAT_LOAD_SBYTE_D16, sextloadi8_d16_lo_flat, v2i16>;
1037 def : FlatLoadPat_D16 <FLAT_LOAD_SBYTE_D16, sextloadi8_d16_lo_flat, v2f16>;
1038 def : FlatLoadPat_D16 <FLAT_LOAD_SHORT_D16, load_d16_lo_flat, v2i16>;
1039 def : FlatLoadPat_D16 <FLAT_LOAD_SHORT_D16, load_d16_lo_flat, v2f16>;
1042 } // End OtherPredicates = [HasFlatAddressSpace]
1045 multiclass GlobalFLATLoadPats<FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> {
1046 def : FlatLoadSignedPat <inst, node, vt> {
1047 let AddedComplexity = 10;
1050 def : GlobalLoadSaddrPat<!cast<FLAT_Pseudo>(!cast<string>(inst)#"_SADDR"), node, vt> {
1051 let AddedComplexity = 11;
1055 multiclass GlobalFLATLoadPats_D16<FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> {
1056 def : FlatSignedLoadPat_D16 <inst, node, vt> {
1057 let AddedComplexity = 10;
1060 def : GlobalLoadSaddrPat_D16<!cast<FLAT_Pseudo>(!cast<string>(inst)#"_SADDR"), node, vt> {
1061 let AddedComplexity = 11;
1065 multiclass GlobalFLATStorePats<FLAT_Pseudo inst, SDPatternOperator node,
1067 def : FlatStoreSignedPat <inst, node, vt> {
1068 let AddedComplexity = 10;
1071 def : GlobalStoreSaddrPat<!cast<FLAT_Pseudo>(!cast<string>(inst)#"_SADDR"), node, vt> {
1072 let AddedComplexity = 11;
1076 // Deal with swapped operands for atomic_store vs. regular store
1077 multiclass GlobalFLATAtomicStorePats<FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> {
1078 def : FlatStoreSignedAtomicPat <inst, node, vt> {
1079 let AddedComplexity = 10;
1082 def : GlobalAtomicStoreSaddrPat<!cast<FLAT_Pseudo>(!cast<string>(inst)#"_SADDR"), node, vt> {
1083 let AddedComplexity = 11;
1087 multiclass GlobalFLATAtomicPats<string nortn_inst_name, SDPatternOperator node,
1088 ValueType vt, ValueType data_vt = vt> {
1089 def : FlatSignedAtomicPat <!cast<FLAT_Pseudo>(nortn_inst_name#"_RTN"), node, vt, data_vt> {
1090 let AddedComplexity = 10;
1093 def : GlobalAtomicSaddrPat<!cast<FLAT_Pseudo>(nortn_inst_name#"_SADDR_RTN"), node, vt, data_vt> {
1094 let AddedComplexity = 11;
1098 multiclass GlobalFLATNoRtnAtomicPats<FLAT_Pseudo inst, SDPatternOperator node,
1100 def : FlatSignedAtomicPatNoRtn <inst, node, vt> {
1101 let AddedComplexity = 10;
1104 def : GlobalAtomicNoRtnSaddrPat<!cast<FLAT_Pseudo>(!cast<string>(inst)#"_SADDR"), node, vt> {
1105 let AddedComplexity = 11;
1109 multiclass ScratchFLATLoadPats<FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> {
1110 def : ScratchLoadSignedPat <inst, node, vt> {
1111 let AddedComplexity = 25;
1114 def : ScratchLoadSaddrPat<!cast<FLAT_Pseudo>(!cast<string>(inst)#"_SADDR"), node, vt> {
1115 let AddedComplexity = 26;
1119 multiclass ScratchFLATStorePats<FLAT_Pseudo inst, SDPatternOperator node,
1121 def : ScratchStoreSignedPat <inst, node, vt> {
1122 let AddedComplexity = 25;
1125 def : ScratchStoreSaddrPat<!cast<FLAT_Pseudo>(!cast<string>(inst)#"_SADDR"), node, vt> {
1126 let AddedComplexity = 26;
1130 multiclass ScratchFLATLoadPats_D16<FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> {
1131 def : ScratchLoadSignedPat_D16 <inst, node, vt> {
1132 let AddedComplexity = 25;
1135 def : ScratchLoadSaddrPat_D16<!cast<FLAT_Pseudo>(!cast<string>(inst)#"_SADDR"), node, vt> {
1136 let AddedComplexity = 26;
1140 let OtherPredicates = [HasFlatGlobalInsts] in {
1142 defm : GlobalFLATLoadPats <GLOBAL_LOAD_UBYTE, extloadi8_global, i32>;
1143 defm : GlobalFLATLoadPats <GLOBAL_LOAD_UBYTE, zextloadi8_global, i32>;
1144 defm : GlobalFLATLoadPats <GLOBAL_LOAD_SBYTE, sextloadi8_global, i32>;
1145 defm : GlobalFLATLoadPats <GLOBAL_LOAD_UBYTE, extloadi8_global, i16>;
1146 defm : GlobalFLATLoadPats <GLOBAL_LOAD_UBYTE, zextloadi8_global, i16>;
1147 defm : GlobalFLATLoadPats <GLOBAL_LOAD_SBYTE, sextloadi8_global, i16>;
1148 defm : GlobalFLATLoadPats <GLOBAL_LOAD_USHORT, extloadi16_global, i32>;
1149 defm : GlobalFLATLoadPats <GLOBAL_LOAD_USHORT, zextloadi16_global, i32>;
1150 defm : GlobalFLATLoadPats <GLOBAL_LOAD_SSHORT, sextloadi16_global, i32>;
1151 defm : GlobalFLATLoadPats <GLOBAL_LOAD_USHORT, load_global, i16>;
1153 foreach vt = Reg32Types.types in {
1154 defm : GlobalFLATLoadPats <GLOBAL_LOAD_DWORD, load_global, vt>;
1155 defm : GlobalFLATStorePats <GLOBAL_STORE_DWORD, store_global, vt>;
1158 foreach vt = VReg_64.RegTypes in {
1159 defm : GlobalFLATLoadPats <GLOBAL_LOAD_DWORDX2, load_global, vt>;
1160 defm : GlobalFLATStorePats <GLOBAL_STORE_DWORDX2, store_global, vt>;
1163 defm : GlobalFLATLoadPats <GLOBAL_LOAD_DWORDX3, load_global, v3i32>;
1165 foreach vt = VReg_128.RegTypes in {
1166 defm : GlobalFLATLoadPats <GLOBAL_LOAD_DWORDX4, load_global, vt>;
1167 defm : GlobalFLATStorePats <GLOBAL_STORE_DWORDX4, store_global, vt>;
1170 // There is no distinction for atomic load lowering during selection;
1171 // the memory legalizer will set the cache bits and insert the
1172 // appropriate waits.
1173 defm : GlobalFLATLoadPats <GLOBAL_LOAD_DWORD, atomic_load_32_global, i32>;
1174 defm : GlobalFLATLoadPats <GLOBAL_LOAD_DWORDX2, atomic_load_64_global, i64>;
1176 defm : GlobalFLATStorePats <GLOBAL_STORE_BYTE, truncstorei8_global, i32>;
1177 defm : GlobalFLATStorePats <GLOBAL_STORE_BYTE, truncstorei8_global, i16>;
1178 defm : GlobalFLATStorePats <GLOBAL_STORE_SHORT, truncstorei16_global, i32>;
1179 defm : GlobalFLATStorePats <GLOBAL_STORE_SHORT, store_global, i16>;
1180 defm : GlobalFLATStorePats <GLOBAL_STORE_DWORDX3, store_global, v3i32>;
1182 let OtherPredicates = [D16PreservesUnusedBits] in {
1183 defm : GlobalFLATStorePats <GLOBAL_STORE_SHORT_D16_HI, truncstorei16_hi16_global, i32>;
1184 defm : GlobalFLATStorePats <GLOBAL_STORE_BYTE_D16_HI, truncstorei8_hi16_global, i32>;
1186 defm : GlobalFLATLoadPats_D16 <GLOBAL_LOAD_UBYTE_D16_HI, az_extloadi8_d16_hi_global, v2i16>;
1187 defm : GlobalFLATLoadPats_D16 <GLOBAL_LOAD_UBYTE_D16_HI, az_extloadi8_d16_hi_global, v2f16>;
1188 defm : GlobalFLATLoadPats_D16 <GLOBAL_LOAD_SBYTE_D16_HI, sextloadi8_d16_hi_global, v2i16>;
1189 defm : GlobalFLATLoadPats_D16 <GLOBAL_LOAD_SBYTE_D16_HI, sextloadi8_d16_hi_global, v2f16>;
1190 defm : GlobalFLATLoadPats_D16 <GLOBAL_LOAD_SHORT_D16_HI, load_d16_hi_global, v2i16>;
1191 defm : GlobalFLATLoadPats_D16 <GLOBAL_LOAD_SHORT_D16_HI, load_d16_hi_global, v2f16>;
1193 defm : GlobalFLATLoadPats_D16 <GLOBAL_LOAD_UBYTE_D16, az_extloadi8_d16_lo_global, v2i16>;
1194 defm : GlobalFLATLoadPats_D16 <GLOBAL_LOAD_UBYTE_D16, az_extloadi8_d16_lo_global, v2f16>;
1195 defm : GlobalFLATLoadPats_D16 <GLOBAL_LOAD_SBYTE_D16, sextloadi8_d16_lo_global, v2i16>;
1196 defm : GlobalFLATLoadPats_D16 <GLOBAL_LOAD_SBYTE_D16, sextloadi8_d16_lo_global, v2f16>;
1197 defm : GlobalFLATLoadPats_D16 <GLOBAL_LOAD_SHORT_D16, load_d16_lo_global, v2i16>;
1198 defm : GlobalFLATLoadPats_D16 <GLOBAL_LOAD_SHORT_D16, load_d16_lo_global, v2f16>;
1201 defm : GlobalFLATAtomicStorePats <GLOBAL_STORE_DWORD, atomic_store_global_32, i32>;
1202 defm : GlobalFLATAtomicStorePats <GLOBAL_STORE_DWORDX2, atomic_store_global_64, i64>;
1204 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_ADD", atomic_load_add_global_32, i32>;
1205 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_SUB", atomic_load_sub_global_32, i32>;
1206 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_INC", atomic_inc_global_32, i32>;
1207 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_DEC", atomic_dec_global_32, i32>;
1208 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_AND", atomic_load_and_global_32, i32>;
1209 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_SMAX", atomic_load_max_global_32, i32>;
1210 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_UMAX", atomic_load_umax_global_32, i32>;
1211 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_SMIN", atomic_load_min_global_32, i32>;
1212 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_UMIN", atomic_load_umin_global_32, i32>;
1213 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_OR", atomic_load_or_global_32, i32>;
1214 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_SWAP", atomic_swap_global_32, i32>;
1215 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_CMPSWAP", AMDGPUatomic_cmp_swap_global_32, i32, v2i32>;
1216 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_XOR", atomic_load_xor_global_32, i32>;
1217 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_CSUB", int_amdgcn_global_atomic_csub, i32>;
1219 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_ADD_X2", atomic_load_add_global_64, i64>;
1220 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_SUB_X2", atomic_load_sub_global_64, i64>;
1221 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_INC_X2", atomic_inc_global_64, i64>;
1222 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_DEC_X2", atomic_dec_global_64, i64>;
1223 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_AND_X2", atomic_load_and_global_64, i64>;
1224 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_SMAX_X2", atomic_load_max_global_64, i64>;
1225 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_UMAX_X2", atomic_load_umax_global_64, i64>;
1226 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_SMIN_X2", atomic_load_min_global_64, i64>;
1227 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_UMIN_X2", atomic_load_umin_global_64, i64>;
1228 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_OR_X2", atomic_load_or_global_64, i64>;
1229 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_SWAP_X2", atomic_swap_global_64, i64>;
1230 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_CMPSWAP_X2", AMDGPUatomic_cmp_swap_global_64, i64, v2i64>;
1231 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_XOR_X2", atomic_load_xor_global_64, i64>;
1233 let OtherPredicates = [isGFX10Plus] in {
1234 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_FMIN", atomic_load_fmin_global_32, f32>;
1235 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_FMAX", atomic_load_fmax_global_32, f32>;
1236 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_FMIN_X2", atomic_load_fmin_global_64, f64>;
1237 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_FMAX_X2", atomic_load_fmax_global_64, f64>;
1240 let OtherPredicates = [HasAtomicFaddInsts] in {
1241 defm : GlobalFLATNoRtnAtomicPats <GLOBAL_ATOMIC_ADD_F32, atomic_load_fadd_global_noret_32, f32>;
1242 defm : GlobalFLATNoRtnAtomicPats <GLOBAL_ATOMIC_PK_ADD_F16, atomic_load_fadd_v2f16_global_noret_32, v2f16>;
1245 let OtherPredicates = [isGFX90APlus] in {
1246 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_ADD_F32", atomic_load_fadd_global_32, f32>;
1247 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_PK_ADD_F16", atomic_load_fadd_v2f16_global_32, v2f16>;
1248 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_ADD_F64", atomic_load_fadd_global_64, f64>;
1249 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_MIN_F64", atomic_load_fmin_global_64, f64>;
1250 defm : GlobalFLATAtomicPats <"GLOBAL_ATOMIC_MAX_F64", atomic_load_fmax_global_64, f64>;
1251 def : FlatSignedAtomicPat <FLAT_ATOMIC_ADD_F64_RTN, atomic_load_fadd_flat_64, f64>;
1252 def : FlatSignedAtomicPat <FLAT_ATOMIC_MIN_F64_RTN, atomic_load_fmin_flat_64, f64>;
1253 def : FlatSignedAtomicPat <FLAT_ATOMIC_MAX_F64_RTN, atomic_load_fmax_flat_64, f64>;
1256 } // End OtherPredicates = [HasFlatGlobalInsts], AddedComplexity = 10
1258 let OtherPredicates = [HasFlatScratchInsts, EnableFlatScratch] in {
1260 defm : ScratchFLATLoadPats <SCRATCH_LOAD_UBYTE, extloadi8_private, i32>;
1261 defm : ScratchFLATLoadPats <SCRATCH_LOAD_UBYTE, zextloadi8_private, i32>;
1262 defm : ScratchFLATLoadPats <SCRATCH_LOAD_SBYTE, sextloadi8_private, i32>;
1263 defm : ScratchFLATLoadPats <SCRATCH_LOAD_UBYTE, extloadi8_private, i16>;
1264 defm : ScratchFLATLoadPats <SCRATCH_LOAD_UBYTE, zextloadi8_private, i16>;
1265 defm : ScratchFLATLoadPats <SCRATCH_LOAD_SBYTE, sextloadi8_private, i16>;
1266 defm : ScratchFLATLoadPats <SCRATCH_LOAD_USHORT, extloadi16_private, i32>;
1267 defm : ScratchFLATLoadPats <SCRATCH_LOAD_USHORT, zextloadi16_private, i32>;
1268 defm : ScratchFLATLoadPats <SCRATCH_LOAD_SSHORT, sextloadi16_private, i32>;
1269 defm : ScratchFLATLoadPats <SCRATCH_LOAD_USHORT, load_private, i16>;
1271 foreach vt = Reg32Types.types in {
1272 defm : ScratchFLATLoadPats <SCRATCH_LOAD_DWORD, load_private, vt>;
1273 defm : ScratchFLATStorePats <SCRATCH_STORE_DWORD, store_private, vt>;
1276 foreach vt = VReg_64.RegTypes in {
1277 defm : ScratchFLATLoadPats <SCRATCH_LOAD_DWORDX2, load_private, vt>;
1278 defm : ScratchFLATStorePats <SCRATCH_STORE_DWORDX2, store_private, vt>;
1281 defm : ScratchFLATLoadPats <SCRATCH_LOAD_DWORDX3, load_private, v3i32>;
1283 foreach vt = VReg_128.RegTypes in {
1284 defm : ScratchFLATLoadPats <SCRATCH_LOAD_DWORDX4, load_private, vt>;
1285 defm : ScratchFLATStorePats <SCRATCH_STORE_DWORDX4, store_private, vt>;
1288 defm : ScratchFLATStorePats <SCRATCH_STORE_BYTE, truncstorei8_private, i32>;
1289 defm : ScratchFLATStorePats <SCRATCH_STORE_BYTE, truncstorei8_private, i16>;
1290 defm : ScratchFLATStorePats <SCRATCH_STORE_SHORT, truncstorei16_private, i32>;
1291 defm : ScratchFLATStorePats <SCRATCH_STORE_SHORT, store_private, i16>;
1292 defm : ScratchFLATStorePats <SCRATCH_STORE_DWORDX3, store_private, v3i32>;
1294 let OtherPredicates = [D16PreservesUnusedBits, HasFlatScratchInsts, EnableFlatScratch] in {
1295 defm : ScratchFLATStorePats <SCRATCH_STORE_SHORT_D16_HI, truncstorei16_hi16_private, i32>;
1296 defm : ScratchFLATStorePats <SCRATCH_STORE_BYTE_D16_HI, truncstorei8_hi16_private, i32>;
1298 defm : ScratchFLATLoadPats_D16 <SCRATCH_LOAD_UBYTE_D16_HI, az_extloadi8_d16_hi_private, v2i16>;
1299 defm : ScratchFLATLoadPats_D16 <SCRATCH_LOAD_UBYTE_D16_HI, az_extloadi8_d16_hi_private, v2f16>;
1300 defm : ScratchFLATLoadPats_D16 <SCRATCH_LOAD_SBYTE_D16_HI, sextloadi8_d16_hi_private, v2i16>;
1301 defm : ScratchFLATLoadPats_D16 <SCRATCH_LOAD_SBYTE_D16_HI, sextloadi8_d16_hi_private, v2f16>;
1302 defm : ScratchFLATLoadPats_D16 <SCRATCH_LOAD_SHORT_D16_HI, load_d16_hi_private, v2i16>;
1303 defm : ScratchFLATLoadPats_D16 <SCRATCH_LOAD_SHORT_D16_HI, load_d16_hi_private, v2f16>;
1305 defm : ScratchFLATLoadPats_D16 <SCRATCH_LOAD_UBYTE_D16, az_extloadi8_d16_lo_private, v2i16>;
1306 defm : ScratchFLATLoadPats_D16 <SCRATCH_LOAD_UBYTE_D16, az_extloadi8_d16_lo_private, v2f16>;
1307 defm : ScratchFLATLoadPats_D16 <SCRATCH_LOAD_SBYTE_D16, sextloadi8_d16_lo_private, v2i16>;
1308 defm : ScratchFLATLoadPats_D16 <SCRATCH_LOAD_SBYTE_D16, sextloadi8_d16_lo_private, v2f16>;
1309 defm : ScratchFLATLoadPats_D16 <SCRATCH_LOAD_SHORT_D16, load_d16_lo_private, v2i16>;
1310 defm : ScratchFLATLoadPats_D16 <SCRATCH_LOAD_SHORT_D16, load_d16_lo_private, v2f16>;
1313 } // End OtherPredicates = [HasFlatScratchInsts,EnableFlatScratch]
1315 //===----------------------------------------------------------------------===//
1317 //===----------------------------------------------------------------------===//
1319 //===----------------------------------------------------------------------===//
1321 //===----------------------------------------------------------------------===//
1323 class FLAT_Real_ci <bits<7> op, FLAT_Pseudo ps> :
1325 SIMCInstr <ps.PseudoInstr, SIEncodingFamily.SI> {
1326 let AssemblerPredicate = isGFX7Only;
1327 let DecoderNamespace="GFX7";
1330 def FLAT_LOAD_UBYTE_ci : FLAT_Real_ci <0x8, FLAT_LOAD_UBYTE>;
1331 def FLAT_LOAD_SBYTE_ci : FLAT_Real_ci <0x9, FLAT_LOAD_SBYTE>;
1332 def FLAT_LOAD_USHORT_ci : FLAT_Real_ci <0xa, FLAT_LOAD_USHORT>;
1333 def FLAT_LOAD_SSHORT_ci : FLAT_Real_ci <0xb, FLAT_LOAD_SSHORT>;
1334 def FLAT_LOAD_DWORD_ci : FLAT_Real_ci <0xc, FLAT_LOAD_DWORD>;
1335 def FLAT_LOAD_DWORDX2_ci : FLAT_Real_ci <0xd, FLAT_LOAD_DWORDX2>;
1336 def FLAT_LOAD_DWORDX4_ci : FLAT_Real_ci <0xe, FLAT_LOAD_DWORDX4>;
1337 def FLAT_LOAD_DWORDX3_ci : FLAT_Real_ci <0xf, FLAT_LOAD_DWORDX3>;
1339 def FLAT_STORE_BYTE_ci : FLAT_Real_ci <0x18, FLAT_STORE_BYTE>;
1340 def FLAT_STORE_SHORT_ci : FLAT_Real_ci <0x1a, FLAT_STORE_SHORT>;
1341 def FLAT_STORE_DWORD_ci : FLAT_Real_ci <0x1c, FLAT_STORE_DWORD>;
1342 def FLAT_STORE_DWORDX2_ci : FLAT_Real_ci <0x1d, FLAT_STORE_DWORDX2>;
1343 def FLAT_STORE_DWORDX4_ci : FLAT_Real_ci <0x1e, FLAT_STORE_DWORDX4>;
1344 def FLAT_STORE_DWORDX3_ci : FLAT_Real_ci <0x1f, FLAT_STORE_DWORDX3>;
1346 multiclass FLAT_Real_Atomics_ci <bits<7> op, FLAT_Pseudo ps> {
1347 def _ci : FLAT_Real_ci<op, !cast<FLAT_Pseudo>(ps.PseudoInstr)>;
1348 def _RTN_ci : FLAT_Real_ci<op, !cast<FLAT_Pseudo>(ps.PseudoInstr # "_RTN")>;
1351 defm FLAT_ATOMIC_SWAP : FLAT_Real_Atomics_ci <0x30, FLAT_ATOMIC_SWAP>;
1352 defm FLAT_ATOMIC_CMPSWAP : FLAT_Real_Atomics_ci <0x31, FLAT_ATOMIC_CMPSWAP>;
1353 defm FLAT_ATOMIC_ADD : FLAT_Real_Atomics_ci <0x32, FLAT_ATOMIC_ADD>;
1354 defm FLAT_ATOMIC_SUB : FLAT_Real_Atomics_ci <0x33, FLAT_ATOMIC_SUB>;
1355 defm FLAT_ATOMIC_SMIN : FLAT_Real_Atomics_ci <0x35, FLAT_ATOMIC_SMIN>;
1356 defm FLAT_ATOMIC_UMIN : FLAT_Real_Atomics_ci <0x36, FLAT_ATOMIC_UMIN>;
1357 defm FLAT_ATOMIC_SMAX : FLAT_Real_Atomics_ci <0x37, FLAT_ATOMIC_SMAX>;
1358 defm FLAT_ATOMIC_UMAX : FLAT_Real_Atomics_ci <0x38, FLAT_ATOMIC_UMAX>;
1359 defm FLAT_ATOMIC_AND : FLAT_Real_Atomics_ci <0x39, FLAT_ATOMIC_AND>;
1360 defm FLAT_ATOMIC_OR : FLAT_Real_Atomics_ci <0x3a, FLAT_ATOMIC_OR>;
1361 defm FLAT_ATOMIC_XOR : FLAT_Real_Atomics_ci <0x3b, FLAT_ATOMIC_XOR>;
1362 defm FLAT_ATOMIC_INC : FLAT_Real_Atomics_ci <0x3c, FLAT_ATOMIC_INC>;
1363 defm FLAT_ATOMIC_DEC : FLAT_Real_Atomics_ci <0x3d, FLAT_ATOMIC_DEC>;
1364 defm FLAT_ATOMIC_SWAP_X2 : FLAT_Real_Atomics_ci <0x50, FLAT_ATOMIC_SWAP_X2>;
1365 defm FLAT_ATOMIC_CMPSWAP_X2 : FLAT_Real_Atomics_ci <0x51, FLAT_ATOMIC_CMPSWAP_X2>;
1366 defm FLAT_ATOMIC_ADD_X2 : FLAT_Real_Atomics_ci <0x52, FLAT_ATOMIC_ADD_X2>;
1367 defm FLAT_ATOMIC_SUB_X2 : FLAT_Real_Atomics_ci <0x53, FLAT_ATOMIC_SUB_X2>;
1368 defm FLAT_ATOMIC_SMIN_X2 : FLAT_Real_Atomics_ci <0x55, FLAT_ATOMIC_SMIN_X2>;
1369 defm FLAT_ATOMIC_UMIN_X2 : FLAT_Real_Atomics_ci <0x56, FLAT_ATOMIC_UMIN_X2>;
1370 defm FLAT_ATOMIC_SMAX_X2 : FLAT_Real_Atomics_ci <0x57, FLAT_ATOMIC_SMAX_X2>;
1371 defm FLAT_ATOMIC_UMAX_X2 : FLAT_Real_Atomics_ci <0x58, FLAT_ATOMIC_UMAX_X2>;
1372 defm FLAT_ATOMIC_AND_X2 : FLAT_Real_Atomics_ci <0x59, FLAT_ATOMIC_AND_X2>;
1373 defm FLAT_ATOMIC_OR_X2 : FLAT_Real_Atomics_ci <0x5a, FLAT_ATOMIC_OR_X2>;
1374 defm FLAT_ATOMIC_XOR_X2 : FLAT_Real_Atomics_ci <0x5b, FLAT_ATOMIC_XOR_X2>;
1375 defm FLAT_ATOMIC_INC_X2 : FLAT_Real_Atomics_ci <0x5c, FLAT_ATOMIC_INC_X2>;
1376 defm FLAT_ATOMIC_DEC_X2 : FLAT_Real_Atomics_ci <0x5d, FLAT_ATOMIC_DEC_X2>;
1378 // CI Only flat instructions
1379 defm FLAT_ATOMIC_FCMPSWAP : FLAT_Real_Atomics_ci <0x3e, FLAT_ATOMIC_FCMPSWAP>;
1380 defm FLAT_ATOMIC_FMIN : FLAT_Real_Atomics_ci <0x3f, FLAT_ATOMIC_FMIN>;
1381 defm FLAT_ATOMIC_FMAX : FLAT_Real_Atomics_ci <0x40, FLAT_ATOMIC_FMAX>;
1382 defm FLAT_ATOMIC_FCMPSWAP_X2 : FLAT_Real_Atomics_ci <0x5e, FLAT_ATOMIC_FCMPSWAP_X2>;
1383 defm FLAT_ATOMIC_FMIN_X2 : FLAT_Real_Atomics_ci <0x5f, FLAT_ATOMIC_FMIN_X2>;
1384 defm FLAT_ATOMIC_FMAX_X2 : FLAT_Real_Atomics_ci <0x60, FLAT_ATOMIC_FMAX_X2>;
1387 //===----------------------------------------------------------------------===//
1389 //===----------------------------------------------------------------------===//
1391 class FLAT_Real_vi <bits<7> op, FLAT_Pseudo ps, bit has_sccb = ps.has_sccb> :
1393 SIMCInstr <ps.PseudoInstr, SIEncodingFamily.VI> {
1394 let AssemblerPredicate = isGFX8GFX9;
1395 let DecoderNamespace = "GFX8";
1397 let Inst{25} = !if(has_sccb, cpol{CPolBit.SCC}, ps.sccbValue);
1398 let AsmString = ps.Mnemonic #
1399 !subst("$sccb", !if(has_sccb, "$sccb",""), ps.AsmOperands);
1402 multiclass FLAT_Real_AllAddr_vi<bits<7> op,
1403 bit has_sccb = !cast<FLAT_Pseudo>(NAME).has_sccb> {
1404 def _vi : FLAT_Real_vi<op, !cast<FLAT_Pseudo>(NAME), has_sccb>;
1405 def _SADDR_vi : FLAT_Real_vi<op, !cast<FLAT_Pseudo>(NAME#"_SADDR"), has_sccb>;
1408 def FLAT_LOAD_UBYTE_vi : FLAT_Real_vi <0x10, FLAT_LOAD_UBYTE>;
1409 def FLAT_LOAD_SBYTE_vi : FLAT_Real_vi <0x11, FLAT_LOAD_SBYTE>;
1410 def FLAT_LOAD_USHORT_vi : FLAT_Real_vi <0x12, FLAT_LOAD_USHORT>;
1411 def FLAT_LOAD_SSHORT_vi : FLAT_Real_vi <0x13, FLAT_LOAD_SSHORT>;
1412 def FLAT_LOAD_DWORD_vi : FLAT_Real_vi <0x14, FLAT_LOAD_DWORD>;
1413 def FLAT_LOAD_DWORDX2_vi : FLAT_Real_vi <0x15, FLAT_LOAD_DWORDX2>;
1414 def FLAT_LOAD_DWORDX4_vi : FLAT_Real_vi <0x17, FLAT_LOAD_DWORDX4>;
1415 def FLAT_LOAD_DWORDX3_vi : FLAT_Real_vi <0x16, FLAT_LOAD_DWORDX3>;
1417 def FLAT_STORE_BYTE_vi : FLAT_Real_vi <0x18, FLAT_STORE_BYTE>;
1418 def FLAT_STORE_BYTE_D16_HI_vi : FLAT_Real_vi <0x19, FLAT_STORE_BYTE_D16_HI>;
1419 def FLAT_STORE_SHORT_vi : FLAT_Real_vi <0x1a, FLAT_STORE_SHORT>;
1420 def FLAT_STORE_SHORT_D16_HI_vi : FLAT_Real_vi <0x1b, FLAT_STORE_SHORT_D16_HI>;
1421 def FLAT_STORE_DWORD_vi : FLAT_Real_vi <0x1c, FLAT_STORE_DWORD>;
1422 def FLAT_STORE_DWORDX2_vi : FLAT_Real_vi <0x1d, FLAT_STORE_DWORDX2>;
1423 def FLAT_STORE_DWORDX4_vi : FLAT_Real_vi <0x1f, FLAT_STORE_DWORDX4>;
1424 def FLAT_STORE_DWORDX3_vi : FLAT_Real_vi <0x1e, FLAT_STORE_DWORDX3>;
1426 def FLAT_LOAD_UBYTE_D16_vi : FLAT_Real_vi <0x20, FLAT_LOAD_UBYTE_D16>;
1427 def FLAT_LOAD_UBYTE_D16_HI_vi : FLAT_Real_vi <0x21, FLAT_LOAD_UBYTE_D16_HI>;
1428 def FLAT_LOAD_SBYTE_D16_vi : FLAT_Real_vi <0x22, FLAT_LOAD_SBYTE_D16>;
1429 def FLAT_LOAD_SBYTE_D16_HI_vi : FLAT_Real_vi <0x23, FLAT_LOAD_SBYTE_D16_HI>;
1430 def FLAT_LOAD_SHORT_D16_vi : FLAT_Real_vi <0x24, FLAT_LOAD_SHORT_D16>;
1431 def FLAT_LOAD_SHORT_D16_HI_vi : FLAT_Real_vi <0x25, FLAT_LOAD_SHORT_D16_HI>;
1433 multiclass FLAT_Real_Atomics_vi <bits<7> op, FLAT_Pseudo ps,
1434 bit has_sccb = !cast<FLAT_Pseudo>(NAME).has_sccb> {
1435 def _vi : FLAT_Real_vi<op, !cast<FLAT_Pseudo>(ps.PseudoInstr), has_sccb>;
1436 def _RTN_vi : FLAT_Real_vi<op, !cast<FLAT_Pseudo>(ps.PseudoInstr # "_RTN"), has_sccb>;
1439 multiclass FLAT_Global_Real_Atomics_vi<bits<7> op,
1440 bit has_sccb = !cast<FLAT_Pseudo>(NAME).has_sccb> :
1441 FLAT_Real_AllAddr_vi<op, has_sccb> {
1442 def _RTN_vi : FLAT_Real_vi <op, !cast<FLAT_Pseudo>(NAME#"_RTN"), has_sccb>;
1443 def _SADDR_RTN_vi : FLAT_Real_vi <op, !cast<FLAT_Pseudo>(NAME#"_SADDR_RTN"), has_sccb>;
1447 defm FLAT_ATOMIC_SWAP : FLAT_Real_Atomics_vi <0x40, FLAT_ATOMIC_SWAP>;
1448 defm FLAT_ATOMIC_CMPSWAP : FLAT_Real_Atomics_vi <0x41, FLAT_ATOMIC_CMPSWAP>;
1449 defm FLAT_ATOMIC_ADD : FLAT_Real_Atomics_vi <0x42, FLAT_ATOMIC_ADD>;
1450 defm FLAT_ATOMIC_SUB : FLAT_Real_Atomics_vi <0x43, FLAT_ATOMIC_SUB>;
1451 defm FLAT_ATOMIC_SMIN : FLAT_Real_Atomics_vi <0x44, FLAT_ATOMIC_SMIN>;
1452 defm FLAT_ATOMIC_UMIN : FLAT_Real_Atomics_vi <0x45, FLAT_ATOMIC_UMIN>;
1453 defm FLAT_ATOMIC_SMAX : FLAT_Real_Atomics_vi <0x46, FLAT_ATOMIC_SMAX>;
1454 defm FLAT_ATOMIC_UMAX : FLAT_Real_Atomics_vi <0x47, FLAT_ATOMIC_UMAX>;
1455 defm FLAT_ATOMIC_AND : FLAT_Real_Atomics_vi <0x48, FLAT_ATOMIC_AND>;
1456 defm FLAT_ATOMIC_OR : FLAT_Real_Atomics_vi <0x49, FLAT_ATOMIC_OR>;
1457 defm FLAT_ATOMIC_XOR : FLAT_Real_Atomics_vi <0x4a, FLAT_ATOMIC_XOR>;
1458 defm FLAT_ATOMIC_INC : FLAT_Real_Atomics_vi <0x4b, FLAT_ATOMIC_INC>;
1459 defm FLAT_ATOMIC_DEC : FLAT_Real_Atomics_vi <0x4c, FLAT_ATOMIC_DEC>;
1460 defm FLAT_ATOMIC_SWAP_X2 : FLAT_Real_Atomics_vi <0x60, FLAT_ATOMIC_SWAP_X2>;
1461 defm FLAT_ATOMIC_CMPSWAP_X2 : FLAT_Real_Atomics_vi <0x61, FLAT_ATOMIC_CMPSWAP_X2>;
1462 defm FLAT_ATOMIC_ADD_X2 : FLAT_Real_Atomics_vi <0x62, FLAT_ATOMIC_ADD_X2>;
1463 defm FLAT_ATOMIC_SUB_X2 : FLAT_Real_Atomics_vi <0x63, FLAT_ATOMIC_SUB_X2>;
1464 defm FLAT_ATOMIC_SMIN_X2 : FLAT_Real_Atomics_vi <0x64, FLAT_ATOMIC_SMIN_X2>;
1465 defm FLAT_ATOMIC_UMIN_X2 : FLAT_Real_Atomics_vi <0x65, FLAT_ATOMIC_UMIN_X2>;
1466 defm FLAT_ATOMIC_SMAX_X2 : FLAT_Real_Atomics_vi <0x66, FLAT_ATOMIC_SMAX_X2>;
1467 defm FLAT_ATOMIC_UMAX_X2 : FLAT_Real_Atomics_vi <0x67, FLAT_ATOMIC_UMAX_X2>;
1468 defm FLAT_ATOMIC_AND_X2 : FLAT_Real_Atomics_vi <0x68, FLAT_ATOMIC_AND_X2>;
1469 defm FLAT_ATOMIC_OR_X2 : FLAT_Real_Atomics_vi <0x69, FLAT_ATOMIC_OR_X2>;
1470 defm FLAT_ATOMIC_XOR_X2 : FLAT_Real_Atomics_vi <0x6a, FLAT_ATOMIC_XOR_X2>;
1471 defm FLAT_ATOMIC_INC_X2 : FLAT_Real_Atomics_vi <0x6b, FLAT_ATOMIC_INC_X2>;
1472 defm FLAT_ATOMIC_DEC_X2 : FLAT_Real_Atomics_vi <0x6c, FLAT_ATOMIC_DEC_X2>;
1474 defm GLOBAL_LOAD_UBYTE : FLAT_Real_AllAddr_vi <0x10>;
1475 defm GLOBAL_LOAD_SBYTE : FLAT_Real_AllAddr_vi <0x11>;
1476 defm GLOBAL_LOAD_USHORT : FLAT_Real_AllAddr_vi <0x12>;
1477 defm GLOBAL_LOAD_SSHORT : FLAT_Real_AllAddr_vi <0x13>;
1478 defm GLOBAL_LOAD_DWORD : FLAT_Real_AllAddr_vi <0x14>;
1479 defm GLOBAL_LOAD_DWORDX2 : FLAT_Real_AllAddr_vi <0x15>;
1480 defm GLOBAL_LOAD_DWORDX3 : FLAT_Real_AllAddr_vi <0x16>;
1481 defm GLOBAL_LOAD_DWORDX4 : FLAT_Real_AllAddr_vi <0x17>;
1483 defm GLOBAL_LOAD_UBYTE_D16 : FLAT_Real_AllAddr_vi <0x20>;
1484 defm GLOBAL_LOAD_UBYTE_D16_HI : FLAT_Real_AllAddr_vi <0x21>;
1485 defm GLOBAL_LOAD_SBYTE_D16 : FLAT_Real_AllAddr_vi <0x22>;
1486 defm GLOBAL_LOAD_SBYTE_D16_HI : FLAT_Real_AllAddr_vi <0x23>;
1487 defm GLOBAL_LOAD_SHORT_D16 : FLAT_Real_AllAddr_vi <0x24>;
1488 defm GLOBAL_LOAD_SHORT_D16_HI : FLAT_Real_AllAddr_vi <0x25>;
1490 defm GLOBAL_STORE_BYTE : FLAT_Real_AllAddr_vi <0x18>;
1491 defm GLOBAL_STORE_BYTE_D16_HI : FLAT_Real_AllAddr_vi <0x19>;
1492 defm GLOBAL_STORE_SHORT : FLAT_Real_AllAddr_vi <0x1a>;
1493 defm GLOBAL_STORE_SHORT_D16_HI : FLAT_Real_AllAddr_vi <0x1b>;
1494 defm GLOBAL_STORE_DWORD : FLAT_Real_AllAddr_vi <0x1c>;
1495 defm GLOBAL_STORE_DWORDX2 : FLAT_Real_AllAddr_vi <0x1d>;
1496 defm GLOBAL_STORE_DWORDX3 : FLAT_Real_AllAddr_vi <0x1e>;
1497 defm GLOBAL_STORE_DWORDX4 : FLAT_Real_AllAddr_vi <0x1f>;
1500 defm GLOBAL_ATOMIC_SWAP : FLAT_Global_Real_Atomics_vi <0x40>;
1501 defm GLOBAL_ATOMIC_CMPSWAP : FLAT_Global_Real_Atomics_vi <0x41>;
1502 defm GLOBAL_ATOMIC_ADD : FLAT_Global_Real_Atomics_vi <0x42>;
1503 defm GLOBAL_ATOMIC_SUB : FLAT_Global_Real_Atomics_vi <0x43>;
1504 defm GLOBAL_ATOMIC_SMIN : FLAT_Global_Real_Atomics_vi <0x44>;
1505 defm GLOBAL_ATOMIC_UMIN : FLAT_Global_Real_Atomics_vi <0x45>;
1506 defm GLOBAL_ATOMIC_SMAX : FLAT_Global_Real_Atomics_vi <0x46>;
1507 defm GLOBAL_ATOMIC_UMAX : FLAT_Global_Real_Atomics_vi <0x47>;
1508 defm GLOBAL_ATOMIC_AND : FLAT_Global_Real_Atomics_vi <0x48>;
1509 defm GLOBAL_ATOMIC_OR : FLAT_Global_Real_Atomics_vi <0x49>;
1510 defm GLOBAL_ATOMIC_XOR : FLAT_Global_Real_Atomics_vi <0x4a>;
1511 defm GLOBAL_ATOMIC_INC : FLAT_Global_Real_Atomics_vi <0x4b>;
1512 defm GLOBAL_ATOMIC_DEC : FLAT_Global_Real_Atomics_vi <0x4c>;
1513 defm GLOBAL_ATOMIC_SWAP_X2 : FLAT_Global_Real_Atomics_vi <0x60>;
1514 defm GLOBAL_ATOMIC_CMPSWAP_X2 : FLAT_Global_Real_Atomics_vi <0x61>;
1515 defm GLOBAL_ATOMIC_ADD_X2 : FLAT_Global_Real_Atomics_vi <0x62>;
1516 defm GLOBAL_ATOMIC_SUB_X2 : FLAT_Global_Real_Atomics_vi <0x63>;
1517 defm GLOBAL_ATOMIC_SMIN_X2 : FLAT_Global_Real_Atomics_vi <0x64>;
1518 defm GLOBAL_ATOMIC_UMIN_X2 : FLAT_Global_Real_Atomics_vi <0x65>;
1519 defm GLOBAL_ATOMIC_SMAX_X2 : FLAT_Global_Real_Atomics_vi <0x66>;
1520 defm GLOBAL_ATOMIC_UMAX_X2 : FLAT_Global_Real_Atomics_vi <0x67>;
1521 defm GLOBAL_ATOMIC_AND_X2 : FLAT_Global_Real_Atomics_vi <0x68>;
1522 defm GLOBAL_ATOMIC_OR_X2 : FLAT_Global_Real_Atomics_vi <0x69>;
1523 defm GLOBAL_ATOMIC_XOR_X2 : FLAT_Global_Real_Atomics_vi <0x6a>;
1524 defm GLOBAL_ATOMIC_INC_X2 : FLAT_Global_Real_Atomics_vi <0x6b>;
1525 defm GLOBAL_ATOMIC_DEC_X2 : FLAT_Global_Real_Atomics_vi <0x6c>;
1527 defm SCRATCH_LOAD_UBYTE : FLAT_Real_AllAddr_vi <0x10>;
1528 defm SCRATCH_LOAD_SBYTE : FLAT_Real_AllAddr_vi <0x11>;
1529 defm SCRATCH_LOAD_USHORT : FLAT_Real_AllAddr_vi <0x12>;
1530 defm SCRATCH_LOAD_SSHORT : FLAT_Real_AllAddr_vi <0x13>;
1531 defm SCRATCH_LOAD_DWORD : FLAT_Real_AllAddr_vi <0x14>;
1532 defm SCRATCH_LOAD_DWORDX2 : FLAT_Real_AllAddr_vi <0x15>;
1533 defm SCRATCH_LOAD_DWORDX3 : FLAT_Real_AllAddr_vi <0x16>;
1534 defm SCRATCH_LOAD_DWORDX4 : FLAT_Real_AllAddr_vi <0x17>;
1535 defm SCRATCH_STORE_BYTE : FLAT_Real_AllAddr_vi <0x18>;
1536 defm SCRATCH_STORE_BYTE_D16_HI : FLAT_Real_AllAddr_vi <0x19>;
1537 defm SCRATCH_LOAD_UBYTE_D16 : FLAT_Real_AllAddr_vi <0x20>;
1538 defm SCRATCH_LOAD_UBYTE_D16_HI : FLAT_Real_AllAddr_vi <0x21>;
1539 defm SCRATCH_LOAD_SBYTE_D16 : FLAT_Real_AllAddr_vi <0x22>;
1540 defm SCRATCH_LOAD_SBYTE_D16_HI : FLAT_Real_AllAddr_vi <0x23>;
1541 defm SCRATCH_LOAD_SHORT_D16 : FLAT_Real_AllAddr_vi <0x24>;
1542 defm SCRATCH_LOAD_SHORT_D16_HI : FLAT_Real_AllAddr_vi <0x25>;
1543 defm SCRATCH_STORE_SHORT : FLAT_Real_AllAddr_vi <0x1a>;
1544 defm SCRATCH_STORE_SHORT_D16_HI : FLAT_Real_AllAddr_vi <0x1b>;
1545 defm SCRATCH_STORE_DWORD : FLAT_Real_AllAddr_vi <0x1c>;
1546 defm SCRATCH_STORE_DWORDX2 : FLAT_Real_AllAddr_vi <0x1d>;
1547 defm SCRATCH_STORE_DWORDX3 : FLAT_Real_AllAddr_vi <0x1e>;
1548 defm SCRATCH_STORE_DWORDX4 : FLAT_Real_AllAddr_vi <0x1f>;
1550 let SubtargetPredicate = HasAtomicFaddInsts in {
1551 defm GLOBAL_ATOMIC_ADD_F32 : FLAT_Global_Real_Atomics_vi <0x04d, 0>;
1552 defm GLOBAL_ATOMIC_PK_ADD_F16 : FLAT_Global_Real_Atomics_vi <0x04e, 0>;
1555 let SubtargetPredicate = isGFX90AOnly in {
1556 defm FLAT_ATOMIC_ADD_F64 : FLAT_Real_Atomics_vi<0x4f, FLAT_ATOMIC_ADD_F64, 0>;
1557 defm FLAT_ATOMIC_MIN_F64 : FLAT_Real_Atomics_vi<0x50, FLAT_ATOMIC_MIN_F64, 0>;
1558 defm FLAT_ATOMIC_MAX_F64 : FLAT_Real_Atomics_vi<0x51, FLAT_ATOMIC_MAX_F64, 0>;
1559 defm GLOBAL_ATOMIC_ADD_F64 : FLAT_Global_Real_Atomics_vi<0x4f, 0>;
1560 defm GLOBAL_ATOMIC_MIN_F64 : FLAT_Global_Real_Atomics_vi<0x50, 0>;
1561 defm GLOBAL_ATOMIC_MAX_F64 : FLAT_Global_Real_Atomics_vi<0x51, 0>;
1562 } // End SubtargetPredicate = isGFX90AOnly
1564 //===----------------------------------------------------------------------===//
1566 //===----------------------------------------------------------------------===//
1568 class FLAT_Real_gfx10<bits<7> op, FLAT_Pseudo ps> :
1569 FLAT_Real<op, ps>, SIMCInstr<ps.PseudoInstr, SIEncodingFamily.GFX10> {
1570 let AssemblerPredicate = isGFX10Plus;
1571 let DecoderNamespace = "GFX10";
1573 let Inst{11-0} = offset{11-0};
1574 let Inst{12} = !if(ps.has_dlc, cpol{CPolBit.DLC}, ps.dlcValue);
1575 let Inst{54-48} = !if(ps.has_saddr, !if(ps.enabled_saddr, saddr, 0x7d), 0x7d);
1580 multiclass FLAT_Real_Base_gfx10<bits<7> op> {
1582 FLAT_Real_gfx10<op, !cast<FLAT_Pseudo>(NAME)>;
1585 multiclass FLAT_Real_RTN_gfx10<bits<7> op> {
1587 FLAT_Real_gfx10<op, !cast<FLAT_Pseudo>(NAME#"_RTN")>;
1590 multiclass FLAT_Real_SADDR_gfx10<bits<7> op> {
1592 FLAT_Real_gfx10<op, !cast<FLAT_Pseudo>(NAME#"_SADDR")>;
1595 multiclass FLAT_Real_SADDR_RTN_gfx10<bits<7> op> {
1596 def _SADDR_RTN_gfx10 :
1597 FLAT_Real_gfx10<op, !cast<FLAT_Pseudo>(NAME#"_SADDR_RTN")>;
1600 multiclass FLAT_Real_ST_gfx10<bits<7> op> {
1602 FLAT_Real_gfx10<op, !cast<FLAT_Pseudo>(NAME#"_ST")> {
1603 let Inst{54-48} = !cast<int>(EXEC_HI.HWEncoding);
1604 let OtherPredicates = [HasFlatScratchSTMode];
1608 multiclass FLAT_Real_AllAddr_gfx10<bits<7> op> :
1609 FLAT_Real_Base_gfx10<op>,
1610 FLAT_Real_SADDR_gfx10<op>;
1612 multiclass FLAT_Real_Atomics_gfx10<bits<7> op> :
1613 FLAT_Real_Base_gfx10<op>,
1614 FLAT_Real_RTN_gfx10<op>;
1616 multiclass FLAT_Real_GlblAtomics_gfx10<bits<7> op> :
1617 FLAT_Real_AllAddr_gfx10<op>,
1618 FLAT_Real_RTN_gfx10<op>,
1619 FLAT_Real_SADDR_RTN_gfx10<op>;
1621 multiclass FLAT_Real_GlblAtomics_RTN_gfx10<bits<7> op> :
1622 FLAT_Real_RTN_gfx10<op>,
1623 FLAT_Real_SADDR_RTN_gfx10<op>;
1625 multiclass FLAT_Real_ScratchAllAddr_gfx10<bits<7> op> :
1626 FLAT_Real_Base_gfx10<op>,
1627 FLAT_Real_SADDR_gfx10<op>,
1628 FLAT_Real_ST_gfx10<op>;
1631 defm FLAT_LOAD_UBYTE : FLAT_Real_Base_gfx10<0x008>;
1632 defm FLAT_LOAD_SBYTE : FLAT_Real_Base_gfx10<0x009>;
1633 defm FLAT_LOAD_USHORT : FLAT_Real_Base_gfx10<0x00a>;
1634 defm FLAT_LOAD_SSHORT : FLAT_Real_Base_gfx10<0x00b>;
1635 defm FLAT_LOAD_DWORD : FLAT_Real_Base_gfx10<0x00c>;
1636 defm FLAT_LOAD_DWORDX2 : FLAT_Real_Base_gfx10<0x00d>;
1637 defm FLAT_LOAD_DWORDX4 : FLAT_Real_Base_gfx10<0x00e>;
1638 defm FLAT_LOAD_DWORDX3 : FLAT_Real_Base_gfx10<0x00f>;
1639 defm FLAT_STORE_BYTE : FLAT_Real_Base_gfx10<0x018>;
1640 defm FLAT_STORE_BYTE_D16_HI : FLAT_Real_Base_gfx10<0x019>;
1641 defm FLAT_STORE_SHORT : FLAT_Real_Base_gfx10<0x01a>;
1642 defm FLAT_STORE_SHORT_D16_HI : FLAT_Real_Base_gfx10<0x01b>;
1643 defm FLAT_STORE_DWORD : FLAT_Real_Base_gfx10<0x01c>;
1644 defm FLAT_STORE_DWORDX2 : FLAT_Real_Base_gfx10<0x01d>;
1645 defm FLAT_STORE_DWORDX4 : FLAT_Real_Base_gfx10<0x01e>;
1646 defm FLAT_STORE_DWORDX3 : FLAT_Real_Base_gfx10<0x01f>;
1647 defm FLAT_LOAD_UBYTE_D16 : FLAT_Real_Base_gfx10<0x020>;
1648 defm FLAT_LOAD_UBYTE_D16_HI : FLAT_Real_Base_gfx10<0x021>;
1649 defm FLAT_LOAD_SBYTE_D16 : FLAT_Real_Base_gfx10<0x022>;
1650 defm FLAT_LOAD_SBYTE_D16_HI : FLAT_Real_Base_gfx10<0x023>;
1651 defm FLAT_LOAD_SHORT_D16 : FLAT_Real_Base_gfx10<0x024>;
1652 defm FLAT_LOAD_SHORT_D16_HI : FLAT_Real_Base_gfx10<0x025>;
1653 defm FLAT_ATOMIC_SWAP : FLAT_Real_Atomics_gfx10<0x030>;
1654 defm FLAT_ATOMIC_CMPSWAP : FLAT_Real_Atomics_gfx10<0x031>;
1655 defm FLAT_ATOMIC_ADD : FLAT_Real_Atomics_gfx10<0x032>;
1656 defm FLAT_ATOMIC_SUB : FLAT_Real_Atomics_gfx10<0x033>;
1657 defm FLAT_ATOMIC_SMIN : FLAT_Real_Atomics_gfx10<0x035>;
1658 defm FLAT_ATOMIC_UMIN : FLAT_Real_Atomics_gfx10<0x036>;
1659 defm FLAT_ATOMIC_SMAX : FLAT_Real_Atomics_gfx10<0x037>;
1660 defm FLAT_ATOMIC_UMAX : FLAT_Real_Atomics_gfx10<0x038>;
1661 defm FLAT_ATOMIC_AND : FLAT_Real_Atomics_gfx10<0x039>;
1662 defm FLAT_ATOMIC_OR : FLAT_Real_Atomics_gfx10<0x03a>;
1663 defm FLAT_ATOMIC_XOR : FLAT_Real_Atomics_gfx10<0x03b>;
1664 defm FLAT_ATOMIC_INC : FLAT_Real_Atomics_gfx10<0x03c>;
1665 defm FLAT_ATOMIC_DEC : FLAT_Real_Atomics_gfx10<0x03d>;
1666 defm FLAT_ATOMIC_FCMPSWAP : FLAT_Real_Atomics_gfx10<0x03e>;
1667 defm FLAT_ATOMIC_FMIN : FLAT_Real_Atomics_gfx10<0x03f>;
1668 defm FLAT_ATOMIC_FMAX : FLAT_Real_Atomics_gfx10<0x040>;
1669 defm FLAT_ATOMIC_SWAP_X2 : FLAT_Real_Atomics_gfx10<0x050>;
1670 defm FLAT_ATOMIC_CMPSWAP_X2 : FLAT_Real_Atomics_gfx10<0x051>;
1671 defm FLAT_ATOMIC_ADD_X2 : FLAT_Real_Atomics_gfx10<0x052>;
1672 defm FLAT_ATOMIC_SUB_X2 : FLAT_Real_Atomics_gfx10<0x053>;
1673 defm FLAT_ATOMIC_SMIN_X2 : FLAT_Real_Atomics_gfx10<0x055>;
1674 defm FLAT_ATOMIC_UMIN_X2 : FLAT_Real_Atomics_gfx10<0x056>;
1675 defm FLAT_ATOMIC_SMAX_X2 : FLAT_Real_Atomics_gfx10<0x057>;
1676 defm FLAT_ATOMIC_UMAX_X2 : FLAT_Real_Atomics_gfx10<0x058>;
1677 defm FLAT_ATOMIC_AND_X2 : FLAT_Real_Atomics_gfx10<0x059>;
1678 defm FLAT_ATOMIC_OR_X2 : FLAT_Real_Atomics_gfx10<0x05a>;
1679 defm FLAT_ATOMIC_XOR_X2 : FLAT_Real_Atomics_gfx10<0x05b>;
1680 defm FLAT_ATOMIC_INC_X2 : FLAT_Real_Atomics_gfx10<0x05c>;
1681 defm FLAT_ATOMIC_DEC_X2 : FLAT_Real_Atomics_gfx10<0x05d>;
1682 defm FLAT_ATOMIC_FCMPSWAP_X2 : FLAT_Real_Atomics_gfx10<0x05e>;
1683 defm FLAT_ATOMIC_FMIN_X2 : FLAT_Real_Atomics_gfx10<0x05f>;
1684 defm FLAT_ATOMIC_FMAX_X2 : FLAT_Real_Atomics_gfx10<0x060>;
1688 defm GLOBAL_LOAD_UBYTE : FLAT_Real_AllAddr_gfx10<0x008>;
1689 defm GLOBAL_LOAD_SBYTE : FLAT_Real_AllAddr_gfx10<0x009>;
1690 defm GLOBAL_LOAD_USHORT : FLAT_Real_AllAddr_gfx10<0x00a>;
1691 defm GLOBAL_LOAD_SSHORT : FLAT_Real_AllAddr_gfx10<0x00b>;
1692 defm GLOBAL_LOAD_DWORD : FLAT_Real_AllAddr_gfx10<0x00c>;
1693 defm GLOBAL_LOAD_DWORDX2 : FLAT_Real_AllAddr_gfx10<0x00d>;
1694 defm GLOBAL_LOAD_DWORDX4 : FLAT_Real_AllAddr_gfx10<0x00e>;
1695 defm GLOBAL_LOAD_DWORDX3 : FLAT_Real_AllAddr_gfx10<0x00f>;
1696 defm GLOBAL_STORE_BYTE : FLAT_Real_AllAddr_gfx10<0x018>;
1697 defm GLOBAL_STORE_BYTE_D16_HI : FLAT_Real_AllAddr_gfx10<0x019>;
1698 defm GLOBAL_STORE_SHORT : FLAT_Real_AllAddr_gfx10<0x01a>;
1699 defm GLOBAL_STORE_SHORT_D16_HI : FLAT_Real_AllAddr_gfx10<0x01b>;
1700 defm GLOBAL_STORE_DWORD : FLAT_Real_AllAddr_gfx10<0x01c>;
1701 defm GLOBAL_STORE_DWORDX2 : FLAT_Real_AllAddr_gfx10<0x01d>;
1702 defm GLOBAL_STORE_DWORDX4 : FLAT_Real_AllAddr_gfx10<0x01e>;
1703 defm GLOBAL_STORE_DWORDX3 : FLAT_Real_AllAddr_gfx10<0x01f>;
1704 defm GLOBAL_LOAD_UBYTE_D16 : FLAT_Real_AllAddr_gfx10<0x020>;
1705 defm GLOBAL_LOAD_UBYTE_D16_HI : FLAT_Real_AllAddr_gfx10<0x021>;
1706 defm GLOBAL_LOAD_SBYTE_D16 : FLAT_Real_AllAddr_gfx10<0x022>;
1707 defm GLOBAL_LOAD_SBYTE_D16_HI : FLAT_Real_AllAddr_gfx10<0x023>;
1708 defm GLOBAL_LOAD_SHORT_D16 : FLAT_Real_AllAddr_gfx10<0x024>;
1709 defm GLOBAL_LOAD_SHORT_D16_HI : FLAT_Real_AllAddr_gfx10<0x025>;
1710 defm GLOBAL_ATOMIC_SWAP : FLAT_Real_GlblAtomics_gfx10<0x030>;
1711 defm GLOBAL_ATOMIC_CMPSWAP : FLAT_Real_GlblAtomics_gfx10<0x031>;
1712 defm GLOBAL_ATOMIC_ADD : FLAT_Real_GlblAtomics_gfx10<0x032>;
1713 defm GLOBAL_ATOMIC_SUB : FLAT_Real_GlblAtomics_gfx10<0x033>;
1714 defm GLOBAL_ATOMIC_CSUB : FLAT_Real_GlblAtomics_RTN_gfx10<0x034>;
1715 defm GLOBAL_ATOMIC_SMIN : FLAT_Real_GlblAtomics_gfx10<0x035>;
1716 defm GLOBAL_ATOMIC_UMIN : FLAT_Real_GlblAtomics_gfx10<0x036>;
1717 defm GLOBAL_ATOMIC_SMAX : FLAT_Real_GlblAtomics_gfx10<0x037>;
1718 defm GLOBAL_ATOMIC_UMAX : FLAT_Real_GlblAtomics_gfx10<0x038>;
1719 defm GLOBAL_ATOMIC_AND : FLAT_Real_GlblAtomics_gfx10<0x039>;
1720 defm GLOBAL_ATOMIC_OR : FLAT_Real_GlblAtomics_gfx10<0x03a>;
1721 defm GLOBAL_ATOMIC_XOR : FLAT_Real_GlblAtomics_gfx10<0x03b>;
1722 defm GLOBAL_ATOMIC_INC : FLAT_Real_GlblAtomics_gfx10<0x03c>;
1723 defm GLOBAL_ATOMIC_DEC : FLAT_Real_GlblAtomics_gfx10<0x03d>;
1724 defm GLOBAL_ATOMIC_FCMPSWAP : FLAT_Real_GlblAtomics_gfx10<0x03e>;
1725 defm GLOBAL_ATOMIC_FMIN : FLAT_Real_GlblAtomics_gfx10<0x03f>;
1726 defm GLOBAL_ATOMIC_FMAX : FLAT_Real_GlblAtomics_gfx10<0x040>;
1727 defm GLOBAL_ATOMIC_SWAP_X2 : FLAT_Real_GlblAtomics_gfx10<0x050>;
1728 defm GLOBAL_ATOMIC_CMPSWAP_X2 : FLAT_Real_GlblAtomics_gfx10<0x051>;
1729 defm GLOBAL_ATOMIC_ADD_X2 : FLAT_Real_GlblAtomics_gfx10<0x052>;
1730 defm GLOBAL_ATOMIC_SUB_X2 : FLAT_Real_GlblAtomics_gfx10<0x053>;
1731 defm GLOBAL_ATOMIC_SMIN_X2 : FLAT_Real_GlblAtomics_gfx10<0x055>;
1732 defm GLOBAL_ATOMIC_UMIN_X2 : FLAT_Real_GlblAtomics_gfx10<0x056>;
1733 defm GLOBAL_ATOMIC_SMAX_X2 : FLAT_Real_GlblAtomics_gfx10<0x057>;
1734 defm GLOBAL_ATOMIC_UMAX_X2 : FLAT_Real_GlblAtomics_gfx10<0x058>;
1735 defm GLOBAL_ATOMIC_AND_X2 : FLAT_Real_GlblAtomics_gfx10<0x059>;
1736 defm GLOBAL_ATOMIC_OR_X2 : FLAT_Real_GlblAtomics_gfx10<0x05a>;
1737 defm GLOBAL_ATOMIC_XOR_X2 : FLAT_Real_GlblAtomics_gfx10<0x05b>;
1738 defm GLOBAL_ATOMIC_INC_X2 : FLAT_Real_GlblAtomics_gfx10<0x05c>;
1739 defm GLOBAL_ATOMIC_DEC_X2 : FLAT_Real_GlblAtomics_gfx10<0x05d>;
1740 defm GLOBAL_ATOMIC_FCMPSWAP_X2 : FLAT_Real_GlblAtomics_gfx10<0x05e>;
1741 defm GLOBAL_ATOMIC_FMIN_X2 : FLAT_Real_GlblAtomics_gfx10<0x05f>;
1742 defm GLOBAL_ATOMIC_FMAX_X2 : FLAT_Real_GlblAtomics_gfx10<0x060>;
1743 defm GLOBAL_LOAD_DWORD_ADDTID : FLAT_Real_AllAddr_gfx10<0x016>;
1744 defm GLOBAL_STORE_DWORD_ADDTID : FLAT_Real_AllAddr_gfx10<0x017>;
1746 // ENC_FLAT_SCRATCH.
1747 defm SCRATCH_LOAD_UBYTE : FLAT_Real_ScratchAllAddr_gfx10<0x008>;
1748 defm SCRATCH_LOAD_SBYTE : FLAT_Real_ScratchAllAddr_gfx10<0x009>;
1749 defm SCRATCH_LOAD_USHORT : FLAT_Real_ScratchAllAddr_gfx10<0x00a>;
1750 defm SCRATCH_LOAD_SSHORT : FLAT_Real_ScratchAllAddr_gfx10<0x00b>;
1751 defm SCRATCH_LOAD_DWORD : FLAT_Real_ScratchAllAddr_gfx10<0x00c>;
1752 defm SCRATCH_LOAD_DWORDX2 : FLAT_Real_ScratchAllAddr_gfx10<0x00d>;
1753 defm SCRATCH_LOAD_DWORDX4 : FLAT_Real_ScratchAllAddr_gfx10<0x00e>;
1754 defm SCRATCH_LOAD_DWORDX3 : FLAT_Real_ScratchAllAddr_gfx10<0x00f>;
1755 defm SCRATCH_STORE_BYTE : FLAT_Real_ScratchAllAddr_gfx10<0x018>;
1756 defm SCRATCH_STORE_BYTE_D16_HI : FLAT_Real_ScratchAllAddr_gfx10<0x019>;
1757 defm SCRATCH_STORE_SHORT : FLAT_Real_ScratchAllAddr_gfx10<0x01a>;
1758 defm SCRATCH_STORE_SHORT_D16_HI : FLAT_Real_ScratchAllAddr_gfx10<0x01b>;
1759 defm SCRATCH_STORE_DWORD : FLAT_Real_ScratchAllAddr_gfx10<0x01c>;
1760 defm SCRATCH_STORE_DWORDX2 : FLAT_Real_ScratchAllAddr_gfx10<0x01d>;
1761 defm SCRATCH_STORE_DWORDX4 : FLAT_Real_ScratchAllAddr_gfx10<0x01e>;
1762 defm SCRATCH_STORE_DWORDX3 : FLAT_Real_ScratchAllAddr_gfx10<0x01f>;
1763 defm SCRATCH_LOAD_UBYTE_D16 : FLAT_Real_ScratchAllAddr_gfx10<0x020>;
1764 defm SCRATCH_LOAD_UBYTE_D16_HI : FLAT_Real_ScratchAllAddr_gfx10<0x021>;
1765 defm SCRATCH_LOAD_SBYTE_D16 : FLAT_Real_ScratchAllAddr_gfx10<0x022>;
1766 defm SCRATCH_LOAD_SBYTE_D16_HI : FLAT_Real_ScratchAllAddr_gfx10<0x023>;
1767 defm SCRATCH_LOAD_SHORT_D16 : FLAT_Real_ScratchAllAddr_gfx10<0x024>;
1768 defm SCRATCH_LOAD_SHORT_D16_HI : FLAT_Real_ScratchAllAddr_gfx10<0x025>;