26 { X86::ADC16mi8, X86::ADC16mi },
27 { X86::ADC16ri8, X86::ADC16ri },
28 { X86::ADC32mi8, X86::ADC32mi },
29 { X86::ADC32ri8, X86::ADC32ri },
30 { X86::ADC64mi8, X86::ADC64mi32 },
31 { X86::ADC64ri8, X86::ADC64ri32 },
33 { X86::ADD16mi8, X86::ADD16mi },
34 { X86::ADD16ri8, X86::ADD16ri },
35 { X86::ADD32mi8, X86::ADD32mi },
36 { X86::ADD32ri8, X86::ADD32ri },
37 { X86::ADD64mi8, X86::ADD64mi32 },
38 { X86::ADD64ri8, X86::ADD64ri32 },
40 { X86::AND16mi8, X86::AND16mi },
41 { X86::AND16ri8, X86::AND16ri },
42 { X86::AND32mi8, X86::AND32mi },
43 { X86::AND32ri8, X86::AND32ri },
44 { X86::AND64mi8, X86::AND64mi32 },
45 { X86::AND64ri8, X86::AND64ri32 },
47 { X86::CMP16mi8, X86::CMP16mi },
48 { X86::CMP16ri8, X86::CMP16ri },
49 { X86::CMP32mi8, X86::CMP32mi },
50 { X86::CMP32ri8, X86::CMP32ri },
51 { X86::CMP64mi8, X86::CMP64mi32 },
52 { X86::CMP64ri8, X86::CMP64ri32 },
54 { X86::IMUL16rmi8, X86::IMUL16rmi },
55 { X86::IMUL16rri8, X86::IMUL16rri },
56 { X86::IMUL32rmi8, X86::IMUL32rmi },
57 { X86::IMUL32rri8, X86::IMUL32rri },
58 { X86::IMUL64rmi8, X86::IMUL64rmi32 },
59 { X86::IMUL64rri8, X86::IMUL64rri32 },
61 { X86::OR16mi8, X86::OR16mi },
62 { X86::OR16ri8, X86::OR16ri },
63 { X86::OR32mi8, X86::OR32mi },
64 { X86::OR32ri8, X86::OR32ri },
65 { X86::OR64mi8, X86::OR64mi32 },
66 { X86::OR64ri8, X86::OR64ri32 },
68 { X86::PUSH16i8, X86::PUSHi16 },
69 { X86::PUSH32i8, X86::PUSHi32 },
70 { X86::PUSH64i8, X86::PUSH64i32 },
72 { X86::SBB16mi8, X86::SBB16mi },
73 { X86::SBB16ri8, X86::SBB16ri },
74 { X86::SBB32mi8, X86::SBB32mi },
75 { X86::SBB32ri8, X86::SBB32ri },
76 { X86::SBB64mi8, X86::SBB64mi32 },
77 { X86::SBB64ri8, X86::SBB64ri32 },
79 { X86::SUB16mi8, X86::SUB16mi },
80 { X86::SUB16ri8, X86::SUB16ri },
81 { X86::SUB32mi8, X86::SUB32mi },
82 { X86::SUB32ri8, X86::SUB32ri },
83 { X86::SUB64mi8, X86::SUB64mi32 },
84 { X86::SUB64ri8, X86::SUB64ri32 },
86 { X86::XOR16mi8, X86::XOR16mi },
87 { X86::XOR16ri8, X86::XOR16ri },
88 { X86::XOR32mi8, X86::XOR32mi },
89 { X86::XOR32ri8, X86::XOR32ri },
90 { X86::XOR64mi8, X86::XOR64mi32 },
91 { X86::XOR64ri8, X86::XOR64ri32 },
99 static std::atomic<bool> RelaxTableChecked(
false);
100 if (!RelaxTableChecked.load(std::memory_order_relaxed)) {
105 "InstrRelaxTable is not sorted and unique!");
106 RelaxTableChecked.store(
true, std::memory_order_relaxed);
124struct X86ShortFormTable {
128 X86ShortFormTable() {
130 Table.
push_back({Entry.DstOp, Entry.KeyOp});
136 "Short form table is not unique!");
142 static X86ShortFormTable ShortTable;
143 auto &Table = ShortTable.Table;
145 if (
I != Table.
end() &&
I->KeyOp == RelaxOp)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
static const X86InstrRelaxTableEntry * lookupRelaxTableImpl(ArrayRef< X86InstrRelaxTableEntry > Table, unsigned ShortOp)
static const X86InstrRelaxTableEntry InstrRelaxTable[]
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
unsigned getShortOpcodeArith(unsigned RelaxOp)
Get the short instruction opcode for a given relaxed opcode.
unsigned getRelaxedOpcodeArith(unsigned ShortOp)
Get the relaxed instruction opcode for a given short opcode.
This is an optimization pass for GlobalISel generic memory operations.
const X86InstrRelaxTableEntry * lookupRelaxTable(unsigned ShortOp)
Look up the relaxed form table entry for a given ShortOp.
void sort(IteratorTy Start, IteratorTy End)
bool is_sorted(R &&Range, Compare C)
Wrapper function around std::is_sorted to check if elements in a range R are sorted with respect to a...
auto lower_bound(R &&Range, T &&Value)
Provide wrappers to std::lower_bound which take ranges instead of having to pass begin/end explicitly...
const X86InstrRelaxTableEntry * lookupShortTable(unsigned RelaxOp)
Look up the short form table entry for a given RelaxOp.