1 /*
2 * Copyright (C) 2024 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "gtest/gtest.h"
18
19 #include <cstdint>
20 #include <initializer_list>
21 #include <tuple>
22
23 #include "berberis/base/bit_util.h"
24 #include "berberis/guest_state/guest_addr.h"
25 #include "berberis/guest_state/guest_state.h"
26 #include "berberis/interpreter/riscv64/interpreter.h"
27 #include "berberis/runtime_primitives/memory_region_reservation.h"
28
29 namespace berberis {
30
31 namespace {
32
33 class Riscv64ToArm64InterpreterTest : public ::testing::Test {
34 public:
35 template <uint8_t kInsnSize = 4>
RunOneInstruction(ThreadState * state,GuestAddr stop_pc)36 bool RunOneInstruction(ThreadState* state, GuestAddr stop_pc) {
37 InterpretInsn(state);
38 return state->cpu.insn_addr == stop_pc;
39 }
40
41 template <uint8_t kInsnSize = 4>
RunInstruction(const uint32_t & insn_bytes)42 void RunInstruction(const uint32_t& insn_bytes) {
43 state_.cpu.insn_addr = ToGuestAddr(&insn_bytes);
44 EXPECT_TRUE(RunOneInstruction<kInsnSize>(&state_, state_.cpu.insn_addr + kInsnSize));
45 }
46
TestOp(uint32_t insn_bytes,std::initializer_list<std::tuple<uint64_t,uint64_t,uint64_t>> args)47 void TestOp(uint32_t insn_bytes,
48 std::initializer_list<std::tuple<uint64_t, uint64_t, uint64_t>> args) {
49 for (auto [arg1, arg2, expected_result] : args) {
50 SetXReg<2>(state_.cpu, arg1);
51 SetXReg<3>(state_.cpu, arg2);
52 RunInstruction(insn_bytes);
53 EXPECT_EQ(GetXReg<1>(state_.cpu), expected_result);
54 }
55 }
56
TestOpImm(uint32_t insn_bytes,std::initializer_list<std::tuple<uint64_t,uint16_t,uint64_t>> args)57 void TestOpImm(uint32_t insn_bytes,
58 std::initializer_list<std::tuple<uint64_t, uint16_t, uint64_t>> args) {
59 for (auto [arg1, imm, expected_result] : args) {
60 CHECK_LE(imm, 63);
61 uint32_t insn_bytes_with_immediate = insn_bytes | imm << 20;
62 SetXReg<2>(state_.cpu, arg1);
63 RunInstruction(insn_bytes_with_immediate);
64 EXPECT_EQ(GetXReg<1>(state_.cpu), expected_result);
65 }
66 }
67
TestAuipc(uint32_t insn_bytes,uint64_t expected_offset)68 void TestAuipc(uint32_t insn_bytes, uint64_t expected_offset) {
69 RunInstruction(insn_bytes);
70 EXPECT_EQ(GetXReg<1>(state_.cpu), expected_offset + ToGuestAddr(&insn_bytes));
71 }
72
TestLui(uint32_t insn_bytes,uint64_t expected_result)73 void TestLui(uint32_t insn_bytes, uint64_t expected_result) {
74 RunInstruction(insn_bytes);
75 EXPECT_EQ(GetXReg<1>(state_.cpu), expected_result);
76 }
77
TestBranch(uint32_t insn_bytes,std::initializer_list<std::tuple<uint64_t,uint64_t,int8_t>> args)78 void TestBranch(uint32_t insn_bytes,
79 std::initializer_list<std::tuple<uint64_t, uint64_t, int8_t>> args) {
80 auto code_start = ToGuestAddr(&insn_bytes);
81 for (auto [arg1, arg2, expected_offset] : args) {
82 state_.cpu.insn_addr = code_start;
83 SetXReg<1>(state_.cpu, arg1);
84 SetXReg<2>(state_.cpu, arg2);
85 InterpretInsn(&state_);
86 EXPECT_EQ(state_.cpu.insn_addr, code_start + expected_offset);
87 }
88 }
89
TestJumpAndLink(uint32_t insn_bytes,int8_t expected_offset)90 void TestJumpAndLink(uint32_t insn_bytes, int8_t expected_offset) {
91 auto code_start = ToGuestAddr(&insn_bytes);
92 state_.cpu.insn_addr = code_start;
93 InterpretInsn(&state_);
94 EXPECT_EQ(state_.cpu.insn_addr, code_start + expected_offset);
95 EXPECT_EQ(GetXReg<1>(state_.cpu), code_start + 4);
96 }
97
TestLoad(uint32_t insn_bytes,uint64_t expected_result)98 void TestLoad(uint32_t insn_bytes, uint64_t expected_result) {
99 // Offset is always 8.
100 SetXReg<2>(state_.cpu, ToGuestAddr(bit_cast<uint8_t*>(&kDataToLoad) - 8));
101 RunInstruction(insn_bytes);
102 EXPECT_EQ(GetXReg<1>(state_.cpu), expected_result);
103 }
104
105 // kLinkRegisterOffsetIfUsed is size of instruction or 0 if instruction does not link register.
106 template <uint8_t kLinkRegisterOffsetIfUsed>
TestJumpAndLinkRegister(uint32_t insn_bytes,uint64_t base_disp,int64_t expected_offset)107 void TestJumpAndLinkRegister(uint32_t insn_bytes, uint64_t base_disp, int64_t expected_offset) {
108 auto code_start = ToGuestAddr(&insn_bytes);
109 state_.cpu.insn_addr = code_start;
110 SetXReg<1>(state_.cpu, 0);
111 SetXReg<2>(state_.cpu, code_start + base_disp);
112 InterpretInsn(&state_);
113 EXPECT_EQ(state_.cpu.insn_addr, code_start + expected_offset);
114 if constexpr (kLinkRegisterOffsetIfUsed == 0) {
115 EXPECT_EQ(GetXReg<1>(state_.cpu), 0UL);
116 } else {
117 EXPECT_EQ(GetXReg<1>(state_.cpu), code_start + kLinkRegisterOffsetIfUsed);
118 }
119 }
120
TestStore(uint32_t insn_bytes,uint64_t expected_result)121 void TestStore(uint32_t insn_bytes, uint64_t expected_result) {
122 // Offset is always 8.
123 SetXReg<1>(state_.cpu, ToGuestAddr(bit_cast<uint8_t*>(&store_area_) - 8));
124 SetXReg<2>(state_.cpu, kDataToStore);
125 store_area_ = 0;
126 RunInstruction(insn_bytes);
127 EXPECT_EQ(store_area_, expected_result);
128 }
129
TestAtomicLoad(uint32_t insn_bytes,const uint64_t * const data_to_load,uint64_t expected_result)130 void TestAtomicLoad(uint32_t insn_bytes,
131 const uint64_t* const data_to_load,
132 uint64_t expected_result) {
133 state_.cpu.insn_addr = ToGuestAddr(&insn_bytes);
134 SetXReg<1>(state_.cpu, ToGuestAddr(data_to_load));
135 EXPECT_TRUE(RunOneInstruction(&state_, state_.cpu.insn_addr + 4));
136 EXPECT_EQ(GetXReg<2>(state_.cpu), expected_result);
137 EXPECT_EQ(state_.cpu.reservation_address, ToGuestAddr(data_to_load));
138 // We always reserve the full 64-bit range of the reservation address.
139 EXPECT_EQ(state_.cpu.reservation_value, *data_to_load);
140 }
141
142 template <typename T>
TestAtomicStore(uint32_t insn_bytes,T expected_result)143 void TestAtomicStore(uint32_t insn_bytes, T expected_result) {
144 store_area_ = ~uint64_t{0};
145 state_.cpu.insn_addr = ToGuestAddr(&insn_bytes);
146 SetXReg<1>(state_.cpu, ToGuestAddr(&store_area_));
147 SetXReg<2>(state_.cpu, kDataToStore);
148 SetXReg<3>(state_.cpu, 0xdeadbeef);
149 state_.cpu.reservation_address = ToGuestAddr(&store_area_);
150 state_.cpu.reservation_value = store_area_;
151 MemoryRegionReservation::SetOwner(ToGuestAddr(&store_area_), &state_.cpu);
152 EXPECT_TRUE(RunOneInstruction(&state_, state_.cpu.insn_addr + 4));
153 EXPECT_EQ(static_cast<T>(store_area_), expected_result);
154 EXPECT_EQ(GetXReg<3>(state_.cpu), 0u);
155 }
156
TestAtomicStoreNoLoadFailure(uint32_t insn_bytes)157 void TestAtomicStoreNoLoadFailure(uint32_t insn_bytes) {
158 state_.cpu.insn_addr = ToGuestAddr(&insn_bytes);
159 SetXReg<1>(state_.cpu, ToGuestAddr(&store_area_));
160 SetXReg<2>(state_.cpu, kDataToStore);
161 SetXReg<3>(state_.cpu, 0xdeadbeef);
162 store_area_ = 0;
163 EXPECT_TRUE(RunOneInstruction(&state_, state_.cpu.insn_addr + 4));
164 EXPECT_EQ(store_area_, 0u);
165 EXPECT_EQ(GetXReg<3>(state_.cpu), 1u);
166 }
167
TestAtomicStoreDifferentLoadFailure(uint32_t insn_bytes)168 void TestAtomicStoreDifferentLoadFailure(uint32_t insn_bytes) {
169 state_.cpu.insn_addr = ToGuestAddr(&insn_bytes);
170 SetXReg<1>(state_.cpu, ToGuestAddr(&store_area_));
171 SetXReg<2>(state_.cpu, kDataToStore);
172 SetXReg<3>(state_.cpu, 0xdeadbeef);
173 state_.cpu.reservation_address = ToGuestAddr(&kDataToStore);
174 state_.cpu.reservation_value = 0;
175 MemoryRegionReservation::SetOwner(ToGuestAddr(&kDataToStore), &state_.cpu);
176 store_area_ = 0;
177 EXPECT_TRUE(RunOneInstruction(&state_, state_.cpu.insn_addr + 4));
178 EXPECT_EQ(store_area_, 0u);
179 EXPECT_EQ(GetXReg<3>(state_.cpu), 1u);
180 }
181
TestAmo(uint32_t insn_bytes,uint64_t arg1,uint64_t arg2,uint64_t expected_result,uint64_t expected_memory)182 void TestAmo(uint32_t insn_bytes,
183 uint64_t arg1,
184 uint64_t arg2,
185 uint64_t expected_result,
186 uint64_t expected_memory) {
187 // Copy arg1 into store_area_
188 store_area_ = arg1;
189 SetXReg<2>(state_.cpu, ToGuestAddr(bit_cast<uint8_t*>(&store_area_)));
190 SetXReg<3>(state_.cpu, arg2);
191 RunInstruction(insn_bytes);
192 EXPECT_EQ(GetXReg<1>(state_.cpu), expected_result);
193 EXPECT_EQ(store_area_, expected_memory);
194 }
195
TestAmo(uint32_t insn_bytes32,uint32_t insn_bytes64,uint64_t expected_memory)196 void TestAmo(uint32_t insn_bytes32, uint32_t insn_bytes64, uint64_t expected_memory) {
197 TestAmo(insn_bytes32,
198 0xffff'eeee'dddd'ccccULL,
199 0xaaaa'bbbb'cccc'ddddULL,
200 0xffff'ffff'dddd'ccccULL,
201 0xffff'eeee'0000'0000 | uint32_t(expected_memory));
202 TestAmo(insn_bytes64,
203 0xffff'eeee'dddd'ccccULL,
204 0xaaaa'bbbb'cccc'ddddULL,
205 0xffff'eeee'dddd'ccccULL,
206 expected_memory);
207 }
208
209 protected:
210 static constexpr uint64_t kDataToLoad{0xffffeeeeddddccccULL};
211 static constexpr uint64_t kDataToStore = kDataToLoad;
212 uint64_t store_area_;
213 ThreadState state_;
214 };
215
TEST_F(Riscv64ToArm64InterpreterTest,OpInstructions)216 TEST_F(Riscv64ToArm64InterpreterTest, OpInstructions) {
217 // Add
218 TestOp(0x003100b3, {{19, 23, 42}});
219 // Sub
220 TestOp(0x403100b3, {{42, 23, 19}});
221 // And
222 TestOp(0x003170b3, {{0b0101, 0b0011, 0b0001}});
223 // Or
224 TestOp(0x003160b3, {{0b0101, 0b0011, 0b0111}});
225 // Xor
226 TestOp(0x003140b3, {{0b0101, 0b0011, 0b0110}});
227 // Sll
228 TestOp(0x003110b3, {{0b1010, 3, 0b1010'000}});
229 // Srl
230 TestOp(0x003150b3, {{0xf000'0000'0000'0000ULL, 12, 0x000f'0000'0000'0000ULL}});
231 // Sra
232 TestOp(0x403150b3, {{0xf000'0000'0000'0000ULL, 12, 0xffff'0000'0000'0000ULL}});
233 // Slt
234 TestOp(0x003120b3,
235 {
236 {19, 23, 1},
237 {23, 19, 0},
238 {~0ULL, 0, 1},
239 });
240 // Sltu
241 TestOp(0x003130b3,
242 {
243 {19, 23, 1},
244 {23, 19, 0},
245 {~0ULL, 0, 0},
246 });
247 // Div
248 TestOp(0x23140b3, {{0x9999'9999'9999'9999, 0x3333, 0xfffd'fffd'fffd'fffe}});
249 TestOp(0x23140b3, {{42, 2, 21}});
250 TestOp(0x23140b3, {{42, 0, -1}});
251 TestOp(0x23140b3, {{-2147483648, -1, 2147483648}});
252 TestOp(0x23140b3, {{0x8000'0000'0000'0000, -1, 0x8000'0000'0000'0000}});
253 // Divu
254 TestOp(0x23150b3, {{0x9999'9999'9999'9999, 0x3333, 0x0003'0003'0003'0003}});
255 TestOp(0x23150b3, {{42, 2, 21}});
256 TestOp(0x23150b3, {{42, 0, 0xffff'ffff'ffff'ffffULL}});
257 // Rem
258 TestOp(0x23160b3, {{0x9999'9999'9999'9999, 0x3333, 0xffff'ffff'ffff'ffff}});
259 TestOp(0x23160b3, {{0x9999'9999'9999'9999, 0, 0x9999'9999'9999'9999}});
260 // Remu
261 TestOp(0x23170b3, {{0x9999'9999'9999'9999, 0x3333, 0}});
262 TestOp(0x23170b3, {{0x9999'9999'9999'9999, 0, 0x9999'9999'9999'9999}});
263 // Andn
264 TestOp(0x403170b3, {{0b0101, 0b0011, 0b0100}});
265 // Orn
266 TestOp(0x403160b3, {{0b0101, 0b0011, 0xffff'ffff'ffff'fffd}});
267 // Xnor
268 TestOp(0x403140b3, {{0b0101, 0b0011, 0xffff'ffff'ffff'fff9}});
269 // Max
270 TestOp(0x0a3160b3, {{bit_cast<uint64_t>(int64_t{-5}), 4, 4}});
271 TestOp(0x0a3160b3,
272 {{bit_cast<uint64_t>(int64_t{-5}),
273 bit_cast<uint64_t>(int64_t{-10}),
274 bit_cast<uint64_t>(int64_t{-5})}});
275 // Maxu
276 TestOp(0x0a3170b3, {{50, 1, 50}});
277 // Min
278 TestOp(0x0a3140b3, {{bit_cast<uint64_t>(int64_t{-5}), 4, bit_cast<uint64_t>(int64_t{-5})}});
279 TestOp(0x0a3140b3,
280 {{bit_cast<uint64_t>(int64_t{-5}),
281 bit_cast<uint64_t>(int64_t{-10}),
282 bit_cast<uint64_t>(int64_t{-10})}});
283 // Minu
284 TestOp(0x0a3150b3, {{50, 1, 1}});
285 // Ror
286 TestOp(0x603150b3, {{0xf000'0000'0000'000fULL, 4, 0xff00'0000'0000'0000ULL}});
287 TestOp(0x603150b3, {{0xf000'0000'0000'000fULL, 8, 0x0ff0'0000'0000'0000ULL}});
288 // // Rol
289 TestOp(0x603110b3, {{0xff00'0000'0000'0000ULL, 4, 0xf000'0000'0000'000fULL}});
290 TestOp(0x603110b3, {{0x000f'ff00'0000'000fULL, 8, 0x0fff'0000'0000'0f00ULL}});
291 // Sh1add
292 TestOp(0x203120b3, {{0x0008'0000'0000'0001, 0x1001'0001'0000'0000ULL, 0x1011'0001'0000'0002ULL}});
293 // Sh2add
294 TestOp(0x203140b3, {{0x0008'0000'0000'0001, 0x0001'0001'0000'0000ULL, 0x0021'0001'0000'0004ULL}});
295 // Sh3add
296 TestOp(0x203160b3, {{0x0008'0000'0000'0001, 0x1001'0011'0000'0000ULL, 0x1041'0011'0000'0008ULL}});
297 // Bclr
298 TestOp(0x483110b3, {{0b1000'0001'0000'0001ULL, 0, 0b1000'0001'0000'0000ULL}});
299 TestOp(0x483110b3, {{0b1000'0001'0000'0001ULL, 8, 0b1000'0000'0000'0001ULL}});
300 // Bext
301 TestOp(0x483150b3, {{0b1000'0001'0000'0001ULL, 0, 0b0000'0000'0000'0001ULL}});
302 TestOp(0x483150b3, {{0b1000'0001'0000'0001ULL, 8, 0b0000'0000'0000'0001ULL}});
303 TestOp(0x483150b3, {{0b1000'0001'0000'0001ULL, 7, 0b0000'0000'0000'0000ULL}});
304 // Binv
305 TestOp(0x683110b3, {{0b1000'0001'0000'0001ULL, 0, 0b1000'0001'0000'0000ULL}});
306 TestOp(0x683110b3, {{0b1000'0001'0000'0001ULL, 1, 0b1000'0001'0000'0011ULL}});
307 // Bset
308 TestOp(0x283110b3, {{0b1000'0001'0000'0001ULL, 0, 0b1000'0001'0000'0001ULL}});
309 TestOp(0x283110b3, {{0b1000'0001'0000'0001ULL, 1, 0b1000'0001'0000'0011ULL}});
310 }
311
TEST_F(Riscv64ToArm64InterpreterTest,OpImmInstructions)312 TEST_F(Riscv64ToArm64InterpreterTest, OpImmInstructions) {
313 // Addi
314 TestOpImm(0x00010093, {{19, 23, 42}});
315 // Slti
316 TestOpImm(0x00012093,
317 {
318 {19, 23, 1},
319 {23, 19, 0},
320 {~0ULL, 0, 1},
321 });
322 // Sltiu
323 TestOpImm(0x00013093,
324 {
325 {19, 23, 1},
326 {23, 19, 0},
327 {~0ULL, 0, 0},
328 });
329 // Xori
330 TestOpImm(0x00014093, {{0b0101, 0b0011, 0b0110}});
331 // Ori
332 TestOpImm(0x00016093, {{0b0101, 0b0011, 0b0111}});
333 // Andi
334 TestOpImm(0x00017093, {{0b0101, 0b0011, 0b0001}});
335 // Slli
336 TestOpImm(0x00011093, {{0b1010, 3, 0b1010'000}});
337 // Srli
338 TestOpImm(0x00015093, {{0xf000'0000'0000'0000ULL, 12, 0x000f'0000'0000'0000ULL}});
339 // Srai
340 TestOpImm(0x40015093, {{0xf000'0000'0000'0000ULL, 12, 0xffff'0000'0000'0000ULL}});
341 // Rori
342 TestOpImm(0x60015093, {{0xf000'0000'0000'000fULL, 4, 0xff00'0000'0000'0000ULL}});
343 // Rev8
344 TestOpImm(0x6b815093, {{0x0000'0000'0000'000fULL, 0, 0x0f00'0000'0000'0000ULL}});
345 TestOpImm(0x6b815093, {{0xf000'0000'0000'0000ULL, 0, 0x0000'0000'0000'00f0ULL}});
346 TestOpImm(0x6b815093, {{0x00f0'0000'0000'0000ULL, 0, 0x0000'0000'0000'f000ULL}});
347 TestOpImm(0x6b815093, {{0x0000'000f'0000'0000ULL, 0, 0x0000'0000'0f00'0000ULL}});
348
349 // Sext.b
350 TestOpImm(0x60411093, {{0b1111'1110, 0, 0xffff'ffff'ffff'fffe}}); // -2
351 // Sext.h
352 TestOpImm(0x60511093, {{0b1111'1110, 0, 0xfe}});
353 TestOpImm(0x60511093, {{0b1111'1111'1111'1110, 0, 0xffff'ffff'ffff'fffe}});
354 // Bclri
355 TestOpImm(0x48011093, {{0b1000'0001'0000'0001ULL, 0, 0b1000'0001'0000'0000ULL}});
356 TestOpImm(0x48011093, {{0b1000'0001'0000'0001ULL, 8, 0b1000'0000'0000'0001ULL}});
357 // Bexti
358 TestOpImm(0x48015093, {{0b1000'0001'0000'0001ULL, 0, 0b0000'0000'0000'0001ULL}});
359 TestOpImm(0x48015093, {{0b1000'0001'0000'0001ULL, 8, 0b0000'0000'0000'0001ULL}});
360 TestOpImm(0x48015093, {{0b1000'0001'0000'0001ULL, 7, 0b0000'0000'0000'0000ULL}});
361 // Binvi
362 TestOpImm(0x68011093, {{0b1000'0001'0000'0001ULL, 0, 0b1000'0001'0000'0000ULL}});
363 TestOpImm(0x68011093, {{0b1000'0001'0000'0001ULL, 1, 0b1000'0001'0000'0011ULL}});
364 // Bseti
365 TestOpImm(0x28011093, {{0b1000'0001'0000'0001ULL, 0, 0b1000'0001'0000'0001ULL}});
366 TestOpImm(0x28011093, {{0b1000'0001'0000'0001ULL, 1, 0b1000'0001'0000'0011ULL}});
367 }
368
TEST_F(Riscv64ToArm64InterpreterTest,UpperImmInstructions)369 TEST_F(Riscv64ToArm64InterpreterTest, UpperImmInstructions) {
370 // Auipc
371 TestAuipc(0xfedcb097, 0xffff'ffff'fedc'b000);
372 // Lui
373 TestLui(0xfedcb0b7, 0xffff'ffff'fedc'b000);
374 }
375
TEST_F(Riscv64ToArm64InterpreterTest,TestBranchInstructions)376 TEST_F(Riscv64ToArm64InterpreterTest, TestBranchInstructions) {
377 // Beq
378 TestBranch(0x00208463,
379 {
380 {42, 42, 8},
381 {41, 42, 4},
382 {42, 41, 4},
383 });
384 // Bne
385 TestBranch(0x00209463,
386 {
387 {42, 42, 4},
388 {41, 42, 8},
389 {42, 41, 8},
390 });
391 // Bltu
392 TestBranch(0x0020e463,
393 {
394 {41, 42, 8},
395 {42, 42, 4},
396 {42, 41, 4},
397 {0xf000'0000'0000'0000ULL, 42, 4},
398 {42, 0xf000'0000'0000'0000ULL, 8},
399 });
400 // Bgeu
401 TestBranch(0x0020f463,
402 {
403 {42, 41, 8},
404 {42, 42, 8},
405 {41, 42, 4},
406 {0xf000'0000'0000'0000ULL, 42, 8},
407 {42, 0xf000'0000'0000'0000ULL, 4},
408 });
409 // Blt
410 TestBranch(0x0020c463,
411 {
412 {41, 42, 8},
413 {42, 42, 4},
414 {42, 41, 4},
415 {0xf000'0000'0000'0000ULL, 42, 8},
416 {42, 0xf000'0000'0000'0000ULL, 4},
417 });
418 // Bge
419 TestBranch(0x0020d463,
420 {
421 {42, 41, 8},
422 {42, 42, 8},
423 {41, 42, 4},
424 {0xf000'0000'0000'0000ULL, 42, 4},
425 {42, 0xf000'0000'0000'0000ULL, 8},
426 });
427 // Beq with negative offset.
428 TestBranch(0xfe208ee3,
429 {
430 {42, 42, -4},
431 });
432 }
433
TEST_F(Riscv64ToArm64InterpreterTest,JumpAndLinkInstructions)434 TEST_F(Riscv64ToArm64InterpreterTest, JumpAndLinkInstructions) {
435 // Jal
436 TestJumpAndLink(0x008000ef, 8);
437 // Jal with negative offset.
438 TestJumpAndLink(0xffdff0ef, -4);
439 }
440
TEST_F(Riscv64ToArm64InterpreterTest,JumpAndLinkRegisterInstructions)441 TEST_F(Riscv64ToArm64InterpreterTest, JumpAndLinkRegisterInstructions) {
442 // Jalr offset=4.
443 TestJumpAndLinkRegister<4>(0x004100e7, 38, 42);
444 // Jalr offset=-4.
445 TestJumpAndLinkRegister<4>(0xffc100e7, 42, 38);
446 // Jalr offset=5 - must properly align the target to even.
447 TestJumpAndLinkRegister<4>(0x005100e7, 38, 42);
448 // Jr offset=4.
449 TestJumpAndLinkRegister<0>(0x00410067, 38, 42);
450 // Jr offset=-4.
451 TestJumpAndLinkRegister<0>(0xffc10067, 42, 38);
452 // Jr offset=5 - must properly align the target to even.
453 TestJumpAndLinkRegister<0>(0x00510067, 38, 42);
454 }
455
TEST_F(Riscv64ToArm64InterpreterTest,LoadInstructions)456 TEST_F(Riscv64ToArm64InterpreterTest, LoadInstructions) {
457 // Offset is always 8.
458 // Lbu
459 TestLoad(0x00814083, kDataToLoad & 0xffULL);
460 // Lhu
461 TestLoad(0x00815083, kDataToLoad & 0xffffULL);
462 // Lwu
463 TestLoad(0x00816083, kDataToLoad & 0xffff'ffffULL);
464 // Ldu
465 TestLoad(0x00813083, kDataToLoad);
466 // Lb
467 TestLoad(0x00810083, int64_t{int8_t(kDataToLoad)});
468 // Lh
469 TestLoad(0x00811083, int64_t{int16_t(kDataToLoad)});
470 // Lw
471 TestLoad(0x00812083, int64_t{int32_t(kDataToLoad)});
472 }
473
TEST_F(Riscv64ToArm64InterpreterTest,StoreInstructions)474 TEST_F(Riscv64ToArm64InterpreterTest, StoreInstructions) {
475 // Offset is always 8.
476 // Sb
477 TestStore(0x00208423, kDataToStore & 0xffULL);
478 // Sh
479 TestStore(0x00209423, kDataToStore & 0xffffULL);
480 // Sw
481 TestStore(0x0020a423, kDataToStore & 0xffff'ffffULL);
482 // Sd
483 TestStore(0x0020b423, kDataToStore);
484 }
485
TEST_F(Riscv64ToArm64InterpreterTest,AtomicLoadInstructions)486 TEST_F(Riscv64ToArm64InterpreterTest, AtomicLoadInstructions) {
487 // Validate sign-extension of returned value.
488 const uint64_t kNegative32BitValue = 0x0000'0000'8000'0000ULL;
489 const uint64_t kSignExtendedNegative = 0xffff'ffff'8000'0000ULL;
490 const uint64_t kPositive32BitValue = 0xffff'ffff'0000'0000ULL;
491 const uint64_t kSignExtendedPositive = 0ULL;
492 static_assert(static_cast<int32_t>(kSignExtendedPositive) >= 0);
493 static_assert(static_cast<int32_t>(kSignExtendedNegative) < 0);
494
495 // Lrw - sign extends from 32 to 64.
496 TestAtomicLoad(0x1000a12f, &kPositive32BitValue, kSignExtendedPositive);
497 TestAtomicLoad(0x1000a12f, &kNegative32BitValue, kSignExtendedNegative);
498
499 // Lrd
500 TestAtomicLoad(0x1000b12f, &kDataToLoad, kDataToLoad);
501 }
502
TEST_F(Riscv64ToArm64InterpreterTest,AtomicStoreInstructions)503 TEST_F(Riscv64ToArm64InterpreterTest, AtomicStoreInstructions) {
504 // Scw
505 TestAtomicStore(0x1820a1af, static_cast<uint32_t>(kDataToStore));
506
507 // Scd
508 TestAtomicStore(0x1820b1af, kDataToStore);
509 }
510
TEST_F(Riscv64ToArm64InterpreterTest,AtomicStoreInstructionNoLoadFailure)511 TEST_F(Riscv64ToArm64InterpreterTest, AtomicStoreInstructionNoLoadFailure) {
512 // Scw
513 TestAtomicStoreNoLoadFailure(0x1820a1af);
514
515 // Scd
516 TestAtomicStoreNoLoadFailure(0x1820b1af);
517 }
518
TEST_F(Riscv64ToArm64InterpreterTest,AtomicStoreInstructionDifferentLoadFailure)519 TEST_F(Riscv64ToArm64InterpreterTest, AtomicStoreInstructionDifferentLoadFailure) {
520 // Scw
521 TestAtomicStoreDifferentLoadFailure(0x1820a1af);
522
523 // Scd
524 TestAtomicStoreDifferentLoadFailure(0x1820b1af);
525 }
526
TEST_F(Riscv64ToArm64InterpreterTest,AmoInstructions)527 TEST_F(Riscv64ToArm64InterpreterTest, AmoInstructions) {
528 // Verifying that all aq and rl combinations work for Amoswap, but only test relaxed one for most
529 // other instructions for brevity.
530
531 // AmoswaoW/AmoswaoD
532 TestAmo(0x083120af, 0x083130af, 0xaaaa'bbbb'cccc'ddddULL);
533
534 // AmoswapWAq/AmoswapDAq
535 TestAmo(0x0c3120af, 0x0c3130af, 0xaaaa'bbbb'cccc'ddddULL);
536
537 // AmoswapWRl/AmoswapDRl
538 TestAmo(0x0a3120af, 0x0a3130af, 0xaaaa'bbbb'cccc'ddddULL);
539
540 // AmoswapWAqrl/AmoswapDAqrl
541 TestAmo(0x0e3120af, 0x0e3130af, 0xaaaa'bbbb'cccc'ddddULL);
542
543 // AmoaddW/AmoaddD
544 TestAmo(0x003120af, 0x003130af, 0xaaaa'aaaa'aaaa'aaa9);
545
546 // AmoxorW/AmoxorD
547 TestAmo(0x203120af, 0x203130af, 0x5555'5555'1111'1111);
548
549 // AmoandW/AmoandD
550 TestAmo(0x603120af, 0x603130af, 0xaaaa'aaaa'cccc'cccc);
551
552 // AmoorW/AmoorD
553 TestAmo(0x403120af, 0x403130af, 0xffff'ffff'dddd'dddd);
554
555 // AmominW/AmominD
556 TestAmo(0x803120af, 0x803130af, 0xaaaa'bbbb'cccc'ddddULL);
557
558 // AmomaxW/AmomaxD
559 TestAmo(0xa03120af, 0xa03130af, 0xffff'eeee'dddd'ccccULL);
560
561 // AmominuW/AmominuD
562 TestAmo(0xc03120af, 0xc03130af, 0xaaaa'bbbb'cccc'ddddULL);
563
564 // AmomaxuW/AmomaxuD
565 TestAmo(0xe03120af, 0xe03130af, 0xffff'eeee'dddd'ccccULL);
566 }
567
568 // Corresponding to interpreter_test.cc
569
TEST_F(Riscv64ToArm64InterpreterTest,FenceInstructions)570 TEST_F(Riscv64ToArm64InterpreterTest, FenceInstructions) {
571 // Fence
572 RunInstruction(0x0ff0000f);
573 // FenceTso
574 RunInstruction(0x8330000f);
575
576 // FenceI explicitly not supported.
577 }
578
579 } // namespace
580
581 } // namespace berberis
582