xref: /aosp_15_r20/art/runtime/interpreter/mterp/arm64ng/object.S (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1%def op_check_cast():
2%  slow_path = add_slow_path(op_check_cast_slow_path)
3   // Fast-path which gets the class from thread-local cache.
4%  fetch_from_thread_cache("x1", miss_label="2f")
51:
6   lsr     w2, wINST, #8               // w2<- A
7   GET_VREG w0, w2                     // w0<- vA (object)
8   cbz     w0, .L${opcode}_resume
9   ldr     w2, [x0, #MIRROR_OBJECT_CLASS_OFFSET]
10   UNPOISON_HEAP_REF w2
11   // Fast path: do a comparison without read barrier.
12   cmp     w1, w2
13   bne     ${slow_path}
14.L${opcode}_resume:
15   FETCH_ADVANCE_INST 2
16   GET_INST_OPCODE ip
17   GOTO_OPCODE ip
182:
19   EXPORT_PC
20   mov     x0, xSELF
21   ldr     x1, [sp]
22   mov     x2, xPC
23   bl      nterp_get_class
24   mov     x1, x0
25   b       1b
26
27%def op_check_cast_slow_path():
28   // We don't do read barriers for simplicity. However, this means that x1
29   // (and all other fetched objects) may be a from-space reference. Tthat's OK as
30   // we only fetch constant information from the references.
31   // This also means that some of the comparisons below may lead to false negative,
32   // but it will eventually be handled in the runtime.
33   ldr     w3, [x1, #MIRROR_CLASS_ACCESS_FLAGS_OFFSET]
34   tbnz    w3, #MIRROR_CLASS_IS_INTERFACE_FLAG_BIT, 2f
35   ldr     w3, [x1, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET]
36   UNPOISON_HEAP_REF w3
37   cbnz    w3, 5f
381:
39   ldr     w2, [x2, #MIRROR_CLASS_SUPER_CLASS_OFFSET]
40   UNPOISON_HEAP_REF w2
41   cmp     w1, w2
42   beq     .L${opcode}_resume
43   cbnz    w2, 1b
442:
45   TEST_IF_MARKING 4f
463:
47   EXPORT_PC
48   bl      art_quick_check_instance_of
49   b       .L${opcode}_resume
504:
51   bl      art_quick_read_barrier_mark_reg01
52   b       3b
535:
54   // Class in w1 is an array, w3 is the component type.
55   ldr     w2, [x2, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET]
56   UNPOISON_HEAP_REF w2
57   // Check if object is an array.
58   cbz     w2, 2b
59   ldr     w4, [x3, #MIRROR_CLASS_SUPER_CLASS_OFFSET]
60   UNPOISON_HEAP_REF w4
61   // If the super class of the component type is not null, go slow path.
62   cbnz    w4, 2b
63   ldrh    w3, [x3, #MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET]
64   // If the component type is primitive, go slow path.
65   cbnz    w3, 2b
66   // Check if the object is a primitive array.
67   ldrh    w2, [x2, #MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET]
68   cbz     w2, .L${opcode}_resume
69   // Go slow path for throwing the exception.
70   b 2b
71
72%def op_instance_of():
73%  slow_path = add_slow_path(op_instance_of_slow_path)
74   /* instance-of vA, vB, class@CCCC */
75   // Fast-path which gets the class from thread-local cache.
76%  fetch_from_thread_cache("x1", miss_label="2f")
771:
78   lsr     w2, wINST, #12              // w2<- B
79   GET_VREG w0, w2                     // w0<- vB (object)
80   cbz     w0, .L${opcode}_resume
81   ldr     w2, [x0, #MIRROR_OBJECT_CLASS_OFFSET]
82   UNPOISON_HEAP_REF w2
83   // Fast path: do a comparison without read barrier.
84   cmp     w1, w2
85   bne     ${slow_path}
86.L${opcode}_set_one:
87   mov     w0, #1
88.L${opcode}_resume:
89   ubfx    w1, wINST, #8, #4           // w1<- A
90   SET_VREG w0, w1
91   FETCH_ADVANCE_INST 2
92   GET_INST_OPCODE ip
93   GOTO_OPCODE ip
942:
95   EXPORT_PC
96   mov     x0, xSELF
97   ldr     x1, [sp]
98   mov     x2, xPC
99   bl      nterp_get_class
100   mov     x1, x0
101   b       1b
102
103%def op_instance_of_slow_path():
104   // Go slow path if we are marking. Checking now allows
105   // not going to slow path if the super class hierarchy check fails.
106   TEST_IF_MARKING 4f
107   ldr     w3, [x1, #MIRROR_CLASS_ACCESS_FLAGS_OFFSET]
108   tbnz    w3, #MIRROR_CLASS_IS_INTERFACE_FLAG_BIT, 5f
109   ldr     w3, [x1, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET]
110   UNPOISON_HEAP_REF w3
111   cbnz    w3, 3f
1121:
113   ldr     w2, [x2, #MIRROR_CLASS_SUPER_CLASS_OFFSET]
114   UNPOISON_HEAP_REF w2
115   cmp     w1, w2
116   beq     .L${opcode}_set_one
117   cbnz    w2, 1b
1182:
119   mov     w0, #0
120   b       .L${opcode}_resume
1213:
122   // Class in x1 is an array, x3 is the component type of x1, and x2 is the class of the object.
123   ldr     w2, [x2, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET]
124   UNPOISON_HEAP_REF w2
125   // Check if object is an array.
126   cbz     w2, 2b
127   // Check of x1 is Object[]
128   ldr     w4, [x3, #MIRROR_CLASS_SUPER_CLASS_OFFSET]
129   UNPOISON_HEAP_REF w4
130   // If the super class is not Object, go to slow path.
131   cbnz    w4, 5f
132   // Super class is null, this could either be a primitive array or Object[].
133   ldrh    w3, [x3, #MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET]
134   // If x1 is a primitive array class, we know the check is false.
135   cbnz    w3, 2b
136   // Check if x2 is a primitive array class.
137   ldrh    w2, [x2, #MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET]
138   cmp     w2, #0
139   cset    w0, eq
140   b       .L${opcode}_resume
1414:
142   bl      art_quick_read_barrier_mark_reg01
1435:
144   EXPORT_PC
145   bl      artInstanceOfFromCode
146   b       .L${opcode}_resume
147
148%def op_iget_boolean():
149%  op_iget(load="ldrb", volatile_load="ldarb", maybe_extend="", wide="0", is_object="0")
150
151%def op_iget_byte():
152%  op_iget(load="ldrsb", volatile_load="ldarb", maybe_extend="sxtb w0, w0", wide="0", is_object="0")
153
154%def op_iget_char():
155%  op_iget(load="ldrh", volatile_load="ldarh", maybe_extend="", wide="0", is_object="0")
156
157%def op_iget_short():
158%  op_iget(load="ldrsh", volatile_load="ldarh", maybe_extend="sxth w0, w0", wide="0", is_object="0")
159
160%def op_iget(load="ldr", volatile_load="ldar", maybe_extend="", wide="0", is_object="0"):
161%  slow_path = add_slow_path(op_iget_slow_path, volatile_load, maybe_extend, wide, is_object)
162   // Fast-path which gets the field from thread-local cache.
163%  fetch_from_thread_cache("x0", miss_label=slow_path)
164.L${opcode}_resume:
165   lsr     w2, wINST, #12              // w2<- B
166   GET_VREG w3, w2                     // w3<- object we're operating on
167   ubfx    w2, wINST, #8, #4           // w2<- A
168   cbz     w3, common_errNullObject    // object was null
169   .if $wide
170   $load   x0, [x3, x0]
171   SET_VREG_WIDE x0, w2                // fp[A] <- value
172   .elseif $is_object
173   $load   w0, [x3, x0]
174   UNPOISON_HEAP_REF w0
175   TEST_IF_MARKING .L${opcode}_read_barrier
176.L${opcode}_resume_after_read_barrier:
177   SET_VREG_OBJECT w0, w2              // fp[A] <- value
178   .else
179   $load   w0, [x3, x0]
180   SET_VREG w0, w2                     // fp[A] <- value
181   .endif
182   FETCH_ADVANCE_INST 2
183   GET_INST_OPCODE ip
184   GOTO_OPCODE ip
185   .if $is_object
186.L${opcode}_read_barrier:
187   bl      art_quick_read_barrier_mark_reg00
188   b       .L${opcode}_resume_after_read_barrier
189   .endif
190
191%def op_iget_slow_path(volatile_load, maybe_extend, wide, is_object):
192   mov     x0, xSELF
193   ldr     x1, [sp]
194   mov     x2, xPC
195   mov     x3, #0
196   EXPORT_PC
197   bl      nterp_get_instance_field_offset
198   // Zero extension (nterp_get_instance_field_offset returns uint32_t) of the return value is
199   // needed as the value is used below via wider X0 register - AARCH64 AAPCS specifies that
200   // "... any unused bits in the register have unspecified value" (see 6.8.2, 6.9).
201   mov     w0, w0
202   tbz     w0, #31, .L${opcode}_resume
203   CLEAR_INSTANCE_VOLATILE_MARKER w0
204   lsr     w2, wINST, #12              // w2<- B
205   GET_VREG w3, w2                     // w3<- object we're operating on
206   ubfx    w2, wINST, #8, #4           // w2<- A
207   cbz     w3, common_errNullObject    // object was null
208   add     x3, x3, x0
209   .if $wide
210   $volatile_load x0, [x3]
211   SET_VREG_WIDE x0, w2                // fp[A] <- value
212   .elseif $is_object
213   $volatile_load w0, [x3]
214   UNPOISON_HEAP_REF w0
215   TEST_IF_MARKING .L${opcode}_read_barrier
216   SET_VREG_OBJECT w0, w2              // fp[A] <- value
217   .else
218   $volatile_load w0, [x3]
219   $maybe_extend
220   SET_VREG w0, w2                     // fp[A] <- value
221   .endif
222   FETCH_ADVANCE_INST 2
223   GET_INST_OPCODE ip
224   GOTO_OPCODE ip
225
226%def op_iget_wide():
227%  op_iget(load="ldr", volatile_load="ldar", maybe_extend="", wide="1", is_object="0")
228
229%def op_iget_object():
230%  op_iget(load="ldr", volatile_load="ldar", maybe_extend="", wide="0", is_object="1")
231
232%def op_iput_boolean():
233%  op_iput(store="strb", volatile_store="stlrb", wide="0", is_object="0")
234
235%def op_iput_byte():
236%  op_iput(store="strb", volatile_store="stlrb", wide="0", is_object="0")
237
238%def op_iput_char():
239%  op_iput(store="strh", volatile_store="stlrh", wide="0", is_object="0")
240
241%def op_iput_short():
242%  op_iput(store="strh", volatile_store="stlrh", wide="0", is_object="0")
243
244%def op_iput(store="str", volatile_store="stlr", wide="0", is_object="0"):
245%  slow_path = add_slow_path(op_iput_slow_path, volatile_store, wide, is_object)
246   ubfx    w1, wINST, #8, #4           // w1<- A
247   .if $wide
248   GET_VREG_WIDE x26, w1               // x26<- fp[A]/fp[A+1]
249   .else
250   GET_VREG w26, w1                    // w26 <- v[A]
251   .endif
252   // Fast-path which gets the field from thread-local cache.
253%  fetch_from_thread_cache("x0", miss_label=slow_path)
254.L${opcode}_resume:
255   lsr     w2, wINST, #12              // w2<- B
256   GET_VREG w2, w2                     // vB (object we're operating on)
257   cbz w2, common_errNullObject
258   .if $wide
259   $store  x26, [x2, x0]
260   .else
261   POISON_HEAP_REF_IF_OBJECT $is_object, w26
262   $store  w26, [x2, x0]
263   WRITE_BARRIER_IF_OBJECT $is_object, w26, w2, .L${opcode}_skip_write_barrier
264   .endif
265   FETCH_ADVANCE_INST 2
266   GET_INST_OPCODE ip
267   GOTO_OPCODE ip
268
269%def op_iput_slow_path(volatile_store, wide, is_object):
270   mov     x0, xSELF
271   ldr     x1, [sp]
272   mov     x2, xPC
273   .if $is_object
274   mov     x3, x26
275   .else
276   mov     x3, #0
277   .endif
278   EXPORT_PC
279   bl      nterp_get_instance_field_offset
280   // Zero extension (nterp_get_instance_field_offset returns uint32_t) of the return value is
281   // needed as the value is used below via wider X0 register - AARCH64 AAPCS specifies that
282   // "... any unused bits in the register have unspecified value" (see 6.8.2, 6.9).
283   mov     w0, w0
284   .if $is_object
285   // Reload the value as it may have moved.
286   ubfx    w1, wINST, #8, #4           // w1<- A
287   GET_VREG w26, w1                    // w26 <- v[A]
288   .endif
289   tbz     w0, #31, .L${opcode}_resume
290   CLEAR_INSTANCE_VOLATILE_MARKER w0
291   lsr     w2, wINST, #12              // w2<- B
292   GET_VREG w2, w2                     // vB (object we're operating on)
293   cbz     w2, common_errNullObject
294   add     x3, x2, x0
295   .if $wide
296   $volatile_store x26, [x3]
297   .else
298   POISON_HEAP_REF_IF_OBJECT $is_object, w26
299   $volatile_store w26, [x3]
300   WRITE_BARRIER_IF_OBJECT $is_object, w26, w2, .L${opcode}_slow_path_skip_write_barrier
301   .endif
302   FETCH_ADVANCE_INST 2
303   GET_INST_OPCODE ip
304   GOTO_OPCODE ip
305
306%def op_iput_wide():
307%  op_iput(store="str", volatile_store="stlr", wide="1", is_object="0")
308
309%def op_iput_object():
310%  op_iput(store="str", volatile_store="stlr", wide="0", is_object="1")
311
312%def op_sget_boolean():
313%  op_sget(load="ldrb", volatile_load="ldarb", maybe_extend="", wide="0", is_object="0")
314
315%def op_sget_byte():
316%  op_sget(load="ldrsb", volatile_load="ldarb", maybe_extend="sxtb w0, w0", wide="0", is_object="0")
317
318%def op_sget_char():
319%  op_sget(load="ldrh", volatile_load="ldarh", maybe_extend="", wide="0", is_object="0")
320
321%def op_sget_short():
322%  op_sget(load="ldrsh", volatile_load="ldarh", maybe_extend="sxth w0, w0", wide="0", is_object="0")
323
324%def op_sget(load="ldr", volatile_load="ldar", maybe_extend="", wide="0", is_object="0"):
325%  slow_path = add_slow_path(op_sget_slow_path, volatile_load, maybe_extend, wide, is_object)
326   // Fast-path which gets the field from thread-local cache.
327%  fetch_from_thread_cache("x0", miss_label=slow_path)
328.L${opcode}_resume:
329   ldr     w1, [x0, #ART_FIELD_OFFSET_OFFSET]
330   lsr     w2, wINST, #8               // w2 <- A
331   ldr     w0, [x0, #ART_FIELD_DECLARING_CLASS_OFFSET]
332   TEST_IF_MARKING .L${opcode}_read_barrier
333.L${opcode}_resume_after_read_barrier:
334   .if $wide
335   ldr     x0, [x0, x1]
336   SET_VREG_WIDE x0, w2                // fp[A] <- value
337   .elseif $is_object
338   $load   w0, [x0, x1]
339   UNPOISON_HEAP_REF w0
340   // No need to check the marking register, we know it's not set here.
341.L${opcode}_after_reference_load:
342   SET_VREG_OBJECT w0, w2              // fp[A] <- value
343   .else
344   $load   w0, [x0, x1]
345   SET_VREG w0, w2                     // fp[A] <- value
346   .endif
347   FETCH_ADVANCE_INST 2
348   GET_INST_OPCODE ip
349   GOTO_OPCODE ip
350.L${opcode}_read_barrier:
351   bl      art_quick_read_barrier_mark_reg00
352   .if $is_object
353   $load   w0, [x0, x1]
354   UNPOISON_HEAP_REF w0
355.L${opcode}_mark_after_load:
356   // Here, we know the marking register is set.
357   bl      art_quick_read_barrier_mark_reg00
358   b       .L${opcode}_after_reference_load
359   .else
360   b       .L${opcode}_resume_after_read_barrier
361   .endif
362
363%def op_sget_slow_path(volatile_load, maybe_extend, wide, is_object):
364   mov     x0, xSELF
365   ldr     x1, [sp]
366   mov     x2, xPC
367   mov     x3, #0
368   EXPORT_PC
369   bl      nterp_get_static_field
370   tbz     x0, #0, .L${opcode}_resume
371   CLEAR_STATIC_VOLATILE_MARKER x0
372   ldr     w1, [x0, #ART_FIELD_OFFSET_OFFSET]
373   lsr     w2, wINST, #8               // w2 <- A
374   ldr     w0, [x0, #ART_FIELD_DECLARING_CLASS_OFFSET]
375   TEST_IF_MARKING .L${opcode}_slow_path_read_barrier
376.L${opcode}_slow_path_resume_after_read_barrier:
377   add     x0, x0, x1
378   .if $wide
379   ldar    x0, [x0]
380   SET_VREG_WIDE x0, w2                // fp[A] <- value
381   .elseif $is_object
382   $volatile_load w0, [x0]
383   UNPOISON_HEAP_REF w0
384   TEST_IF_MARKING .L${opcode}_mark_after_load
385   SET_VREG_OBJECT w0, w2              // fp[A] <- value
386   .else
387   $volatile_load w0, [x0]
388   $maybe_extend
389   SET_VREG w0, w2                     // fp[A] <- value
390   .endif
391   FETCH_ADVANCE_INST 2
392   GET_INST_OPCODE ip
393   GOTO_OPCODE ip
394.L${opcode}_slow_path_read_barrier:
395   bl      art_quick_read_barrier_mark_reg00
396   b       .L${opcode}_slow_path_resume_after_read_barrier
397
398%def op_sget_wide():
399%  op_sget(load="ldr", volatile_load="ldar", maybe_extend="", wide="1", is_object="0")
400
401%def op_sget_object():
402%  op_sget(load="ldr", volatile_load="ldar", maybe_extend="", wide="0", is_object="1")
403
404%def op_sput_boolean():
405%  op_sput(store="strb", volatile_store="stlrb", wide="0", is_object="0")
406
407%def op_sput_byte():
408%  op_sput(store="strb", volatile_store="stlrb", wide="0", is_object="0")
409
410%def op_sput_char():
411%  op_sput(store="strh", volatile_store="stlrh", wide="0", is_object="0")
412
413%def op_sput_short():
414%  op_sput(store="strh", volatile_store="stlrh", wide="0", is_object="0")
415
416%def op_sput(store="str", volatile_store="stlr", wide="0", is_object="0"):
417%  slow_path = add_slow_path(op_sput_slow_path, volatile_store, wide, is_object)
418   lsr     w2, wINST, #8               // w2 <- A
419   .if $wide
420   GET_VREG_WIDE x26, w2               // x26 <- v[A]
421   .else
422   GET_VREG w26, w2                    // w26 <- v[A]
423   .endif
424   // Fast-path which gets the field from thread-local cache.
425%  fetch_from_thread_cache("x0", miss_label=slow_path)
426.L${opcode}_resume:
427   ldr     w1, [x0, #ART_FIELD_OFFSET_OFFSET]
428   ldr     w0, [x0, #ART_FIELD_DECLARING_CLASS_OFFSET]
429   TEST_IF_MARKING .L${opcode}_read_barrier
430.L${opcode}_resume_after_read_barrier:
431   .if $wide
432   $store  x26, [x0, x1]
433   .else
434   POISON_HEAP_REF_IF_OBJECT $is_object, w26
435   $store  w26, [x0, x1]
436   WRITE_BARRIER_IF_OBJECT $is_object, w26, w0, .L${opcode}_skip_write_barrier
437   .endif
438   FETCH_ADVANCE_INST 2
439   GET_INST_OPCODE ip
440   GOTO_OPCODE ip
441.L${opcode}_read_barrier:
442   bl      art_quick_read_barrier_mark_reg00
443   b       .L${opcode}_resume_after_read_barrier
444
445%def op_sput_slow_path(volatile_store, wide, is_object):
446   mov     x0, xSELF
447   ldr     x1, [sp]
448   mov     x2, xPC
449   .if $is_object
450   mov     x3, x26
451   .else
452   mov     x3, #0
453   .endif
454   EXPORT_PC
455   bl      nterp_get_static_field
456   .if $is_object
457   // Reload the value as it may have moved.
458   lsr     w2, wINST, #8               // w2 <- A
459   GET_VREG w26, w2                    // w26 <- v[A]
460   .endif
461   tbz     x0, #0, .L${opcode}_resume
462   CLEAR_STATIC_VOLATILE_MARKER x0
463   ldr     w1, [x0, #ART_FIELD_OFFSET_OFFSET]
464   ldr     w0, [x0, #ART_FIELD_DECLARING_CLASS_OFFSET]
465   TEST_IF_MARKING .L${opcode}_slow_path_read_barrier
466.L${opcode}_slow_path_resume_after_read_barrier:
467   add     x1, x0, x1
468   .if $wide
469   $volatile_store    x26, [x1]
470   .else
471   POISON_HEAP_REF_IF_OBJECT $is_object, w26
472   $volatile_store    w26, [x1]
473   WRITE_BARRIER_IF_OBJECT $is_object, w26, w0, .L${opcode}_slow_path_skip_write_barrier
474   .endif
475   FETCH_ADVANCE_INST 2
476   GET_INST_OPCODE ip
477   GOTO_OPCODE ip
478.L${opcode}_slow_path_read_barrier:
479   bl      art_quick_read_barrier_mark_reg00
480   b       .L${opcode}_slow_path_resume_after_read_barrier
481
482%def op_sput_wide():
483%  op_sput(store="str", volatile_store="stlr", wide="1", is_object="0")
484
485%def op_sput_object():
486%  op_sput(store="str", volatile_store="stlr", wide="0", is_object="1")
487
488%def op_new_instance():
489   EXPORT_PC
490   // Fast-path which gets the class from thread-local cache.
491%  fetch_from_thread_cache("x0", miss_label="2f")
492   TEST_IF_MARKING 3f
4934:
494   ldr     lr, [xSELF, #THREAD_ALLOC_OBJECT_ENTRYPOINT_OFFSET]
495   blr     lr
496   dmb     ishst                       // need fence for making object's class visible
4971:
498   lsr     w1, wINST, #8               // w1 <- A
499   SET_VREG_OBJECT w0, w1              // fp[A] <- value
500   FETCH_ADVANCE_INST 2
501   GET_INST_OPCODE ip
502   GOTO_OPCODE ip
5032:
504   mov     x0, xSELF
505   ldr     x1, [sp]
506   mov     x2, xPC
507   bl      nterp_allocate_object
508   b       1b
5093:
510   bl      art_quick_read_barrier_mark_reg00
511   b       4b
512