xref: /aosp_15_r20/art/runtime/interpreter/mterp/armng/object.S (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1%def op_check_cast():
2%  slow_path = add_slow_path(op_check_cast_slow_path)
3   // Fast-path which gets the class from thread-local cache.
4%  fetch_from_thread_cache("r1", miss_label="2f")
51:
6   lsr     r2, rINST, #8               // r2<- A
7   GET_VREG r0, r2                     // r0<- vA (object)
8   cmp     r0, #0
9   beq     .L${opcode}_resume
10   ldr     r2, [r0, #MIRROR_OBJECT_CLASS_OFFSET]
11   UNPOISON_HEAP_REF r2
12   // Fast path: do a comparison without read barrier.
13   cmp     r1, r2
14   bne     ${slow_path}
15.L${opcode}_resume:
16   FETCH_ADVANCE_INST 2
17   GET_INST_OPCODE ip
18   GOTO_OPCODE ip
192:
20   EXPORT_PC
21   mov     r0, rSELF
22   ldr     r1, [sp]
23   mov     r2, rPC
24   bl      nterp_get_class
25   mov     r1, r0
26   b       1b
27
28%def op_check_cast_slow_path():
29   ldr     r3, [r1, #MIRROR_CLASS_ACCESS_FLAGS_OFFSET]
30   tst     r3, #MIRROR_CLASS_IS_INTERFACE_FLAG
31   bne     2f
32   ldr     r3, [r1, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET]
33   UNPOISON_HEAP_REF r3
34   cmp     r3, #0
35   bne     5f
361:
37   ldr     r2, [r2, #MIRROR_CLASS_SUPER_CLASS_OFFSET]
38   UNPOISON_HEAP_REF r2
39   cmp     r1, r2
40   beq     .L${opcode}_resume
41   cmp     r2, #0
42   bne     1b
432:
44   TEST_IF_MARKING 4f
453:
46   EXPORT_PC
47   bl      art_quick_check_instance_of
48   b       .L${opcode}_resume
494:
50   bl      art_quick_read_barrier_mark_reg01
51   b       3b
525:
53   // Class in r1 is an array, r3 is the component type.
54   ldr     r2, [r2, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET]
55   UNPOISON_HEAP_REF r2
56   // Check if object is an array.
57   cmp     r2, #0
58   beq     2b
59   ldr     r4, [r3, #MIRROR_CLASS_SUPER_CLASS_OFFSET]
60   UNPOISON_HEAP_REF r4
61   cmp     r4, #0
62   // If the super class of the component type is not null, go slow path.
63   bne     2b
64   ldrh    r3, [r3, #MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET]
65   // Check if the object is a primitive array.
66   ldrh    r2, [r2, #MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET]
67   orrs    r2, r3
68   beq     .L${opcode}_resume
69   // Go slow path for throwing the exception.
70   b 2b
71
72%def op_instance_of():
73%  slow_path = add_slow_path(op_instance_of_slow_path)
74   /* instance-of vA, vB, class@CCCC */
75   // Fast-path which gets the class from thread-local cache.
76%  fetch_from_thread_cache("r1", miss_label="2f")
771:
78   lsr     r2, rINST, #12              // r2<- B
79   GET_VREG r0, r2                     // r0<- vB (object)
80   cmp     r0, #0
81   beq     .L${opcode}_resume
82   ldr     r2, [r0, #MIRROR_OBJECT_CLASS_OFFSET]
83   UNPOISON_HEAP_REF r2
84   // Fast path: do a comparison without read barrier.
85   cmp     r1, r2
86   bne     ${slow_path}
87.L${opcode}_set_one:
88   mov     r0, #1
89.L${opcode}_resume:
90   ubfx    r1, rINST, #8, #4           // r1<- A
91   SET_VREG r0, r1
92   FETCH_ADVANCE_INST 2
93   GET_INST_OPCODE ip
94   GOTO_OPCODE ip
952:
96   EXPORT_PC
97   mov     r0, rSELF
98   ldr     r1, [sp]
99   mov     r2, rPC
100   bl      nterp_get_class
101   mov     r1, r0
102   b       1b
103
104%def op_instance_of_slow_path():
105   // Go slow path if we are marking. Checking now allows
106   // not going to slow path if the super class hierarchy check fails.
107   TEST_IF_MARKING 4f
108   ldr     r3, [r1, #MIRROR_CLASS_ACCESS_FLAGS_OFFSET]
109   tst     r3, #MIRROR_CLASS_IS_INTERFACE_FLAG
110   bne     5f
111   ldr     r3, [r1, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET]
112   UNPOISON_HEAP_REF r3
113   cmp     r3, #0
114   bne     3f
1151:
116   ldr     r2, [r2, #MIRROR_CLASS_SUPER_CLASS_OFFSET]
117   UNPOISON_HEAP_REF r2
118   cmp     r1, r2
119   beq     .L${opcode}_set_one
120   cmp     r2, #0
121   bne     1b
1222:
123   mov     r0, #0
124   b       .L${opcode}_resume
1253:
126   // Class in r1 is an array, r3 is the component type.
127   ldr     r2, [r2, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET]
128   UNPOISON_HEAP_REF r2
129   // Check if object is an array.
130   cmp     r2, #0
131   beq     2b
132   ldr     r4, [r3, #MIRROR_CLASS_SUPER_CLASS_OFFSET]
133   UNPOISON_HEAP_REF r4
134   cmp     r4, #0
135   bne     5f
136   ldrh    r3, [r3, #MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET]
137   // Check if the object is a primitive array.
138   ldrh    r2, [r2, #MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET]
139   orr     r0, r2, r3
140   clz     r0, r0
141   lsrs    r0, r0, #5
142   b       .L${opcode}_resume
1434:
144   bl      art_quick_read_barrier_mark_reg01
1455:
146   EXPORT_PC
147   bl      artInstanceOfFromCode
148   b       .L${opcode}_resume
149
150%def op_iget_boolean():
151%  op_iget(load="ldrb", wide="0", is_object="0")
152
153%def op_iget_byte():
154%  op_iget(load="ldrsb", wide="0", is_object="0")
155
156%def op_iget_char():
157%  op_iget(load="ldrh", wide="0", is_object="0")
158
159%def op_iget_short():
160%  op_iget(load="ldrsh", wide="0", is_object="0")
161
162%def op_iget(load="ldr", wide="0", is_object="0"):
163%  slow_path = add_slow_path(op_iget_slow_path, load, wide, is_object)
164   // Fast-path which gets the field from thread-local cache.
165%  fetch_from_thread_cache("r0", miss_label=slow_path)
166.L${opcode}_resume:
167   lsr     r2, rINST, #12              // r2<- B
168   GET_VREG r3, r2                     // r3<- object we're operating on
169   ubfx    r2, rINST, #8, #4           // r2<- A
170   cmp     r3, #0
171   beq     common_errNullObject        // object was null
172   .if $wide
173   add     r3, r3, r0
174   ldrd    r0, r1, [r3]
175   CLEAR_SHADOW_PAIR r2, ip, lr
176   VREG_INDEX_TO_ADDR r2, r2
177   SET_VREG_WIDE_BY_ADDR r0, r1, r2    // fp[A] <- value
178   .elseif $is_object
179   $load   r0, [r3, r0]
180   UNPOISON_HEAP_REF r0
181   TEST_IF_MARKING .L${opcode}_read_barrier
182.L${opcode}_resume_after_read_barrier:
183   SET_VREG_OBJECT r0, r2              // fp[A] <- value
184   .else
185   $load   r0, [r3, r0]
186   SET_VREG r0, r2                     // fp[A] <- value
187   .endif
188   FETCH_ADVANCE_INST 2
189   GET_INST_OPCODE ip
190   GOTO_OPCODE ip
191   .if $is_object
192.L${opcode}_read_barrier:
193   bl      art_quick_read_barrier_mark_reg00
194   b       .L${opcode}_resume_after_read_barrier
195   .endif
196
197%def op_iget_slow_path(load, wide, is_object):
198   mov     r0, rSELF
199   ldr     r1, [sp]
200   mov     r2, rPC
201   mov     r3, #0
202   EXPORT_PC
203   bl      nterp_get_instance_field_offset
204   cmp     r0, #0
205   bge     .L${opcode}_resume
206   CLEAR_INSTANCE_VOLATILE_MARKER r0
207   lsr     r2, rINST, #12              // r2<- B
208   GET_VREG r3, r2                     // r3<- object we're operating on
209   ubfx    r2, rINST, #8, #4           // r2<- A
210   cmp     r3, #0
211   beq     common_errNullObject            // object was null
212   .if $wide
213   add     ip, r3, r0
214   ATOMIC_LOAD64 ip, r0, r1, r3, .L${opcode}_slow_path_atomic_load
215   dmb     ish
216   CLEAR_SHADOW_PAIR r2, ip, lr
217   VREG_INDEX_TO_ADDR r2, r2
218   SET_VREG_WIDE_BY_ADDR r0, r1, r2    // fp[A] <- value
219   .else
220   $load   r0, [r3, r0]
221   dmb     ish
222   .if $is_object
223   UNPOISON_HEAP_REF r0
224   TEST_IF_MARKING .L${opcode}_read_barrier
225   SET_VREG_OBJECT r0, r2              // fp[A] <- value
226   .else
227   SET_VREG r0, r2                     // fp[A] <- value
228   .endif
229   .endif
230   FETCH_ADVANCE_INST 2
231   GET_INST_OPCODE ip
232   GOTO_OPCODE ip
233
234%def op_iget_wide():
235%  op_iget(load="ldr", wide="1", is_object="0")
236
237%def op_iget_object():
238%  op_iget(load="ldr", wide="0", is_object="1")
239
240%def op_iput_boolean():
241%  op_iput(store="strb", wide="0", is_object="0")
242
243%def op_iput_byte():
244%  op_iput(store="strb", wide="0", is_object="0")
245
246%def op_iput_char():
247%  op_iput(store="strh", wide="0", is_object="0")
248
249%def op_iput_short():
250%  op_iput(store="strh", wide="0", is_object="0")
251
252%def op_iput(store="str", wide="0", is_object="0"):
253%  slow_path = add_slow_path(op_iput_slow_path, store, wide, is_object)
254   .if !$wide
255   ubfx    r4, rINST, #8, #4           // r4<- A
256   GET_VREG r4, r4                     // r4 <- v[A]
257   .endif
258   // Fast-path which gets the field from thread-local cache.
259%  fetch_from_thread_cache("r0", miss_label=slow_path)
260.L${opcode}_resume:
261   lsr     r1, rINST, #12              // r1<- B
262   GET_VREG r1, r1                     // vB (object we're operating on)
263   cmp     r1, #0
264   beq     common_errNullObject
265   .if $wide
266   ubfx    r4, rINST, #8, #4           // r4<- A
267   VREG_INDEX_TO_ADDR r4, r4
268   GET_VREG_WIDE_BY_ADDR r2, r3, r4      // fp[A] <- value
269   add     r1, r1, r0
270   strd    r2, r3, [r1]
271   .else
272   POISON_HEAP_REF_IF_OBJECT $is_object, r4
273   $store  r4, [r1, r0]
274   WRITE_BARRIER_IF_OBJECT $is_object, r4, r1, .L${opcode}_skip_write_barrier, r0
275   .endif
276   FETCH_ADVANCE_INST 2
277   GET_INST_OPCODE ip
278   GOTO_OPCODE ip
279
280%def op_iput_slow_path(store, wide, is_object):
281   mov     r0, rSELF
282   ldr     r1, [sp]
283   mov     r2, rPC
284   .if $is_object
285   mov     r3, r4
286   .else
287   mov     r3, #0
288   .endif
289   EXPORT_PC
290   bl      nterp_get_instance_field_offset
291   .if $is_object
292   // Reload the value as it may have moved.
293   ubfx    r4, rINST, #8, #4           // r4<- A
294   GET_VREG r4, r4                     // r4 <- v[A]
295   .endif
296   cmp     r0, #0
297   bge     .L${opcode}_resume
298   CLEAR_INSTANCE_VOLATILE_MARKER r0
299   .if $wide
300   lsr     r4, rINST, #12              // r4<- B
301   ubfx    r1, rINST, #8, #4           // r1<- A
302   GET_VREG r4, r4                     // vB (object we're operating on)
303   cmp     r4, #0
304   beq     common_errNullObject
305   VREG_INDEX_TO_ADDR r1, r1
306   GET_VREG_WIDE_BY_ADDR r2, r3, r1
307   add     ip, r4, r0
308   dmb     ish
309   ATOMIC_STORE64 ip, r2, r3, r0, r1, .L${opcode}_slow_path_atomic_store
310   dmb     ish
311   .else
312   lsr     r1, rINST, #12              // r4<- B
313   GET_VREG r1, r1                     // vB (object we're operating on)
314   cmp     r1, #0
315   beq     common_errNullObject
316   dmb     ish
317   POISON_HEAP_REF_IF_OBJECT $is_object, r4
318   $store  r4, [r1, r0]
319   dmb     ish
320   WRITE_BARRIER_IF_OBJECT $is_object, r4, r1, .L${opcode}_slow_path_skip_write_barrier, r0
321   .endif
322   FETCH_ADVANCE_INST 2
323   GET_INST_OPCODE ip
324   GOTO_OPCODE ip
325
326%def op_iput_wide():
327%  op_iput(store="str", wide="1", is_object="0")
328
329%def op_iput_object():
330%  op_iput(store="str", wide="0", is_object="1")
331
332%def op_sget_boolean():
333%  op_sget(load="ldrb", wide="0", is_object="0")
334
335%def op_sget_byte():
336%  op_sget(load="ldrsb", wide="0", is_object="0")
337
338%def op_sget_char():
339%  op_sget(load="ldrh", wide="0", is_object="0")
340
341%def op_sget_short():
342%  op_sget(load="ldrsh", wide="0", is_object="0")
343
344%def op_sget(load="ldr", wide="0", is_object="0"):
345%  slow_path = add_slow_path(op_sget_slow_path, load, wide, is_object)
346   // Fast-path which gets the field from thread-local cache.
347%  fetch_from_thread_cache("r0", miss_label=slow_path)
348.L${opcode}_resume:
349   ldr     r1, [r0, #ART_FIELD_OFFSET_OFFSET]
350   lsr     r2, rINST, #8               // r2 <- A
351   ldr     r0, [r0, #ART_FIELD_DECLARING_CLASS_OFFSET]
352   TEST_IF_MARKING .L${opcode}_read_barrier
353.L${opcode}_resume_after_read_barrier:
354   .if $wide
355   add     r0, r0, r1
356   ldrd    r0, r1, [r0]
357   CLEAR_SHADOW_PAIR r2, ip, lr
358   VREG_INDEX_TO_ADDR r2, r2
359   SET_VREG_WIDE_BY_ADDR r0, r1, r2    // fp[A] <- value
360   .elseif $is_object
361   $load   r0, [r0, r1]
362   UNPOISON_HEAP_REF r0
363   // No need to check the marking register, we know it's not set here.
364.L${opcode}_after_reference_load:
365   SET_VREG_OBJECT r0, r2              // fp[A] <- value
366   .else
367   $load   r0, [r0, r1]
368   SET_VREG r0, r2                     // fp[A] <- value
369   .endif
370   FETCH_ADVANCE_INST 2
371   GET_INST_OPCODE ip
372   GOTO_OPCODE ip
373.L${opcode}_read_barrier:
374   bl      art_quick_read_barrier_mark_reg00
375   .if $is_object
376   ldr     r0, [r0, r1]
377   UNPOISON_HEAP_REF r0
378.L${opcode}_mark_after_load:
379   // Here, we know the marking register is set.
380   bl      art_quick_read_barrier_mark_reg00
381   b       .L${opcode}_after_reference_load
382   .else
383   b       .L${opcode}_resume_after_read_barrier
384   .endif
385
386%def op_sget_slow_path(load="ldr", wide="0", is_object="0"):
387   mov     r0, rSELF
388   ldr     r1, [sp]
389   mov     r2, rPC
390   mov     r3, #0
391   EXPORT_PC
392   bl      nterp_get_static_field
393   tst     r0, #1
394   beq     .L${opcode}_resume
395   CLEAR_STATIC_VOLATILE_MARKER r0
396   ldr     r1, [r0, #ART_FIELD_OFFSET_OFFSET]
397   lsr     r2, rINST, #8               // r2 <- A
398   ldr     r0, [r0, #ART_FIELD_DECLARING_CLASS_OFFSET]
399   TEST_IF_MARKING .L${opcode}_slow_path_read_barrier
400.L${opcode}_slow_path_resume_after_read_barrier:
401   .if $wide
402   add     ip, r0, r1
403   ATOMIC_LOAD64 ip, r0, r1, r3, .L${opcode}_slow_path_atomic_load
404   dmb     ish
405   CLEAR_SHADOW_PAIR r2, ip, lr
406   VREG_INDEX_TO_ADDR r2, r2
407   SET_VREG_WIDE_BY_ADDR r0, r1, r2    // fp[A] <- value
408   .else
409   $load   r0, [r0, r1]
410   dmb     ish
411   .if $is_object
412   UNPOISON_HEAP_REF r0
413   TEST_IF_MARKING .L${opcode}_mark_after_load
414   SET_VREG_OBJECT r0, r2              // fp[A] <- value
415   .else
416   SET_VREG r0, r2                     // fp[A] <- value
417   .endif
418   .endif
419   FETCH_ADVANCE_INST 2
420   GET_INST_OPCODE ip
421   GOTO_OPCODE ip
422.L${opcode}_slow_path_read_barrier:
423   bl      art_quick_read_barrier_mark_reg00
424   b       .L${opcode}_slow_path_resume_after_read_barrier
425
426%def op_sget_wide():
427%  op_sget(load="ldr", wide="1", is_object="0")
428
429%def op_sget_object():
430%  op_sget(load="ldr", wide="0", is_object="1")
431
432%def op_sput_boolean():
433%  op_sput(store="strb", wide="0", is_object="0")
434
435%def op_sput_byte():
436%  op_sput(store="strb", wide="0", is_object="0")
437
438%def op_sput_char():
439%  op_sput(store="strh", wide="0", is_object="0")
440
441%def op_sput_short():
442%  op_sput(store="strh", wide="0", is_object="0")
443
444%def op_sput(store="str", wide="0", is_object="0"):
445%  slow_path = add_slow_path(op_sput_slow_path, store, wide, is_object)
446   .if !$wide
447   lsr     r4, rINST, #8               // r4 <- A
448   GET_VREG r4, r4                     // r4 <- v[A]
449   .endif
450   // Fast-path which gets the field from thread-local cache.
451%  fetch_from_thread_cache("r0", miss_label=slow_path)
452.L${opcode}_resume:
453   ldr     r1, [r0, #ART_FIELD_OFFSET_OFFSET]
454   ldr     r0, [r0, #ART_FIELD_DECLARING_CLASS_OFFSET]
455   TEST_IF_MARKING .L${opcode}_read_barrier
456.L${opcode}_resume_after_read_barrier:
457   .if $wide
458   lsr     r2, rINST, #8               // r2 <- A
459   VREG_INDEX_TO_ADDR r2, r2
460   GET_VREG_WIDE_BY_ADDR r2, r3, r2    // fp[A] <- value
461   add     r0, r0, r1
462   strd    r2, r3, [r0]
463   .else
464   POISON_HEAP_REF_IF_OBJECT $is_object, r4
465   $store  r4, [r0, r1]
466   WRITE_BARRIER_IF_OBJECT $is_object, r4, r0, .L${opcode}_skip_write_barrier, r1
467   .endif
468   FETCH_ADVANCE_INST 2
469   GET_INST_OPCODE ip
470   GOTO_OPCODE ip
471.L${opcode}_read_barrier:
472   bl      art_quick_read_barrier_mark_reg00
473   b       .L${opcode}_resume_after_read_barrier
474
475%def op_sput_slow_path(store, wide, is_object):
476   mov     r0, rSELF
477   ldr     r1, [sp]
478   mov     r2, rPC
479   .if $is_object
480   mov     r3, r4
481   .else
482   mov     r3, #0
483   .endif
484   EXPORT_PC
485   bl      nterp_get_static_field
486   .if $is_object
487   // Reload the value as it may have moved.
488   lsr     r4, rINST, #8               // r4 <- A
489   GET_VREG r4, r4                     // r4 <- v[A]
490   .endif
491   tst     r0, #1
492   beq     .L${opcode}_resume
493   CLEAR_STATIC_VOLATILE_MARKER r0
494   ldr     r1, [r0, #ART_FIELD_OFFSET_OFFSET]
495   ldr     r0, [r0, #ART_FIELD_DECLARING_CLASS_OFFSET]
496   TEST_IF_MARKING .L${opcode}_slow_path_read_barrier
497.L${opcode}_slow_path_resume_after_read_barrier:
498   .if $wide
499   lsr     r2, rINST, #8               // r2 <- A
500   VREG_INDEX_TO_ADDR r2, r2
501   GET_VREG_WIDE_BY_ADDR r2, r3, r2
502   add     ip, r0, r1
503   dmb     ish
504   ATOMIC_STORE64 ip, r2, r3, r0, r1, .L${opcode}_slow_path_atomic_store
505   dmb     ish
506   .else
507   dmb     ish
508   POISON_HEAP_REF_IF_OBJECT $is_object r4
509   $store  r4, [r0, r1]
510   dmb     ish
511   WRITE_BARRIER_IF_OBJECT $is_object, r4, r0, .L${opcode}_slow_path_skip_write_barrier, r1
512   .endif
513   FETCH_ADVANCE_INST 2
514   GET_INST_OPCODE ip
515   GOTO_OPCODE ip
516.L${opcode}_slow_path_read_barrier:
517   bl      art_quick_read_barrier_mark_reg00
518   b       .L${opcode}_slow_path_resume_after_read_barrier
519
520%def op_sput_wide():
521%  op_sput(store="str", wide="1", is_object="0")
522
523%def op_sput_object():
524%  op_sput(store="str", wide="0", is_object="1")
525
526%def op_new_instance():
527   // The routine is too big to fit in a handler, so jump to it.
528   EXPORT_PC
529   // Fast-path which gets the class from thread-local cache.
530%  fetch_from_thread_cache("r0", miss_label="2f")
531   TEST_IF_MARKING 3f
5324:
533   ldr     lr, [rSELF, #THREAD_ALLOC_OBJECT_ENTRYPOINT_OFFSET]
534   blx     lr
535   dmb     ishst                        // need fence for making object's class visible
5361:
537   lsr     r1, rINST, #8                // r1 <- A
538   SET_VREG_OBJECT r0, r1               // fp[A] <- value
539   FETCH_ADVANCE_INST 2
540   GET_INST_OPCODE ip
541   GOTO_OPCODE ip
5422:
543   mov     r0, rSELF
544   ldr     r1, [sp]
545   mov     r2, rPC
546   bl      nterp_allocate_object
547   b       1b
5483:
549   bl      art_quick_read_barrier_mark_reg00
550   b       4b
551