xref: /aosp_15_r20/art/runtime/interpreter/interpreter_common.cc (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "interpreter_common.h"
18 
19 #include <cmath>
20 
21 #include "base/casts.h"
22 #include "base/pointer_size.h"
23 #include "class_linker.h"
24 #include "class_root-inl.h"
25 #include "debugger.h"
26 #include "dex/dex_file_types.h"
27 #include "entrypoints/runtime_asm_entrypoints.h"
28 #include "handle.h"
29 #include "intrinsics_enum.h"
30 #include "intrinsics_list.h"
31 #include "jit/jit.h"
32 #include "jvalue-inl.h"
33 #include "method_handles-inl.h"
34 #include "method_handles.h"
35 #include "mirror/array-alloc-inl.h"
36 #include "mirror/array-inl.h"
37 #include "mirror/call_site-inl.h"
38 #include "mirror/class.h"
39 #include "mirror/emulated_stack_frame.h"
40 #include "mirror/method_handle_impl-inl.h"
41 #include "mirror/method_type-inl.h"
42 #include "mirror/object_array-alloc-inl.h"
43 #include "mirror/object_array-inl.h"
44 #include "mirror/var_handle.h"
45 #include "reflection-inl.h"
46 #include "reflection.h"
47 #include "shadow_frame-inl.h"
48 #include "stack.h"
49 #include "thread-inl.h"
50 #include "var_handles.h"
51 #include "well_known_classes-inl.h"
52 
53 namespace art HIDDEN {
54 namespace interpreter {
55 
ThrowNullPointerExceptionFromInterpreter()56 void ThrowNullPointerExceptionFromInterpreter() {
57   ThrowNullPointerExceptionFromDexPC();
58 }
59 
CheckStackOverflow(Thread * self,size_t frame_size)60 bool CheckStackOverflow(Thread* self, size_t frame_size)
61     REQUIRES_SHARED(Locks::mutator_lock_) {
62   bool implicit_check = Runtime::Current()->GetImplicitStackOverflowChecks();
63   uint8_t* stack_end = self->GetStackEndForInterpreter(implicit_check);
64   if (UNLIKELY(__builtin_frame_address(0) < stack_end + frame_size)) {
65     ThrowStackOverflowError<kNativeStackType>(self);
66     return false;
67   }
68   return true;
69 }
70 
ShouldStayInSwitchInterpreter(ArtMethod * method)71 bool ShouldStayInSwitchInterpreter(ArtMethod* method)
72     REQUIRES_SHARED(Locks::mutator_lock_) {
73   if (!Runtime::Current()->IsStarted()) {
74     // For unstarted runtimes, always use the interpreter entrypoint. This fixes the case where
75     // we are doing cross compilation. Note that GetEntryPointFromQuickCompiledCode doesn't use
76     // the image pointer size here and this may case an overflow if it is called from the
77     // compiler. b/62402160
78     return true;
79   }
80 
81   if (UNLIKELY(method->IsNative() || method->IsProxyMethod())) {
82     return false;
83   }
84 
85   if (Thread::Current()->IsForceInterpreter()) {
86     // Force the use of interpreter when it is required by the debugger.
87     return true;
88   }
89 
90   if (Thread::Current()->IsAsyncExceptionPending()) {
91     // Force use of interpreter to handle async-exceptions
92     return true;
93   }
94 
95   const void* code = method->GetEntryPointFromQuickCompiledCode();
96   return Runtime::Current()->GetClassLinker()->IsQuickToInterpreterBridge(code);
97 }
98 
99 template <typename T>
SendMethodExitEvents(Thread * self,const instrumentation::Instrumentation * instrumentation,ShadowFrame & frame,ArtMethod * method,T & result)100 bool SendMethodExitEvents(Thread* self,
101                           const instrumentation::Instrumentation* instrumentation,
102                           ShadowFrame& frame,
103                           ArtMethod* method,
104                           T& result) {
105   bool had_event = false;
106   // We can get additional ForcePopFrame requests during handling of these events. We should
107   // respect these and send additional instrumentation events.
108   do {
109     frame.SetForcePopFrame(false);
110     if (UNLIKELY(instrumentation->HasMethodExitListeners() && !frame.GetSkipMethodExitEvents())) {
111       had_event = true;
112       instrumentation->MethodExitEvent(self, method, instrumentation::OptionalFrame{frame}, result);
113     }
114     // We don't send method-exit if it's a pop-frame. We still send frame_popped though.
115     if (UNLIKELY(frame.NeedsNotifyPop() && instrumentation->HasWatchedFramePopListeners())) {
116       had_event = true;
117       instrumentation->WatchedFramePopped(self, frame);
118     }
119   } while (UNLIKELY(frame.GetForcePopFrame()));
120   if (UNLIKELY(had_event)) {
121     return !self->IsExceptionPending();
122   } else {
123     return true;
124   }
125 }
126 
127 template
128 bool SendMethodExitEvents(Thread* self,
129                           const instrumentation::Instrumentation* instrumentation,
130                           ShadowFrame& frame,
131                           ArtMethod* method,
132                           MutableHandle<mirror::Object>& result);
133 
134 template
135 bool SendMethodExitEvents(Thread* self,
136                           const instrumentation::Instrumentation* instrumentation,
137                           ShadowFrame& frame,
138                           ArtMethod* method,
139                           JValue& result);
140 
141 // We execute any instrumentation events that are triggered by this exception and change the
142 // shadow_frame's dex_pc to that of the exception handler if there is one in the current method.
143 // Return true if we should continue executing in the current method and false if we need to go up
144 // the stack to find an exception handler.
145 // We accept a null Instrumentation* meaning we must not report anything to the instrumentation.
146 // TODO We should have a better way to skip instrumentation reporting or possibly rethink that
147 // behavior.
MoveToExceptionHandler(Thread * self,ShadowFrame & shadow_frame,bool skip_listeners,bool skip_throw_listener)148 bool MoveToExceptionHandler(Thread* self,
149                             ShadowFrame& shadow_frame,
150                             bool skip_listeners,
151                             bool skip_throw_listener) {
152   self->VerifyStack();
153   StackHandleScope<2> hs(self);
154   Handle<mirror::Throwable> exception(hs.NewHandle(self->GetException()));
155   const instrumentation::Instrumentation* instrumentation =
156       Runtime::Current()->GetInstrumentation();
157   if (!skip_throw_listener &&
158       instrumentation->HasExceptionThrownListeners() &&
159       self->IsExceptionThrownByCurrentMethod(exception.Get())) {
160     // See b/65049545 for why we don't need to check to see if the exception has changed.
161     instrumentation->ExceptionThrownEvent(self, exception.Get());
162     if (shadow_frame.GetForcePopFrame()) {
163       // We will check in the caller for GetForcePopFrame again. We need to bail out early to
164       // prevent an ExceptionHandledEvent from also being sent before popping.
165       return true;
166     }
167   }
168   bool clear_exception = false;
169   uint32_t found_dex_pc = shadow_frame.GetMethod()->FindCatchBlock(
170       hs.NewHandle(exception->GetClass()), shadow_frame.GetDexPC(), &clear_exception);
171   if (found_dex_pc == dex::kDexNoIndex) {
172     if (!skip_listeners) {
173       if (shadow_frame.NeedsNotifyPop()) {
174         instrumentation->WatchedFramePopped(self, shadow_frame);
175         if (shadow_frame.GetForcePopFrame()) {
176           // We will check in the caller for GetForcePopFrame again. We need to bail out early to
177           // prevent an ExceptionHandledEvent from also being sent before popping and to ensure we
178           // handle other types of non-standard-exits.
179           return true;
180         }
181       }
182       // Exception is not caught by the current method. We will unwind to the
183       // caller. Notify any instrumentation listener.
184       instrumentation->MethodUnwindEvent(self,
185                                          shadow_frame.GetMethod(),
186                                          shadow_frame.GetDexPC());
187     }
188     return shadow_frame.GetForcePopFrame();
189   } else {
190     shadow_frame.SetDexPC(found_dex_pc);
191     if (!skip_listeners && instrumentation->HasExceptionHandledListeners()) {
192       shadow_frame.SetNotifyExceptionHandledEvent(/*enable=*/ true);
193     } else if (clear_exception) {
194       self->ClearException();
195     }
196     return true;
197   }
198 }
199 
UnexpectedOpcode(const Instruction * inst,const ShadowFrame & shadow_frame)200 void UnexpectedOpcode(const Instruction* inst, const ShadowFrame& shadow_frame) {
201   LOG(FATAL) << "Unexpected instruction: "
202              << inst->DumpString(shadow_frame.GetMethod()->GetDexFile());
203   UNREACHABLE();
204 }
205 
206 // START DECLARATIONS :
207 //
208 // These additional declarations are required because clang complains
209 // about ALWAYS_INLINE (-Werror, -Wgcc-compat) in definitions.
210 //
211 
212 template <bool is_range>
213 NO_STACK_PROTECTOR
214 static ALWAYS_INLINE bool DoCallCommon(ArtMethod* called_method,
215                                        Thread* self,
216                                        ShadowFrame& shadow_frame,
217                                        JValue* result,
218                                        uint16_t number_of_inputs,
219                                        uint32_t (&arg)[Instruction::kMaxVarArgRegs],
220                                        uint32_t vregC,
221                                        bool string_init) REQUIRES_SHARED(Locks::mutator_lock_);
222 
223 template <bool is_range>
224 ALWAYS_INLINE void CopyRegisters(ShadowFrame& caller_frame,
225                                  ShadowFrame* callee_frame,
226                                  const uint32_t (&arg)[Instruction::kMaxVarArgRegs],
227                                  const size_t first_src_reg,
228                                  const size_t first_dest_reg,
229                                  const size_t num_regs) REQUIRES_SHARED(Locks::mutator_lock_);
230 
231 // END DECLARATIONS.
232 
233 NO_STACK_PROTECTOR
ArtInterpreterToCompiledCodeBridge(Thread * self,ArtMethod * caller,ShadowFrame * shadow_frame,uint16_t arg_offset,JValue * result)234 void ArtInterpreterToCompiledCodeBridge(Thread* self,
235                                         ArtMethod* caller,
236                                         ShadowFrame* shadow_frame,
237                                         uint16_t arg_offset,
238                                         JValue* result)
239     REQUIRES_SHARED(Locks::mutator_lock_) {
240   ArtMethod* method = shadow_frame->GetMethod();
241   // Basic checks for the arg_offset. If there's no code item, the arg_offset must be 0. Otherwise,
242   // check that the arg_offset isn't greater than the number of registers. A stronger check is
243   // difficult since the frame may contain space for all the registers in the method, or only enough
244   // space for the arguments.
245   if (kIsDebugBuild) {
246     if (method->GetCodeItem() == nullptr) {
247       DCHECK_EQ(0u, arg_offset) << method->PrettyMethod();
248     } else {
249       DCHECK_LE(arg_offset, shadow_frame->NumberOfVRegs());
250     }
251   }
252   jit::Jit* jit = Runtime::Current()->GetJit();
253   if (jit != nullptr && caller != nullptr) {
254     jit->NotifyInterpreterToCompiledCodeTransition(self, caller);
255   }
256   method->Invoke(self, shadow_frame->GetVRegArgs(arg_offset),
257                  (shadow_frame->NumberOfVRegs() - arg_offset) * sizeof(uint32_t),
258                  result, method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty());
259 }
260 
SetStringInitValueToAllAliases(ShadowFrame * shadow_frame,uint16_t this_obj_vreg,JValue result)261 void SetStringInitValueToAllAliases(ShadowFrame* shadow_frame,
262                                     uint16_t this_obj_vreg,
263                                     JValue result)
264     REQUIRES_SHARED(Locks::mutator_lock_) {
265   ObjPtr<mirror::Object> existing = shadow_frame->GetVRegReference(this_obj_vreg);
266   if (existing == nullptr) {
267     // If it's null, we come from compiled code that was deoptimized. Nothing to do,
268     // as the compiler verified there was no alias.
269     // Set the new string result of the StringFactory.
270     shadow_frame->SetVRegReference(this_obj_vreg, result.GetL());
271     return;
272   }
273   // Set the string init result into all aliases.
274   for (uint32_t i = 0, e = shadow_frame->NumberOfVRegs(); i < e; ++i) {
275     if (shadow_frame->GetVRegReference(i) == existing) {
276       DCHECK_EQ(shadow_frame->GetVRegReference(i),
277                 reinterpret_cast32<mirror::Object*>(shadow_frame->GetVReg(i)));
278       shadow_frame->SetVRegReference(i, result.GetL());
279       DCHECK_EQ(shadow_frame->GetVRegReference(i),
280                 reinterpret_cast32<mirror::Object*>(shadow_frame->GetVReg(i)));
281     }
282   }
283 }
284 
285 template<bool is_range>
DoMethodHandleInvokeCommon(Thread * self,ShadowFrame & shadow_frame,bool invoke_exact,const Instruction * inst,uint16_t inst_data,JValue * result)286 static bool DoMethodHandleInvokeCommon(Thread* self,
287                                        ShadowFrame& shadow_frame,
288                                        bool invoke_exact,
289                                        const Instruction* inst,
290                                        uint16_t inst_data,
291                                        JValue* result)
292     REQUIRES_SHARED(Locks::mutator_lock_) {
293   // Make sure to check for async exceptions
294   if (UNLIKELY(self->ObserveAsyncException())) {
295     return false;
296   }
297   // Invoke-polymorphic instructions always take a receiver. i.e, they are never static.
298   const uint32_t vRegC = (is_range) ? inst->VRegC_4rcc() : inst->VRegC_45cc();
299   const int invoke_method_idx = (is_range) ? inst->VRegB_4rcc() : inst->VRegB_45cc();
300 
301   // Initialize |result| to 0 as this is the default return value for
302   // polymorphic invocations of method handle types with void return
303   // and provides a sensible return result in error cases.
304   result->SetJ(0);
305 
306   // The invoke_method_idx here is the name of the signature polymorphic method that
307   // was symbolically invoked in bytecode (say MethodHandle.invoke or MethodHandle.invokeExact)
308   // and not the method that we'll dispatch to in the end.
309   StackHandleScope<2> hs(self);
310   Handle<mirror::MethodHandle> method_handle(hs.NewHandle(
311       ObjPtr<mirror::MethodHandle>::DownCast(shadow_frame.GetVRegReference(vRegC))));
312   if (UNLIKELY(method_handle == nullptr)) {
313     // Note that the invoke type is kVirtual here because a call to a signature
314     // polymorphic method is shaped like a virtual call at the bytecode level.
315     ThrowNullPointerExceptionForMethodAccess(invoke_method_idx, InvokeType::kVirtual);
316     return false;
317   }
318 
319   // The vRegH value gives the index of the proto_id associated with this
320   // signature polymorphic call site.
321   const uint16_t vRegH = (is_range) ? inst->VRegH_4rcc() : inst->VRegH_45cc();
322   const dex::ProtoIndex callsite_proto_id(vRegH);
323 
324   // Call through to the classlinker and ask it to resolve the static type associated
325   // with the callsite. This information is stored in the dex cache so it's
326   // guaranteed to be fast after the first resolution.
327   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
328   Handle<mirror::MethodType> callsite_type(hs.NewHandle(
329       class_linker->ResolveMethodType(self, callsite_proto_id, shadow_frame.GetMethod())));
330 
331   // This implies we couldn't resolve one or more types in this method handle.
332   if (UNLIKELY(callsite_type == nullptr)) {
333     CHECK(self->IsExceptionPending());
334     return false;
335   }
336 
337   // There is a common dispatch method for method handles that takes
338   // arguments either from a range or an array of arguments depending
339   // on whether the DEX instruction is invoke-polymorphic/range or
340   // invoke-polymorphic. The array here is for the latter.
341   if (UNLIKELY(is_range)) {
342     // VRegC is the register holding the method handle. Arguments passed
343     // to the method handle's target do not include the method handle.
344     RangeInstructionOperands operands(inst->VRegC_4rcc() + 1, inst->VRegA_4rcc() - 1);
345     if (invoke_exact) {
346       return MethodHandleInvokeExact(self,
347                                      shadow_frame,
348                                      method_handle,
349                                      callsite_type,
350                                      &operands,
351                                      result);
352     } else {
353       return MethodHandleInvoke(self,
354                                 shadow_frame,
355                                 method_handle,
356                                 callsite_type,
357                                 &operands,
358                                 result);
359     }
360   } else {
361     // Get the register arguments for the invoke.
362     uint32_t args[Instruction::kMaxVarArgRegs] = {};
363     inst->GetVarArgs(args, inst_data);
364     // Drop the first register which is the method handle performing the invoke.
365     memmove(args, args + 1, sizeof(args[0]) * (Instruction::kMaxVarArgRegs - 1));
366     args[Instruction::kMaxVarArgRegs - 1] = 0;
367     VarArgsInstructionOperands operands(args, inst->VRegA_45cc() - 1);
368     if (invoke_exact) {
369       return MethodHandleInvokeExact(self,
370                                      shadow_frame,
371                                      method_handle,
372                                      callsite_type,
373                                      &operands,
374                                      result);
375     } else {
376       return MethodHandleInvoke(self,
377                                 shadow_frame,
378                                 method_handle,
379                                 callsite_type,
380                                 &operands,
381                                 result);
382     }
383   }
384 }
385 
DoMethodHandleInvokeExact(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)386 bool DoMethodHandleInvokeExact(Thread* self,
387                                ShadowFrame& shadow_frame,
388                                const Instruction* inst,
389                                uint16_t inst_data,
390                                JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
391   if (inst->Opcode() == Instruction::INVOKE_POLYMORPHIC) {
392     static const bool kIsRange = false;
393     return DoMethodHandleInvokeCommon<kIsRange>(
394         self, shadow_frame, /* invoke_exact= */ true, inst, inst_data, result);
395   } else {
396     DCHECK_EQ(inst->Opcode(), Instruction::INVOKE_POLYMORPHIC_RANGE);
397     static const bool kIsRange = true;
398     return DoMethodHandleInvokeCommon<kIsRange>(
399         self, shadow_frame, /* invoke_exact= */ true, inst, inst_data, result);
400   }
401 }
402 
DoMethodHandleInvoke(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)403 bool DoMethodHandleInvoke(Thread* self,
404                           ShadowFrame& shadow_frame,
405                           const Instruction* inst,
406                           uint16_t inst_data,
407                           JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
408   if (inst->Opcode() == Instruction::INVOKE_POLYMORPHIC) {
409     static const bool kIsRange = false;
410     return DoMethodHandleInvokeCommon<kIsRange>(
411         self, shadow_frame, /* invoke_exact= */ false, inst, inst_data, result);
412   } else {
413     DCHECK_EQ(inst->Opcode(), Instruction::INVOKE_POLYMORPHIC_RANGE);
414     static const bool kIsRange = true;
415     return DoMethodHandleInvokeCommon<kIsRange>(
416         self, shadow_frame, /* invoke_exact= */ false, inst, inst_data, result);
417   }
418 }
419 
DoVarHandleInvokeCommon(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result,mirror::VarHandle::AccessMode access_mode)420 static bool DoVarHandleInvokeCommon(Thread* self,
421                                     ShadowFrame& shadow_frame,
422                                     const Instruction* inst,
423                                     uint16_t inst_data,
424                                     JValue* result,
425                                     mirror::VarHandle::AccessMode access_mode)
426     REQUIRES_SHARED(Locks::mutator_lock_) {
427   // Make sure to check for async exceptions
428   if (UNLIKELY(self->ObserveAsyncException())) {
429     return false;
430   }
431 
432   bool is_var_args = inst->HasVarArgs();
433   const uint32_t vRegC = is_var_args ? inst->VRegC_45cc() : inst->VRegC_4rcc();
434   const uint16_t vRegH = is_var_args ? inst->VRegH_45cc() : inst->VRegH_4rcc();
435   StackHandleScope<1> hs(self);
436   Handle<mirror::VarHandle> var_handle = hs.NewHandle(
437       ObjPtr<mirror::VarHandle>::DownCast(shadow_frame.GetVRegReference(vRegC)));
438   ArtMethod* method = shadow_frame.GetMethod();
439   uint32_t var_args[Instruction::kMaxVarArgRegs];
440   std::optional<VarArgsInstructionOperands> var_args_operands(std::nullopt);
441   std::optional<RangeInstructionOperands> range_operands(std::nullopt);
442   InstructionOperands* all_operands;
443   if (is_var_args) {
444     inst->GetVarArgs(var_args, inst_data);
445     var_args_operands.emplace(var_args, inst->VRegA_45cc());
446     all_operands = &var_args_operands.value();
447   } else {
448     range_operands.emplace(inst->VRegC_4rcc(), inst->VRegA_4rcc());
449     all_operands = &range_operands.value();
450   }
451   NoReceiverInstructionOperands operands(all_operands);
452 
453   return VarHandleInvokeAccessor(self,
454                                  shadow_frame,
455                                  var_handle,
456                                  method,
457                                  dex::ProtoIndex(vRegH),
458                                  access_mode,
459                                  &operands,
460                                  result);
461 }
462 
463 #define DO_VAR_HANDLE_ACCESSOR(_access_mode)                                                \
464 bool DoVarHandle ## _access_mode(Thread* self,                                              \
465                                  ShadowFrame& shadow_frame,                                 \
466                                  const Instruction* inst,                                   \
467                                  uint16_t inst_data,                                        \
468                                  JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {    \
469   const auto access_mode = mirror::VarHandle::AccessMode::k ## _access_mode;                \
470   return DoVarHandleInvokeCommon(self, shadow_frame, inst, inst_data, result, access_mode); \
471 }
472 
473 DO_VAR_HANDLE_ACCESSOR(CompareAndExchange)
DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeAcquire)474 DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeAcquire)
475 DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeRelease)
476 DO_VAR_HANDLE_ACCESSOR(CompareAndSet)
477 DO_VAR_HANDLE_ACCESSOR(Get)
478 DO_VAR_HANDLE_ACCESSOR(GetAcquire)
479 DO_VAR_HANDLE_ACCESSOR(GetAndAdd)
480 DO_VAR_HANDLE_ACCESSOR(GetAndAddAcquire)
481 DO_VAR_HANDLE_ACCESSOR(GetAndAddRelease)
482 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAnd)
483 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAndAcquire)
484 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAndRelease)
485 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOr)
486 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOrAcquire)
487 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOrRelease)
488 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXor)
489 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXorAcquire)
490 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXorRelease)
491 DO_VAR_HANDLE_ACCESSOR(GetAndSet)
492 DO_VAR_HANDLE_ACCESSOR(GetAndSetAcquire)
493 DO_VAR_HANDLE_ACCESSOR(GetAndSetRelease)
494 DO_VAR_HANDLE_ACCESSOR(GetOpaque)
495 DO_VAR_HANDLE_ACCESSOR(GetVolatile)
496 DO_VAR_HANDLE_ACCESSOR(Set)
497 DO_VAR_HANDLE_ACCESSOR(SetOpaque)
498 DO_VAR_HANDLE_ACCESSOR(SetRelease)
499 DO_VAR_HANDLE_ACCESSOR(SetVolatile)
500 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSet)
501 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetAcquire)
502 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetPlain)
503 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetRelease)
504 
505 #undef DO_VAR_HANDLE_ACCESSOR
506 
507 template<bool is_range>
508 bool DoInvokePolymorphic(Thread* self,
509                          ShadowFrame& shadow_frame,
510                          const Instruction* inst,
511                          uint16_t inst_data,
512                          JValue* result) {
513   const int invoke_method_idx = inst->VRegB();
514   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
515   ArtMethod* invoke_method =
516       class_linker->ResolveMethodWithChecks(
517           invoke_method_idx, shadow_frame.GetMethod(), kPolymorphic);
518 
519   // Ensure intrinsic identifiers are initialized.
520   DCHECK(invoke_method->IsIntrinsic());
521 
522   // Dispatch based on intrinsic identifier associated with method.
523   switch (invoke_method->GetIntrinsic()) {
524 #define CASE_SIGNATURE_POLYMORPHIC_INTRINSIC(Name, ...) \
525     case Intrinsics::k##Name:                           \
526       return Do ## Name(self, shadow_frame, inst, inst_data, result);
527     ART_SIGNATURE_POLYMORPHIC_INTRINSICS_LIST(CASE_SIGNATURE_POLYMORPHIC_INTRINSIC)
528 #undef CASE_SIGNATURE_POLYMORPHIC_INTRINSIC
529     default:
530       LOG(FATAL) << "Unreachable: " << invoke_method->GetIntrinsic();
531       UNREACHABLE();
532       return false;
533   }
534 }
535 
ConvertScalarBootstrapArgument(jvalue value)536 static JValue ConvertScalarBootstrapArgument(jvalue value) {
537   // value either contains a primitive scalar value if it corresponds
538   // to a primitive type, or it contains an integer value if it
539   // corresponds to an object instance reference id (e.g. a string id).
540   return JValue::FromPrimitive(value.j);
541 }
542 
GetClassForBootstrapArgument(EncodedArrayValueIterator::ValueType type)543 static ObjPtr<mirror::Class> GetClassForBootstrapArgument(EncodedArrayValueIterator::ValueType type)
544     REQUIRES_SHARED(Locks::mutator_lock_) {
545   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
546   ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots = class_linker->GetClassRoots();
547   switch (type) {
548     case EncodedArrayValueIterator::ValueType::kBoolean:
549     case EncodedArrayValueIterator::ValueType::kByte:
550     case EncodedArrayValueIterator::ValueType::kChar:
551     case EncodedArrayValueIterator::ValueType::kShort:
552       // These types are disallowed by JVMS. Treat as integers. This
553       // will result in CCE's being raised if the BSM has one of these
554       // types.
555     case EncodedArrayValueIterator::ValueType::kInt:
556       return GetClassRoot(ClassRoot::kPrimitiveInt, class_roots);
557     case EncodedArrayValueIterator::ValueType::kLong:
558       return GetClassRoot(ClassRoot::kPrimitiveLong, class_roots);
559     case EncodedArrayValueIterator::ValueType::kFloat:
560       return GetClassRoot(ClassRoot::kPrimitiveFloat, class_roots);
561     case EncodedArrayValueIterator::ValueType::kDouble:
562       return GetClassRoot(ClassRoot::kPrimitiveDouble, class_roots);
563     case EncodedArrayValueIterator::ValueType::kMethodType:
564       return GetClassRoot<mirror::MethodType>(class_roots);
565     case EncodedArrayValueIterator::ValueType::kMethodHandle:
566       return GetClassRoot<mirror::MethodHandle>(class_roots);
567     case EncodedArrayValueIterator::ValueType::kString:
568       return GetClassRoot<mirror::String>();
569     case EncodedArrayValueIterator::ValueType::kType:
570       return GetClassRoot<mirror::Class>();
571     case EncodedArrayValueIterator::ValueType::kField:
572     case EncodedArrayValueIterator::ValueType::kMethod:
573     case EncodedArrayValueIterator::ValueType::kEnum:
574     case EncodedArrayValueIterator::ValueType::kArray:
575     case EncodedArrayValueIterator::ValueType::kAnnotation:
576     case EncodedArrayValueIterator::ValueType::kNull:
577       return nullptr;
578     case EncodedArrayValueIterator::ValueType::kEndOfInput:
579       LOG(FATAL) << "Unreachable";
580       UNREACHABLE();
581   }
582 }
583 
GetArgumentForBootstrapMethod(Thread * self,ArtMethod * referrer,EncodedArrayValueIterator::ValueType type,const JValue * encoded_value,JValue * decoded_value)584 static bool GetArgumentForBootstrapMethod(Thread* self,
585                                           ArtMethod* referrer,
586                                           EncodedArrayValueIterator::ValueType type,
587                                           const JValue* encoded_value,
588                                           JValue* decoded_value)
589     REQUIRES_SHARED(Locks::mutator_lock_) {
590   // The encoded_value contains either a scalar value (IJDF) or a
591   // scalar DEX file index to a reference type to be materialized.
592   switch (type) {
593     case EncodedArrayValueIterator::ValueType::kInt:
594     case EncodedArrayValueIterator::ValueType::kFloat:
595       decoded_value->SetI(encoded_value->GetI());
596       return true;
597     case EncodedArrayValueIterator::ValueType::kLong:
598     case EncodedArrayValueIterator::ValueType::kDouble:
599       decoded_value->SetJ(encoded_value->GetJ());
600       return true;
601     case EncodedArrayValueIterator::ValueType::kMethodType: {
602       StackHandleScope<2> hs(self);
603       Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
604       Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
605       dex::ProtoIndex proto_idx(encoded_value->GetC());
606       ClassLinker* cl = Runtime::Current()->GetClassLinker();
607       ObjPtr<mirror::MethodType> o =
608           cl->ResolveMethodType(self, proto_idx, dex_cache, class_loader);
609       if (UNLIKELY(o.IsNull())) {
610         DCHECK(self->IsExceptionPending());
611         return false;
612       }
613       decoded_value->SetL(o);
614       return true;
615     }
616     case EncodedArrayValueIterator::ValueType::kMethodHandle: {
617       uint32_t index = static_cast<uint32_t>(encoded_value->GetI());
618       ClassLinker* cl = Runtime::Current()->GetClassLinker();
619       ObjPtr<mirror::MethodHandle> o = cl->ResolveMethodHandle(self, index, referrer);
620       if (UNLIKELY(o.IsNull())) {
621         DCHECK(self->IsExceptionPending());
622         return false;
623       }
624       decoded_value->SetL(o);
625       return true;
626     }
627     case EncodedArrayValueIterator::ValueType::kString: {
628       dex::StringIndex index(static_cast<uint32_t>(encoded_value->GetI()));
629       ClassLinker* cl = Runtime::Current()->GetClassLinker();
630       ObjPtr<mirror::String> o = cl->ResolveString(index, referrer);
631       if (UNLIKELY(o.IsNull())) {
632         DCHECK(self->IsExceptionPending());
633         return false;
634       }
635       decoded_value->SetL(o);
636       return true;
637     }
638     case EncodedArrayValueIterator::ValueType::kType: {
639       dex::TypeIndex index(static_cast<uint32_t>(encoded_value->GetI()));
640       ClassLinker* cl = Runtime::Current()->GetClassLinker();
641       ObjPtr<mirror::Class> o = cl->ResolveType(index, referrer);
642       if (UNLIKELY(o.IsNull())) {
643         DCHECK(self->IsExceptionPending());
644         return false;
645       }
646       decoded_value->SetL(o);
647       return true;
648     }
649     case EncodedArrayValueIterator::ValueType::kBoolean:
650     case EncodedArrayValueIterator::ValueType::kByte:
651     case EncodedArrayValueIterator::ValueType::kChar:
652     case EncodedArrayValueIterator::ValueType::kShort:
653     case EncodedArrayValueIterator::ValueType::kField:
654     case EncodedArrayValueIterator::ValueType::kMethod:
655     case EncodedArrayValueIterator::ValueType::kEnum:
656     case EncodedArrayValueIterator::ValueType::kArray:
657     case EncodedArrayValueIterator::ValueType::kAnnotation:
658     case EncodedArrayValueIterator::ValueType::kNull:
659       // Unreachable - unsupported types that have been checked when
660       // determining the effect call site type based on the bootstrap
661       // argument types.
662     case EncodedArrayValueIterator::ValueType::kEndOfInput:
663       LOG(FATAL) << "Unreachable";
664       UNREACHABLE();
665   }
666 }
667 
PackArgumentForBootstrapMethod(Thread * self,ArtMethod * referrer,CallSiteArrayValueIterator * it,ShadowFrameSetter * setter)668 static bool PackArgumentForBootstrapMethod(Thread* self,
669                                            ArtMethod* referrer,
670                                            CallSiteArrayValueIterator* it,
671                                            ShadowFrameSetter* setter)
672     REQUIRES_SHARED(Locks::mutator_lock_) {
673   auto type = it->GetValueType();
674   const JValue encoded_value = ConvertScalarBootstrapArgument(it->GetJavaValue());
675   JValue decoded_value;
676   if (!GetArgumentForBootstrapMethod(self, referrer, type, &encoded_value, &decoded_value)) {
677     return false;
678   }
679   switch (it->GetValueType()) {
680     case EncodedArrayValueIterator::ValueType::kInt:
681     case EncodedArrayValueIterator::ValueType::kFloat:
682       setter->Set(static_cast<uint32_t>(decoded_value.GetI()));
683       return true;
684     case EncodedArrayValueIterator::ValueType::kLong:
685     case EncodedArrayValueIterator::ValueType::kDouble:
686       setter->SetLong(decoded_value.GetJ());
687       return true;
688     case EncodedArrayValueIterator::ValueType::kMethodType:
689     case EncodedArrayValueIterator::ValueType::kMethodHandle:
690     case EncodedArrayValueIterator::ValueType::kString:
691     case EncodedArrayValueIterator::ValueType::kType:
692       setter->SetReference(decoded_value.GetL());
693       return true;
694     case EncodedArrayValueIterator::ValueType::kBoolean:
695     case EncodedArrayValueIterator::ValueType::kByte:
696     case EncodedArrayValueIterator::ValueType::kChar:
697     case EncodedArrayValueIterator::ValueType::kShort:
698     case EncodedArrayValueIterator::ValueType::kField:
699     case EncodedArrayValueIterator::ValueType::kMethod:
700     case EncodedArrayValueIterator::ValueType::kEnum:
701     case EncodedArrayValueIterator::ValueType::kArray:
702     case EncodedArrayValueIterator::ValueType::kAnnotation:
703     case EncodedArrayValueIterator::ValueType::kNull:
704       // Unreachable - unsupported types that have been checked when
705       // determining the effect call site type based on the bootstrap
706       // argument types.
707     case EncodedArrayValueIterator::ValueType::kEndOfInput:
708       LOG(FATAL) << "Unreachable";
709       UNREACHABLE();
710   }
711 }
712 
PackCollectorArrayForBootstrapMethod(Thread * self,ArtMethod * referrer,ObjPtr<mirror::Class> array_type,int32_t array_length,CallSiteArrayValueIterator * it,ShadowFrameSetter * setter)713 static bool PackCollectorArrayForBootstrapMethod(Thread* self,
714                                                  ArtMethod* referrer,
715                                                  ObjPtr<mirror::Class> array_type,
716                                                  int32_t array_length,
717                                                  CallSiteArrayValueIterator* it,
718                                                  ShadowFrameSetter* setter)
719     REQUIRES_SHARED(Locks::mutator_lock_) {
720   StackHandleScope<1> hs(self);
721   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
722   JValue decoded_value;
723 
724 #define COLLECT_PRIMITIVE_ARRAY(Descriptor, Type)                       \
725   Handle<mirror::Type ## Array> array =                                 \
726       hs.NewHandle(mirror::Type ## Array::Alloc(self, array_length));   \
727   if (array.IsNull()) {                                                 \
728     return false;                                                       \
729   }                                                                     \
730   for (int32_t i = 0; it->HasNext(); it->Next(), ++i) {                 \
731     auto type = it->GetValueType();                                     \
732     DCHECK_EQ(type, EncodedArrayValueIterator::ValueType::k ## Type);   \
733     const JValue encoded_value =                                        \
734         ConvertScalarBootstrapArgument(it->GetJavaValue());             \
735     GetArgumentForBootstrapMethod(self,                                 \
736                                   referrer,                             \
737                                   type,                                 \
738                                   &encoded_value,                       \
739                                   &decoded_value);                      \
740     array->Set(i, decoded_value.Get ## Descriptor());                   \
741   }                                                                     \
742   setter->SetReference(array.Get());                                    \
743   return true;
744 
745 #define COLLECT_REFERENCE_ARRAY(T, Type)                                \
746   Handle<mirror::ObjectArray<T>> array =                   /* NOLINT */ \
747       hs.NewHandle(mirror::ObjectArray<T>::Alloc(self,                  \
748                                                  array_type,            \
749                                                  array_length));        \
750   if (array.IsNull()) {                                                 \
751     return false;                                                       \
752   }                                                                     \
753   for (int32_t i = 0; it->HasNext(); it->Next(), ++i) {                 \
754     auto type = it->GetValueType();                                     \
755     DCHECK_EQ(type, EncodedArrayValueIterator::ValueType::k ## Type);   \
756     const JValue encoded_value =                                        \
757         ConvertScalarBootstrapArgument(it->GetJavaValue());             \
758     if (!GetArgumentForBootstrapMethod(self,                            \
759                                        referrer,                        \
760                                        type,                            \
761                                        &encoded_value,                  \
762                                        &decoded_value)) {               \
763       return false;                                                     \
764     }                                                                   \
765     ObjPtr<mirror::Object> o = decoded_value.GetL();                    \
766     if (Runtime::Current()->IsActiveTransaction()) {                    \
767       array->Set<true>(i, ObjPtr<T>::DownCast(o));                      \
768     } else {                                                            \
769       array->Set<false>(i, ObjPtr<T>::DownCast(o));                     \
770     }                                                                   \
771   }                                                                     \
772   setter->SetReference(array.Get());                                    \
773   return true;
774 
775   ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots = class_linker->GetClassRoots();
776   ObjPtr<mirror::Class> component_type = array_type->GetComponentType();
777   if (component_type == GetClassRoot(ClassRoot::kPrimitiveInt, class_roots)) {
778     COLLECT_PRIMITIVE_ARRAY(I, Int);
779   } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveLong, class_roots)) {
780     COLLECT_PRIMITIVE_ARRAY(J, Long);
781   } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveFloat, class_roots)) {
782     COLLECT_PRIMITIVE_ARRAY(F, Float);
783   } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveDouble, class_roots)) {
784     COLLECT_PRIMITIVE_ARRAY(D, Double);
785   } else if (component_type == GetClassRoot<mirror::MethodType>()) {
786     COLLECT_REFERENCE_ARRAY(mirror::MethodType, MethodType);
787   } else if (component_type == GetClassRoot<mirror::MethodHandle>()) {
788     COLLECT_REFERENCE_ARRAY(mirror::MethodHandle, MethodHandle);
789   } else if (component_type == GetClassRoot<mirror::String>(class_roots)) {
790     COLLECT_REFERENCE_ARRAY(mirror::String, String);
791   } else if (component_type == GetClassRoot<mirror::Class>()) {
792     COLLECT_REFERENCE_ARRAY(mirror::Class, Type);
793   } else {
794     component_type->DumpClass(LOG_STREAM(FATAL_WITHOUT_ABORT), mirror::Class::kDumpClassFullDetail);
795     LOG(FATAL) << "unexpected class: " << component_type->PrettyTypeOf();
796     UNREACHABLE();
797   }
798   #undef COLLECT_PRIMITIVE_ARRAY
799   #undef COLLECT_REFERENCE_ARRAY
800 }
801 
BuildCallSiteForBootstrapMethod(Thread * self,const DexFile * dex_file,uint32_t call_site_idx)802 static ObjPtr<mirror::MethodType> BuildCallSiteForBootstrapMethod(Thread* self,
803                                                                   const DexFile* dex_file,
804                                                                   uint32_t call_site_idx)
805     REQUIRES_SHARED(Locks::mutator_lock_) {
806   const dex::CallSiteIdItem& csi = dex_file->GetCallSiteId(call_site_idx);
807   CallSiteArrayValueIterator it(*dex_file, csi);
808   DCHECK_GE(it.Size(), 1u);
809 
810   StackHandleScope<2> hs(self);
811   // Create array for parameter types.
812   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
813   ObjPtr<mirror::Class> class_array_type =
814       GetClassRoot<mirror::ObjectArray<mirror::Class>>(class_linker);
815   Handle<mirror::ObjectArray<mirror::Class>> ptypes = hs.NewHandle(
816       mirror::ObjectArray<mirror::Class>::Alloc(self,
817                                                 class_array_type,
818                                                 static_cast<int>(it.Size())));
819   if (ptypes.IsNull()) {
820     DCHECK(self->IsExceptionPending());
821     return nullptr;
822   }
823 
824   // Populate the first argument with an instance of j.l.i.MethodHandles.Lookup
825   // that the runtime will construct.
826   ptypes->Set(0, GetClassRoot<mirror::MethodHandlesLookup>(class_linker));
827   it.Next();
828 
829   // The remaining parameter types are derived from the types of
830   // arguments present in the DEX file.
831   int index = 1;
832   while (it.HasNext()) {
833     ObjPtr<mirror::Class> ptype = GetClassForBootstrapArgument(it.GetValueType());
834     if (ptype.IsNull()) {
835       ThrowClassCastException("Unsupported bootstrap argument type");
836       return nullptr;
837     }
838     ptypes->Set(index, ptype);
839     index++;
840     it.Next();
841   }
842   DCHECK_EQ(static_cast<size_t>(index), it.Size());
843 
844   // By definition, the return type is always a j.l.i.CallSite.
845   Handle<mirror::Class> rtype = hs.NewHandle(GetClassRoot<mirror::CallSite>());
846   return mirror::MethodType::Create(self, rtype, ptypes);
847 }
848 
InvokeBootstrapMethod(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx)849 static ObjPtr<mirror::CallSite> InvokeBootstrapMethod(Thread* self,
850                                                       ShadowFrame& shadow_frame,
851                                                       uint32_t call_site_idx)
852     REQUIRES_SHARED(Locks::mutator_lock_) {
853   StackHandleScope<5> hs(self);
854   // There are three mandatory arguments expected from the call site
855   // value array in the DEX file: the bootstrap method handle, the
856   // method name to pass to the bootstrap method, and the method type
857   // to pass to the bootstrap method.
858   static constexpr size_t kMandatoryArgumentsCount = 3;
859   ArtMethod* referrer = shadow_frame.GetMethod();
860   const DexFile* dex_file = referrer->GetDexFile();
861   const dex::CallSiteIdItem& csi = dex_file->GetCallSiteId(call_site_idx);
862   CallSiteArrayValueIterator it(*dex_file, csi);
863   if (it.Size() < kMandatoryArgumentsCount) {
864     ThrowBootstrapMethodError("Truncated bootstrap arguments (%zu < %zu)",
865                               it.Size(), kMandatoryArgumentsCount);
866     return nullptr;
867   }
868 
869   if (it.GetValueType() != EncodedArrayValueIterator::ValueType::kMethodHandle) {
870     ThrowBootstrapMethodError("First bootstrap argument is not a method handle");
871     return nullptr;
872   }
873 
874   uint32_t bsm_index = static_cast<uint32_t>(it.GetJavaValue().i);
875   it.Next();
876 
877   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
878   Handle<mirror::MethodHandle> bsm =
879       hs.NewHandle(class_linker->ResolveMethodHandle(self, bsm_index, referrer));
880   if (bsm.IsNull()) {
881     DCHECK(self->IsExceptionPending());
882     return nullptr;
883   }
884 
885   if (bsm->GetHandleKind() != mirror::MethodHandle::Kind::kInvokeStatic) {
886     // JLS suggests also accepting constructors. This is currently
887     // hard as constructor invocations happen via transformers in ART
888     // today. The constructor would need to be a class derived from java.lang.invoke.CallSite.
889     ThrowBootstrapMethodError("Unsupported bootstrap method invocation kind");
890     return nullptr;
891   }
892 
893   // Construct the local call site type information based on the 3
894   // mandatory arguments provided by the runtime and the static arguments
895   // in the DEX file. We will use these arguments to build a shadow frame.
896   MutableHandle<mirror::MethodType> call_site_type =
897       hs.NewHandle(BuildCallSiteForBootstrapMethod(self, dex_file, call_site_idx));
898   if (call_site_type.IsNull()) {
899     DCHECK(self->IsExceptionPending());
900     return nullptr;
901   }
902 
903   // Check if this BSM is targeting a variable arity method. If so,
904   // we'll need to collect the trailing arguments into an array.
905   Handle<mirror::Array> collector_arguments;
906   int32_t collector_arguments_length;
907   if (bsm->GetTargetMethod()->IsVarargs()) {
908     int number_of_bsm_parameters = bsm->GetMethodType()->GetNumberOfPTypes();
909     if (number_of_bsm_parameters == 0) {
910       ThrowBootstrapMethodError("Variable arity BSM does not have any arguments");
911       return nullptr;
912     }
913     Handle<mirror::Class> collector_array_class =
914         hs.NewHandle(bsm->GetMethodType()->GetPTypes()->Get(number_of_bsm_parameters - 1));
915     if (!collector_array_class->IsArrayClass()) {
916       ThrowBootstrapMethodError("Variable arity BSM does not have array as final argument");
917       return nullptr;
918     }
919     // The call site may include no arguments to be collected. In this
920     // case the number of arguments must be at least the number of BSM
921     // parameters less the collector array.
922     if (call_site_type->GetNumberOfPTypes() < number_of_bsm_parameters - 1) {
923       ThrowWrongMethodTypeException(bsm->GetMethodType(), call_site_type.Get());
924       return nullptr;
925     }
926     // Check all the arguments to be collected match the collector array component type.
927     for (int i = number_of_bsm_parameters - 1; i < call_site_type->GetNumberOfPTypes(); ++i) {
928       if (call_site_type->GetPTypes()->Get(i) != collector_array_class->GetComponentType()) {
929         ThrowClassCastException(collector_array_class->GetComponentType(),
930                                 call_site_type->GetPTypes()->Get(i));
931         return nullptr;
932       }
933     }
934     // Update the call site method type so it now includes the collector array.
935     int32_t collector_arguments_start = number_of_bsm_parameters - 1;
936     collector_arguments_length = call_site_type->GetNumberOfPTypes() - number_of_bsm_parameters + 1;
937     call_site_type.Assign(
938         mirror::MethodType::CollectTrailingArguments(self,
939                                                      call_site_type.Get(),
940                                                      collector_array_class.Get(),
941                                                      collector_arguments_start));
942     if (call_site_type.IsNull()) {
943       DCHECK(self->IsExceptionPending());
944       return nullptr;
945     }
946   } else {
947     collector_arguments_length = 0;
948   }
949 
950   if (call_site_type->GetNumberOfPTypes() != bsm->GetMethodType()->GetNumberOfPTypes()) {
951     ThrowWrongMethodTypeException(bsm->GetMethodType(), call_site_type.Get());
952     return nullptr;
953   }
954 
955   // BSM invocation has a different set of exceptions that
956   // j.l.i.MethodHandle.invoke(). Scan arguments looking for CCE
957   // "opportunities". Unfortunately we cannot just leave this to the
958   // method handle invocation as this might generate a WMTE.
959   for (int32_t i = 0; i < call_site_type->GetNumberOfPTypes(); ++i) {
960     ObjPtr<mirror::Class> from = call_site_type->GetPTypes()->Get(i);
961     ObjPtr<mirror::Class> to = bsm->GetMethodType()->GetPTypes()->Get(i);
962     if (!IsParameterTypeConvertible(from, to)) {
963       ThrowClassCastException(from, to);
964       return nullptr;
965     }
966   }
967   if (!IsReturnTypeConvertible(call_site_type->GetRType(), bsm->GetMethodType()->GetRType())) {
968     ThrowClassCastException(bsm->GetMethodType()->GetRType(), call_site_type->GetRType());
969     return nullptr;
970   }
971 
972   // Set-up a shadow frame for invoking the bootstrap method handle.
973   ShadowFrameAllocaUniquePtr bootstrap_frame =
974       CREATE_SHADOW_FRAME(call_site_type->NumberOfVRegs(),
975                           referrer,
976                           shadow_frame.GetDexPC());
977   ScopedStackedShadowFramePusher pusher(self, bootstrap_frame.get());
978   ShadowFrameSetter setter(bootstrap_frame.get(), 0u);
979 
980   // The first parameter is a MethodHandles lookup instance.
981   Handle<mirror::Class> lookup_class =
982       hs.NewHandle(shadow_frame.GetMethod()->GetDeclaringClass());
983   ObjPtr<mirror::MethodHandlesLookup> lookup =
984       mirror::MethodHandlesLookup::Create(self, lookup_class);
985   if (lookup.IsNull()) {
986     DCHECK(self->IsExceptionPending());
987     return nullptr;
988   }
989   setter.SetReference(lookup);
990 
991   // Pack the remaining arguments into the frame.
992   int number_of_arguments = call_site_type->GetNumberOfPTypes();
993   int argument_index;
994   for (argument_index = 1; argument_index < number_of_arguments; ++argument_index) {
995     if (argument_index == number_of_arguments - 1 &&
996         call_site_type->GetPTypes()->Get(argument_index)->IsArrayClass()) {
997       ObjPtr<mirror::Class> array_type = call_site_type->GetPTypes()->Get(argument_index);
998       if (!PackCollectorArrayForBootstrapMethod(self,
999                                                 referrer,
1000                                                 array_type,
1001                                                 collector_arguments_length,
1002                                                 &it,
1003                                                 &setter)) {
1004         DCHECK(self->IsExceptionPending());
1005         return nullptr;
1006       }
1007     } else if (!PackArgumentForBootstrapMethod(self, referrer, &it, &setter)) {
1008       DCHECK(self->IsExceptionPending());
1009       return nullptr;
1010     }
1011     it.Next();
1012   }
1013   DCHECK(!it.HasNext());
1014   DCHECK(setter.Done());
1015 
1016   // Invoke the bootstrap method handle.
1017   JValue result;
1018   RangeInstructionOperands operands(0, bootstrap_frame->NumberOfVRegs());
1019   bool invoke_success = MethodHandleInvoke(self,
1020                                            *bootstrap_frame,
1021                                            bsm,
1022                                            call_site_type,
1023                                            &operands,
1024                                            &result);
1025   if (!invoke_success) {
1026     DCHECK(self->IsExceptionPending());
1027     return nullptr;
1028   }
1029 
1030   Handle<mirror::Object> object(hs.NewHandle(result.GetL()));
1031   if (UNLIKELY(object.IsNull())) {
1032     // This will typically be for LambdaMetafactory which is not supported.
1033     ThrowClassCastException("Bootstrap method returned null");
1034     return nullptr;
1035   }
1036 
1037   // Check the result type is a subclass of j.l.i.CallSite.
1038   ObjPtr<mirror::Class> call_site_class = GetClassRoot<mirror::CallSite>(class_linker);
1039   if (UNLIKELY(!object->InstanceOf(call_site_class))) {
1040     ThrowClassCastException(object->GetClass(), call_site_class);
1041     return nullptr;
1042   }
1043 
1044   // Check the call site target is not null as we're going to invoke it.
1045   ObjPtr<mirror::CallSite> call_site = ObjPtr<mirror::CallSite>::DownCast(result.GetL());
1046   ObjPtr<mirror::MethodHandle> target = call_site->GetTarget();
1047   if (UNLIKELY(target == nullptr)) {
1048     ThrowClassCastException("Bootstrap method returned a CallSite with a null target");
1049     return nullptr;
1050   }
1051   return call_site;
1052 }
1053 
1054 namespace {
1055 
DoResolveCallSite(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx)1056 ObjPtr<mirror::CallSite> DoResolveCallSite(Thread* self,
1057                                            ShadowFrame& shadow_frame,
1058                                            uint32_t call_site_idx)
1059     REQUIRES_SHARED(Locks::mutator_lock_) {
1060   StackHandleScope<1> hs(self);
1061   Handle<mirror::DexCache> dex_cache(hs.NewHandle(shadow_frame.GetMethod()->GetDexCache()));
1062 
1063   // Get the call site from the DexCache if present.
1064   ObjPtr<mirror::CallSite> call_site = dex_cache->GetResolvedCallSite(call_site_idx);
1065   if (LIKELY(call_site != nullptr)) {
1066     return call_site;
1067   }
1068 
1069   // Invoke the bootstrap method to get a candidate call site.
1070   call_site = InvokeBootstrapMethod(self, shadow_frame, call_site_idx);
1071   if (UNLIKELY(call_site == nullptr)) {
1072     if (!self->GetException()->IsError()) {
1073       // Use a BootstrapMethodError if the exception is not an instance of java.lang.Error.
1074       ThrowWrappedBootstrapMethodError("Exception from call site #%u bootstrap method",
1075                                        call_site_idx);
1076     }
1077     return nullptr;
1078   }
1079 
1080   // Attempt to place the candidate call site into the DexCache, return the winning call site.
1081   return dex_cache->SetResolvedCallSite(call_site_idx, call_site);
1082 }
1083 
1084 }  // namespace
1085 
DoInvokeCustom(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx,const InstructionOperands * operands,JValue * result)1086 bool DoInvokeCustom(Thread* self,
1087                     ShadowFrame& shadow_frame,
1088                     uint32_t call_site_idx,
1089                     const InstructionOperands* operands,
1090                     JValue* result) {
1091   // Make sure to check for async exceptions
1092   if (UNLIKELY(self->ObserveAsyncException())) {
1093     return false;
1094   }
1095 
1096   // invoke-custom is not supported in transactions. In transactions
1097   // there is a limited set of types supported. invoke-custom allows
1098   // running arbitrary code and instantiating arbitrary types.
1099   CHECK(!Runtime::Current()->IsActiveTransaction());
1100 
1101   ObjPtr<mirror::CallSite> call_site = DoResolveCallSite(self, shadow_frame, call_site_idx);
1102   if (call_site.IsNull()) {
1103     DCHECK(self->IsExceptionPending());
1104     return false;
1105   }
1106 
1107   StackHandleScope<2> hs(self);
1108   Handle<mirror::MethodHandle> target = hs.NewHandle(call_site->GetTarget());
1109   Handle<mirror::MethodType> target_method_type = hs.NewHandle(target->GetMethodType());
1110   DCHECK_EQ(operands->GetNumberOfOperands(), target_method_type->NumberOfVRegs())
1111       << " call_site_idx" << call_site_idx;
1112   return MethodHandleInvokeExact(self,
1113                                  shadow_frame,
1114                                  target,
1115                                  target_method_type,
1116                                  operands,
1117                                  result);
1118 }
1119 
1120 // Assign register 'src_reg' from shadow_frame to register 'dest_reg' into new_shadow_frame.
AssignRegister(ShadowFrame * new_shadow_frame,const ShadowFrame & shadow_frame,size_t dest_reg,size_t src_reg)1121 static inline void AssignRegister(ShadowFrame* new_shadow_frame, const ShadowFrame& shadow_frame,
1122                                   size_t dest_reg, size_t src_reg)
1123     REQUIRES_SHARED(Locks::mutator_lock_) {
1124   // Uint required, so that sign extension does not make this wrong on 64b systems
1125   uint32_t src_value = shadow_frame.GetVReg(src_reg);
1126   ObjPtr<mirror::Object> o = shadow_frame.GetVRegReference<kVerifyNone>(src_reg);
1127 
1128   // If both register locations contains the same value, the register probably holds a reference.
1129   // Note: As an optimization, non-moving collectors leave a stale reference value
1130   // in the references array even after the original vreg was overwritten to a non-reference.
1131   if (src_value == reinterpret_cast32<uint32_t>(o.Ptr())) {
1132     new_shadow_frame->SetVRegReference(dest_reg, o);
1133   } else {
1134     new_shadow_frame->SetVReg(dest_reg, src_value);
1135   }
1136 }
1137 
1138 template <bool is_range>
CopyRegisters(ShadowFrame & caller_frame,ShadowFrame * callee_frame,const uint32_t (& arg)[Instruction::kMaxVarArgRegs],const size_t first_src_reg,const size_t first_dest_reg,const size_t num_regs)1139 inline void CopyRegisters(ShadowFrame& caller_frame,
1140                           ShadowFrame* callee_frame,
1141                           const uint32_t (&arg)[Instruction::kMaxVarArgRegs],
1142                           const size_t first_src_reg,
1143                           const size_t first_dest_reg,
1144                           const size_t num_regs) {
1145   if (is_range) {
1146     const size_t dest_reg_bound = first_dest_reg + num_regs;
1147     for (size_t src_reg = first_src_reg, dest_reg = first_dest_reg; dest_reg < dest_reg_bound;
1148         ++dest_reg, ++src_reg) {
1149       AssignRegister(callee_frame, caller_frame, dest_reg, src_reg);
1150     }
1151   } else {
1152     DCHECK_LE(num_regs, arraysize(arg));
1153 
1154     for (size_t arg_index = 0; arg_index < num_regs; ++arg_index) {
1155       AssignRegister(callee_frame, caller_frame, first_dest_reg + arg_index, arg[arg_index]);
1156     }
1157   }
1158 }
1159 
1160 template <bool is_range>
DoCallCommon(ArtMethod * called_method,Thread * self,ShadowFrame & shadow_frame,JValue * result,uint16_t number_of_inputs,uint32_t (& arg)[Instruction::kMaxVarArgRegs],uint32_t vregC,bool string_init)1161 static inline bool DoCallCommon(ArtMethod* called_method,
1162                                 Thread* self,
1163                                 ShadowFrame& shadow_frame,
1164                                 JValue* result,
1165                                 uint16_t number_of_inputs,
1166                                 uint32_t (&arg)[Instruction::kMaxVarArgRegs],
1167                                 uint32_t vregC,
1168                                 bool string_init) {
1169   // Compute method information.
1170   CodeItemDataAccessor accessor(called_method->DexInstructionData());
1171   // Number of registers for the callee's call frame.
1172   uint16_t num_regs;
1173   // Test whether to use the interpreter or compiler entrypoint, and save that result to pass to
1174   // PerformCall. A deoptimization could occur at any time, and we shouldn't change which
1175   // entrypoint to use once we start building the shadow frame.
1176 
1177   const bool use_interpreter_entrypoint = ShouldStayInSwitchInterpreter(called_method);
1178   if (LIKELY(accessor.HasCodeItem())) {
1179     // When transitioning to compiled code, space only needs to be reserved for the input registers.
1180     // The rest of the frame gets discarded. This also prevents accessing the called method's code
1181     // item, saving memory by keeping code items of compiled code untouched.
1182     if (!use_interpreter_entrypoint) {
1183       DCHECK(!Runtime::Current()->IsAotCompiler()) << "Compiler should use interpreter entrypoint";
1184       num_regs = number_of_inputs;
1185     } else {
1186       num_regs = accessor.RegistersSize();
1187       DCHECK_EQ(string_init ? number_of_inputs - 1 : number_of_inputs, accessor.InsSize());
1188     }
1189   } else {
1190     DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
1191     num_regs = number_of_inputs;
1192   }
1193 
1194   // Hack for String init:
1195   //
1196   // Rewrite invoke-x java.lang.String.<init>(this, a, b, c, ...) into:
1197   //         invoke-x StringFactory(a, b, c, ...)
1198   // by effectively dropping the first virtual register from the invoke.
1199   //
1200   // (at this point the ArtMethod has already been replaced,
1201   // so we just need to fix-up the arguments)
1202   //
1203   // Note that FindMethodFromCode in entrypoint_utils-inl.h was also special-cased
1204   // to handle the compiler optimization of replacing `this` with null without
1205   // throwing NullPointerException.
1206   uint32_t string_init_vreg_this = is_range ? vregC : arg[0];
1207   if (UNLIKELY(string_init)) {
1208     DCHECK_GT(num_regs, 0u);  // As the method is an instance method, there should be at least 1.
1209 
1210     // The new StringFactory call is static and has one fewer argument.
1211     if (!accessor.HasCodeItem()) {
1212       DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
1213       num_regs--;
1214     }  // else ... don't need to change num_regs since it comes up from the string_init's code item
1215     number_of_inputs--;
1216 
1217     // Rewrite the var-args, dropping the 0th argument ("this")
1218     for (uint32_t i = 1; i < arraysize(arg); ++i) {
1219       arg[i - 1] = arg[i];
1220     }
1221     arg[arraysize(arg) - 1] = 0;
1222 
1223     // Rewrite the non-var-arg case
1224     vregC++;  // Skips the 0th vreg in the range ("this").
1225   }
1226 
1227   // Parameter registers go at the end of the shadow frame.
1228   DCHECK_GE(num_regs, number_of_inputs);
1229   size_t first_dest_reg = num_regs - number_of_inputs;
1230   DCHECK_NE(first_dest_reg, (size_t)-1);
1231 
1232   // Allocate shadow frame on the stack.
1233   const char* old_cause = self->StartAssertNoThreadSuspension("DoCallCommon");
1234   ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
1235       CREATE_SHADOW_FRAME(num_regs, called_method, /* dex pc */ 0);
1236   ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
1237 
1238   // Initialize new shadow frame by copying the registers from the callee shadow frame.
1239   if (!shadow_frame.GetMethod()->SkipAccessChecks()) {
1240     // Slow path.
1241     // We might need to do class loading, which incurs a thread state change to kNative. So
1242     // register the shadow frame as under construction and allow suspension again.
1243     ScopedStackedShadowFramePusher pusher(self, new_shadow_frame);
1244     self->EndAssertNoThreadSuspension(old_cause);
1245 
1246     // ArtMethod here is needed to check type information of the call site against the callee.
1247     // Type information is retrieved from a DexFile/DexCache for that respective declared method.
1248     //
1249     // As a special case for proxy methods, which are not dex-backed,
1250     // we have to retrieve type information from the proxy's method
1251     // interface method instead (which is dex backed since proxies are never interfaces).
1252     ArtMethod* method =
1253         new_shadow_frame->GetMethod()->GetInterfaceMethodIfProxy(kRuntimePointerSize);
1254 
1255     // We need to do runtime check on reference assignment. We need to load the shorty
1256     // to get the exact type of each reference argument.
1257     const dex::TypeList* params = method->GetParameterTypeList();
1258     uint32_t shorty_len = 0;
1259     const char* shorty = method->GetShorty(&shorty_len);
1260 
1261     // Handle receiver apart since it's not part of the shorty.
1262     size_t dest_reg = first_dest_reg;
1263     size_t arg_offset = 0;
1264 
1265     if (!method->IsStatic()) {
1266       size_t receiver_reg = is_range ? vregC : arg[0];
1267       new_shadow_frame->SetVRegReference(dest_reg, shadow_frame.GetVRegReference(receiver_reg));
1268       ++dest_reg;
1269       ++arg_offset;
1270       DCHECK(!string_init);  // All StringFactory methods are static.
1271     }
1272 
1273     // Copy the caller's invoke-* arguments into the callee's parameter registers.
1274     for (uint32_t shorty_pos = 0; dest_reg < num_regs; ++shorty_pos, ++dest_reg, ++arg_offset) {
1275       // Skip the 0th 'shorty' type since it represents the return type.
1276       DCHECK_LT(shorty_pos + 1, shorty_len) << "for shorty '" << shorty << "'";
1277       const size_t src_reg = (is_range) ? vregC + arg_offset : arg[arg_offset];
1278       switch (shorty[shorty_pos + 1]) {
1279         // Handle Object references. 1 virtual register slot.
1280         case 'L': {
1281           ObjPtr<mirror::Object> o = shadow_frame.GetVRegReference(src_reg);
1282           if (o != nullptr) {
1283             const dex::TypeIndex type_idx = params->GetTypeItem(shorty_pos).type_idx_;
1284             ObjPtr<mirror::Class> arg_type = method->GetDexCache()->GetResolvedType(type_idx);
1285             if (arg_type == nullptr) {
1286               StackHandleScope<1> hs(self);
1287               // Preserve o since it is used below and GetClassFromTypeIndex may cause thread
1288               // suspension.
1289               HandleWrapperObjPtr<mirror::Object> h = hs.NewHandleWrapper(&o);
1290               arg_type = method->ResolveClassFromTypeIndex(type_idx);
1291               if (arg_type == nullptr) {
1292                 CHECK(self->IsExceptionPending());
1293                 return false;
1294               }
1295             }
1296             if (!o->VerifierInstanceOf(arg_type)) {
1297               // This should never happen.
1298               std::string temp1, temp2;
1299               self->ThrowNewExceptionF("Ljava/lang/InternalError;",
1300                                        "Invoking %s with bad arg %d, type '%s' not instance of '%s'",
1301                                        new_shadow_frame->GetMethod()->GetName(), shorty_pos,
1302                                        o->GetClass()->GetDescriptor(&temp1),
1303                                        arg_type->GetDescriptor(&temp2));
1304               return false;
1305             }
1306           }
1307           new_shadow_frame->SetVRegReference(dest_reg, o);
1308           break;
1309         }
1310         // Handle doubles and longs. 2 consecutive virtual register slots.
1311         case 'J': case 'D': {
1312           uint64_t wide_value =
1313               (static_cast<uint64_t>(shadow_frame.GetVReg(src_reg + 1)) << BitSizeOf<uint32_t>()) |
1314                static_cast<uint32_t>(shadow_frame.GetVReg(src_reg));
1315           new_shadow_frame->SetVRegLong(dest_reg, wide_value);
1316           // Skip the next virtual register slot since we already used it.
1317           ++dest_reg;
1318           ++arg_offset;
1319           break;
1320         }
1321         // Handle all other primitives that are always 1 virtual register slot.
1322         default:
1323           new_shadow_frame->SetVReg(dest_reg, shadow_frame.GetVReg(src_reg));
1324           break;
1325       }
1326     }
1327   } else {
1328     if (is_range) {
1329       DCHECK_EQ(num_regs, first_dest_reg + number_of_inputs);
1330     }
1331 
1332     CopyRegisters<is_range>(shadow_frame,
1333                             new_shadow_frame,
1334                             arg,
1335                             vregC,
1336                             first_dest_reg,
1337                             number_of_inputs);
1338     self->EndAssertNoThreadSuspension(old_cause);
1339   }
1340 
1341   PerformCall(self,
1342               accessor,
1343               shadow_frame.GetMethod(),
1344               first_dest_reg,
1345               new_shadow_frame,
1346               result,
1347               use_interpreter_entrypoint);
1348 
1349   if (string_init && !self->IsExceptionPending()) {
1350     SetStringInitValueToAllAliases(&shadow_frame, string_init_vreg_this, *result);
1351   }
1352 
1353   return !self->IsExceptionPending();
1354 }
1355 
1356 template<bool is_range>
1357 NO_STACK_PROTECTOR
DoCall(ArtMethod * called_method,Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,bool is_string_init,JValue * result)1358 bool DoCall(ArtMethod* called_method,
1359             Thread* self,
1360             ShadowFrame& shadow_frame,
1361             const Instruction* inst,
1362             uint16_t inst_data,
1363             bool is_string_init,
1364             JValue* result) {
1365   // Argument word count.
1366   const uint16_t number_of_inputs =
1367       (is_range) ? inst->VRegA_3rc(inst_data) : inst->VRegA_35c(inst_data);
1368 
1369   // TODO: find a cleaner way to separate non-range and range information without duplicating
1370   //       code.
1371   uint32_t arg[Instruction::kMaxVarArgRegs] = {};  // only used in invoke-XXX.
1372   uint32_t vregC = 0;
1373   if (is_range) {
1374     vregC = inst->VRegC_3rc();
1375   } else {
1376     vregC = inst->VRegC_35c();
1377     inst->GetVarArgs(arg, inst_data);
1378   }
1379 
1380   return DoCallCommon<is_range>(
1381       called_method,
1382       self,
1383       shadow_frame,
1384       result,
1385       number_of_inputs,
1386       arg,
1387       vregC,
1388       is_string_init);
1389 }
1390 
1391 template <bool is_range>
DoFilledNewArray(const Instruction * inst,const ShadowFrame & shadow_frame,Thread * self,JValue * result)1392 bool DoFilledNewArray(const Instruction* inst,
1393                       const ShadowFrame& shadow_frame,
1394                       Thread* self,
1395                       JValue* result) {
1396   DCHECK(inst->Opcode() == Instruction::FILLED_NEW_ARRAY ||
1397          inst->Opcode() == Instruction::FILLED_NEW_ARRAY_RANGE);
1398   const int32_t length = is_range ? inst->VRegA_3rc() : inst->VRegA_35c();
1399   if (!is_range) {
1400     // Checks FILLED_NEW_ARRAY's length does not exceed 5 arguments.
1401     CHECK_LE(length, 5);
1402   }
1403   if (UNLIKELY(length < 0)) {
1404     ThrowNegativeArraySizeException(length);
1405     return false;
1406   }
1407   uint16_t type_idx = is_range ? inst->VRegB_3rc() : inst->VRegB_35c();
1408   bool do_access_check = !shadow_frame.GetMethod()->SkipAccessChecks();
1409   ObjPtr<mirror::Class> array_class = ResolveVerifyAndClinit(dex::TypeIndex(type_idx),
1410                                                              shadow_frame.GetMethod(),
1411                                                              self,
1412                                                              false,
1413                                                              do_access_check);
1414   if (UNLIKELY(array_class == nullptr)) {
1415     DCHECK(self->IsExceptionPending());
1416     return false;
1417   }
1418   CHECK(array_class->IsArrayClass());
1419   ObjPtr<mirror::Class> component_class = array_class->GetComponentType();
1420   const bool is_primitive_int_component = component_class->IsPrimitiveInt();
1421   if (UNLIKELY(component_class->IsPrimitive() && !is_primitive_int_component)) {
1422     if (component_class->IsPrimitiveLong() || component_class->IsPrimitiveDouble()) {
1423       ThrowRuntimeException("Bad filled array request for type %s",
1424                             component_class->PrettyDescriptor().c_str());
1425     } else {
1426       self->ThrowNewExceptionF("Ljava/lang/InternalError;",
1427                                "Found type %s; filled-new-array not implemented for anything but 'int'",
1428                                component_class->PrettyDescriptor().c_str());
1429     }
1430     return false;
1431   }
1432   ObjPtr<mirror::Object> new_array = mirror::Array::Alloc(
1433       self,
1434       array_class,
1435       length,
1436       array_class->GetComponentSizeShift(),
1437       Runtime::Current()->GetHeap()->GetCurrentAllocator());
1438   if (UNLIKELY(new_array == nullptr)) {
1439     self->AssertPendingOOMException();
1440     return false;
1441   }
1442   uint32_t arg[Instruction::kMaxVarArgRegs];  // only used in filled-new-array.
1443   uint32_t vregC = 0;   // only used in filled-new-array-range.
1444   if (is_range) {
1445     vregC = inst->VRegC_3rc();
1446   } else {
1447     inst->GetVarArgs(arg);
1448   }
1449   // We're initializing a newly allocated array, so we do not need to record that under
1450   // a transaction. If the transaction is aborted, the whole array shall be unreachable.
1451   if (LIKELY(is_primitive_int_component)) {
1452     ObjPtr<mirror::IntArray> int_array = new_array->AsIntArray();
1453     for (int32_t i = 0; i < length; ++i) {
1454       size_t src_reg = is_range ? vregC + i : arg[i];
1455       int_array->SetWithoutChecks</*kTransactionActive=*/ false, /*kCheckTransaction=*/ false>(
1456           i, shadow_frame.GetVReg(src_reg));
1457     }
1458   } else {
1459     ObjPtr<mirror::ObjectArray<mirror::Object>> object_array =
1460         new_array->AsObjectArray<mirror::Object>();
1461     for (int32_t i = 0; i < length; ++i) {
1462       size_t src_reg = is_range ? vregC + i : arg[i];
1463       object_array->SetWithoutChecks</*kTransactionActive=*/ false, /*kCheckTransaction=*/ false>(
1464           i, shadow_frame.GetVRegReference(src_reg));
1465     }
1466   }
1467 
1468   result->SetL(new_array);
1469   return true;
1470 }
1471 
UnlockHeldMonitors(Thread * self,ShadowFrame * shadow_frame)1472 void UnlockHeldMonitors(Thread* self, ShadowFrame* shadow_frame)
1473     REQUIRES_SHARED(Locks::mutator_lock_) {
1474   DCHECK(shadow_frame->GetForcePopFrame() ||
1475          (Runtime::Current()->IsActiveTransaction() &&
1476              Runtime::Current()->GetClassLinker()->IsTransactionAborted()));
1477   // Unlock all monitors.
1478   if (shadow_frame->GetMethod()->MustCountLocks()) {
1479     DCHECK(!shadow_frame->GetMethod()->SkipAccessChecks());
1480     // Get the monitors from the shadow-frame monitor-count data.
1481     shadow_frame->GetLockCountData().VisitMonitors(
1482       [&](mirror::Object** obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1483         // Since we don't use the 'obj' pointer after the DoMonitorExit everything should be fine
1484         // WRT suspension.
1485         DoMonitorExit(self, shadow_frame, *obj);
1486       });
1487   } else {
1488     std::vector<verifier::MethodVerifier::DexLockInfo> locks;
1489     verifier::MethodVerifier::FindLocksAtDexPc(shadow_frame->GetMethod(),
1490                                                shadow_frame->GetDexPC(),
1491                                                &locks,
1492                                                Runtime::Current()->GetTargetSdkVersion());
1493     for (const auto& reg : locks) {
1494       if (UNLIKELY(reg.dex_registers.empty())) {
1495         LOG(ERROR) << "Unable to determine reference locked by "
1496                    << shadow_frame->GetMethod()->PrettyMethod() << " at pc "
1497                    << shadow_frame->GetDexPC();
1498       } else {
1499         DoMonitorExit(
1500             self, shadow_frame, shadow_frame->GetVRegReference(*reg.dex_registers.begin()));
1501       }
1502     }
1503   }
1504 }
1505 
PerformNonStandardReturn(Thread * self,ShadowFrame & frame,JValue & result,const instrumentation::Instrumentation * instrumentation,bool unlock_monitors)1506 void PerformNonStandardReturn(Thread* self,
1507                               ShadowFrame& frame,
1508                               JValue& result,
1509                               const instrumentation::Instrumentation* instrumentation,
1510                               bool unlock_monitors) {
1511   if (UNLIKELY(self->IsExceptionPending())) {
1512     LOG(WARNING) << "Suppressing exception for non-standard method exit: "
1513                  << self->GetException()->Dump();
1514     self->ClearException();
1515   }
1516   if (unlock_monitors) {
1517     UnlockHeldMonitors(self, &frame);
1518     DoMonitorCheckOnExit(self, &frame);
1519   }
1520   result = JValue();
1521   if (UNLIKELY(NeedsMethodExitEvent(instrumentation))) {
1522     SendMethodExitEvents(self, instrumentation, frame, frame.GetMethod(), result);
1523   }
1524 }
1525 
1526 // Explicit DoCall template function declarations.
1527 #define EXPLICIT_DO_CALL_TEMPLATE_DECL(_is_range)                      \
1528   template REQUIRES_SHARED(Locks::mutator_lock_)                       \
1529   bool DoCall<_is_range>(ArtMethod* method,                            \
1530                          Thread* self,                                 \
1531                          ShadowFrame& shadow_frame,                    \
1532                          const Instruction* inst,                      \
1533                          uint16_t inst_data,                           \
1534                          bool string_init,                             \
1535                          JValue* result)
1536 EXPLICIT_DO_CALL_TEMPLATE_DECL(false);
1537 EXPLICIT_DO_CALL_TEMPLATE_DECL(true);
1538 #undef EXPLICIT_DO_CALL_TEMPLATE_DECL
1539 
1540 // Explicit DoInvokePolymorphic template function declarations.
1541 #define EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(_is_range)          \
1542   template REQUIRES_SHARED(Locks::mutator_lock_)                         \
1543   bool DoInvokePolymorphic<_is_range>(                                   \
1544       Thread* self, ShadowFrame& shadow_frame, const Instruction* inst,  \
1545       uint16_t inst_data, JValue* result)
1546 EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(false);
1547 EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(true);
1548 #undef EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL
1549 
1550 // Explicit DoFilledNewArray template function declarations.
1551 #define EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(_is_range_)               \
1552   template REQUIRES_SHARED(Locks::mutator_lock_)                             \
1553   bool DoFilledNewArray<_is_range_>(const Instruction* inst,                 \
1554                                     const ShadowFrame& shadow_frame,         \
1555                                     Thread* self,                            \
1556                                     JValue* result)
1557 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(false);
1558 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(true);
1559 #undef EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL
1560 
1561 }  // namespace interpreter
1562 }  // namespace art
1563