xref: /aosp_15_r20/art/runtime/interpreter/interpreter.cc (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "interpreter.h"
18 
19 #include <limits>
20 #include <string_view>
21 
22 #include "common_dex_operations.h"
23 #include "common_throws.h"
24 #include "dex/dex_file_types.h"
25 #include "interpreter_common.h"
26 #include "interpreter_switch_impl.h"
27 #include "jit/jit.h"
28 #include "jit/jit_code_cache.h"
29 #include "jvalue-inl.h"
30 #include "mirror/string-inl.h"
31 #include "nativehelper/scoped_local_ref.h"
32 #include "scoped_thread_state_change-inl.h"
33 #include "shadow_frame-inl.h"
34 #include "stack.h"
35 #include "thread-inl.h"
36 #include "unstarted_runtime.h"
37 
38 namespace art HIDDEN {
39 namespace interpreter {
40 
ObjArg(uint32_t arg)41 ALWAYS_INLINE static ObjPtr<mirror::Object> ObjArg(uint32_t arg)
42     REQUIRES_SHARED(Locks::mutator_lock_) {
43   return reinterpret_cast<mirror::Object*>(arg);
44 }
45 
InterpreterJni(Thread * self,ArtMethod * method,std::string_view shorty,ObjPtr<mirror::Object> receiver,uint32_t * args,JValue * result)46 static void InterpreterJni(Thread* self,
47                            ArtMethod* method,
48                            std::string_view shorty,
49                            ObjPtr<mirror::Object> receiver,
50                            uint32_t* args,
51                            JValue* result)
52     REQUIRES_SHARED(Locks::mutator_lock_) {
53   // TODO: The following enters JNI code using a typedef-ed function rather than the JNI compiler,
54   //       it should be removed and JNI compiled stubs used instead.
55   ScopedObjectAccessUnchecked soa(self);
56   if (method->IsStatic()) {
57     if (shorty == "L") {
58       using fntype = jobject(JNIEnv*, jclass);
59       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
60       ScopedLocalRef<jclass> klass(soa.Env(),
61                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
62       jobject jresult;
63       {
64         ScopedThreadStateChange tsc(self, ThreadState::kNative);
65         jresult = fn(soa.Env(), klass.get());
66       }
67       result->SetL(soa.Decode<mirror::Object>(jresult));
68     } else if (shorty == "V") {
69       using fntype = void(JNIEnv*, jclass);
70       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
71       ScopedLocalRef<jclass> klass(soa.Env(),
72                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
73       ScopedThreadStateChange tsc(self, ThreadState::kNative);
74       fn(soa.Env(), klass.get());
75     } else if (shorty == "Z") {
76       using fntype = jboolean(JNIEnv*, jclass);
77       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
78       ScopedLocalRef<jclass> klass(soa.Env(),
79                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
80       ScopedThreadStateChange tsc(self, ThreadState::kNative);
81       result->SetZ(fn(soa.Env(), klass.get()));
82     } else if (shorty == "BI") {
83       using fntype = jbyte(JNIEnv*, jclass, jint);
84       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
85       ScopedLocalRef<jclass> klass(soa.Env(),
86                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
87       ScopedThreadStateChange tsc(self, ThreadState::kNative);
88       result->SetB(fn(soa.Env(), klass.get(), args[0]));
89     } else if (shorty == "II") {
90       using fntype = jint(JNIEnv*, jclass, jint);
91       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
92       ScopedLocalRef<jclass> klass(soa.Env(),
93                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
94       ScopedThreadStateChange tsc(self, ThreadState::kNative);
95       result->SetI(fn(soa.Env(), klass.get(), args[0]));
96     } else if (shorty == "LL") {
97       using fntype = jobject(JNIEnv*, jclass, jobject);
98       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
99       ScopedLocalRef<jclass> klass(soa.Env(),
100                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
101       ScopedLocalRef<jobject> arg0(soa.Env(),
102                                    soa.AddLocalReference<jobject>(ObjArg(args[0])));
103       jobject jresult;
104       {
105         ScopedThreadStateChange tsc(self, ThreadState::kNative);
106         jresult = fn(soa.Env(), klass.get(), arg0.get());
107       }
108       result->SetL(soa.Decode<mirror::Object>(jresult));
109     } else if (shorty == "IIZ") {
110       using fntype = jint(JNIEnv*, jclass, jint, jboolean);
111       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
112       ScopedLocalRef<jclass> klass(soa.Env(),
113                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
114       ScopedThreadStateChange tsc(self, ThreadState::kNative);
115       result->SetI(fn(soa.Env(), klass.get(), args[0], args[1]));
116     } else if (shorty == "ILI") {
117       using fntype = jint(JNIEnv*, jclass, jobject, jint);
118       fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(
119           method->GetEntryPointFromJni()));
120       ScopedLocalRef<jclass> klass(soa.Env(),
121                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
122       ScopedLocalRef<jobject> arg0(soa.Env(),
123                                    soa.AddLocalReference<jobject>(ObjArg(args[0])));
124       ScopedThreadStateChange tsc(self, ThreadState::kNative);
125       result->SetI(fn(soa.Env(), klass.get(), arg0.get(), args[1]));
126     } else if (shorty == "SIZ") {
127       using fntype = jshort(JNIEnv*, jclass, jint, jboolean);
128       fntype* const fn =
129           reinterpret_cast<fntype*>(const_cast<void*>(method->GetEntryPointFromJni()));
130       ScopedLocalRef<jclass> klass(soa.Env(),
131                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
132       ScopedThreadStateChange tsc(self, ThreadState::kNative);
133       result->SetS(fn(soa.Env(), klass.get(), args[0], args[1]));
134     } else if (shorty == "VIZ") {
135       using fntype = void(JNIEnv*, jclass, jint, jboolean);
136       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
137       ScopedLocalRef<jclass> klass(soa.Env(),
138                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
139       ScopedThreadStateChange tsc(self, ThreadState::kNative);
140       fn(soa.Env(), klass.get(), args[0], args[1]);
141     } else if (shorty == "ZLL") {
142       using fntype = jboolean(JNIEnv*, jclass, jobject, jobject);
143       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
144       ScopedLocalRef<jclass> klass(soa.Env(),
145                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
146       ScopedLocalRef<jobject> arg0(soa.Env(),
147                                    soa.AddLocalReference<jobject>(ObjArg(args[0])));
148       ScopedLocalRef<jobject> arg1(soa.Env(),
149                                    soa.AddLocalReference<jobject>(ObjArg(args[1])));
150       ScopedThreadStateChange tsc(self, ThreadState::kNative);
151       result->SetZ(fn(soa.Env(), klass.get(), arg0.get(), arg1.get()));
152     } else if (shorty == "ZILL") {
153       using fntype = jboolean(JNIEnv*, jclass, jint, jobject, jobject);
154       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
155       ScopedLocalRef<jclass> klass(soa.Env(),
156                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
157       ScopedLocalRef<jobject> arg1(soa.Env(),
158                                    soa.AddLocalReference<jobject>(ObjArg(args[1])));
159       ScopedLocalRef<jobject> arg2(soa.Env(),
160                                    soa.AddLocalReference<jobject>(ObjArg(args[2])));
161       ScopedThreadStateChange tsc(self, ThreadState::kNative);
162       result->SetZ(fn(soa.Env(), klass.get(), args[0], arg1.get(), arg2.get()));
163     } else if (shorty == "VILII") {
164       using fntype = void(JNIEnv*, jclass, jint, jobject, jint, jint);
165       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
166       ScopedLocalRef<jclass> klass(soa.Env(),
167                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
168       ScopedLocalRef<jobject> arg1(soa.Env(),
169                                    soa.AddLocalReference<jobject>(ObjArg(args[1])));
170       ScopedThreadStateChange tsc(self, ThreadState::kNative);
171       fn(soa.Env(), klass.get(), args[0], arg1.get(), args[2], args[3]);
172     } else if (shorty == "VLILII") {
173       using fntype = void(JNIEnv*, jclass, jobject, jint, jobject, jint, jint);
174       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
175       ScopedLocalRef<jclass> klass(soa.Env(),
176                                    soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
177       ScopedLocalRef<jobject> arg0(soa.Env(),
178                                    soa.AddLocalReference<jobject>(ObjArg(args[0])));
179       ScopedLocalRef<jobject> arg2(soa.Env(),
180                                    soa.AddLocalReference<jobject>(ObjArg(args[2])));
181       ScopedThreadStateChange tsc(self, ThreadState::kNative);
182       fn(soa.Env(), klass.get(), arg0.get(), args[1], arg2.get(), args[3], args[4]);
183     } else {
184       LOG(FATAL) << "Do something with static native method: " << method->PrettyMethod()
185           << " shorty: " << shorty;
186     }
187   } else {
188     if (shorty == "L") {
189       using fntype = jobject(JNIEnv*, jobject);
190       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
191       ScopedLocalRef<jobject> rcvr(soa.Env(),
192                                    soa.AddLocalReference<jobject>(receiver));
193       jobject jresult;
194       {
195         ScopedThreadStateChange tsc(self, ThreadState::kNative);
196         jresult = fn(soa.Env(), rcvr.get());
197       }
198       result->SetL(soa.Decode<mirror::Object>(jresult));
199     } else if (shorty == "V") {
200       using fntype = void(JNIEnv*, jobject);
201       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
202       ScopedLocalRef<jobject> rcvr(soa.Env(),
203                                    soa.AddLocalReference<jobject>(receiver));
204       ScopedThreadStateChange tsc(self, ThreadState::kNative);
205       fn(soa.Env(), rcvr.get());
206     } else if (shorty == "LL") {
207       using fntype = jobject(JNIEnv*, jobject, jobject);
208       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
209       ScopedLocalRef<jobject> rcvr(soa.Env(),
210                                    soa.AddLocalReference<jobject>(receiver));
211       ScopedLocalRef<jobject> arg0(soa.Env(),
212                                    soa.AddLocalReference<jobject>(ObjArg(args[0])));
213       jobject jresult;
214       {
215         ScopedThreadStateChange tsc(self, ThreadState::kNative);
216         jresult = fn(soa.Env(), rcvr.get(), arg0.get());
217       }
218       result->SetL(soa.Decode<mirror::Object>(jresult));
219       ScopedThreadStateChange tsc(self, ThreadState::kNative);
220     } else if (shorty == "III") {
221       using fntype = jint(JNIEnv*, jobject, jint, jint);
222       fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
223       ScopedLocalRef<jobject> rcvr(soa.Env(),
224                                    soa.AddLocalReference<jobject>(receiver));
225       ScopedThreadStateChange tsc(self, ThreadState::kNative);
226       result->SetI(fn(soa.Env(), rcvr.get(), args[0], args[1]));
227     } else {
228       LOG(FATAL) << "Do something with native method: " << method->PrettyMethod()
229           << " shorty: " << shorty;
230     }
231   }
232 }
233 
234 NO_STACK_PROTECTOR
ExecuteSwitch(Thread * self,const CodeItemDataAccessor & accessor,ShadowFrame & shadow_frame,JValue result_register)235 static JValue ExecuteSwitch(Thread* self,
236                             const CodeItemDataAccessor& accessor,
237                             ShadowFrame& shadow_frame,
238                             JValue result_register) REQUIRES_SHARED(Locks::mutator_lock_) {
239   Runtime* runtime = Runtime::Current();
240   auto switch_impl_cpp = runtime->IsActiveTransaction()
241       ? runtime->GetClassLinker()->GetTransactionalInterpreter()
242       : reinterpret_cast<const void*>(&ExecuteSwitchImplCpp</*transaction_active=*/ false>);
243   return ExecuteSwitchImpl(
244       self, accessor, shadow_frame, result_register, switch_impl_cpp);
245 }
246 
247 NO_STACK_PROTECTOR
Execute(Thread * self,const CodeItemDataAccessor & accessor,ShadowFrame & shadow_frame,JValue result_register,bool stay_in_interpreter=false,bool from_deoptimize=false)248 static inline JValue Execute(
249     Thread* self,
250     const CodeItemDataAccessor& accessor,
251     ShadowFrame& shadow_frame,
252     JValue result_register,
253     bool stay_in_interpreter = false,
254     bool from_deoptimize = false) REQUIRES_SHARED(Locks::mutator_lock_) {
255   DCHECK(!shadow_frame.GetMethod()->IsAbstract());
256   DCHECK(!shadow_frame.GetMethod()->IsNative());
257 
258   // We cache the result of NeedsDexPcEvents in the shadow frame so we don't need to call
259   // NeedsDexPcEvents on every instruction for better performance. NeedsDexPcEvents only gets
260   // updated asynchronoulsy in a SuspendAll scope and any existing shadow frames are updated with
261   // new value. So it is safe to cache it here.
262   shadow_frame.SetNotifyDexPcMoveEvents(
263       Runtime::Current()->GetInstrumentation()->NeedsDexPcEvents(shadow_frame.GetMethod(), self));
264 
265   if (LIKELY(!from_deoptimize)) {  // Entering the method, but not via deoptimization.
266     if (kIsDebugBuild) {
267       CHECK_EQ(shadow_frame.GetDexPC(), 0u);
268       self->AssertNoPendingException();
269     }
270     ArtMethod *method = shadow_frame.GetMethod();
271 
272     // If we can continue in JIT and have JITed code available execute JITed code.
273     if (!stay_in_interpreter &&
274         !self->IsForceInterpreter() &&
275         !shadow_frame.GetForcePopFrame() &&
276         !shadow_frame.GetNotifyDexPcMoveEvents()) {
277       jit::Jit* jit = Runtime::Current()->GetJit();
278       if (jit != nullptr) {
279         jit->MethodEntered(self, shadow_frame.GetMethod());
280         if (jit->CanInvokeCompiledCode(method)) {
281           JValue result;
282 
283           // Pop the shadow frame before calling into compiled code.
284           self->PopShadowFrame();
285           // Calculate the offset of the first input reg. The input registers are in the high regs.
286           // It's ok to access the code item here since JIT code will have been touched by the
287           // interpreter and compiler already.
288           uint16_t arg_offset = accessor.RegistersSize() - accessor.InsSize();
289           ArtInterpreterToCompiledCodeBridge(self, nullptr, &shadow_frame, arg_offset, &result);
290           // Push the shadow frame back as the caller will expect it.
291           self->PushShadowFrame(&shadow_frame);
292 
293           return result;
294         }
295       }
296     }
297 
298     instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
299     if (UNLIKELY(instrumentation->HasMethodEntryListeners() || shadow_frame.GetForcePopFrame())) {
300       instrumentation->MethodEnterEvent(self, method);
301       if (UNLIKELY(shadow_frame.GetForcePopFrame())) {
302         // The caller will retry this invoke or ignore the result. Just return immediately without
303         // any value.
304         DCHECK(Runtime::Current()->AreNonStandardExitsEnabled());
305         JValue ret = JValue();
306         PerformNonStandardReturn(self,
307                                  shadow_frame,
308                                  ret,
309                                  instrumentation,
310                                  /* unlock_monitors= */ false);
311         return ret;
312       }
313       if (UNLIKELY(self->IsExceptionPending())) {
314         instrumentation->MethodUnwindEvent(self,
315                                            method,
316                                            0);
317         JValue ret = JValue();
318         if (UNLIKELY(shadow_frame.GetForcePopFrame())) {
319           DCHECK(Runtime::Current()->AreNonStandardExitsEnabled());
320           PerformNonStandardReturn(self,
321                                    shadow_frame,
322                                    ret,
323                                    instrumentation,
324                                    /* unlock_monitors= */ false);
325         }
326         return ret;
327       }
328     }
329   }
330 
331   ArtMethod* method = shadow_frame.GetMethod();
332 
333   DCheckStaticState(self, method);
334 
335   // Lock counting is a special version of accessibility checks, and for simplicity and
336   // reduction of template parameters, we gate it behind access-checks mode.
337   DCHECK_IMPLIES(method->SkipAccessChecks(), !method->MustCountLocks());
338 
339   VLOG(interpreter) << "Interpreting " << method->PrettyMethod();
340 
341   return ExecuteSwitch(self, accessor, shadow_frame, result_register);
342 }
343 
EnterInterpreterFromInvoke(Thread * self,ArtMethod * method,ObjPtr<mirror::Object> receiver,uint32_t * args,JValue * result,bool stay_in_interpreter)344 void EnterInterpreterFromInvoke(Thread* self,
345                                 ArtMethod* method,
346                                 ObjPtr<mirror::Object> receiver,
347                                 uint32_t* args,
348                                 JValue* result,
349                                 bool stay_in_interpreter) {
350   DCHECK_EQ(self, Thread::Current());
351   bool implicit_check = Runtime::Current()->GetImplicitStackOverflowChecks();
352   if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
353     ThrowStackOverflowError<kNativeStackType>(self);
354     return;
355   }
356 
357   // This can happen if we are in forced interpreter mode and an obsolete method is called using
358   // reflection.
359   if (UNLIKELY(method->IsObsolete())) {
360     ThrowInternalError("Attempting to invoke obsolete version of '%s'.",
361                        method->PrettyMethod().c_str());
362     return;
363   }
364 
365   const char* old_cause = self->StartAssertNoThreadSuspension("EnterInterpreterFromInvoke");
366   CodeItemDataAccessor accessor(method->DexInstructionData());
367   uint16_t num_regs;
368   uint16_t num_ins;
369   if (accessor.HasCodeItem()) {
370     num_regs =  accessor.RegistersSize();
371     num_ins = accessor.InsSize();
372   } else if (!method->IsInvokable()) {
373     self->EndAssertNoThreadSuspension(old_cause);
374     method->ThrowInvocationTimeError(receiver);
375     return;
376   } else {
377     DCHECK(method->IsNative()) << method->PrettyMethod();
378     num_regs = num_ins = ArtMethod::NumArgRegisters(method->GetShortyView());
379     if (!method->IsStatic()) {
380       num_regs++;
381       num_ins++;
382     }
383   }
384   // Set up shadow frame with matching number of reference slots to vregs.
385   ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
386       CREATE_SHADOW_FRAME(num_regs, method, /* dex pc */ 0);
387   ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get();
388 
389   size_t cur_reg = num_regs - num_ins;
390   if (!method->IsStatic()) {
391     CHECK(receiver != nullptr);
392     shadow_frame->SetVRegReference(cur_reg, receiver);
393     ++cur_reg;
394   }
395   uint32_t shorty_len = 0;
396   const char* shorty = method->GetShorty(&shorty_len);
397   for (size_t shorty_pos = 0, arg_pos = 0; cur_reg < num_regs; ++shorty_pos, ++arg_pos, cur_reg++) {
398     DCHECK_LT(shorty_pos + 1, shorty_len);
399     switch (shorty[shorty_pos + 1]) {
400       case 'L': {
401         ObjPtr<mirror::Object> o =
402             reinterpret_cast<StackReference<mirror::Object>*>(&args[arg_pos])->AsMirrorPtr();
403         shadow_frame->SetVRegReference(cur_reg, o);
404         break;
405       }
406       case 'J': case 'D': {
407         uint64_t wide_value = (static_cast<uint64_t>(args[arg_pos + 1]) << 32) | args[arg_pos];
408         shadow_frame->SetVRegLong(cur_reg, wide_value);
409         cur_reg++;
410         arg_pos++;
411         break;
412       }
413       default:
414         shadow_frame->SetVReg(cur_reg, args[arg_pos]);
415         break;
416     }
417   }
418   self->EndAssertNoThreadSuspension(old_cause);
419   if (!EnsureInitialized(self, shadow_frame)) {
420     return;
421   }
422   self->PushShadowFrame(shadow_frame);
423   if (LIKELY(!method->IsNative())) {
424     JValue r = Execute(self, accessor, *shadow_frame, JValue(), stay_in_interpreter);
425     if (result != nullptr) {
426       *result = r;
427     }
428   } else {
429     // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
430     // generated stub) except during testing and image writing.
431     // Update args to be the args in the shadow frame since the input ones could hold stale
432     // references pointers due to moving GC.
433     args = shadow_frame->GetVRegArgs(method->IsStatic() ? 0 : 1);
434     if (!Runtime::Current()->IsStarted()) {
435       UnstartedRuntime::Jni(self, method, receiver.Ptr(), args, result);
436     } else {
437       InterpreterJni(self, method, shorty, receiver, args, result);
438     }
439   }
440   self->PopShadowFrame();
441 }
442 
GetReceiverRegisterForStringInit(const Instruction * instr)443 static int16_t GetReceiverRegisterForStringInit(const Instruction* instr) {
444   DCHECK(instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE ||
445          instr->Opcode() == Instruction::INVOKE_DIRECT);
446   return (instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE) ?
447       instr->VRegC_3rc() : instr->VRegC_35c();
448 }
449 
EnterInterpreterFromDeoptimize(Thread * self,ShadowFrame * shadow_frame,JValue * ret_val,bool from_code,DeoptimizationMethodType deopt_method_type)450 void EnterInterpreterFromDeoptimize(Thread* self,
451                                     ShadowFrame* shadow_frame,
452                                     JValue* ret_val,
453                                     bool from_code,
454                                     DeoptimizationMethodType deopt_method_type)
455     REQUIRES_SHARED(Locks::mutator_lock_) {
456   JValue value;
457   // Set value to last known result in case the shadow frame chain is empty.
458   value.SetJ(ret_val->GetJ());
459   // How many frames we have executed.
460   size_t frame_cnt = 0;
461   while (shadow_frame != nullptr) {
462     // We do not want to recover lock state for lock counting when deoptimizing. Currently,
463     // the compiler should not have compiled a method that failed structured-locking checks.
464     DCHECK(!shadow_frame->GetMethod()->MustCountLocks());
465 
466     self->SetTopOfShadowStack(shadow_frame);
467     CodeItemDataAccessor accessor(shadow_frame->GetMethod()->DexInstructionData());
468     const uint32_t dex_pc = shadow_frame->GetDexPC();
469     uint32_t new_dex_pc = dex_pc;
470     if (UNLIKELY(self->IsExceptionPending())) {
471       DCHECK(self->GetException() != Thread::GetDeoptimizationException());
472       // If we deoptimize from the QuickExceptionHandler, we already reported the exception throw
473       // event to the instrumentation. Skip throw listeners for the first frame. The deopt check
474       // should happen after the throw listener is called as throw listener can trigger a
475       // deoptimization.
476       new_dex_pc = MoveToExceptionHandler(self,
477                                           *shadow_frame,
478                                           /* skip_listeners= */ false,
479                                           /* skip_throw_listener= */ frame_cnt == 0) ?
480                        shadow_frame->GetDexPC() :
481                        dex::kDexNoIndex;
482     } else if (!from_code) {
483       // Deoptimization is not called from code directly.
484       const Instruction* instr = &accessor.InstructionAt(dex_pc);
485       if (deopt_method_type == DeoptimizationMethodType::kKeepDexPc ||
486           shadow_frame->GetForceRetryInstruction()) {
487         DCHECK(frame_cnt == 0 || shadow_frame->GetForceRetryInstruction())
488             << "frame_cnt: " << frame_cnt
489             << " force-retry: " << shadow_frame->GetForceRetryInstruction();
490         // Need to re-execute the dex instruction.
491         // (1) An invocation might be split into class initialization and invoke.
492         //     In this case, the invoke should not be skipped.
493         // (2) A suspend check should also execute the dex instruction at the
494         //     corresponding dex pc.
495         // If the ForceRetryInstruction bit is set this must be the second frame (the first being
496         // the one that is being popped).
497         DCHECK_EQ(new_dex_pc, dex_pc);
498         shadow_frame->SetForceRetryInstruction(false);
499       } else if (instr->Opcode() == Instruction::MONITOR_ENTER ||
500                  instr->Opcode() == Instruction::MONITOR_EXIT) {
501         DCHECK(deopt_method_type == DeoptimizationMethodType::kDefault);
502         DCHECK_EQ(frame_cnt, 0u);
503         // Non-idempotent dex instruction should not be re-executed.
504         // On the other hand, if a MONITOR_ENTER is at the dex_pc of a suspend
505         // check, that MONITOR_ENTER should be executed. That case is handled
506         // above.
507         new_dex_pc = dex_pc + instr->SizeInCodeUnits();
508       } else if (instr->IsInvoke()) {
509         DCHECK(deopt_method_type == DeoptimizationMethodType::kDefault);
510         if (IsStringInit(*instr, shadow_frame->GetMethod())) {
511           uint16_t this_obj_vreg = GetReceiverRegisterForStringInit(instr);
512           // Move the StringFactory.newStringFromChars() result into the register representing
513           // "this object" when invoking the string constructor in the original dex instruction.
514           // Also move the result into all aliases.
515           DCHECK(value.GetL()->IsString());
516           SetStringInitValueToAllAliases(shadow_frame, this_obj_vreg, value);
517           // Calling string constructor in the original dex code doesn't generate a result value.
518           value.SetJ(0);
519         }
520         new_dex_pc = dex_pc + instr->SizeInCodeUnits();
521       } else if (instr->Opcode() == Instruction::NEW_INSTANCE) {
522         // A NEW_INSTANCE is simply re-executed, including
523         // "new-instance String" which is compiled into a call into
524         // StringFactory.newEmptyString().
525         DCHECK_EQ(new_dex_pc, dex_pc);
526       } else {
527         DCHECK(deopt_method_type == DeoptimizationMethodType::kDefault);
528         DCHECK_EQ(frame_cnt, 0u);
529         // By default, we re-execute the dex instruction since if they are not
530         // an invoke, so that we don't have to decode the dex instruction to move
531         // result into the right vreg. All slow paths have been audited to be
532         // idempotent except monitor-enter/exit and invocation stubs.
533         // TODO: move result and advance dex pc. That also requires that we
534         // can tell the return type of a runtime method, possibly by decoding
535         // the dex instruction at the caller.
536         DCHECK_EQ(new_dex_pc, dex_pc);
537       }
538     } else {
539       // Nothing to do, the dex_pc is the one at which the code requested
540       // the deoptimization.
541       DCHECK_EQ(frame_cnt, 0u);
542       DCHECK_EQ(new_dex_pc, dex_pc);
543     }
544     if (new_dex_pc != dex::kDexNoIndex) {
545       shadow_frame->SetDexPC(new_dex_pc);
546       value = Execute(self,
547                       accessor,
548                       *shadow_frame,
549                       value,
550                       /* stay_in_interpreter= */ true,
551                       /* from_deoptimize= */ true);
552     }
553     ShadowFrame* old_frame = shadow_frame;
554     shadow_frame = shadow_frame->GetLink();
555     ShadowFrame::DeleteDeoptimizedFrame(old_frame);
556     // Following deoptimizations of shadow frames must be at invocation point
557     // and should advance dex pc past the invoke instruction.
558     from_code = false;
559     deopt_method_type = DeoptimizationMethodType::kDefault;
560     frame_cnt++;
561   }
562   ret_val->SetJ(value.GetJ());
563 }
564 
565 NO_STACK_PROTECTOR
EnterInterpreterFromEntryPoint(Thread * self,const CodeItemDataAccessor & accessor,ShadowFrame * shadow_frame)566 JValue EnterInterpreterFromEntryPoint(Thread* self, const CodeItemDataAccessor& accessor,
567                                       ShadowFrame* shadow_frame) {
568   DCHECK_EQ(self, Thread::Current());
569   bool implicit_check = Runtime::Current()->GetImplicitStackOverflowChecks();
570   if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
571     ThrowStackOverflowError<kNativeStackType>(self);
572     return JValue();
573   }
574 
575   jit::Jit* jit = Runtime::Current()->GetJit();
576   if (jit != nullptr) {
577     jit->NotifyCompiledCodeToInterpreterTransition(self, shadow_frame->GetMethod());
578   }
579   return Execute(self, accessor, *shadow_frame, JValue());
580 }
581 
582 NO_STACK_PROTECTOR
ArtInterpreterToInterpreterBridge(Thread * self,const CodeItemDataAccessor & accessor,ShadowFrame * shadow_frame,JValue * result)583 void ArtInterpreterToInterpreterBridge(Thread* self,
584                                        const CodeItemDataAccessor& accessor,
585                                        ShadowFrame* shadow_frame,
586                                        JValue* result) {
587   bool implicit_check = Runtime::Current()->GetImplicitStackOverflowChecks();
588   if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
589     ThrowStackOverflowError<kNativeStackType>(self);
590     return;
591   }
592 
593   self->PushShadowFrame(shadow_frame);
594 
595   if (LIKELY(!shadow_frame->GetMethod()->IsNative())) {
596     result->SetJ(Execute(self, accessor, *shadow_frame, JValue()).GetJ());
597   } else {
598     // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
599     // generated stub) except during testing and image writing.
600     CHECK(!Runtime::Current()->IsStarted());
601     bool is_static = shadow_frame->GetMethod()->IsStatic();
602     ObjPtr<mirror::Object> receiver = is_static ? nullptr : shadow_frame->GetVRegReference(0);
603     uint32_t* args = shadow_frame->GetVRegArgs(is_static ? 0 : 1);
604     UnstartedRuntime::Jni(self, shadow_frame->GetMethod(), receiver.Ptr(), args, result);
605   }
606 
607   self->PopShadowFrame();
608 }
609 
CheckInterpreterAsmConstants()610 void CheckInterpreterAsmConstants() {
611   CheckNterpAsmConstants();
612 }
613 
PrevFrameWillRetry(Thread * self,const ShadowFrame & frame)614 bool PrevFrameWillRetry(Thread* self, const ShadowFrame& frame) {
615   ShadowFrame* prev_frame = frame.GetLink();
616   if (prev_frame == nullptr) {
617     NthCallerVisitor vis(self, 1, false);
618     vis.WalkStack();
619     prev_frame = vis.GetCurrentShadowFrame();
620     if (prev_frame == nullptr) {
621       prev_frame = self->FindDebuggerShadowFrame(vis.GetFrameId());
622     }
623   }
624   return prev_frame != nullptr && prev_frame->GetForceRetryInstruction();
625 }
626 
627 }  // namespace interpreter
628 }  // namespace art
629