1 /*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "method_handles-inl.h"
18
19 #include "android-base/macros.h"
20 #include "android-base/stringprintf.h"
21 #include "class_root-inl.h"
22 #include "common_dex_operations.h"
23 #include "common_throws.h"
24 #include "interpreter/shadow_frame-inl.h"
25 #include "interpreter/shadow_frame.h"
26 #include "jvalue-inl.h"
27 #include "mirror/class-inl.h"
28 #include "mirror/emulated_stack_frame-inl.h"
29 #include "mirror/emulated_stack_frame.h"
30 #include "mirror/method_handle_impl-inl.h"
31 #include "mirror/method_handle_impl.h"
32 #include "mirror/method_type-inl.h"
33 #include "mirror/var_handle.h"
34 #include "reflection-inl.h"
35 #include "reflection.h"
36 #include "thread.h"
37 #include "var_handles.h"
38 #include "well_known_classes.h"
39
40 namespace art HIDDEN {
41
42 using android::base::StringPrintf;
43
44 namespace {
45
46 #define PRIMITIVES_LIST(V) \
47 V(Primitive::kPrimBoolean, Boolean, Boolean, Z) \
48 V(Primitive::kPrimByte, Byte, Byte, B) \
49 V(Primitive::kPrimChar, Char, Character, C) \
50 V(Primitive::kPrimShort, Short, Short, S) \
51 V(Primitive::kPrimInt, Int, Integer, I) \
52 V(Primitive::kPrimLong, Long, Long, J) \
53 V(Primitive::kPrimFloat, Float, Float, F) \
54 V(Primitive::kPrimDouble, Double, Double, D)
55
56 // Assigns |type| to the primitive type associated with |klass|. Returns
57 // true iff. |klass| was a boxed type (Integer, Long etc.), false otherwise.
GetUnboxedPrimitiveType(ObjPtr<mirror::Class> klass,Primitive::Type * type)58 bool GetUnboxedPrimitiveType(ObjPtr<mirror::Class> klass, Primitive::Type* type)
59 REQUIRES_SHARED(Locks::mutator_lock_) {
60 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
61 std::string storage;
62 const char* descriptor = klass->GetDescriptor(&storage);
63 static const char kJavaLangPrefix[] = "Ljava/lang/";
64 static const size_t kJavaLangPrefixSize = sizeof(kJavaLangPrefix) - 1;
65 if (strncmp(descriptor, kJavaLangPrefix, kJavaLangPrefixSize) != 0) {
66 return false;
67 }
68
69 descriptor += kJavaLangPrefixSize;
70 #define LOOKUP_PRIMITIVE(primitive, _, java_name, ___) \
71 if (strcmp(descriptor, #java_name ";") == 0) { \
72 *type = primitive; \
73 return true; \
74 }
75
76 PRIMITIVES_LIST(LOOKUP_PRIMITIVE);
77 #undef LOOKUP_PRIMITIVE
78 return false;
79 }
80
GetBoxedPrimitiveClass(Primitive::Type type)81 ObjPtr<mirror::Class> GetBoxedPrimitiveClass(Primitive::Type type)
82 REQUIRES_SHARED(Locks::mutator_lock_) {
83 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
84 ArtMethod* m = nullptr;
85 switch (type) {
86 #define CASE_PRIMITIVE(primitive, _, java_name, __) \
87 case primitive: \
88 m = WellKnownClasses::java_lang_ ## java_name ## _valueOf; \
89 break;
90 PRIMITIVES_LIST(CASE_PRIMITIVE);
91 #undef CASE_PRIMITIVE
92 case Primitive::Type::kPrimNot:
93 case Primitive::Type::kPrimVoid:
94 return nullptr;
95 }
96 return m->GetDeclaringClass();
97 }
98
GetUnboxedTypeAndValue(ObjPtr<mirror::Object> o,Primitive::Type * type,JValue * value)99 bool GetUnboxedTypeAndValue(ObjPtr<mirror::Object> o, Primitive::Type* type, JValue* value)
100 REQUIRES_SHARED(Locks::mutator_lock_) {
101 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
102 ObjPtr<mirror::Class> klass = o->GetClass();
103 ArtField* primitive_field = &klass->GetIFieldsPtr()->At(0);
104 #define CASE_PRIMITIVE(primitive, abbrev, _, shorthand) \
105 if (klass == GetBoxedPrimitiveClass(primitive)) { \
106 *type = primitive; \
107 value->Set ## shorthand(primitive_field->Get ## abbrev(o)); \
108 return true; \
109 }
110 PRIMITIVES_LIST(CASE_PRIMITIVE)
111 #undef CASE_PRIMITIVE
112 return false;
113 }
114
IsReferenceType(Primitive::Type type)115 inline bool IsReferenceType(Primitive::Type type) {
116 return type == Primitive::kPrimNot;
117 }
118
IsPrimitiveType(Primitive::Type type)119 inline bool IsPrimitiveType(Primitive::Type type) {
120 return !IsReferenceType(type);
121 }
122
123 } // namespace
124
IsParameterTypeConvertible(ObjPtr<mirror::Class> from,ObjPtr<mirror::Class> to)125 bool IsParameterTypeConvertible(ObjPtr<mirror::Class> from, ObjPtr<mirror::Class> to)
126 REQUIRES_SHARED(Locks::mutator_lock_) {
127 // This function returns true if there's any conceivable conversion
128 // between |from| and |to|. It's expected this method will be used
129 // to determine if a WrongMethodTypeException should be raised. The
130 // decision logic follows the documentation for MethodType.asType().
131 if (from == to) {
132 return true;
133 }
134
135 Primitive::Type from_primitive = from->GetPrimitiveType();
136 Primitive::Type to_primitive = to->GetPrimitiveType();
137 DCHECK(from_primitive != Primitive::Type::kPrimVoid);
138 DCHECK(to_primitive != Primitive::Type::kPrimVoid);
139
140 // If |to| and |from| are references.
141 if (IsReferenceType(from_primitive) && IsReferenceType(to_primitive)) {
142 // Assignability is determined during parameter conversion when
143 // invoking the associated method handle.
144 return true;
145 }
146
147 // If |to| and |from| are primitives and a widening conversion exists.
148 if (Primitive::IsWidenable(from_primitive, to_primitive)) {
149 return true;
150 }
151
152 // If |to| is a reference and |from| is a primitive, then boxing conversion.
153 if (IsReferenceType(to_primitive) && IsPrimitiveType(from_primitive)) {
154 return to->IsAssignableFrom(GetBoxedPrimitiveClass(from_primitive));
155 }
156
157 // If |from| is a reference and |to| is a primitive, then unboxing conversion.
158 if (IsPrimitiveType(to_primitive) && IsReferenceType(from_primitive)) {
159 if (from->DescriptorEquals("Ljava/lang/Object;")) {
160 // Object might be converted into a primitive during unboxing.
161 return true;
162 }
163
164 if (Primitive::IsNumericType(to_primitive) && from->DescriptorEquals("Ljava/lang/Number;")) {
165 // Number might be unboxed into any of the number primitive types.
166 return true;
167 }
168
169 Primitive::Type unboxed_type;
170 if (GetUnboxedPrimitiveType(from, &unboxed_type)) {
171 if (unboxed_type == to_primitive) {
172 // Straightforward unboxing conversion such as Boolean => boolean.
173 return true;
174 }
175
176 // Check if widening operations for numeric primitives would work,
177 // such as Byte => byte => long.
178 return Primitive::IsWidenable(unboxed_type, to_primitive);
179 }
180 }
181
182 return false;
183 }
184
IsReturnTypeConvertible(ObjPtr<mirror::Class> from,ObjPtr<mirror::Class> to)185 bool IsReturnTypeConvertible(ObjPtr<mirror::Class> from, ObjPtr<mirror::Class> to)
186 REQUIRES_SHARED(Locks::mutator_lock_) {
187 if (to->GetPrimitiveType() == Primitive::Type::kPrimVoid) {
188 // Result will be ignored.
189 return true;
190 } else if (from->GetPrimitiveType() == Primitive::Type::kPrimVoid) {
191 // Returned value will be 0 / null.
192 return true;
193 } else {
194 // Otherwise apply usual parameter conversion rules.
195 return IsParameterTypeConvertible(from, to);
196 }
197 }
198
ConvertJValueCommon(const ThrowWrongMethodTypeFunction & throw_wmt,ObjPtr<mirror::Class> from,ObjPtr<mirror::Class> to,JValue * value)199 bool ConvertJValueCommon(
200 const ThrowWrongMethodTypeFunction& throw_wmt,
201 ObjPtr<mirror::Class> from,
202 ObjPtr<mirror::Class> to,
203 /*inout*/ JValue* value) {
204 // The reader maybe concerned about the safety of the heap object
205 // that may be in |value|. There is only one case where allocation
206 // is obviously needed and that's for boxing. However, in the case
207 // of boxing |value| contains a non-reference type.
208
209 const Primitive::Type from_type = from->GetPrimitiveType();
210 const Primitive::Type to_type = to->GetPrimitiveType();
211
212 // Put incoming value into |src_value| and set return value to 0.
213 // Errors and conversions from void require the return value to be 0.
214 const JValue src_value(*value);
215 value->SetJ(0);
216
217 // Conversion from void set result to zero.
218 if (from_type == Primitive::kPrimVoid) {
219 return true;
220 }
221
222 // This method must be called only when the types don't match.
223 DCHECK(from != to);
224
225 if (IsPrimitiveType(from_type) && IsPrimitiveType(to_type)) {
226 // The source and target types are both primitives.
227 if (UNLIKELY(!ConvertPrimitiveValueNoThrow(from_type, to_type, src_value, value))) {
228 throw_wmt();
229 return false;
230 }
231 return true;
232 } else if (IsReferenceType(from_type) && IsReferenceType(to_type)) {
233 // They're both reference types. If "from" is null, we can pass it
234 // through unchanged. If not, we must generate a cast exception if
235 // |to| is not assignable from the dynamic type of |ref|.
236 //
237 // Playing it safe with StackHandleScope here, not expecting any allocation
238 // in mirror::Class::IsAssignable().
239 StackHandleScope<2> hs(Thread::Current());
240 Handle<mirror::Class> h_to(hs.NewHandle(to));
241 Handle<mirror::Object> h_obj(hs.NewHandle(src_value.GetL()));
242 if (UNLIKELY(!h_obj.IsNull() && !to->IsAssignableFrom(h_obj->GetClass()))) {
243 ThrowClassCastException(h_to.Get(), h_obj->GetClass());
244 return false;
245 }
246 value->SetL(h_obj.Get());
247 return true;
248 } else if (IsReferenceType(to_type)) {
249 DCHECK(IsPrimitiveType(from_type));
250 // The source type is a primitive and the target type is a reference, so we must box.
251 // The target type maybe a super class of the boxed source type, for example,
252 // if the source type is int, it's boxed type is java.lang.Integer, and the target
253 // type could be java.lang.Number.
254 Primitive::Type type;
255 if (!GetUnboxedPrimitiveType(to, &type)) {
256 ObjPtr<mirror::Class> boxed_from_class = GetBoxedPrimitiveClass(from_type);
257 if (LIKELY(boxed_from_class->IsSubClass(to))) {
258 type = from_type;
259 } else {
260 throw_wmt();
261 return false;
262 }
263 }
264
265 if (UNLIKELY(from_type != type)) {
266 throw_wmt();
267 return false;
268 }
269
270 if (UNLIKELY(!ConvertPrimitiveValueNoThrow(from_type, type, src_value, value))) {
271 throw_wmt();
272 return false;
273 }
274
275 // Then perform the actual boxing, and then set the reference.
276 ObjPtr<mirror::Object> boxed = BoxPrimitive(type, src_value);
277 value->SetL(boxed);
278 return true;
279 } else {
280 // The source type is a reference and the target type is a primitive, so we must unbox.
281 DCHECK(IsReferenceType(from_type));
282 DCHECK(IsPrimitiveType(to_type));
283
284 ObjPtr<mirror::Object> from_obj(src_value.GetL());
285 if (UNLIKELY(from_obj.IsNull())) {
286 ThrowNullPointerException(
287 StringPrintf("Expected to unbox a '%s' primitive type but was returned null",
288 from->PrettyDescriptor().c_str()).c_str());
289 return false;
290 }
291
292 ObjPtr<mirror::Class> from_obj_type = from_obj->GetClass();
293 Primitive::Type from_primitive_type;
294 if (!GetUnboxedPrimitiveType(from_obj_type, &from_primitive_type)) {
295 ThrowClassCastException(from, to);
296 return false;
297 }
298
299 Primitive::Type unboxed_type;
300 JValue unboxed_value;
301 if (UNLIKELY(!GetUnboxedTypeAndValue(from_obj, &unboxed_type, &unboxed_value))) {
302 throw_wmt();
303 return false;
304 }
305
306 if (UNLIKELY(!ConvertPrimitiveValueNoThrow(unboxed_type, to_type, unboxed_value, value))) {
307 if (from->IsAssignableFrom(GetBoxedPrimitiveClass(to_type))) {
308 // CallSite may be Number, but the Number object is
309 // incompatible, e.g. Number (Integer) for a short.
310 ThrowClassCastException(from, to);
311 } else {
312 // CallSite is incompatible, e.g. Integer for a short.
313 throw_wmt();
314 }
315 return false;
316 }
317
318 return true;
319 }
320 }
321
322 namespace {
323
CopyArgumentsFromCallerFrame(const ShadowFrame & caller_frame,ShadowFrame * callee_frame,const InstructionOperands * const operands,const size_t first_dst_reg)324 inline void CopyArgumentsFromCallerFrame(const ShadowFrame& caller_frame,
325 ShadowFrame* callee_frame,
326 const InstructionOperands* const operands,
327 const size_t first_dst_reg)
328 REQUIRES_SHARED(Locks::mutator_lock_) {
329 for (size_t i = 0; i < operands->GetNumberOfOperands(); ++i) {
330 size_t dst_reg = first_dst_reg + i;
331 size_t src_reg = operands->GetOperand(i);
332 // Uint required, so that sign extension does not make this wrong on 64-bit systems
333 uint32_t src_value = caller_frame.GetVReg(src_reg);
334 ObjPtr<mirror::Object> o = caller_frame.GetVRegReference<kVerifyNone>(src_reg);
335 // If both register locations contains the same value, the register probably holds a reference.
336 // Note: As an optimization, non-moving collectors leave a stale reference value
337 // in the references array even after the original vreg was overwritten to a non-reference.
338 if (src_value == reinterpret_cast<uintptr_t>(o.Ptr())) {
339 callee_frame->SetVRegReference(dst_reg, o);
340 } else {
341 callee_frame->SetVReg(dst_reg, src_value);
342 }
343 }
344 }
345
346 // Calculate the number of ins for a proxy or native method, where we
347 // can't just look at the code item.
GetInsForProxyOrNativeMethod(ArtMethod * method)348 static inline size_t GetInsForProxyOrNativeMethod(ArtMethod* method)
349 REQUIRES_SHARED(Locks::mutator_lock_) {
350 DCHECK(method->IsNative() || method->IsProxyMethod());
351 method = method->GetInterfaceMethodIfProxy(kRuntimePointerSize);
352 uint32_t shorty_length = 0;
353 const char* shorty = method->GetShorty(&shorty_length);
354
355 // Static methods do not include the receiver. The receiver isn't included
356 // in the shorty_length though the return value is.
357 size_t num_ins = method->IsStatic() ? shorty_length - 1 : shorty_length;
358 for (const char* c = shorty + 1; *c != '\0'; ++c) {
359 if (*c == 'J' || *c == 'D') {
360 ++num_ins;
361 }
362 }
363 return num_ins;
364 }
365
MethodHandleInvokeTransform(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)366 static inline bool MethodHandleInvokeTransform(Thread* self,
367 ShadowFrame& shadow_frame,
368 Handle<mirror::MethodHandle> method_handle,
369 Handle<mirror::MethodType> callsite_type,
370 const InstructionOperands* const operands,
371 JValue* result)
372 REQUIRES_SHARED(Locks::mutator_lock_) {
373 // This can be fixed to two, because the method we're calling here
374 // (MethodHandle.transformInternal) doesn't have any locals and the signature
375 // is known :
376 //
377 // private MethodHandle.transformInternal(EmulatedStackFrame sf);
378 //
379 // This means we need only two vregs :
380 // - One for the method_handle object.
381 // - One for the only method argument (an EmulatedStackFrame).
382 static constexpr size_t kNumRegsForTransform = 2;
383
384 ArtMethod* called_method = method_handle->GetTargetMethod();
385 CodeItemDataAccessor accessor(called_method->DexInstructionData());
386 DCHECK_EQ(kNumRegsForTransform, accessor.RegistersSize());
387 DCHECK_EQ(kNumRegsForTransform, accessor.InsSize());
388
389 StackHandleScope<2> hs(self);
390 Handle<mirror::MethodType> callee_type(hs.NewHandle(method_handle->GetMethodType()));
391 Handle<mirror::EmulatedStackFrame> sf(
392 hs.NewHandle<mirror::EmulatedStackFrame>(
393 mirror::EmulatedStackFrame::CreateFromShadowFrameAndArgs(
394 self, callsite_type, callee_type, shadow_frame, operands)));
395 if (sf == nullptr) {
396 DCHECK(self->IsExceptionPending());
397 return false;
398 }
399
400 const char* old_cause = self->StartAssertNoThreadSuspension("MethodHandleInvokeTransform");
401 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
402 CREATE_SHADOW_FRAME(kNumRegsForTransform, called_method, /* dex pc */ 0);
403 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
404 new_shadow_frame->SetVRegReference(0, method_handle.Get());
405 new_shadow_frame->SetVRegReference(1, sf.Get());
406 self->EndAssertNoThreadSuspension(old_cause);
407
408 PerformCall(self,
409 accessor,
410 shadow_frame.GetMethod(),
411 0 /* first destination register */,
412 new_shadow_frame,
413 result,
414 interpreter::ShouldStayInSwitchInterpreter(called_method));
415 if (self->IsExceptionPending()) {
416 return false;
417 }
418
419 // If the called transformer method we called has returned a value, then we
420 // need to copy it back to |result|.
421 sf->GetReturnValue(self, result);
422 return true;
423 }
424
GetAndInitializeDeclaringClass(Thread * self,ArtField * field)425 inline static ObjPtr<mirror::Class> GetAndInitializeDeclaringClass(Thread* self, ArtField* field)
426 REQUIRES_SHARED(Locks::mutator_lock_) {
427 // Method handle invocations on static fields should ensure class is
428 // initialized. This usually happens when an instance is constructed
429 // or class members referenced, but this is not guaranteed when
430 // looking up method handles.
431 ObjPtr<mirror::Class> klass = field->GetDeclaringClass();
432 if (UNLIKELY(!klass->IsInitialized())) {
433 StackHandleScope<1> hs(self);
434 HandleWrapperObjPtr<mirror::Class> h(hs.NewHandleWrapper(&klass));
435 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h, true, true)) {
436 DCHECK(self->IsExceptionPending());
437 return nullptr;
438 }
439 }
440 return klass;
441 }
442
RefineTargetMethod(Thread * self,ShadowFrame & shadow_frame,const mirror::MethodHandle::Kind & handle_kind,ObjPtr<mirror::MethodType> handle_type,const uint32_t receiver_reg,ArtMethod * target_method)443 ArtMethod* RefineTargetMethod(Thread* self,
444 ShadowFrame& shadow_frame,
445 const mirror::MethodHandle::Kind& handle_kind,
446 ObjPtr<mirror::MethodType> handle_type,
447 const uint32_t receiver_reg,
448 ArtMethod* target_method) REQUIRES_SHARED(Locks::mutator_lock_) {
449 if (handle_kind == mirror::MethodHandle::Kind::kInvokeVirtual ||
450 handle_kind == mirror::MethodHandle::Kind::kInvokeInterface) {
451 // For virtual and interface methods ensure target_method points to
452 // the actual method to invoke.
453 ObjPtr<mirror::Object> receiver(shadow_frame.GetVRegReference(receiver_reg));
454 ObjPtr<mirror::Class> declaring_class(target_method->GetDeclaringClass());
455 if (receiver == nullptr || receiver->GetClass() != declaring_class) {
456 // Verify that _vRegC is an object reference and of the type expected by
457 // the receiver.
458 if (!VerifyObjectIsClass(receiver, declaring_class)) {
459 DCHECK(self->IsExceptionPending());
460 return nullptr;
461 }
462 return receiver->GetClass()->FindVirtualMethodForVirtualOrInterface(
463 target_method, kRuntimePointerSize);
464 }
465 } else if (handle_kind == mirror::MethodHandle::Kind::kInvokeDirect) {
466 // String constructors are replaced with static StringFactory methods when a MethodHandle
467 // object is created.
468 DCHECK(!target_method->IsStringConstructor());
469 ObjPtr<mirror::Object> receiver(shadow_frame.GetVRegReference(receiver_reg));
470 if (receiver == nullptr) {
471 ThrowNullPointerException("null receiver");
472 return nullptr;
473 }
474 } else if (handle_kind == mirror::MethodHandle::Kind::kInvokeSuper) {
475 // Note that we're not dynamically dispatching on the type of the receiver
476 // here. We use the static type of the "receiver" object that we've
477 // recorded in the method handle's type, which will be the same as the
478 // special caller that was specified at the point of lookup.
479 ObjPtr<mirror::Class> referrer_class = handle_type->GetPTypes()->Get(0);
480 ObjPtr<mirror::Class> declaring_class = target_method->GetDeclaringClass();
481 if (referrer_class == declaring_class) {
482 return target_method;
483 }
484 if (declaring_class->IsInterface()) {
485 if (target_method->IsAbstract()) {
486 std::string msg =
487 "Method " + target_method->PrettyMethod() + " is abstract interface method!";
488 ThrowIllegalAccessException(msg.c_str());
489 return nullptr;
490 }
491 } else {
492 ObjPtr<mirror::Class> super_class = referrer_class->GetSuperClass();
493 uint16_t vtable_index = target_method->GetMethodIndex();
494 DCHECK(super_class != nullptr);
495 DCHECK(super_class->HasVTable());
496 // Note that super_class is a super of referrer_class and target_method
497 // will always be declared by super_class (or one of its super classes).
498 DCHECK_LT(vtable_index, super_class->GetVTableLength());
499 return super_class->GetVTableEntry(vtable_index, kRuntimePointerSize);
500 }
501 }
502 return target_method;
503 }
504
505 // Helper for getters in invoke-polymorphic.
MethodHandleFieldGet(Thread * self,const ShadowFrame & shadow_frame,ObjPtr<mirror::Object> & obj,ArtField * field,Primitive::Type field_type,JValue * result)506 inline static void MethodHandleFieldGet(Thread* self,
507 const ShadowFrame& shadow_frame,
508 ObjPtr<mirror::Object>& obj,
509 ArtField* field,
510 Primitive::Type field_type,
511 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
512 switch (field_type) {
513 case Primitive::kPrimBoolean:
514 DoFieldGetCommon<Primitive::kPrimBoolean>(self, shadow_frame, obj, field, result);
515 break;
516 case Primitive::kPrimByte:
517 DoFieldGetCommon<Primitive::kPrimByte>(self, shadow_frame, obj, field, result);
518 break;
519 case Primitive::kPrimChar:
520 DoFieldGetCommon<Primitive::kPrimChar>(self, shadow_frame, obj, field, result);
521 break;
522 case Primitive::kPrimShort:
523 DoFieldGetCommon<Primitive::kPrimShort>(self, shadow_frame, obj, field, result);
524 break;
525 case Primitive::kPrimInt:
526 DoFieldGetCommon<Primitive::kPrimInt>(self, shadow_frame, obj, field, result);
527 break;
528 case Primitive::kPrimLong:
529 DoFieldGetCommon<Primitive::kPrimLong>(self, shadow_frame, obj, field, result);
530 break;
531 case Primitive::kPrimFloat:
532 DoFieldGetCommon<Primitive::kPrimInt>(self, shadow_frame, obj, field, result);
533 break;
534 case Primitive::kPrimDouble:
535 DoFieldGetCommon<Primitive::kPrimLong>(self, shadow_frame, obj, field, result);
536 break;
537 case Primitive::kPrimNot:
538 DoFieldGetCommon<Primitive::kPrimNot>(self, shadow_frame, obj, field, result);
539 break;
540 case Primitive::kPrimVoid:
541 LOG(FATAL) << "Unreachable: " << field_type;
542 UNREACHABLE();
543 }
544 }
545
546 // Helper for setters in invoke-polymorphic.
MethodHandleFieldPut(Thread * self,ShadowFrame & shadow_frame,ObjPtr<mirror::Object> & obj,ArtField * field,Primitive::Type field_type,JValue & value)547 inline bool MethodHandleFieldPut(Thread* self,
548 ShadowFrame& shadow_frame,
549 ObjPtr<mirror::Object>& obj,
550 ArtField* field,
551 Primitive::Type field_type,
552 JValue& value) REQUIRES_SHARED(Locks::mutator_lock_) {
553 DCHECK(!Runtime::Current()->IsActiveTransaction());
554 static const bool kTransaction = false; // Not in a transaction.
555 switch (field_type) {
556 case Primitive::kPrimBoolean:
557 return
558 DoFieldPutCommon<Primitive::kPrimBoolean, kTransaction>(
559 self, shadow_frame, obj, field, value);
560 case Primitive::kPrimByte:
561 return DoFieldPutCommon<Primitive::kPrimByte, kTransaction>(
562 self, shadow_frame, obj, field, value);
563 case Primitive::kPrimChar:
564 return DoFieldPutCommon<Primitive::kPrimChar, kTransaction>(
565 self, shadow_frame, obj, field, value);
566 case Primitive::kPrimShort:
567 return DoFieldPutCommon<Primitive::kPrimShort, kTransaction>(
568 self, shadow_frame, obj, field, value);
569 case Primitive::kPrimInt:
570 case Primitive::kPrimFloat:
571 return DoFieldPutCommon<Primitive::kPrimInt, kTransaction>(
572 self, shadow_frame, obj, field, value);
573 case Primitive::kPrimLong:
574 case Primitive::kPrimDouble:
575 return DoFieldPutCommon<Primitive::kPrimLong, kTransaction>(
576 self, shadow_frame, obj, field, value);
577 case Primitive::kPrimNot:
578 return DoFieldPutCommon<Primitive::kPrimNot, kTransaction>(
579 self, shadow_frame, obj, field, value);
580 case Primitive::kPrimVoid:
581 LOG(FATAL) << "Unreachable: " << field_type;
582 UNREACHABLE();
583 }
584 }
585
GetValueFromShadowFrame(const ShadowFrame & shadow_frame,Primitive::Type field_type,uint32_t vreg)586 static JValue GetValueFromShadowFrame(const ShadowFrame& shadow_frame,
587 Primitive::Type field_type,
588 uint32_t vreg) REQUIRES_SHARED(Locks::mutator_lock_) {
589 JValue field_value;
590 switch (field_type) {
591 case Primitive::kPrimBoolean:
592 field_value.SetZ(static_cast<uint8_t>(shadow_frame.GetVReg(vreg)));
593 break;
594 case Primitive::kPrimByte:
595 field_value.SetB(static_cast<int8_t>(shadow_frame.GetVReg(vreg)));
596 break;
597 case Primitive::kPrimChar:
598 field_value.SetC(static_cast<uint16_t>(shadow_frame.GetVReg(vreg)));
599 break;
600 case Primitive::kPrimShort:
601 field_value.SetS(static_cast<int16_t>(shadow_frame.GetVReg(vreg)));
602 break;
603 case Primitive::kPrimInt:
604 case Primitive::kPrimFloat:
605 field_value.SetI(shadow_frame.GetVReg(vreg));
606 break;
607 case Primitive::kPrimLong:
608 case Primitive::kPrimDouble:
609 field_value.SetJ(shadow_frame.GetVRegLong(vreg));
610 break;
611 case Primitive::kPrimNot:
612 field_value.SetL(shadow_frame.GetVRegReference(vreg));
613 break;
614 case Primitive::kPrimVoid:
615 LOG(FATAL) << "Unreachable: " << field_type;
616 UNREACHABLE();
617 }
618 return field_value;
619 }
620
MethodHandleFieldAccess(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)621 bool MethodHandleFieldAccess(Thread* self,
622 ShadowFrame& shadow_frame,
623 Handle<mirror::MethodHandle> method_handle,
624 Handle<mirror::MethodType> callsite_type,
625 const InstructionOperands* const operands,
626 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
627 StackHandleScope<1> hs(self);
628 const mirror::MethodHandle::Kind handle_kind = method_handle->GetHandleKind();
629 ArtField* field = method_handle->GetTargetField();
630 Primitive::Type field_type = field->GetTypeAsPrimitiveType();
631 switch (handle_kind) {
632 case mirror::MethodHandle::kInstanceGet: {
633 size_t obj_reg = operands->GetOperand(0);
634 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(obj_reg);
635 if (obj == nullptr) {
636 ThrowNullPointerException("Receiver is null");
637 return false;
638 }
639 MethodHandleFieldGet(self, shadow_frame, obj, field, field_type, result);
640 return true;
641 }
642 case mirror::MethodHandle::kStaticGet: {
643 ObjPtr<mirror::Object> obj = GetAndInitializeDeclaringClass(self, field);
644 if (obj == nullptr) {
645 DCHECK(self->IsExceptionPending());
646 return false;
647 }
648 MethodHandleFieldGet(self, shadow_frame, obj, field, field_type, result);
649 return true;
650 }
651 case mirror::MethodHandle::kInstancePut: {
652 size_t obj_reg = operands->GetOperand(0);
653 size_t value_reg = operands->GetOperand(1);
654 const size_t kPTypeIndex = 1;
655 // Use ptypes instead of field type since we may be unboxing a reference for a primitive
656 // field. The field type is incorrect for this case.
657 JValue value = GetValueFromShadowFrame(
658 shadow_frame,
659 callsite_type->GetPTypes()->Get(kPTypeIndex)->GetPrimitiveType(),
660 value_reg);
661 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(obj_reg);
662 if (obj == nullptr) {
663 ThrowNullPointerException("Receiver is null");
664 return false;
665 }
666 return MethodHandleFieldPut(self, shadow_frame, obj, field, field_type, value);
667 }
668 case mirror::MethodHandle::kStaticPut: {
669 ObjPtr<mirror::Object> obj = GetAndInitializeDeclaringClass(self, field);
670 if (obj == nullptr) {
671 DCHECK(self->IsExceptionPending());
672 return false;
673 }
674 size_t value_reg = operands->GetOperand(0);
675 const size_t kPTypeIndex = 0;
676 // Use ptypes instead of field type since we may be unboxing a reference for a primitive
677 // field. The field type is incorrect for this case.
678 JValue value = GetValueFromShadowFrame(
679 shadow_frame,
680 callsite_type->GetPTypes()->Get(kPTypeIndex)->GetPrimitiveType(),
681 value_reg);
682 return MethodHandleFieldPut(self, shadow_frame, obj, field, field_type, value);
683 }
684 default:
685 LOG(FATAL) << "Unreachable: " << handle_kind;
686 UNREACHABLE();
687 }
688 }
689
DoVarHandleInvokeTranslation(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)690 bool DoVarHandleInvokeTranslation(Thread* self,
691 ShadowFrame& shadow_frame,
692 Handle<mirror::MethodHandle> method_handle,
693 Handle<mirror::MethodType> callsite_type,
694 const InstructionOperands* const operands,
695 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
696 //
697 // Basic checks that apply in all cases.
698 //
699 StackHandleScope<6> hs(self);
700 Handle<mirror::ObjectArray<mirror::Class>>
701 callsite_ptypes(hs.NewHandle(callsite_type->GetPTypes()));
702 Handle<mirror::ObjectArray<mirror::Class>>
703 mh_ptypes(hs.NewHandle(method_handle->GetMethodType()->GetPTypes()));
704
705 // Check that the first parameter is a VarHandle
706 if (callsite_ptypes->GetLength() < 1 ||
707 !mh_ptypes->Get(0)->IsAssignableFrom(callsite_ptypes->Get(0)) ||
708 mh_ptypes->Get(0) != GetClassRoot<mirror::VarHandle>()) {
709 ThrowWrongMethodTypeException(method_handle->GetMethodType(), callsite_type.Get());
710 return false;
711 }
712
713 // Get the receiver
714 ObjPtr<mirror::Object> receiver = shadow_frame.GetVRegReference(operands->GetOperand(0));
715 if (receiver == nullptr) {
716 ThrowNullPointerException("Expected argument 1 to be a non-null VarHandle");
717 return false;
718 }
719
720 // Cast to VarHandle instance
721 Handle<mirror::VarHandle> vh(hs.NewHandle(ObjPtr<mirror::VarHandle>::DownCast(receiver)));
722 DCHECK(GetClassRoot<mirror::VarHandle>()->IsAssignableFrom(vh->GetClass()));
723
724 // Determine the accessor kind to dispatch
725 ArtMethod* target_method = method_handle->GetTargetMethod();
726 mirror::VarHandle::AccessMode access_mode =
727 mirror::VarHandle::GetAccessModeByIntrinsic(target_method->GetIntrinsic());
728 Handle<mirror::MethodType> vh_type =
729 hs.NewHandle(vh->GetMethodTypeForAccessMode(self, access_mode));
730 Handle<mirror::MethodType> mh_invoke_type = hs.NewHandle(
731 mirror::MethodType::CloneWithoutLeadingParameter(self, method_handle->GetMethodType()));
732 if (method_handle->GetHandleKind() == mirror::MethodHandle::Kind::kInvokeVarHandleExact) {
733 if (!mh_invoke_type->IsExactMatch(vh_type.Get())) {
734 ThrowWrongMethodTypeException(vh_type.Get(), mh_invoke_type.Get());
735 return false;
736 }
737 }
738
739 Handle<mirror::MethodType> callsite_type_without_varhandle =
740 hs.NewHandle(mirror::MethodType::CloneWithoutLeadingParameter(self, callsite_type.Get()));
741 NoReceiverInstructionOperands varhandle_operands(operands);
742 return VarHandleInvokeAccessor(self,
743 shadow_frame,
744 vh,
745 callsite_type_without_varhandle,
746 access_mode,
747 &varhandle_operands,
748 result);
749 }
750
DoMethodHandleInvokeMethod(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,const InstructionOperands * const operands,JValue * result)751 static bool DoMethodHandleInvokeMethod(Thread* self,
752 ShadowFrame& shadow_frame,
753 Handle<mirror::MethodHandle> method_handle,
754 const InstructionOperands* const operands,
755 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
756 ArtMethod* target_method = method_handle->GetTargetMethod();
757 uint32_t receiver_reg = (operands->GetNumberOfOperands() > 0) ? operands->GetOperand(0) : 0u;
758 ArtMethod* called_method = RefineTargetMethod(self,
759 shadow_frame,
760 method_handle->GetHandleKind(),
761 method_handle->GetMethodType(),
762 receiver_reg,
763 target_method);
764 if (called_method == nullptr) {
765 DCHECK(self->IsExceptionPending());
766 return false;
767 }
768 // Compute method information.
769 CodeItemDataAccessor accessor(called_method->DexInstructionData());
770 uint16_t num_regs;
771 size_t first_dest_reg;
772 if (LIKELY(accessor.HasCodeItem())) {
773 num_regs = accessor.RegistersSize();
774 first_dest_reg = num_regs - accessor.InsSize();
775 // Parameter registers go at the end of the shadow frame.
776 DCHECK_NE(first_dest_reg, (size_t)-1);
777 } else if (called_method->IsNative() || called_method->IsProxyMethod()) {
778 // No local regs for proxy and native methods.
779 num_regs = GetInsForProxyOrNativeMethod(called_method);
780 first_dest_reg = 0;
781 } else {
782 if (called_method->IsDefaultConflicting()) {
783 ThrowIncompatibleClassChangeErrorForMethodConflict(called_method);
784 } else {
785 ThrowAbstractMethodError(called_method);
786 }
787 return false;
788 }
789
790 const char* old_cause = self->StartAssertNoThreadSuspension("DoMethodHandleInvokeMethod");
791 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
792 CREATE_SHADOW_FRAME(num_regs, called_method, /* dex pc */ 0);
793 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
794 CopyArgumentsFromCallerFrame(shadow_frame, new_shadow_frame, operands, first_dest_reg);
795 self->EndAssertNoThreadSuspension(old_cause);
796
797 PerformCall(self,
798 accessor,
799 shadow_frame.GetMethod(),
800 first_dest_reg,
801 new_shadow_frame,
802 result,
803 interpreter::ShouldStayInSwitchInterpreter(called_method));
804 if (self->IsExceptionPending()) {
805 return false;
806 }
807 return true;
808 }
809
MethodHandleInvokeExactInternal(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)810 static bool MethodHandleInvokeExactInternal(Thread* self,
811 ShadowFrame& shadow_frame,
812 Handle<mirror::MethodHandle> method_handle,
813 Handle<mirror::MethodType> callsite_type,
814 const InstructionOperands* const operands,
815 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
816 if (!callsite_type->IsExactMatch(method_handle->GetMethodType())) {
817 ThrowWrongMethodTypeException(method_handle->GetMethodType(), callsite_type.Get());
818 return false;
819 }
820
821 switch (method_handle->GetHandleKind()) {
822 case mirror::MethodHandle::Kind::kInvokeDirect:
823 case mirror::MethodHandle::Kind::kInvokeInterface:
824 case mirror::MethodHandle::Kind::kInvokeStatic:
825 case mirror::MethodHandle::Kind::kInvokeSuper:
826 case mirror::MethodHandle::Kind::kInvokeVirtual:
827 return DoMethodHandleInvokeMethod(self, shadow_frame, method_handle, operands, result);
828 case mirror::MethodHandle::Kind::kInstanceGet:
829 case mirror::MethodHandle::Kind::kInstancePut:
830 case mirror::MethodHandle::Kind::kStaticGet:
831 case mirror::MethodHandle::Kind::kStaticPut:
832 return MethodHandleFieldAccess(
833 self, shadow_frame, method_handle, callsite_type, operands, result);
834 case mirror::MethodHandle::Kind::kInvokeTransform:
835 return MethodHandleInvokeTransform(
836 self, shadow_frame, method_handle, callsite_type, operands, result);
837 case mirror::MethodHandle::Kind::kInvokeVarHandle:
838 case mirror::MethodHandle::Kind::kInvokeVarHandleExact:
839 return DoVarHandleInvokeTranslation(
840 self, shadow_frame, method_handle, callsite_type, operands, result);
841 }
842 }
843
MethodHandleInvokeInternal(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)844 static bool MethodHandleInvokeInternal(Thread* self,
845 ShadowFrame& shadow_frame,
846 Handle<mirror::MethodHandle> method_handle,
847 Handle<mirror::MethodType> callsite_type,
848 const InstructionOperands* const operands,
849 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
850 StackHandleScope<2> hs(self);
851 Handle<mirror::MethodType> method_handle_type(hs.NewHandle(method_handle->GetMethodType()));
852 // Non-exact invoke behaves as calling mh.asType(newType). In ART, asType() is implemented
853 // as a transformer and it is expensive to call so check first if it's really necessary.
854 //
855 // There are two cases where the asType() transformation can be skipped:
856 //
857 // 1) the call site and type of the MethodHandle match, ie code is calling invoke()
858 // unnecessarily.
859 //
860 // 2) when the call site can be trivially converted to the MethodHandle type due to how
861 // values are represented in the ShadowFrame, ie all registers in the shadow frame are
862 // 32-bit, there is no byte, short, char, etc. So a call site with arguments of these
863 // kinds can be trivially converted to one with int arguments. Similarly if the reference
864 // types are assignable between the call site and MethodHandle type, then as asType()
865 // transformation isn't really doing any work.
866 //
867 // The following IsInPlaceConvertible check determines if either of these opportunities to
868 // skip asType() are true.
869 if (callsite_type->IsInPlaceConvertible(method_handle_type.Get())) {
870 return MethodHandleInvokeExact(
871 self, shadow_frame, method_handle, method_handle_type, operands, result);
872 }
873
874 // Use asType() variant of this MethodHandle to adapt callsite to the target.
875 MutableHandle<mirror::MethodHandle> atc(hs.NewHandle(method_handle->GetAsTypeCache()));
876 if (atc == nullptr || !callsite_type->IsExactMatch(atc->GetMethodType())) {
877 // Cached asType adapter does not exist or is for another call site. Call
878 // MethodHandle::asType() to get an appropriate adapter.
879 ArtMethod* as_type = WellKnownClasses::java_lang_invoke_MethodHandle_asType;
880 ObjPtr<mirror::MethodHandle> atc_method_handle = ObjPtr<mirror::MethodHandle>::DownCast(
881 as_type->InvokeVirtual<'L', 'L'>(self, method_handle.Get(), callsite_type.Get()));
882 if (atc_method_handle == nullptr) {
883 DCHECK(self->IsExceptionPending());
884 return false;
885 }
886 atc.Assign(atc_method_handle);
887 DCHECK(!atc.IsNull());
888 }
889
890 return MethodHandleInvokeExact(self, shadow_frame, atc, callsite_type, operands, result);
891 }
892
893 } // namespace
894
MethodHandleInvoke(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)895 bool MethodHandleInvoke(Thread* self,
896 ShadowFrame& shadow_frame,
897 Handle<mirror::MethodHandle> method_handle,
898 Handle<mirror::MethodType> callsite_type,
899 const InstructionOperands* const operands,
900 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
901 return MethodHandleInvokeInternal(
902 self, shadow_frame, method_handle, callsite_type, operands, result);
903 }
904
MethodHandleInvokeExact(Thread * self,ShadowFrame & shadow_frame,Handle<mirror::MethodHandle> method_handle,Handle<mirror::MethodType> callsite_type,const InstructionOperands * const operands,JValue * result)905 bool MethodHandleInvokeExact(Thread* self,
906 ShadowFrame& shadow_frame,
907 Handle<mirror::MethodHandle> method_handle,
908 Handle<mirror::MethodType> callsite_type,
909 const InstructionOperands* const operands,
910 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
911 return MethodHandleInvokeExactInternal(
912 self, shadow_frame, method_handle, callsite_type, operands, result);
913 }
914
MethodHandleInvokeExactWithFrame(Thread * self,Handle<mirror::MethodHandle> method_handle,Handle<mirror::EmulatedStackFrame> emulated_frame)915 void MethodHandleInvokeExactWithFrame(Thread* self,
916 Handle<mirror::MethodHandle> method_handle,
917 Handle<mirror::EmulatedStackFrame> emulated_frame)
918 REQUIRES_SHARED(Locks::mutator_lock_) {
919 StackHandleScope<1> hs(self);
920 Handle<mirror::MethodType> callsite_type = hs.NewHandle(emulated_frame->GetType());
921
922 // Copy arguments from the EmalatedStackFrame to a ShadowFrame.
923 const uint16_t num_vregs = callsite_type->NumberOfVRegs();
924
925 const char* old_cause = self->StartAssertNoThreadSuspension("EmulatedStackFrame to ShadowFrame");
926 ArtMethod* invoke_exact = WellKnownClasses::java_lang_invoke_MethodHandle_invokeExact;
927 ShadowFrameAllocaUniquePtr shadow_frame =
928 CREATE_SHADOW_FRAME(num_vregs, invoke_exact, /*dex_pc*/ 0);
929 emulated_frame->WriteToShadowFrame(self, callsite_type, 0, shadow_frame.get());
930 self->EndAssertNoThreadSuspension(old_cause);
931
932 ManagedStack fragment;
933 self->PushManagedStackFragment(&fragment);
934 self->PushShadowFrame(shadow_frame.get());
935
936 JValue result;
937 RangeInstructionOperands operands(0, num_vregs);
938 bool success = MethodHandleInvokeExact(self,
939 *shadow_frame.get(),
940 method_handle,
941 callsite_type,
942 &operands,
943 &result);
944 DCHECK_NE(success, self->IsExceptionPending());
945 if (success) {
946 emulated_frame->SetReturnValue(self, result);
947 }
948
949 self->PopShadowFrame();
950 self->PopManagedStackFragment(fragment);
951 }
952
953 } // namespace art
954