1 /*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "dalvik_system_VMStack.h"
18
19 #include <type_traits>
20
21 #include "nativehelper/jni_macros.h"
22
23 #include "art_method-inl.h"
24 #include "gc/task_processor.h"
25 #include "jni/jni_internal.h"
26 #include "mirror/class-inl.h"
27 #include "mirror/class_loader.h"
28 #include "mirror/object-inl.h"
29 #include "native_util.h"
30 #include "nth_caller_visitor.h"
31 #include "scoped_fast_native_object_access-inl.h"
32 #include "scoped_thread_state_change-inl.h"
33 #include "thread_list.h"
34
35 namespace art HIDDEN {
36
37 template <typename T,
38 typename ResultT =
39 typename std::invoke_result_t<T, Thread*, const ScopedFastNativeObjectAccess&>>
GetThreadStack(const ScopedFastNativeObjectAccess & soa,jobject peer,T fn)40 static ResultT GetThreadStack(const ScopedFastNativeObjectAccess& soa, jobject peer, T fn)
41 REQUIRES_SHARED(Locks::mutator_lock_) {
42 ResultT trace = nullptr;
43 ObjPtr<mirror::Object> decoded_peer = soa.Decode<mirror::Object>(peer);
44 if (decoded_peer == soa.Self()->GetPeer()) {
45 trace = fn(soa.Self(), soa);
46 return trace;
47 }
48 // Suspend thread to build stack trace.
49 ScopedThreadSuspension sts(soa.Self(), ThreadState::kNative);
50 Runtime* runtime = Runtime::Current();
51 ThreadList* thread_list = runtime->GetThreadList();
52 Thread* thread = thread_list->SuspendThreadByPeer(peer, SuspendReason::kInternal);
53 if (thread != nullptr) {
54 // If we were asked for the HeapTaskDaemon's stack trace, we went ahead and suspended it.
55 // It's usually already in a suspended state anyway. But we should immediately give up and
56 // resume it, since we must be able to allocate while generating the stack trace.
57 if (!runtime->GetHeap()->GetTaskProcessor()->IsRunningThread(thread, /*wait=*/true)) {
58 {
59 // Must be runnable to create returned array.
60 ScopedObjectAccess soa2(soa.Self());
61 trace = fn(thread, soa);
62 }
63 // Else either thread is the HeapTaskDaemon, or we couldn't identify the thread yet. The
64 // HeapTaskDaemon can appear in enumerations before it is registered with the task
65 // processor, and we don't wait indefinitely, so there is a tiny chance of the latter.
66 }
67 // Restart suspended thread.
68 bool resumed = thread_list->Resume(thread, SuspendReason::kInternal);
69 DCHECK(resumed);
70 }
71 return trace;
72 }
73
VMStack_fillStackTraceElements(JNIEnv * env,jclass,jobject javaThread,jobjectArray javaSteArray)74 static jint VMStack_fillStackTraceElements(JNIEnv* env, jclass, jobject javaThread,
75 jobjectArray javaSteArray) {
76 ScopedFastNativeObjectAccess soa(env);
77 auto fn = [](Thread* thread, const ScopedFastNativeObjectAccess& soaa)
78 REQUIRES_SHARED(Locks::mutator_lock_) -> jobject {
79 return soaa.AddLocalReference<jobject>(thread->CreateInternalStackTrace(soaa));
80 };
81 jobject trace = GetThreadStack(soa, javaThread, fn);
82 if (trace == nullptr) {
83 return 0;
84 }
85 int32_t depth;
86 Thread::InternalStackTraceToStackTraceElementArray(soa, trace, javaSteArray, &depth);
87 return depth;
88 }
89
90 // Returns the defining class loader of the caller's caller.
VMStack_getCallingClassLoader(JNIEnv * env,jclass)91 static jobject VMStack_getCallingClassLoader(JNIEnv* env, jclass) {
92 ScopedFastNativeObjectAccess soa(env);
93 NthCallerVisitor visitor(soa.Self(), 2);
94 visitor.WalkStack();
95 if (UNLIKELY(visitor.caller == nullptr)) {
96 // The caller is an attached native thread.
97 return nullptr;
98 }
99 return soa.AddLocalReference<jobject>(visitor.caller->GetDeclaringClass()->GetClassLoader());
100 }
101
VMStack_getClosestUserClassLoader(JNIEnv * env,jclass)102 static jobject VMStack_getClosestUserClassLoader(JNIEnv* env, jclass) {
103 struct ClosestUserClassLoaderVisitor : public StackVisitor {
104 explicit ClosestUserClassLoaderVisitor(Thread* thread)
105 : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
106 class_loader(nullptr) {}
107
108 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
109 DCHECK(class_loader == nullptr);
110 ObjPtr<mirror::Class> c = GetMethod()->GetDeclaringClass();
111 // c is null for runtime methods.
112 if (c != nullptr) {
113 ObjPtr<mirror::Object> cl = c->GetClassLoader();
114 if (cl != nullptr) {
115 class_loader = cl;
116 return false;
117 }
118 }
119 return true;
120 }
121
122 ObjPtr<mirror::Object> class_loader;
123 };
124 ScopedFastNativeObjectAccess soa(env);
125 ClosestUserClassLoaderVisitor visitor(soa.Self());
126 visitor.WalkStack();
127 return soa.AddLocalReference<jobject>(visitor.class_loader);
128 }
129
130 // Returns the class of the caller's caller's caller.
VMStack_getStackClass2(JNIEnv * env,jclass)131 static jclass VMStack_getStackClass2(JNIEnv* env, jclass) {
132 ScopedFastNativeObjectAccess soa(env);
133 NthCallerVisitor visitor(soa.Self(), 3);
134 visitor.WalkStack();
135 if (UNLIKELY(visitor.caller == nullptr)) {
136 // The caller is an attached native thread.
137 return nullptr;
138 }
139 return soa.AddLocalReference<jclass>(visitor.caller->GetDeclaringClass());
140 }
141
VMStack_getThreadStackTrace(JNIEnv * env,jclass,jobject javaThread)142 static jobjectArray VMStack_getThreadStackTrace(JNIEnv* env, jclass, jobject javaThread) {
143 ScopedFastNativeObjectAccess soa(env);
144 auto fn = [](Thread* thread, const ScopedFastNativeObjectAccess& soaa)
145 REQUIRES_SHARED(Locks::mutator_lock_) -> jobject {
146 return soaa.AddLocalReference<jobject>(thread->CreateInternalStackTrace(soaa));
147 };
148 jobject trace = GetThreadStack(soa, javaThread, fn);
149 if (trace == nullptr) {
150 return nullptr;
151 }
152 return Thread::InternalStackTraceToStackTraceElementArray(soa, trace);
153 }
154
VMStack_getAnnotatedThreadStackTrace(JNIEnv * env,jclass,jobject javaThread)155 static jobjectArray VMStack_getAnnotatedThreadStackTrace(JNIEnv* env, jclass, jobject javaThread) {
156 ScopedFastNativeObjectAccess soa(env);
157 auto fn = [](Thread* thread, const ScopedFastNativeObjectAccess& soaa)
158 REQUIRES_SHARED(Locks::mutator_lock_) -> jobjectArray {
159 return thread->CreateAnnotatedStackTrace(soaa);
160 };
161 return GetThreadStack(soa, javaThread, fn);
162 }
163
164 static JNINativeMethod gMethods[] = {
165 FAST_NATIVE_METHOD(VMStack, fillStackTraceElements, "(Ljava/lang/Thread;[Ljava/lang/StackTraceElement;)I"),
166 FAST_NATIVE_METHOD(VMStack, getCallingClassLoader, "()Ljava/lang/ClassLoader;"),
167 FAST_NATIVE_METHOD(VMStack, getClosestUserClassLoader, "()Ljava/lang/ClassLoader;"),
168 FAST_NATIVE_METHOD(VMStack, getStackClass2, "()Ljava/lang/Class;"),
169 FAST_NATIVE_METHOD(VMStack, getThreadStackTrace, "(Ljava/lang/Thread;)[Ljava/lang/StackTraceElement;"),
170 FAST_NATIVE_METHOD(VMStack, getAnnotatedThreadStackTrace, "(Ljava/lang/Thread;)[Ldalvik/system/AnnotatedStackTraceElement;"),
171 };
172
register_dalvik_system_VMStack(JNIEnv * env)173 void register_dalvik_system_VMStack(JNIEnv* env) {
174 REGISTER_NATIVE_METHODS("dalvik/system/VMStack");
175 }
176
177 } // namespace art
178