1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "register_line.h"
18
19 #include "android-base/stringprintf.h"
20
21 #include "dex/dex_instruction-inl.h"
22 #include "method_verifier-inl.h"
23 #include "reg_type-inl.h"
24 #include "register_line-inl.h"
25
26 namespace art HIDDEN {
27 namespace verifier {
28
29 using android::base::StringPrintf;
30
CheckConstructorReturn(MethodVerifier * verifier) const31 bool RegisterLine::CheckConstructorReturn(MethodVerifier* verifier) const {
32 if (kIsDebugBuild && this_initialized_) {
33 // Ensure that there is no UninitializedThisReference type anymore if this_initialized_ is true.
34 for (size_t i = 0; i < num_regs_; i++) {
35 const RegType& type = GetRegisterType(verifier, i);
36 CHECK(!type.IsUninitializedThisReference() &&
37 !type.IsUnresolvedUninitializedThisReference())
38 << i << ": " << type.IsUninitializedThisReference() << " in "
39 << verifier->GetMethodReference().PrettyMethod();
40 }
41 }
42 if (!this_initialized_) {
43 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD)
44 << "Constructor returning without calling superclass constructor";
45 }
46 return this_initialized_;
47 }
48
CopyFromLine(const RegisterLine * src)49 void RegisterLine::CopyFromLine(const RegisterLine* src) {
50 DCHECK_EQ(num_regs_, src->num_regs_);
51 memcpy(&line_, &src->line_, num_regs_ * sizeof(uint16_t));
52 // Copy `allocation_dex_pcs_`. Note that if the `src` does not have `allocation_dex_pcs_`
53 // allocated, we retain the array allocated for this register line to avoid wasting
54 // memory by allocating a new array later. This means that the `allocation_dex_pcs_` can
55 // be filled with bogus values not tied to a `new-instance` uninitialized type.
56 if (src->allocation_dex_pcs_ != nullptr) {
57 EnsureAllocationDexPcsAvailable();
58 memcpy(allocation_dex_pcs_, src->allocation_dex_pcs_, num_regs_ * sizeof(uint32_t));
59 }
60 monitors_ = src->monitors_;
61 reg_to_lock_depths_ = src->reg_to_lock_depths_;
62 this_initialized_ = src->this_initialized_;
63 }
64
MarkRefsAsInitialized(MethodVerifier * verifier,uint32_t vsrc)65 void RegisterLine::MarkRefsAsInitialized(MethodVerifier* verifier, uint32_t vsrc) {
66 const RegType& uninit_type = GetRegisterType(verifier, vsrc);
67 DCHECK(uninit_type.IsUninitializedTypes());
68 const RegType& init_type = verifier->GetRegTypeCache()->FromUninitialized(uninit_type);
69 size_t changed = 0;
70 // Is this initializing "this"?
71 if (uninit_type.IsUninitializedThisReference() ||
72 uninit_type.IsUnresolvedUninitializedThisReference()) {
73 this_initialized_ = true;
74 for (uint32_t i = 0; i < num_regs_; i++) {
75 if (GetRegisterType(verifier, i).Equals(uninit_type)) {
76 line_[i] = init_type.GetId();
77 changed++;
78 }
79 }
80 } else {
81 DCHECK(NeedsAllocationDexPc(uninit_type));
82 DCHECK(allocation_dex_pcs_ != nullptr);
83 uint32_t dex_pc = allocation_dex_pcs_[vsrc];
84 for (uint32_t i = 0; i < num_regs_; i++) {
85 if (GetRegisterType(verifier, i).Equals(uninit_type) && allocation_dex_pcs_[i] == dex_pc) {
86 line_[i] = init_type.GetId();
87 changed++;
88 }
89 }
90 }
91 DCHECK_GT(changed, 0u);
92 }
93
MarkAllRegistersAsConflicts(MethodVerifier * verifier)94 void RegisterLine::MarkAllRegistersAsConflicts(MethodVerifier* verifier) {
95 uint16_t conflict_type_id = verifier->GetRegTypeCache()->Conflict().GetId();
96 for (uint32_t i = 0; i < num_regs_; i++) {
97 line_[i] = conflict_type_id;
98 }
99 }
100
MarkAllRegistersAsConflictsExcept(MethodVerifier * verifier,uint32_t vsrc)101 void RegisterLine::MarkAllRegistersAsConflictsExcept(MethodVerifier* verifier, uint32_t vsrc) {
102 uint16_t conflict_type_id = verifier->GetRegTypeCache()->Conflict().GetId();
103 for (uint32_t i = 0; i < num_regs_; i++) {
104 if (i != vsrc) {
105 line_[i] = conflict_type_id;
106 }
107 }
108 }
109
MarkAllRegistersAsConflictsExceptWide(MethodVerifier * verifier,uint32_t vsrc)110 void RegisterLine::MarkAllRegistersAsConflictsExceptWide(MethodVerifier* verifier, uint32_t vsrc) {
111 uint16_t conflict_type_id = verifier->GetRegTypeCache()->Conflict().GetId();
112 for (uint32_t i = 0; i < num_regs_; i++) {
113 if ((i != vsrc) && (i != (vsrc + 1))) {
114 line_[i] = conflict_type_id;
115 }
116 }
117 }
118
Dump(MethodVerifier * verifier) const119 std::string RegisterLine::Dump(MethodVerifier* verifier) const {
120 std::string result;
121 for (size_t i = 0; i < num_regs_; i++) {
122 result += StringPrintf("%zd:[", i);
123 result += GetRegisterType(verifier, i).Dump();
124 result += "],";
125 }
126 for (const auto& monitor : monitors_) {
127 result += StringPrintf("{%d},", monitor);
128 }
129 for (auto& pairs : reg_to_lock_depths_) {
130 result += StringPrintf("<%d -> %" PRIx64 ">",
131 pairs.first,
132 static_cast<uint64_t>(pairs.second));
133 }
134 return result;
135 }
136
CopyResultRegister1(MethodVerifier * verifier,uint32_t vdst,bool is_reference)137 void RegisterLine::CopyResultRegister1(MethodVerifier* verifier, uint32_t vdst, bool is_reference) {
138 const RegType& type = verifier->GetRegTypeCache()->GetFromId(result_[0]);
139 if ((!is_reference && !type.IsCategory1Types()) ||
140 (is_reference && !type.IsReferenceTypes())) {
141 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD)
142 << "copyRes1 v" << vdst << "<- result0" << " type=" << type;
143 } else {
144 DCHECK(verifier->GetRegTypeCache()->GetFromId(result_[1]).IsUndefined());
145 SetRegisterType<LockOp::kClear>(vdst, type);
146 result_[0] = verifier->GetRegTypeCache()->Undefined().GetId();
147 }
148 }
149
150 /*
151 * Implement "move-result-wide". Copy the category-2 value from the result
152 * register to another register, and reset the result register.
153 */
CopyResultRegister2(MethodVerifier * verifier,uint32_t vdst)154 void RegisterLine::CopyResultRegister2(MethodVerifier* verifier, uint32_t vdst) {
155 const RegType& type_l = verifier->GetRegTypeCache()->GetFromId(result_[0]);
156 const RegType& type_h = verifier->GetRegTypeCache()->GetFromId(result_[1]);
157 if (!type_l.IsCategory2Types()) {
158 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD)
159 << "copyRes2 v" << vdst << "<- result0" << " type=" << type_l;
160 } else {
161 DCHECK(type_l.CheckWidePair(type_h)); // Set should never allow this case
162 SetRegisterTypeWide(vdst, type_l, type_h); // also sets the high
163 result_[0] = verifier->GetRegTypeCache()->Undefined().GetId();
164 result_[1] = verifier->GetRegTypeCache()->Undefined().GetId();
165 }
166 }
167
168 static constexpr uint32_t kVirtualNullRegister = std::numeric_limits<uint32_t>::max();
169
PushMonitor(MethodVerifier * verifier,uint32_t reg_idx,int32_t insn_idx)170 void RegisterLine::PushMonitor(MethodVerifier* verifier, uint32_t reg_idx, int32_t insn_idx) {
171 const RegType& reg_type = GetRegisterType(verifier, reg_idx);
172 if (!reg_type.IsReferenceTypes()) {
173 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "monitor-enter on non-object ("
174 << reg_type << ")";
175 } else if (monitors_.size() >= kMaxMonitorStackDepth) {
176 verifier->Fail(VERIFY_ERROR_LOCKING);
177 if (kDumpLockFailures) {
178 VLOG(verifier) << "monitor-enter stack overflow while verifying "
179 << verifier->GetMethodReference().PrettyMethod();
180 }
181 } else {
182 if (SetRegToLockDepth(reg_idx, monitors_.size())) {
183 // Null literals can establish aliases that we can't easily track. As such, handle the zero
184 // case as the 2^32-1 register (which isn't available in dex bytecode).
185 if (reg_type.IsZero()) {
186 SetRegToLockDepth(kVirtualNullRegister, monitors_.size());
187 }
188
189 monitors_.push_back(insn_idx);
190 } else {
191 verifier->Fail(VERIFY_ERROR_LOCKING);
192 if (kDumpLockFailures) {
193 VLOG(verifier) << "unexpected monitor-enter on register v" << reg_idx << " in "
194 << verifier->GetMethodReference().PrettyMethod();
195 }
196 }
197 }
198 }
199
PopMonitor(MethodVerifier * verifier,uint32_t reg_idx)200 void RegisterLine::PopMonitor(MethodVerifier* verifier, uint32_t reg_idx) {
201 const RegType& reg_type = GetRegisterType(verifier, reg_idx);
202 if (!reg_type.IsReferenceTypes()) {
203 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "monitor-exit on non-object (" << reg_type << ")";
204 } else if (monitors_.empty()) {
205 verifier->Fail(VERIFY_ERROR_LOCKING);
206 if (kDumpLockFailures) {
207 VLOG(verifier) << "monitor-exit stack underflow while verifying "
208 << verifier->GetMethodReference().PrettyMethod();
209 }
210 } else {
211 monitors_.pop_back();
212
213 bool success = IsSetLockDepth(reg_idx, monitors_.size());
214
215 if (!success && reg_type.IsZero()) {
216 // Null literals can establish aliases that we can't easily track. As such, handle the zero
217 // case as the 2^32-1 register (which isn't available in dex bytecode).
218 success = IsSetLockDepth(kVirtualNullRegister, monitors_.size());
219 if (success) {
220 reg_idx = kVirtualNullRegister;
221 }
222 }
223
224 if (!success) {
225 verifier->Fail(VERIFY_ERROR_LOCKING);
226 if (kDumpLockFailures) {
227 VLOG(verifier) << "monitor-exit not unlocking the top of the monitor stack while verifying "
228 << verifier->GetMethodReference().PrettyMethod();
229 }
230 } else {
231 // Record the register was unlocked. This clears all aliases, thus it will also clear the
232 // null lock, if necessary.
233 ClearRegToLockDepth(reg_idx, monitors_.size());
234 }
235 }
236 }
237
FindLockAliasedRegister(uint32_t src,const RegisterLine::RegToLockDepthsMap & src_map,const RegisterLine::RegToLockDepthsMap & search_map)238 bool FindLockAliasedRegister(uint32_t src,
239 const RegisterLine::RegToLockDepthsMap& src_map,
240 const RegisterLine::RegToLockDepthsMap& search_map) {
241 auto it = src_map.find(src);
242 if (it == src_map.end()) {
243 // "Not locked" is trivially aliased.
244 return true;
245 }
246 uint32_t src_lock_levels = it->second;
247 if (src_lock_levels == 0) {
248 // "Not locked" is trivially aliased.
249 return true;
250 }
251
252 // Scan the map for the same value.
253 for (const std::pair<const uint32_t, uint32_t>& pair : search_map) {
254 if (pair.first != src && pair.second == src_lock_levels) {
255 return true;
256 }
257 }
258
259 // Nothing found, no alias.
260 return false;
261 }
262
MergeRegisters(MethodVerifier * verifier,const RegisterLine * incoming_line)263 bool RegisterLine::MergeRegisters(MethodVerifier* verifier, const RegisterLine* incoming_line) {
264 bool changed = false;
265 DCHECK(incoming_line != nullptr);
266 for (size_t idx = 0; idx < num_regs_; idx++) {
267 if (line_[idx] != incoming_line->line_[idx]) {
268 const RegType& incoming_reg_type = incoming_line->GetRegisterType(verifier, idx);
269 const RegType& cur_type = GetRegisterType(verifier, idx);
270 const RegType& new_type = cur_type.Merge(
271 incoming_reg_type, verifier->GetRegTypeCache(), verifier);
272 changed = changed || !cur_type.Equals(new_type);
273 line_[idx] = new_type.GetId();
274 } else {
275 auto needs_allocation_dex_pc = [&]() {
276 return NeedsAllocationDexPc(verifier->GetRegTypeCache()->GetFromId(line_[idx]));
277 };
278 DCHECK_IMPLIES(needs_allocation_dex_pc(), allocation_dex_pcs_ != nullptr);
279 DCHECK_IMPLIES(needs_allocation_dex_pc(), incoming_line->allocation_dex_pcs_ != nullptr);
280 // Check for allocation dex pc mismatch first to try and avoid costly virtual calls.
281 // For methods without any `new-instance` instructions, the `allocation_dex_pcs_` is null.
282 if (allocation_dex_pcs_ != nullptr &&
283 incoming_line->allocation_dex_pcs_ != nullptr &&
284 allocation_dex_pcs_[idx] != incoming_line->allocation_dex_pcs_[idx] &&
285 needs_allocation_dex_pc()) {
286 line_[idx] = verifier->GetRegTypeCache()->Conflict().GetId();
287 }
288 }
289 }
290 if (monitors_.size() > 0 || incoming_line->monitors_.size() > 0) {
291 if (monitors_.size() != incoming_line->monitors_.size()) {
292 verifier->Fail(VERIFY_ERROR_LOCKING, /*pending_exc=*/ false);
293 if (kDumpLockFailures) {
294 VLOG(verifier) << "mismatched stack depths (depth=" << MonitorStackDepth()
295 << ", incoming depth=" << incoming_line->MonitorStackDepth() << ") in "
296 << verifier->GetMethodReference().PrettyMethod();
297 }
298 } else if (reg_to_lock_depths_ != incoming_line->reg_to_lock_depths_) {
299 for (uint32_t idx = 0; idx < num_regs_; idx++) {
300 size_t depths = reg_to_lock_depths_.count(idx);
301 size_t incoming_depths = incoming_line->reg_to_lock_depths_.count(idx);
302 if (depths != incoming_depths) {
303 // Stack levels aren't matching. This is potentially bad, as we don't do a
304 // flow-sensitive analysis.
305 // However, this could be an alias of something locked in one path, and the alias was
306 // destroyed in another path. It is fine to drop this as long as there's another alias
307 // for the lock around. The last vanishing alias will then report that things would be
308 // left unlocked. We need to check for aliases for both lock levels.
309 //
310 // Example (lock status in curly braces as pair of register and lock leels):
311 //
312 // lock v1 {v1=1}
313 // | |
314 // v0 = v1 {v0=1, v1=1} v0 = v2 {v1=1}
315 // | |
316 // {v1=1}
317 // // Dropping v0, as the status can't be merged
318 // // but the lock info ("locked at depth 1" and)
319 // // "not locked at all") is available.
320 if (!FindLockAliasedRegister(idx,
321 reg_to_lock_depths_,
322 reg_to_lock_depths_) ||
323 !FindLockAliasedRegister(idx,
324 incoming_line->reg_to_lock_depths_,
325 reg_to_lock_depths_)) {
326 verifier->Fail(VERIFY_ERROR_LOCKING, /*pending_exc=*/ false);
327 if (kDumpLockFailures) {
328 VLOG(verifier) << "mismatched stack depths for register v" << idx
329 << ": " << depths << " != " << incoming_depths << " in "
330 << verifier->GetMethodReference().PrettyMethod();
331 }
332 break;
333 }
334 // We found aliases, set this to zero.
335 reg_to_lock_depths_.erase(idx);
336 } else if (depths > 0) {
337 // Check whether they're actually the same levels.
338 uint32_t locked_levels = reg_to_lock_depths_.find(idx)->second;
339 uint32_t incoming_locked_levels = incoming_line->reg_to_lock_depths_.find(idx)->second;
340 if (locked_levels != incoming_locked_levels) {
341 // Lock levels aren't matching. This is potentially bad, as we don't do a
342 // flow-sensitive analysis.
343 // However, this could be an alias of something locked in one path, and the alias was
344 // destroyed in another path. It is fine to drop this as long as there's another alias
345 // for the lock around. The last vanishing alias will then report that things would be
346 // left unlocked. We need to check for aliases for both lock levels.
347 //
348 // Example (lock status in curly braces as pair of register and lock leels):
349 //
350 // lock v1 {v1=1}
351 // lock v2 {v1=1, v2=2}
352 // | |
353 // v0 = v1 {v0=1, v1=1, v2=2} v0 = v2 {v0=2, v1=1, v2=2}
354 // | |
355 // {v1=1, v2=2}
356 // // Dropping v0, as the status can't be
357 // // merged but the lock info ("locked at
358 // // depth 1" and "locked at depth 2") is
359 // // available.
360 if (!FindLockAliasedRegister(idx,
361 reg_to_lock_depths_,
362 reg_to_lock_depths_) ||
363 !FindLockAliasedRegister(idx,
364 incoming_line->reg_to_lock_depths_,
365 reg_to_lock_depths_)) {
366 // No aliases for both current and incoming, we'll lose information.
367 verifier->Fail(VERIFY_ERROR_LOCKING, /*pending_exc=*/ false);
368 if (kDumpLockFailures) {
369 VLOG(verifier) << "mismatched lock levels for register v" << idx << ": "
370 << std::hex << locked_levels << std::dec << " != "
371 << std::hex << incoming_locked_levels << std::dec << " in "
372 << verifier->GetMethodReference().PrettyMethod();
373 }
374 break;
375 }
376 // We found aliases, set this to zero.
377 reg_to_lock_depths_.erase(idx);
378 }
379 }
380 }
381 }
382 }
383
384 // Check whether "this" was initialized in both paths.
385 if (this_initialized_ && !incoming_line->this_initialized_) {
386 this_initialized_ = false;
387 changed = true;
388 }
389 return changed;
390 }
391
392 } // namespace verifier
393 } // namespace art
394