1 /*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_VERIFIER_REGISTER_LINE_INL_H_
18 #define ART_RUNTIME_VERIFIER_REGISTER_LINE_INL_H_
19
20 #include "register_line.h"
21
22 #include "base/logging.h" // For VLOG.
23 #include "method_verifier.h"
24 #include "reg_type_cache-inl.h"
25
26 namespace art HIDDEN {
27 namespace verifier {
28
29 // Should we dump a warning on failures to verify balanced locking? That would be an indication to
30 // developers that their code will be slow.
31 static constexpr bool kDumpLockFailures = true;
32
GetRegisterTypeId(uint32_t vsrc)33 inline uint16_t RegisterLine::GetRegisterTypeId(uint32_t vsrc) const {
34 // The register index was validated during the static pass, so we don't need to check it here.
35 DCHECK_LT(vsrc, num_regs_);
36 return line_[vsrc];
37 }
38
GetRegisterType(MethodVerifier * verifier,uint32_t vsrc)39 inline const RegType& RegisterLine::GetRegisterType(MethodVerifier* verifier, uint32_t vsrc) const {
40 return verifier->GetRegTypeCache()->GetFromId(GetRegisterTypeId(vsrc));
41 }
42
43 template <LockOp kLockOp>
SetRegisterTypeImpl(uint32_t vdst,uint16_t new_id)44 inline void RegisterLine::SetRegisterTypeImpl(uint32_t vdst, uint16_t new_id) {
45 DCHECK_LT(vdst, num_regs_);
46 // Note: previously we failed when asked to set a conflict. However, conflicts are OK as long
47 // as they are not accessed, and our backends can handle this nowadays.
48 line_[vdst] = new_id;
49 switch (kLockOp) {
50 case LockOp::kClear:
51 // Clear the monitor entry bits for this register.
52 ClearAllRegToLockDepths(vdst);
53 break;
54 case LockOp::kKeep:
55 break;
56 }
57 }
58
SetRegisterType(uint32_t vdst,RegType::Kind new_kind)59 inline void RegisterLine::SetRegisterType(uint32_t vdst, RegType::Kind new_kind) {
60 DCHECK(!RegType::IsLowHalf(new_kind));
61 DCHECK(!RegType::IsHighHalf(new_kind));
62 SetRegisterTypeImpl<LockOp::kClear>(vdst, RegTypeCache::IdForRegKind(new_kind));
63 }
64
65 template <LockOp kLockOp>
SetRegisterType(uint32_t vdst,const RegType & new_type)66 inline void RegisterLine::SetRegisterType(uint32_t vdst, const RegType& new_type) {
67 DCHECK(!new_type.IsLowHalf());
68 DCHECK(!new_type.IsHighHalf());
69 // Should only keep locks for reference types.
70 DCHECK_IMPLIES(kLockOp == LockOp::kKeep, new_type.IsReferenceTypes());
71 SetRegisterTypeImpl<kLockOp>(vdst, new_type.GetId());
72 }
73
SetRegisterTypeWideImpl(uint32_t vdst,uint16_t new_id1,uint16_t new_id2)74 inline void RegisterLine::SetRegisterTypeWideImpl(uint32_t vdst,
75 uint16_t new_id1,
76 uint16_t new_id2) {
77 DCHECK_LT(vdst + 1, num_regs_);
78 line_[vdst] = new_id1;
79 line_[vdst + 1] = new_id2;
80 // Clear the monitor entry bits for this register.
81 ClearAllRegToLockDepths(vdst);
82 ClearAllRegToLockDepths(vdst + 1);
83 }
84
SetRegisterTypeWide(uint32_t vdst,RegType::Kind new_kind1,RegType::Kind new_kind2)85 inline void RegisterLine::SetRegisterTypeWide(uint32_t vdst,
86 RegType::Kind new_kind1,
87 RegType::Kind new_kind2) {
88 DCHECK(RegType::CheckWidePair(new_kind1, new_kind2));
89 SetRegisterTypeWideImpl(
90 vdst, RegTypeCache::IdForRegKind(new_kind1), RegTypeCache::IdForRegKind(new_kind2));
91 }
92
SetRegisterTypeWide(uint32_t vdst,const RegType & new_type1,const RegType & new_type2)93 inline void RegisterLine::SetRegisterTypeWide(uint32_t vdst,
94 const RegType& new_type1,
95 const RegType& new_type2) {
96 DCHECK(new_type1.CheckWidePair(new_type2));
97 SetRegisterTypeWideImpl(vdst, new_type1.GetId(), new_type2.GetId());
98 }
99
SetResultTypeToUnknown(RegTypeCache * reg_types)100 inline void RegisterLine::SetResultTypeToUnknown(RegTypeCache* reg_types) {
101 result_[0] = reg_types->Undefined().GetId();
102 result_[1] = result_[0];
103 }
104
SetResultRegisterType(MethodVerifier * verifier,const RegType & new_type)105 inline void RegisterLine::SetResultRegisterType(MethodVerifier* verifier, const RegType& new_type) {
106 DCHECK(!new_type.IsLowHalf());
107 DCHECK(!new_type.IsHighHalf());
108 result_[0] = new_type.GetId();
109 result_[1] = verifier->GetRegTypeCache()->Undefined().GetId();
110 }
111
SetResultRegisterTypeWide(const RegType & new_type1,const RegType & new_type2)112 inline void RegisterLine::SetResultRegisterTypeWide(const RegType& new_type1,
113 const RegType& new_type2) {
114 DCHECK(new_type1.CheckWidePair(new_type2));
115 result_[0] = new_type1.GetId();
116 result_[1] = new_type2.GetId();
117 }
118
SetRegisterTypeForNewInstance(uint32_t vdst,const RegType & uninit_type,uint32_t dex_pc)119 inline void RegisterLine::SetRegisterTypeForNewInstance(uint32_t vdst,
120 const RegType& uninit_type,
121 uint32_t dex_pc) {
122 DCHECK_LT(vdst, num_regs_);
123 DCHECK(NeedsAllocationDexPc(uninit_type));
124 SetRegisterType<LockOp::kClear>(vdst, uninit_type);
125 EnsureAllocationDexPcsAvailable();
126 allocation_dex_pcs_[vdst] = dex_pc;
127 }
128
CopyRegister1(MethodVerifier * verifier,uint32_t vdst,uint32_t vsrc,TypeCategory cat)129 inline void RegisterLine::CopyRegister1(MethodVerifier* verifier, uint32_t vdst, uint32_t vsrc,
130 TypeCategory cat) {
131 DCHECK(cat == kTypeCategory1nr || cat == kTypeCategoryRef);
132 const RegType& type = GetRegisterType(verifier, vsrc);
133 if (type.IsLowHalf() || type.IsHighHalf()) {
134 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "Expected category1 register type not '"
135 << type << "'";
136 return;
137 }
138 // FIXME: If `vdst == vsrc`, we clear locking information before we try to copy it below. Adding
139 // `move-object v1, v1` to the middle of `OK.runStraightLine()` in run-test 088 makes it fail.
140 SetRegisterType<LockOp::kClear>(vdst, type);
141 if (!type.IsConflict() && // Allow conflicts to be copied around.
142 ((cat == kTypeCategory1nr && !type.IsCategory1Types()) ||
143 (cat == kTypeCategoryRef && !type.IsReferenceTypes()))) {
144 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "copy1 v" << vdst << "<-v" << vsrc << " type=" << type
145 << " cat=" << static_cast<int>(cat);
146 } else if (cat == kTypeCategoryRef) {
147 CopyRegToLockDepth(vdst, vsrc);
148 if (allocation_dex_pcs_ != nullptr) {
149 // Copy allocation dex pc for uninitialized types. (Copy unused value for other types.)
150 allocation_dex_pcs_[vdst] = allocation_dex_pcs_[vsrc];
151 }
152 }
153 }
154
CopyRegister2(MethodVerifier * verifier,uint32_t vdst,uint32_t vsrc)155 inline void RegisterLine::CopyRegister2(MethodVerifier* verifier, uint32_t vdst, uint32_t vsrc) {
156 const RegType& type_l = GetRegisterType(verifier, vsrc);
157 const RegType& type_h = GetRegisterType(verifier, vsrc + 1);
158
159 if (!type_l.CheckWidePair(type_h)) {
160 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "copy2 v" << vdst << "<-v" << vsrc
161 << " type=" << type_l << "/" << type_h;
162 } else {
163 SetRegisterTypeWide(vdst, type_l, type_h);
164 }
165 }
166
NeedsAllocationDexPc(const RegType & reg_type)167 inline bool RegisterLine::NeedsAllocationDexPc(const RegType& reg_type) {
168 return reg_type.IsUninitializedReference() || reg_type.IsUnresolvedUninitializedReference();
169 }
170
DCheckUniqueNewInstanceDexPc(MethodVerifier * verifier,uint32_t dex_pc)171 inline void RegisterLine::DCheckUniqueNewInstanceDexPc(MethodVerifier* verifier, uint32_t dex_pc) {
172 if (kIsDebugBuild && allocation_dex_pcs_ != nullptr) {
173 // Note: We do not clear the `allocation_dex_pcs_` entries when copying data from
174 // a register line without `allocation_dex_pcs_`, or when we merge types and find
175 // a conflict, so the same dex pc can remain in the `allocation_dex_pcs_` array
176 // but it cannot be recorded for a `new-instance` uninitialized type.
177 RegTypeCache* reg_types = verifier->GetRegTypeCache();
178 for (uint32_t i = 0; i != num_regs_; ++i) {
179 if (NeedsAllocationDexPc(reg_types->GetFromId(line_[i]))) {
180 CHECK_NE(allocation_dex_pcs_[i], dex_pc) << i << " " << reg_types->GetFromId(line_[i]);
181 }
182 }
183 }
184 }
185
EnsureAllocationDexPcsAvailable()186 inline void RegisterLine::EnsureAllocationDexPcsAvailable() {
187 DCHECK_NE(num_regs_, 0u);
188 if (allocation_dex_pcs_ == nullptr) {
189 ArenaAllocatorAdapter<uint32_t> allocator(monitors_.get_allocator());
190 allocation_dex_pcs_ = allocator.allocate(num_regs_);
191 std::fill_n(allocation_dex_pcs_, num_regs_, kNoDexPc);
192 }
193 }
194
VerifyMonitorStackEmpty(MethodVerifier * verifier)195 inline void RegisterLine::VerifyMonitorStackEmpty(MethodVerifier* verifier) const {
196 if (MonitorStackDepth() != 0) {
197 verifier->Fail(VERIFY_ERROR_LOCKING, /*pending_exc=*/ false);
198 if (kDumpLockFailures) {
199 VLOG(verifier) << "expected empty monitor stack in "
200 << verifier->GetMethodReference().PrettyMethod();
201 }
202 }
203 }
204
ComputeSize(size_t num_regs)205 inline size_t RegisterLine::ComputeSize(size_t num_regs) {
206 return OFFSETOF_MEMBER(RegisterLine, line_) + num_regs * sizeof(uint16_t);
207 }
208
Create(size_t num_regs,ArenaAllocator & allocator,RegTypeCache * reg_types)209 inline RegisterLine* RegisterLine::Create(size_t num_regs,
210 ArenaAllocator& allocator,
211 RegTypeCache* reg_types) {
212 void* memory = allocator.Alloc(ComputeSize(num_regs));
213 return new (memory) RegisterLine(num_regs, allocator, reg_types);
214 }
215
RegisterLine(size_t num_regs,ArenaAllocator & allocator,RegTypeCache * reg_types)216 inline RegisterLine::RegisterLine(size_t num_regs,
217 ArenaAllocator& allocator,
218 RegTypeCache* reg_types)
219 : num_regs_(num_regs),
220 allocation_dex_pcs_(nullptr),
221 monitors_(allocator.Adapter(kArenaAllocVerifier)),
222 reg_to_lock_depths_(std::less<uint32_t>(),
223 allocator.Adapter(kArenaAllocVerifier)),
224 this_initialized_(false) {
225 // `ArenaAllocator` guarantees zero-initialization.
226 static_assert(RegTypeCache::kUndefinedCacheId == 0u);
227 DCHECK(std::all_of(line_,
228 line_ + num_regs_,
229 [](auto id) { return id == RegTypeCache::kUndefinedCacheId;}));
230 SetResultTypeToUnknown(reg_types);
231 }
232
ClearRegToLockDepth(size_t reg,size_t depth)233 inline void RegisterLine::ClearRegToLockDepth(size_t reg, size_t depth) {
234 CHECK_LT(depth, 32u);
235 DCHECK(IsSetLockDepth(reg, depth));
236 auto it = reg_to_lock_depths_.find(reg);
237 DCHECK(it != reg_to_lock_depths_.end());
238 uint32_t depths = it->second ^ (1 << depth);
239 if (depths != 0) {
240 it->second = depths;
241 } else {
242 reg_to_lock_depths_.erase(it);
243 }
244 // Need to unlock every register at the same lock depth. These are aliased locks.
245 uint32_t mask = 1 << depth;
246 for (auto& pair : reg_to_lock_depths_) {
247 if ((pair.second & mask) != 0) {
248 VLOG(verifier) << "Also unlocking " << pair.first;
249 pair.second ^= mask;
250 }
251 }
252 }
253
operator()254 inline void RegisterLineArenaDelete::operator()(RegisterLine* ptr) const {
255 if (ptr != nullptr) {
256 uint32_t num_regs = ptr->NumRegs();
257 uint32_t* allocation_dex_pcs = ptr->allocation_dex_pcs_;
258 ptr->~RegisterLine();
259 ProtectMemory(ptr, RegisterLine::ComputeSize(num_regs));
260 if (allocation_dex_pcs != nullptr) {
261 struct AllocationDexPcsDelete : ArenaDelete<uint32_t> {
262 void operator()(uint32_t* ptr, size_t size) {
263 ProtectMemory(ptr, size);
264 }
265 };
266 AllocationDexPcsDelete()(allocation_dex_pcs, num_regs * sizeof(*allocation_dex_pcs));
267 }
268 }
269 }
270
271 } // namespace verifier
272 } // namespace art
273
274 #endif // ART_RUNTIME_VERIFIER_REGISTER_LINE_INL_H_
275