1 //
2 // Copyright 2019 The Abseil Authors.
3 //
4 // Licensed under the Apache License, Version 2.0 (the "License");
5 // you may not use this file except in compliance with the License.
6 // You may obtain a copy of the License at
7 //
8 // https://www.apache.org/licenses/LICENSE-2.0
9 //
10 // Unless required by applicable law or agreed to in writing, software
11 // distributed under the License is distributed on an "AS IS" BASIS,
12 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 // See the License for the specific language governing permissions and
14 // limitations under the License.
15
16 #include "absl/flags/internal/flag.h"
17
18 #include <assert.h>
19 #include <stddef.h>
20 #include <stdint.h>
21 #include <string.h>
22
23 #include <array>
24 #include <atomic>
25 #include <cstring>
26 #include <memory>
27 #include <string>
28 #include <typeinfo>
29 #include <vector>
30
31 #include "absl/base/attributes.h"
32 #include "absl/base/call_once.h"
33 #include "absl/base/casts.h"
34 #include "absl/base/config.h"
35 #include "absl/base/const_init.h"
36 #include "absl/base/dynamic_annotations.h"
37 #include "absl/base/optimization.h"
38 #include "absl/flags/config.h"
39 #include "absl/flags/internal/commandlineflag.h"
40 #include "absl/flags/usage_config.h"
41 #include "absl/memory/memory.h"
42 #include "absl/strings/str_cat.h"
43 #include "absl/strings/string_view.h"
44 #include "absl/synchronization/mutex.h"
45
46 namespace absl {
47 ABSL_NAMESPACE_BEGIN
48 namespace flags_internal {
49
50 // The help message indicating that the commandline flag has been stripped. It
51 // will not show up when doing "-help" and its variants. The flag is stripped
52 // if ABSL_FLAGS_STRIP_HELP is set to 1 before including absl/flags/flag.h
53 const char kStrippedFlagHelp[] = "\001\002\003\004 (unknown) \004\003\002\001";
54
55 namespace {
56
57 // Currently we only validate flag values for user-defined flag types.
ShouldValidateFlagValue(FlagFastTypeId flag_type_id)58 bool ShouldValidateFlagValue(FlagFastTypeId flag_type_id) {
59 #define DONT_VALIDATE(T, _) \
60 if (flag_type_id == base_internal::FastTypeId<T>()) return false;
61 ABSL_FLAGS_INTERNAL_SUPPORTED_TYPES(DONT_VALIDATE)
62 #undef DONT_VALIDATE
63
64 return true;
65 }
66
67 // RAII helper used to temporarily unlock and relock `absl::Mutex`.
68 // This is used when we need to ensure that locks are released while
69 // invoking user supplied callbacks and then reacquired, since callbacks may
70 // need to acquire these locks themselves.
71 class MutexRelock {
72 public:
MutexRelock(absl::Mutex & mu)73 explicit MutexRelock(absl::Mutex& mu) : mu_(mu) { mu_.Unlock(); }
~MutexRelock()74 ~MutexRelock() { mu_.Lock(); }
75
76 MutexRelock(const MutexRelock&) = delete;
77 MutexRelock& operator=(const MutexRelock&) = delete;
78
79 private:
80 absl::Mutex& mu_;
81 };
82
83 // This is a freelist of leaked flag values and guard for its access.
84 // When we can't guarantee it is safe to reuse the memory for flag values,
85 // we move the memory to the freelist where it lives indefinitely, so it can
86 // still be safely accessed. This also prevents leak checkers from complaining
87 // about the leaked memory that can no longer be accessed through any pointer.
88 ABSL_CONST_INIT absl::Mutex s_freelist_guard(absl::kConstInit);
89 ABSL_CONST_INIT std::vector<void*>* s_freelist = nullptr;
90
AddToFreelist(void * p)91 void AddToFreelist(void* p) {
92 absl::MutexLock l(&s_freelist_guard);
93 if (!s_freelist) {
94 s_freelist = new std::vector<void*>;
95 }
96 s_freelist->push_back(p);
97 }
98
99 } // namespace
100
101 ///////////////////////////////////////////////////////////////////////////////
102
NumLeakedFlagValues()103 uint64_t NumLeakedFlagValues() {
104 absl::MutexLock l(&s_freelist_guard);
105 return s_freelist == nullptr ? 0u : s_freelist->size();
106 }
107
108 ///////////////////////////////////////////////////////////////////////////////
109 // Persistent state of the flag data.
110
111 class FlagImpl;
112
113 class FlagState : public flags_internal::FlagStateInterface {
114 public:
115 template <typename V>
FlagState(FlagImpl & flag_impl,const V & v,bool modified,bool on_command_line,int64_t counter)116 FlagState(FlagImpl& flag_impl, const V& v, bool modified,
117 bool on_command_line, int64_t counter)
118 : flag_impl_(flag_impl),
119 value_(v),
120 modified_(modified),
121 on_command_line_(on_command_line),
122 counter_(counter) {}
123
~FlagState()124 ~FlagState() override {
125 if (flag_impl_.ValueStorageKind() != FlagValueStorageKind::kHeapAllocated &&
126 flag_impl_.ValueStorageKind() != FlagValueStorageKind::kSequenceLocked)
127 return;
128 flags_internal::Delete(flag_impl_.op_, value_.heap_allocated);
129 }
130
131 private:
132 friend class FlagImpl;
133
134 // Restores the flag to the saved state.
Restore() const135 void Restore() const override {
136 if (!flag_impl_.RestoreState(*this)) return;
137
138 ABSL_INTERNAL_LOG(INFO,
139 absl::StrCat("Restore saved value of ", flag_impl_.Name(),
140 " to: ", flag_impl_.CurrentValue()));
141 }
142
143 // Flag and saved flag data.
144 FlagImpl& flag_impl_;
145 union SavedValue {
SavedValue(void * v)146 explicit SavedValue(void* v) : heap_allocated(v) {}
SavedValue(int64_t v)147 explicit SavedValue(int64_t v) : one_word(v) {}
148
149 void* heap_allocated;
150 int64_t one_word;
151 } value_;
152 bool modified_;
153 bool on_command_line_;
154 int64_t counter_;
155 };
156
157 ///////////////////////////////////////////////////////////////////////////////
158 // Flag implementation, which does not depend on flag value type.
159
DynValueDeleter(FlagOpFn op_arg)160 DynValueDeleter::DynValueDeleter(FlagOpFn op_arg) : op(op_arg) {}
161
operator ()(void * ptr) const162 void DynValueDeleter::operator()(void* ptr) const {
163 if (op == nullptr) return;
164
165 Delete(op, ptr);
166 }
167
MaskedPointer(ptr_t rhs,bool is_candidate)168 MaskedPointer::MaskedPointer(ptr_t rhs, bool is_candidate) : ptr_(rhs) {
169 if (is_candidate) {
170 ApplyMask(kUnprotectedReadCandidate);
171 }
172 }
173
IsUnprotectedReadCandidate() const174 bool MaskedPointer::IsUnprotectedReadCandidate() const {
175 return CheckMask(kUnprotectedReadCandidate);
176 }
177
HasBeenRead() const178 bool MaskedPointer::HasBeenRead() const { return CheckMask(kHasBeenRead); }
179
Set(FlagOpFn op,const void * src,bool is_candidate)180 void MaskedPointer::Set(FlagOpFn op, const void* src, bool is_candidate) {
181 flags_internal::Copy(op, src, Ptr());
182 if (is_candidate) {
183 ApplyMask(kUnprotectedReadCandidate);
184 }
185 }
MarkAsRead()186 void MaskedPointer::MarkAsRead() { ApplyMask(kHasBeenRead); }
187
ApplyMask(mask_t mask)188 void MaskedPointer::ApplyMask(mask_t mask) {
189 ptr_ = reinterpret_cast<ptr_t>(reinterpret_cast<mask_t>(ptr_) | mask);
190 }
CheckMask(mask_t mask) const191 bool MaskedPointer::CheckMask(mask_t mask) const {
192 return (reinterpret_cast<mask_t>(ptr_) & mask) != 0;
193 }
194
Init()195 void FlagImpl::Init() {
196 new (&data_guard_) absl::Mutex;
197
198 auto def_kind = static_cast<FlagDefaultKind>(def_kind_);
199
200 switch (ValueStorageKind()) {
201 case FlagValueStorageKind::kValueAndInitBit:
202 case FlagValueStorageKind::kOneWordAtomic: {
203 alignas(int64_t) std::array<char, sizeof(int64_t)> buf{};
204 if (def_kind == FlagDefaultKind::kGenFunc) {
205 (*default_value_.gen_func)(buf.data());
206 } else {
207 assert(def_kind != FlagDefaultKind::kDynamicValue);
208 std::memcpy(buf.data(), &default_value_, Sizeof(op_));
209 }
210 if (ValueStorageKind() == FlagValueStorageKind::kValueAndInitBit) {
211 // We presume here the memory layout of FlagValueAndInitBit struct.
212 uint8_t initialized = 1;
213 std::memcpy(buf.data() + Sizeof(op_), &initialized,
214 sizeof(initialized));
215 }
216 // Type can contain valid uninitialized bits, e.g. padding.
217 ABSL_ANNOTATE_MEMORY_IS_INITIALIZED(buf.data(), buf.size());
218 OneWordValue().store(absl::bit_cast<int64_t>(buf),
219 std::memory_order_release);
220 break;
221 }
222 case FlagValueStorageKind::kSequenceLocked: {
223 // For this storage kind the default_value_ always points to gen_func
224 // during initialization.
225 assert(def_kind == FlagDefaultKind::kGenFunc);
226 (*default_value_.gen_func)(AtomicBufferValue());
227 break;
228 }
229 case FlagValueStorageKind::kHeapAllocated:
230 // For this storage kind the default_value_ always points to gen_func
231 // during initialization.
232 assert(def_kind == FlagDefaultKind::kGenFunc);
233 // Flag value initially points to the internal buffer.
234 MaskedPointer ptr_value = PtrStorage().load(std::memory_order_acquire);
235 (*default_value_.gen_func)(ptr_value.Ptr());
236 // Default value is a candidate for an unprotected read.
237 PtrStorage().store(MaskedPointer(ptr_value.Ptr(), true),
238 std::memory_order_release);
239 break;
240 }
241 seq_lock_.MarkInitialized();
242 }
243
DataGuard() const244 absl::Mutex* FlagImpl::DataGuard() const {
245 absl::call_once(const_cast<FlagImpl*>(this)->init_control_, &FlagImpl::Init,
246 const_cast<FlagImpl*>(this));
247
248 // data_guard_ is initialized inside Init.
249 return reinterpret_cast<absl::Mutex*>(&data_guard_);
250 }
251
AssertValidType(FlagFastTypeId rhs_type_id,const std::type_info * (* gen_rtti)()) const252 void FlagImpl::AssertValidType(FlagFastTypeId rhs_type_id,
253 const std::type_info* (*gen_rtti)()) const {
254 FlagFastTypeId lhs_type_id = flags_internal::FastTypeId(op_);
255
256 // `rhs_type_id` is the fast type id corresponding to the declaration
257 // visible at the call site. `lhs_type_id` is the fast type id
258 // corresponding to the type specified in flag definition. They must match
259 // for this operation to be well-defined.
260 if (ABSL_PREDICT_TRUE(lhs_type_id == rhs_type_id)) return;
261
262 const std::type_info* lhs_runtime_type_id =
263 flags_internal::RuntimeTypeId(op_);
264 const std::type_info* rhs_runtime_type_id = (*gen_rtti)();
265
266 if (lhs_runtime_type_id == rhs_runtime_type_id) return;
267
268 #ifdef ABSL_INTERNAL_HAS_RTTI
269 if (*lhs_runtime_type_id == *rhs_runtime_type_id) return;
270 #endif
271
272 ABSL_INTERNAL_LOG(
273 FATAL, absl::StrCat("Flag '", Name(),
274 "' is defined as one type and declared as another"));
275 }
276
MakeInitValue() const277 std::unique_ptr<void, DynValueDeleter> FlagImpl::MakeInitValue() const {
278 void* res = nullptr;
279 switch (DefaultKind()) {
280 case FlagDefaultKind::kDynamicValue:
281 res = flags_internal::Clone(op_, default_value_.dynamic_value);
282 break;
283 case FlagDefaultKind::kGenFunc:
284 res = flags_internal::Alloc(op_);
285 (*default_value_.gen_func)(res);
286 break;
287 default:
288 res = flags_internal::Clone(op_, &default_value_);
289 break;
290 }
291 return {res, DynValueDeleter{op_}};
292 }
293
StoreValue(const void * src,ValueSource source)294 void FlagImpl::StoreValue(const void* src, ValueSource source) {
295 switch (ValueStorageKind()) {
296 case FlagValueStorageKind::kValueAndInitBit:
297 case FlagValueStorageKind::kOneWordAtomic: {
298 // Load the current value to avoid setting 'init' bit manually.
299 int64_t one_word_val = OneWordValue().load(std::memory_order_acquire);
300 std::memcpy(&one_word_val, src, Sizeof(op_));
301 OneWordValue().store(one_word_val, std::memory_order_release);
302 seq_lock_.IncrementModificationCount();
303 break;
304 }
305 case FlagValueStorageKind::kSequenceLocked: {
306 seq_lock_.Write(AtomicBufferValue(), src, Sizeof(op_));
307 break;
308 }
309 case FlagValueStorageKind::kHeapAllocated:
310 MaskedPointer ptr_value = PtrStorage().load(std::memory_order_acquire);
311
312 if (ptr_value.IsUnprotectedReadCandidate() && ptr_value.HasBeenRead()) {
313 // If current value is a candidate for an unprotected read and if it was
314 // already read at least once, follow up reads (if any) are done without
315 // mutex protection. We can't guarantee it is safe to reuse this memory
316 // since it may have been accessed by another thread concurrently, so
317 // instead we move the memory to a freelist so it can still be safely
318 // accessed, and allocate a new one for the new value.
319 AddToFreelist(ptr_value.Ptr());
320 ptr_value = MaskedPointer(Clone(op_, src), source == kCommandLine);
321 } else {
322 // Current value either was set programmatically or was never read.
323 // We can reuse the memory since all accesses to this value (if any)
324 // were protected by mutex. That said, if a new value comes from command
325 // line it now becomes a candidate for an unprotected read.
326 ptr_value.Set(op_, src, source == kCommandLine);
327 }
328
329 PtrStorage().store(ptr_value, std::memory_order_release);
330 seq_lock_.IncrementModificationCount();
331 break;
332 }
333 modified_ = true;
334 InvokeCallback();
335 }
336
Name() const337 absl::string_view FlagImpl::Name() const { return name_; }
338
Filename() const339 std::string FlagImpl::Filename() const {
340 return flags_internal::GetUsageConfig().normalize_filename(filename_);
341 }
342
Help() const343 std::string FlagImpl::Help() const {
344 return HelpSourceKind() == FlagHelpKind::kLiteral ? help_.literal
345 : help_.gen_func();
346 }
347
TypeId() const348 FlagFastTypeId FlagImpl::TypeId() const {
349 return flags_internal::FastTypeId(op_);
350 }
351
ModificationCount() const352 int64_t FlagImpl::ModificationCount() const {
353 return seq_lock_.ModificationCount();
354 }
355
IsSpecifiedOnCommandLine() const356 bool FlagImpl::IsSpecifiedOnCommandLine() const {
357 absl::MutexLock l(DataGuard());
358 return on_command_line_;
359 }
360
DefaultValue() const361 std::string FlagImpl::DefaultValue() const {
362 absl::MutexLock l(DataGuard());
363
364 auto obj = MakeInitValue();
365 return flags_internal::Unparse(op_, obj.get());
366 }
367
CurrentValue() const368 std::string FlagImpl::CurrentValue() const {
369 auto* guard = DataGuard(); // Make sure flag initialized
370 switch (ValueStorageKind()) {
371 case FlagValueStorageKind::kValueAndInitBit:
372 case FlagValueStorageKind::kOneWordAtomic: {
373 const auto one_word_val =
374 absl::bit_cast<std::array<char, sizeof(int64_t)>>(
375 OneWordValue().load(std::memory_order_acquire));
376 return flags_internal::Unparse(op_, one_word_val.data());
377 }
378 case FlagValueStorageKind::kSequenceLocked: {
379 std::unique_ptr<void, DynValueDeleter> cloned(flags_internal::Alloc(op_),
380 DynValueDeleter{op_});
381 ReadSequenceLockedData(cloned.get());
382 return flags_internal::Unparse(op_, cloned.get());
383 }
384 case FlagValueStorageKind::kHeapAllocated: {
385 absl::MutexLock l(guard);
386 return flags_internal::Unparse(
387 op_, PtrStorage().load(std::memory_order_acquire).Ptr());
388 }
389 }
390
391 return "";
392 }
393
SetCallback(const FlagCallbackFunc mutation_callback)394 void FlagImpl::SetCallback(const FlagCallbackFunc mutation_callback) {
395 absl::MutexLock l(DataGuard());
396
397 if (callback_ == nullptr) {
398 callback_ = new FlagCallback;
399 }
400 callback_->func = mutation_callback;
401
402 InvokeCallback();
403 }
404
InvokeCallback() const405 void FlagImpl::InvokeCallback() const {
406 if (!callback_) return;
407
408 // Make a copy of the C-style function pointer that we are about to invoke
409 // before we release the lock guarding it.
410 FlagCallbackFunc cb = callback_->func;
411
412 // If the flag has a mutation callback this function invokes it. While the
413 // callback is being invoked the primary flag's mutex is unlocked and it is
414 // re-locked back after call to callback is completed. Callback invocation is
415 // guarded by flag's secondary mutex instead which prevents concurrent
416 // callback invocation. Note that it is possible for other thread to grab the
417 // primary lock and update flag's value at any time during the callback
418 // invocation. This is by design. Callback can get a value of the flag if
419 // necessary, but it might be different from the value initiated the callback
420 // and it also can be different by the time the callback invocation is
421 // completed. Requires that *primary_lock be held in exclusive mode; it may be
422 // released and reacquired by the implementation.
423 MutexRelock relock(*DataGuard());
424 absl::MutexLock lock(&callback_->guard);
425 cb();
426 }
427
SaveState()428 std::unique_ptr<FlagStateInterface> FlagImpl::SaveState() {
429 absl::MutexLock l(DataGuard());
430
431 bool modified = modified_;
432 bool on_command_line = on_command_line_;
433 switch (ValueStorageKind()) {
434 case FlagValueStorageKind::kValueAndInitBit:
435 case FlagValueStorageKind::kOneWordAtomic: {
436 return absl::make_unique<FlagState>(
437 *this, OneWordValue().load(std::memory_order_acquire), modified,
438 on_command_line, ModificationCount());
439 }
440 case FlagValueStorageKind::kSequenceLocked: {
441 void* cloned = flags_internal::Alloc(op_);
442 // Read is guaranteed to be successful because we hold the lock.
443 bool success =
444 seq_lock_.TryRead(cloned, AtomicBufferValue(), Sizeof(op_));
445 assert(success);
446 static_cast<void>(success);
447 return absl::make_unique<FlagState>(*this, cloned, modified,
448 on_command_line, ModificationCount());
449 }
450 case FlagValueStorageKind::kHeapAllocated: {
451 return absl::make_unique<FlagState>(
452 *this,
453 flags_internal::Clone(
454 op_, PtrStorage().load(std::memory_order_acquire).Ptr()),
455 modified, on_command_line, ModificationCount());
456 }
457 }
458 return nullptr;
459 }
460
RestoreState(const FlagState & flag_state)461 bool FlagImpl::RestoreState(const FlagState& flag_state) {
462 absl::MutexLock l(DataGuard());
463 if (flag_state.counter_ == ModificationCount()) {
464 return false;
465 }
466
467 switch (ValueStorageKind()) {
468 case FlagValueStorageKind::kValueAndInitBit:
469 case FlagValueStorageKind::kOneWordAtomic:
470 StoreValue(&flag_state.value_.one_word, kProgrammaticChange);
471 break;
472 case FlagValueStorageKind::kSequenceLocked:
473 case FlagValueStorageKind::kHeapAllocated:
474 StoreValue(flag_state.value_.heap_allocated, kProgrammaticChange);
475 break;
476 }
477
478 modified_ = flag_state.modified_;
479 on_command_line_ = flag_state.on_command_line_;
480
481 return true;
482 }
483
484 template <typename StorageT>
OffsetValue() const485 StorageT* FlagImpl::OffsetValue() const {
486 char* p = reinterpret_cast<char*>(const_cast<FlagImpl*>(this));
487 // The offset is deduced via Flag value type specific op_.
488 ptrdiff_t offset = flags_internal::ValueOffset(op_);
489
490 return reinterpret_cast<StorageT*>(p + offset);
491 }
492
AtomicBufferValue() const493 std::atomic<uint64_t>* FlagImpl::AtomicBufferValue() const {
494 assert(ValueStorageKind() == FlagValueStorageKind::kSequenceLocked);
495 return OffsetValue<std::atomic<uint64_t>>();
496 }
497
OneWordValue() const498 std::atomic<int64_t>& FlagImpl::OneWordValue() const {
499 assert(ValueStorageKind() == FlagValueStorageKind::kOneWordAtomic ||
500 ValueStorageKind() == FlagValueStorageKind::kValueAndInitBit);
501 return OffsetValue<FlagOneWordValue>()->value;
502 }
503
PtrStorage() const504 std::atomic<MaskedPointer>& FlagImpl::PtrStorage() const {
505 assert(ValueStorageKind() == FlagValueStorageKind::kHeapAllocated);
506 return OffsetValue<FlagMaskedPointerValue>()->value;
507 }
508
509 // Attempts to parse supplied `value` string using parsing routine in the `flag`
510 // argument. If parsing successful, this function replaces the dst with newly
511 // parsed value. In case if any error is encountered in either step, the error
512 // message is stored in 'err'
TryParse(absl::string_view value,std::string & err) const513 std::unique_ptr<void, DynValueDeleter> FlagImpl::TryParse(
514 absl::string_view value, std::string& err) const {
515 std::unique_ptr<void, DynValueDeleter> tentative_value = MakeInitValue();
516
517 std::string parse_err;
518 if (!flags_internal::Parse(op_, value, tentative_value.get(), &parse_err)) {
519 absl::string_view err_sep = parse_err.empty() ? "" : "; ";
520 err = absl::StrCat("Illegal value '", value, "' specified for flag '",
521 Name(), "'", err_sep, parse_err);
522 return nullptr;
523 }
524
525 return tentative_value;
526 }
527
Read(void * dst) const528 void FlagImpl::Read(void* dst) const {
529 auto* guard = DataGuard(); // Make sure flag initialized
530 switch (ValueStorageKind()) {
531 case FlagValueStorageKind::kValueAndInitBit:
532 case FlagValueStorageKind::kOneWordAtomic: {
533 const int64_t one_word_val =
534 OneWordValue().load(std::memory_order_acquire);
535 std::memcpy(dst, &one_word_val, Sizeof(op_));
536 break;
537 }
538 case FlagValueStorageKind::kSequenceLocked: {
539 ReadSequenceLockedData(dst);
540 break;
541 }
542 case FlagValueStorageKind::kHeapAllocated: {
543 absl::MutexLock l(guard);
544 MaskedPointer ptr_value = PtrStorage().load(std::memory_order_acquire);
545
546 flags_internal::CopyConstruct(op_, ptr_value.Ptr(), dst);
547
548 // For unprotected read candidates, mark that the value as has been read.
549 if (ptr_value.IsUnprotectedReadCandidate() && !ptr_value.HasBeenRead()) {
550 ptr_value.MarkAsRead();
551 PtrStorage().store(ptr_value, std::memory_order_release);
552 }
553 break;
554 }
555 }
556 }
557
ReadOneWord() const558 int64_t FlagImpl::ReadOneWord() const {
559 assert(ValueStorageKind() == FlagValueStorageKind::kOneWordAtomic ||
560 ValueStorageKind() == FlagValueStorageKind::kValueAndInitBit);
561 auto* guard = DataGuard(); // Make sure flag initialized
562 (void)guard;
563 return OneWordValue().load(std::memory_order_acquire);
564 }
565
ReadOneBool() const566 bool FlagImpl::ReadOneBool() const {
567 assert(ValueStorageKind() == FlagValueStorageKind::kValueAndInitBit);
568 auto* guard = DataGuard(); // Make sure flag initialized
569 (void)guard;
570 return absl::bit_cast<FlagValueAndInitBit<bool>>(
571 OneWordValue().load(std::memory_order_acquire))
572 .value;
573 }
574
ReadSequenceLockedData(void * dst) const575 void FlagImpl::ReadSequenceLockedData(void* dst) const {
576 size_t size = Sizeof(op_);
577 // Attempt to read using the sequence lock.
578 if (ABSL_PREDICT_TRUE(seq_lock_.TryRead(dst, AtomicBufferValue(), size))) {
579 return;
580 }
581 // We failed due to contention. Acquire the lock to prevent contention
582 // and try again.
583 absl::ReaderMutexLock l(DataGuard());
584 bool success = seq_lock_.TryRead(dst, AtomicBufferValue(), size);
585 assert(success);
586 static_cast<void>(success);
587 }
588
Write(const void * src)589 void FlagImpl::Write(const void* src) {
590 absl::MutexLock l(DataGuard());
591
592 if (ShouldValidateFlagValue(flags_internal::FastTypeId(op_))) {
593 std::unique_ptr<void, DynValueDeleter> obj{flags_internal::Clone(op_, src),
594 DynValueDeleter{op_}};
595 std::string ignored_error;
596 std::string src_as_str = flags_internal::Unparse(op_, src);
597 if (!flags_internal::Parse(op_, src_as_str, obj.get(), &ignored_error)) {
598 ABSL_INTERNAL_LOG(ERROR, absl::StrCat("Attempt to set flag '", Name(),
599 "' to invalid value ", src_as_str));
600 }
601 }
602
603 StoreValue(src, kProgrammaticChange);
604 }
605
606 // Sets the value of the flag based on specified string `value`. If the flag
607 // was successfully set to new value, it returns true. Otherwise, sets `err`
608 // to indicate the error, leaves the flag unchanged, and returns false. There
609 // are three ways to set the flag's value:
610 // * Update the current flag value
611 // * Update the flag's default value
612 // * Update the current flag value if it was never set before
613 // The mode is selected based on 'set_mode' parameter.
ParseFrom(absl::string_view value,FlagSettingMode set_mode,ValueSource source,std::string & err)614 bool FlagImpl::ParseFrom(absl::string_view value, FlagSettingMode set_mode,
615 ValueSource source, std::string& err) {
616 absl::MutexLock l(DataGuard());
617
618 switch (set_mode) {
619 case SET_FLAGS_VALUE: {
620 // set or modify the flag's value
621 auto tentative_value = TryParse(value, err);
622 if (!tentative_value) return false;
623
624 StoreValue(tentative_value.get(), source);
625
626 if (source == kCommandLine) {
627 on_command_line_ = true;
628 }
629 break;
630 }
631 case SET_FLAG_IF_DEFAULT: {
632 // set the flag's value, but only if it hasn't been set by someone else
633 if (modified_) {
634 // TODO(rogeeff): review and fix this semantic. Currently we do not fail
635 // in this case if flag is modified. This is misleading since the flag's
636 // value is not updated even though we return true.
637 // *err = absl::StrCat(Name(), " is already set to ",
638 // CurrentValue(), "\n");
639 // return false;
640 return true;
641 }
642 auto tentative_value = TryParse(value, err);
643 if (!tentative_value) return false;
644
645 StoreValue(tentative_value.get(), source);
646 break;
647 }
648 case SET_FLAGS_DEFAULT: {
649 auto tentative_value = TryParse(value, err);
650 if (!tentative_value) return false;
651
652 if (DefaultKind() == FlagDefaultKind::kDynamicValue) {
653 void* old_value = default_value_.dynamic_value;
654 default_value_.dynamic_value = tentative_value.release();
655 tentative_value.reset(old_value);
656 } else {
657 default_value_.dynamic_value = tentative_value.release();
658 def_kind_ = static_cast<uint8_t>(FlagDefaultKind::kDynamicValue);
659 }
660
661 if (!modified_) {
662 // Need to set both default value *and* current, in this case.
663 StoreValue(default_value_.dynamic_value, source);
664 modified_ = false;
665 }
666 break;
667 }
668 }
669
670 return true;
671 }
672
CheckDefaultValueParsingRoundtrip() const673 void FlagImpl::CheckDefaultValueParsingRoundtrip() const {
674 std::string v = DefaultValue();
675
676 absl::MutexLock lock(DataGuard());
677
678 auto dst = MakeInitValue();
679 std::string error;
680 if (!flags_internal::Parse(op_, v, dst.get(), &error)) {
681 ABSL_INTERNAL_LOG(
682 FATAL,
683 absl::StrCat("Flag ", Name(), " (from ", Filename(),
684 "): string form of default value '", v,
685 "' could not be parsed; error=", error));
686 }
687
688 // We do not compare dst to def since parsing/unparsing may make
689 // small changes, e.g., precision loss for floating point types.
690 }
691
ValidateInputValue(absl::string_view value) const692 bool FlagImpl::ValidateInputValue(absl::string_view value) const {
693 absl::MutexLock l(DataGuard());
694
695 auto obj = MakeInitValue();
696 std::string ignored_error;
697 return flags_internal::Parse(op_, value, obj.get(), &ignored_error);
698 }
699
700 } // namespace flags_internal
701 ABSL_NAMESPACE_END
702 } // namespace absl
703