flag.cc 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702
  1. //
  2. // Copyright 2019 The Abseil Authors.
  3. //
  4. // Licensed under the Apache License, Version 2.0 (the "License");
  5. // you may not use this file except in compliance with the License.
  6. // You may obtain a copy of the License at
  7. //
  8. // https://www.apache.org/licenses/LICENSE-2.0
  9. //
  10. // Unless required by applicable law or agreed to in writing, software
  11. // distributed under the License is distributed on an "AS IS" BASIS,
  12. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. // See the License for the specific language governing permissions and
  14. // limitations under the License.
  15. #include "absl/flags/internal/flag.h"
  16. #include <assert.h>
  17. #include <stddef.h>
  18. #include <stdint.h>
  19. #include <string.h>
  20. #include <array>
  21. #include <atomic>
  22. #include <cstring>
  23. #include <memory>
  24. #include <string>
  25. #include <typeinfo>
  26. #include <vector>
  27. #include "absl/base/attributes.h"
  28. #include "absl/base/call_once.h"
  29. #include "absl/base/casts.h"
  30. #include "absl/base/config.h"
  31. #include "absl/base/const_init.h"
  32. #include "absl/base/dynamic_annotations.h"
  33. #include "absl/base/optimization.h"
  34. #include "absl/flags/config.h"
  35. #include "absl/flags/internal/commandlineflag.h"
  36. #include "absl/flags/usage_config.h"
  37. #include "absl/memory/memory.h"
  38. #include "absl/strings/str_cat.h"
  39. #include "absl/strings/string_view.h"
  40. #include "absl/synchronization/mutex.h"
  41. namespace absl {
  42. ABSL_NAMESPACE_BEGIN
  43. namespace flags_internal {
  44. // The help message indicating that the commandline flag has been stripped. It
  45. // will not show up when doing "-help" and its variants. The flag is stripped
  46. // if ABSL_FLAGS_STRIP_HELP is set to 1 before including absl/flags/flag.h
  47. const char kStrippedFlagHelp[] = "\001\002\003\004 (unknown) \004\003\002\001";
  48. namespace {
  49. // Currently we only validate flag values for user-defined flag types.
  50. bool ShouldValidateFlagValue(FlagFastTypeId flag_type_id) {
  51. #define DONT_VALIDATE(T, _) \
  52. if (flag_type_id == base_internal::FastTypeId<T>()) return false;
  53. ABSL_FLAGS_INTERNAL_SUPPORTED_TYPES(DONT_VALIDATE)
  54. #undef DONT_VALIDATE
  55. return true;
  56. }
  57. // RAII helper used to temporarily unlock and relock `absl::Mutex`.
  58. // This is used when we need to ensure that locks are released while
  59. // invoking user supplied callbacks and then reacquired, since callbacks may
  60. // need to acquire these locks themselves.
  61. class MutexRelock {
  62. public:
  63. explicit MutexRelock(absl::Mutex& mu) : mu_(mu) { mu_.Unlock(); }
  64. ~MutexRelock() { mu_.Lock(); }
  65. MutexRelock(const MutexRelock&) = delete;
  66. MutexRelock& operator=(const MutexRelock&) = delete;
  67. private:
  68. absl::Mutex& mu_;
  69. };
  70. // This is a freelist of leaked flag values and guard for its access.
  71. // When we can't guarantee it is safe to reuse the memory for flag values,
  72. // we move the memory to the freelist where it lives indefinitely, so it can
  73. // still be safely accessed. This also prevents leak checkers from complaining
  74. // about the leaked memory that can no longer be accessed through any pointer.
  75. ABSL_CONST_INIT absl::Mutex s_freelist_guard(absl::kConstInit);
  76. ABSL_CONST_INIT std::vector<void*>* s_freelist = nullptr;
  77. void AddToFreelist(void* p) {
  78. absl::MutexLock l(&s_freelist_guard);
  79. if (!s_freelist) {
  80. s_freelist = new std::vector<void*>;
  81. }
  82. s_freelist->push_back(p);
  83. }
  84. } // namespace
  85. ///////////////////////////////////////////////////////////////////////////////
  86. uint64_t NumLeakedFlagValues() {
  87. absl::MutexLock l(&s_freelist_guard);
  88. return s_freelist == nullptr ? 0u : s_freelist->size();
  89. }
  90. ///////////////////////////////////////////////////////////////////////////////
  91. // Persistent state of the flag data.
  92. class FlagImpl;
  93. class FlagState : public flags_internal::FlagStateInterface {
  94. public:
  95. template <typename V>
  96. FlagState(FlagImpl& flag_impl, const V& v, bool modified,
  97. bool on_command_line, int64_t counter)
  98. : flag_impl_(flag_impl),
  99. value_(v),
  100. modified_(modified),
  101. on_command_line_(on_command_line),
  102. counter_(counter) {}
  103. ~FlagState() override {
  104. if (flag_impl_.ValueStorageKind() != FlagValueStorageKind::kHeapAllocated &&
  105. flag_impl_.ValueStorageKind() != FlagValueStorageKind::kSequenceLocked)
  106. return;
  107. flags_internal::Delete(flag_impl_.op_, value_.heap_allocated);
  108. }
  109. private:
  110. friend class FlagImpl;
  111. // Restores the flag to the saved state.
  112. void Restore() const override {
  113. if (!flag_impl_.RestoreState(*this)) return;
  114. ABSL_INTERNAL_LOG(INFO,
  115. absl::StrCat("Restore saved value of ", flag_impl_.Name(),
  116. " to: ", flag_impl_.CurrentValue()));
  117. }
  118. // Flag and saved flag data.
  119. FlagImpl& flag_impl_;
  120. union SavedValue {
  121. explicit SavedValue(void* v) : heap_allocated(v) {}
  122. explicit SavedValue(int64_t v) : one_word(v) {}
  123. void* heap_allocated;
  124. int64_t one_word;
  125. } value_;
  126. bool modified_;
  127. bool on_command_line_;
  128. int64_t counter_;
  129. };
  130. ///////////////////////////////////////////////////////////////////////////////
  131. // Flag implementation, which does not depend on flag value type.
  132. DynValueDeleter::DynValueDeleter(FlagOpFn op_arg) : op(op_arg) {}
  133. void DynValueDeleter::operator()(void* ptr) const {
  134. if (op == nullptr) return;
  135. Delete(op, ptr);
  136. }
  137. MaskedPointer::MaskedPointer(ptr_t rhs, bool is_candidate) : ptr_(rhs) {
  138. if (is_candidate) {
  139. ApplyMask(kUnprotectedReadCandidate);
  140. }
  141. }
  142. bool MaskedPointer::IsUnprotectedReadCandidate() const {
  143. return CheckMask(kUnprotectedReadCandidate);
  144. }
  145. bool MaskedPointer::HasBeenRead() const { return CheckMask(kHasBeenRead); }
  146. void MaskedPointer::Set(FlagOpFn op, const void* src, bool is_candidate) {
  147. flags_internal::Copy(op, src, Ptr());
  148. if (is_candidate) {
  149. ApplyMask(kUnprotectedReadCandidate);
  150. }
  151. }
  152. void MaskedPointer::MarkAsRead() { ApplyMask(kHasBeenRead); }
  153. void MaskedPointer::ApplyMask(mask_t mask) {
  154. ptr_ = reinterpret_cast<ptr_t>(reinterpret_cast<mask_t>(ptr_) | mask);
  155. }
  156. bool MaskedPointer::CheckMask(mask_t mask) const {
  157. return (reinterpret_cast<mask_t>(ptr_) & mask) != 0;
  158. }
  159. void FlagImpl::Init() {
  160. new (&data_guard_) absl::Mutex;
  161. auto def_kind = static_cast<FlagDefaultKind>(def_kind_);
  162. switch (ValueStorageKind()) {
  163. case FlagValueStorageKind::kValueAndInitBit:
  164. case FlagValueStorageKind::kOneWordAtomic: {
  165. alignas(int64_t) std::array<char, sizeof(int64_t)> buf{};
  166. if (def_kind == FlagDefaultKind::kGenFunc) {
  167. (*default_value_.gen_func)(buf.data());
  168. } else {
  169. assert(def_kind != FlagDefaultKind::kDynamicValue);
  170. std::memcpy(buf.data(), &default_value_, Sizeof(op_));
  171. }
  172. if (ValueStorageKind() == FlagValueStorageKind::kValueAndInitBit) {
  173. // We presume here the memory layout of FlagValueAndInitBit struct.
  174. uint8_t initialized = 1;
  175. std::memcpy(buf.data() + Sizeof(op_), &initialized,
  176. sizeof(initialized));
  177. }
  178. // Type can contain valid uninitialized bits, e.g. padding.
  179. ABSL_ANNOTATE_MEMORY_IS_INITIALIZED(buf.data(), buf.size());
  180. OneWordValue().store(absl::bit_cast<int64_t>(buf),
  181. std::memory_order_release);
  182. break;
  183. }
  184. case FlagValueStorageKind::kSequenceLocked: {
  185. // For this storage kind the default_value_ always points to gen_func
  186. // during initialization.
  187. assert(def_kind == FlagDefaultKind::kGenFunc);
  188. (*default_value_.gen_func)(AtomicBufferValue());
  189. break;
  190. }
  191. case FlagValueStorageKind::kHeapAllocated:
  192. // For this storage kind the default_value_ always points to gen_func
  193. // during initialization.
  194. assert(def_kind == FlagDefaultKind::kGenFunc);
  195. // Flag value initially points to the internal buffer.
  196. MaskedPointer ptr_value = PtrStorage().load(std::memory_order_acquire);
  197. (*default_value_.gen_func)(ptr_value.Ptr());
  198. // Default value is a candidate for an unprotected read.
  199. PtrStorage().store(MaskedPointer(ptr_value.Ptr(), true),
  200. std::memory_order_release);
  201. break;
  202. }
  203. seq_lock_.MarkInitialized();
  204. }
  205. absl::Mutex* FlagImpl::DataGuard() const {
  206. absl::call_once(const_cast<FlagImpl*>(this)->init_control_, &FlagImpl::Init,
  207. const_cast<FlagImpl*>(this));
  208. // data_guard_ is initialized inside Init.
  209. return reinterpret_cast<absl::Mutex*>(&data_guard_);
  210. }
  211. void FlagImpl::AssertValidType(FlagFastTypeId rhs_type_id,
  212. const std::type_info* (*gen_rtti)()) const {
  213. FlagFastTypeId lhs_type_id = flags_internal::FastTypeId(op_);
  214. // `rhs_type_id` is the fast type id corresponding to the declaration
  215. // visible at the call site. `lhs_type_id` is the fast type id
  216. // corresponding to the type specified in flag definition. They must match
  217. // for this operation to be well-defined.
  218. if (ABSL_PREDICT_TRUE(lhs_type_id == rhs_type_id)) return;
  219. const std::type_info* lhs_runtime_type_id =
  220. flags_internal::RuntimeTypeId(op_);
  221. const std::type_info* rhs_runtime_type_id = (*gen_rtti)();
  222. if (lhs_runtime_type_id == rhs_runtime_type_id) return;
  223. #ifdef ABSL_INTERNAL_HAS_RTTI
  224. if (*lhs_runtime_type_id == *rhs_runtime_type_id) return;
  225. #endif
  226. ABSL_INTERNAL_LOG(
  227. FATAL, absl::StrCat("Flag '", Name(),
  228. "' is defined as one type and declared as another"));
  229. }
  230. std::unique_ptr<void, DynValueDeleter> FlagImpl::MakeInitValue() const {
  231. void* res = nullptr;
  232. switch (DefaultKind()) {
  233. case FlagDefaultKind::kDynamicValue:
  234. res = flags_internal::Clone(op_, default_value_.dynamic_value);
  235. break;
  236. case FlagDefaultKind::kGenFunc:
  237. res = flags_internal::Alloc(op_);
  238. (*default_value_.gen_func)(res);
  239. break;
  240. default:
  241. res = flags_internal::Clone(op_, &default_value_);
  242. break;
  243. }
  244. return {res, DynValueDeleter{op_}};
  245. }
  246. void FlagImpl::StoreValue(const void* src, ValueSource source) {
  247. switch (ValueStorageKind()) {
  248. case FlagValueStorageKind::kValueAndInitBit:
  249. case FlagValueStorageKind::kOneWordAtomic: {
  250. // Load the current value to avoid setting 'init' bit manually.
  251. int64_t one_word_val = OneWordValue().load(std::memory_order_acquire);
  252. std::memcpy(&one_word_val, src, Sizeof(op_));
  253. OneWordValue().store(one_word_val, std::memory_order_release);
  254. seq_lock_.IncrementModificationCount();
  255. break;
  256. }
  257. case FlagValueStorageKind::kSequenceLocked: {
  258. seq_lock_.Write(AtomicBufferValue(), src, Sizeof(op_));
  259. break;
  260. }
  261. case FlagValueStorageKind::kHeapAllocated:
  262. MaskedPointer ptr_value = PtrStorage().load(std::memory_order_acquire);
  263. if (ptr_value.IsUnprotectedReadCandidate() && ptr_value.HasBeenRead()) {
  264. // If current value is a candidate for an unprotected read and if it was
  265. // already read at least once, follow up reads (if any) are done without
  266. // mutex protection. We can't guarantee it is safe to reuse this memory
  267. // since it may have been accessed by another thread concurrently, so
  268. // instead we move the memory to a freelist so it can still be safely
  269. // accessed, and allocate a new one for the new value.
  270. AddToFreelist(ptr_value.Ptr());
  271. ptr_value = MaskedPointer(Clone(op_, src), source == kCommandLine);
  272. } else {
  273. // Current value either was set programmatically or was never read.
  274. // We can reuse the memory since all accesses to this value (if any)
  275. // were protected by mutex. That said, if a new value comes from command
  276. // line it now becomes a candidate for an unprotected read.
  277. ptr_value.Set(op_, src, source == kCommandLine);
  278. }
  279. PtrStorage().store(ptr_value, std::memory_order_release);
  280. seq_lock_.IncrementModificationCount();
  281. break;
  282. }
  283. modified_ = true;
  284. InvokeCallback();
  285. }
  286. absl::string_view FlagImpl::Name() const { return name_; }
  287. std::string FlagImpl::Filename() const {
  288. return flags_internal::GetUsageConfig().normalize_filename(filename_);
  289. }
  290. std::string FlagImpl::Help() const {
  291. return HelpSourceKind() == FlagHelpKind::kLiteral ? help_.literal
  292. : help_.gen_func();
  293. }
  294. FlagFastTypeId FlagImpl::TypeId() const {
  295. return flags_internal::FastTypeId(op_);
  296. }
  297. int64_t FlagImpl::ModificationCount() const {
  298. return seq_lock_.ModificationCount();
  299. }
  300. bool FlagImpl::IsSpecifiedOnCommandLine() const {
  301. absl::MutexLock l(DataGuard());
  302. return on_command_line_;
  303. }
  304. std::string FlagImpl::DefaultValue() const {
  305. absl::MutexLock l(DataGuard());
  306. auto obj = MakeInitValue();
  307. return flags_internal::Unparse(op_, obj.get());
  308. }
  309. std::string FlagImpl::CurrentValue() const {
  310. auto* guard = DataGuard(); // Make sure flag initialized
  311. switch (ValueStorageKind()) {
  312. case FlagValueStorageKind::kValueAndInitBit:
  313. case FlagValueStorageKind::kOneWordAtomic: {
  314. const auto one_word_val =
  315. absl::bit_cast<std::array<char, sizeof(int64_t)>>(
  316. OneWordValue().load(std::memory_order_acquire));
  317. return flags_internal::Unparse(op_, one_word_val.data());
  318. }
  319. case FlagValueStorageKind::kSequenceLocked: {
  320. std::unique_ptr<void, DynValueDeleter> cloned(flags_internal::Alloc(op_),
  321. DynValueDeleter{op_});
  322. ReadSequenceLockedData(cloned.get());
  323. return flags_internal::Unparse(op_, cloned.get());
  324. }
  325. case FlagValueStorageKind::kHeapAllocated: {
  326. absl::MutexLock l(guard);
  327. return flags_internal::Unparse(
  328. op_, PtrStorage().load(std::memory_order_acquire).Ptr());
  329. }
  330. }
  331. return "";
  332. }
  333. void FlagImpl::SetCallback(const FlagCallbackFunc mutation_callback) {
  334. absl::MutexLock l(DataGuard());
  335. if (callback_ == nullptr) {
  336. callback_ = new FlagCallback;
  337. }
  338. callback_->func = mutation_callback;
  339. InvokeCallback();
  340. }
  341. void FlagImpl::InvokeCallback() const {
  342. if (!callback_) return;
  343. // Make a copy of the C-style function pointer that we are about to invoke
  344. // before we release the lock guarding it.
  345. FlagCallbackFunc cb = callback_->func;
  346. // If the flag has a mutation callback this function invokes it. While the
  347. // callback is being invoked the primary flag's mutex is unlocked and it is
  348. // re-locked back after call to callback is completed. Callback invocation is
  349. // guarded by flag's secondary mutex instead which prevents concurrent
  350. // callback invocation. Note that it is possible for other thread to grab the
  351. // primary lock and update flag's value at any time during the callback
  352. // invocation. This is by design. Callback can get a value of the flag if
  353. // necessary, but it might be different from the value initiated the callback
  354. // and it also can be different by the time the callback invocation is
  355. // completed. Requires that *primary_lock be held in exclusive mode; it may be
  356. // released and reacquired by the implementation.
  357. MutexRelock relock(*DataGuard());
  358. absl::MutexLock lock(&callback_->guard);
  359. cb();
  360. }
  361. std::unique_ptr<FlagStateInterface> FlagImpl::SaveState() {
  362. absl::MutexLock l(DataGuard());
  363. bool modified = modified_;
  364. bool on_command_line = on_command_line_;
  365. switch (ValueStorageKind()) {
  366. case FlagValueStorageKind::kValueAndInitBit:
  367. case FlagValueStorageKind::kOneWordAtomic: {
  368. return absl::make_unique<FlagState>(
  369. *this, OneWordValue().load(std::memory_order_acquire), modified,
  370. on_command_line, ModificationCount());
  371. }
  372. case FlagValueStorageKind::kSequenceLocked: {
  373. void* cloned = flags_internal::Alloc(op_);
  374. // Read is guaranteed to be successful because we hold the lock.
  375. bool success =
  376. seq_lock_.TryRead(cloned, AtomicBufferValue(), Sizeof(op_));
  377. assert(success);
  378. static_cast<void>(success);
  379. return absl::make_unique<FlagState>(*this, cloned, modified,
  380. on_command_line, ModificationCount());
  381. }
  382. case FlagValueStorageKind::kHeapAllocated: {
  383. return absl::make_unique<FlagState>(
  384. *this,
  385. flags_internal::Clone(
  386. op_, PtrStorage().load(std::memory_order_acquire).Ptr()),
  387. modified, on_command_line, ModificationCount());
  388. }
  389. }
  390. return nullptr;
  391. }
  392. bool FlagImpl::RestoreState(const FlagState& flag_state) {
  393. absl::MutexLock l(DataGuard());
  394. if (flag_state.counter_ == ModificationCount()) {
  395. return false;
  396. }
  397. switch (ValueStorageKind()) {
  398. case FlagValueStorageKind::kValueAndInitBit:
  399. case FlagValueStorageKind::kOneWordAtomic:
  400. StoreValue(&flag_state.value_.one_word, kProgrammaticChange);
  401. break;
  402. case FlagValueStorageKind::kSequenceLocked:
  403. case FlagValueStorageKind::kHeapAllocated:
  404. StoreValue(flag_state.value_.heap_allocated, kProgrammaticChange);
  405. break;
  406. }
  407. modified_ = flag_state.modified_;
  408. on_command_line_ = flag_state.on_command_line_;
  409. return true;
  410. }
  411. template <typename StorageT>
  412. StorageT* FlagImpl::OffsetValue() const {
  413. char* p = reinterpret_cast<char*>(const_cast<FlagImpl*>(this));
  414. // The offset is deduced via Flag value type specific op_.
  415. ptrdiff_t offset = flags_internal::ValueOffset(op_);
  416. return reinterpret_cast<StorageT*>(p + offset);
  417. }
  418. std::atomic<uint64_t>* FlagImpl::AtomicBufferValue() const {
  419. assert(ValueStorageKind() == FlagValueStorageKind::kSequenceLocked);
  420. return OffsetValue<std::atomic<uint64_t>>();
  421. }
  422. std::atomic<int64_t>& FlagImpl::OneWordValue() const {
  423. assert(ValueStorageKind() == FlagValueStorageKind::kOneWordAtomic ||
  424. ValueStorageKind() == FlagValueStorageKind::kValueAndInitBit);
  425. return OffsetValue<FlagOneWordValue>()->value;
  426. }
  427. std::atomic<MaskedPointer>& FlagImpl::PtrStorage() const {
  428. assert(ValueStorageKind() == FlagValueStorageKind::kHeapAllocated);
  429. return OffsetValue<FlagMaskedPointerValue>()->value;
  430. }
  431. // Attempts to parse supplied `value` string using parsing routine in the `flag`
  432. // argument. If parsing successful, this function replaces the dst with newly
  433. // parsed value. In case if any error is encountered in either step, the error
  434. // message is stored in 'err'
  435. std::unique_ptr<void, DynValueDeleter> FlagImpl::TryParse(
  436. absl::string_view value, std::string& err) const {
  437. std::unique_ptr<void, DynValueDeleter> tentative_value = MakeInitValue();
  438. std::string parse_err;
  439. if (!flags_internal::Parse(op_, value, tentative_value.get(), &parse_err)) {
  440. absl::string_view err_sep = parse_err.empty() ? "" : "; ";
  441. err = absl::StrCat("Illegal value '", value, "' specified for flag '",
  442. Name(), "'", err_sep, parse_err);
  443. return nullptr;
  444. }
  445. return tentative_value;
  446. }
  447. void FlagImpl::Read(void* dst) const {
  448. auto* guard = DataGuard(); // Make sure flag initialized
  449. switch (ValueStorageKind()) {
  450. case FlagValueStorageKind::kValueAndInitBit:
  451. case FlagValueStorageKind::kOneWordAtomic: {
  452. const int64_t one_word_val =
  453. OneWordValue().load(std::memory_order_acquire);
  454. std::memcpy(dst, &one_word_val, Sizeof(op_));
  455. break;
  456. }
  457. case FlagValueStorageKind::kSequenceLocked: {
  458. ReadSequenceLockedData(dst);
  459. break;
  460. }
  461. case FlagValueStorageKind::kHeapAllocated: {
  462. absl::MutexLock l(guard);
  463. MaskedPointer ptr_value = PtrStorage().load(std::memory_order_acquire);
  464. flags_internal::CopyConstruct(op_, ptr_value.Ptr(), dst);
  465. // For unprotected read candidates, mark that the value as has been read.
  466. if (ptr_value.IsUnprotectedReadCandidate() && !ptr_value.HasBeenRead()) {
  467. ptr_value.MarkAsRead();
  468. PtrStorage().store(ptr_value, std::memory_order_release);
  469. }
  470. break;
  471. }
  472. }
  473. }
  474. int64_t FlagImpl::ReadOneWord() const {
  475. assert(ValueStorageKind() == FlagValueStorageKind::kOneWordAtomic ||
  476. ValueStorageKind() == FlagValueStorageKind::kValueAndInitBit);
  477. auto* guard = DataGuard(); // Make sure flag initialized
  478. (void)guard;
  479. return OneWordValue().load(std::memory_order_acquire);
  480. }
  481. bool FlagImpl::ReadOneBool() const {
  482. assert(ValueStorageKind() == FlagValueStorageKind::kValueAndInitBit);
  483. auto* guard = DataGuard(); // Make sure flag initialized
  484. (void)guard;
  485. return absl::bit_cast<FlagValueAndInitBit<bool>>(
  486. OneWordValue().load(std::memory_order_acquire))
  487. .value;
  488. }
  489. void FlagImpl::ReadSequenceLockedData(void* dst) const {
  490. size_t size = Sizeof(op_);
  491. // Attempt to read using the sequence lock.
  492. if (ABSL_PREDICT_TRUE(seq_lock_.TryRead(dst, AtomicBufferValue(), size))) {
  493. return;
  494. }
  495. // We failed due to contention. Acquire the lock to prevent contention
  496. // and try again.
  497. absl::ReaderMutexLock l(DataGuard());
  498. bool success = seq_lock_.TryRead(dst, AtomicBufferValue(), size);
  499. assert(success);
  500. static_cast<void>(success);
  501. }
  502. void FlagImpl::Write(const void* src) {
  503. absl::MutexLock l(DataGuard());
  504. if (ShouldValidateFlagValue(flags_internal::FastTypeId(op_))) {
  505. std::unique_ptr<void, DynValueDeleter> obj{flags_internal::Clone(op_, src),
  506. DynValueDeleter{op_}};
  507. std::string ignored_error;
  508. std::string src_as_str = flags_internal::Unparse(op_, src);
  509. if (!flags_internal::Parse(op_, src_as_str, obj.get(), &ignored_error)) {
  510. ABSL_INTERNAL_LOG(ERROR, absl::StrCat("Attempt to set flag '", Name(),
  511. "' to invalid value ", src_as_str));
  512. }
  513. }
  514. StoreValue(src, kProgrammaticChange);
  515. }
  516. // Sets the value of the flag based on specified string `value`. If the flag
  517. // was successfully set to new value, it returns true. Otherwise, sets `err`
  518. // to indicate the error, leaves the flag unchanged, and returns false. There
  519. // are three ways to set the flag's value:
  520. // * Update the current flag value
  521. // * Update the flag's default value
  522. // * Update the current flag value if it was never set before
  523. // The mode is selected based on 'set_mode' parameter.
  524. bool FlagImpl::ParseFrom(absl::string_view value, FlagSettingMode set_mode,
  525. ValueSource source, std::string& err) {
  526. absl::MutexLock l(DataGuard());
  527. switch (set_mode) {
  528. case SET_FLAGS_VALUE: {
  529. // set or modify the flag's value
  530. auto tentative_value = TryParse(value, err);
  531. if (!tentative_value) return false;
  532. StoreValue(tentative_value.get(), source);
  533. if (source == kCommandLine) {
  534. on_command_line_ = true;
  535. }
  536. break;
  537. }
  538. case SET_FLAG_IF_DEFAULT: {
  539. // set the flag's value, but only if it hasn't been set by someone else
  540. if (modified_) {
  541. // TODO(rogeeff): review and fix this semantic. Currently we do not fail
  542. // in this case if flag is modified. This is misleading since the flag's
  543. // value is not updated even though we return true.
  544. // *err = absl::StrCat(Name(), " is already set to ",
  545. // CurrentValue(), "\n");
  546. // return false;
  547. return true;
  548. }
  549. auto tentative_value = TryParse(value, err);
  550. if (!tentative_value) return false;
  551. StoreValue(tentative_value.get(), source);
  552. break;
  553. }
  554. case SET_FLAGS_DEFAULT: {
  555. auto tentative_value = TryParse(value, err);
  556. if (!tentative_value) return false;
  557. if (DefaultKind() == FlagDefaultKind::kDynamicValue) {
  558. void* old_value = default_value_.dynamic_value;
  559. default_value_.dynamic_value = tentative_value.release();
  560. tentative_value.reset(old_value);
  561. } else {
  562. default_value_.dynamic_value = tentative_value.release();
  563. def_kind_ = static_cast<uint8_t>(FlagDefaultKind::kDynamicValue);
  564. }
  565. if (!modified_) {
  566. // Need to set both default value *and* current, in this case.
  567. StoreValue(default_value_.dynamic_value, source);
  568. modified_ = false;
  569. }
  570. break;
  571. }
  572. }
  573. return true;
  574. }
  575. void FlagImpl::CheckDefaultValueParsingRoundtrip() const {
  576. std::string v = DefaultValue();
  577. absl::MutexLock lock(DataGuard());
  578. auto dst = MakeInitValue();
  579. std::string error;
  580. if (!flags_internal::Parse(op_, v, dst.get(), &error)) {
  581. ABSL_INTERNAL_LOG(
  582. FATAL,
  583. absl::StrCat("Flag ", Name(), " (from ", Filename(),
  584. "): string form of default value '", v,
  585. "' could not be parsed; error=", error));
  586. }
  587. // We do not compare dst to def since parsing/unparsing may make
  588. // small changes, e.g., precision loss for floating point types.
  589. }
  590. bool FlagImpl::ValidateInputValue(absl::string_view value) const {
  591. absl::MutexLock l(DataGuard());
  592. auto obj = MakeInitValue();
  593. std::string ignored_error;
  594. return flags_internal::Parse(op_, value, obj.get(), &ignored_error);
  595. }
  596. } // namespace flags_internal
  597. ABSL_NAMESPACE_END
  598. } // namespace absl