cord.cc 53 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581
  1. // Copyright 2020 The Abseil Authors.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // https://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #include "absl/strings/cord.h"
  15. #include <algorithm>
  16. #include <cassert>
  17. #include <cstddef>
  18. #include <cstdint>
  19. #include <cstdio>
  20. #include <cstdlib>
  21. #include <cstring>
  22. #include <iomanip>
  23. #include <ios>
  24. #include <iostream>
  25. #include <limits>
  26. #include <memory>
  27. #include <ostream>
  28. #include <sstream>
  29. #include <string>
  30. #include <utility>
  31. #include "absl/base/attributes.h"
  32. #include "absl/base/config.h"
  33. #include "absl/base/internal/endian.h"
  34. #include "absl/base/internal/raw_logging.h"
  35. #include "absl/base/macros.h"
  36. #include "absl/base/optimization.h"
  37. #include "absl/base/nullability.h"
  38. #include "absl/container/inlined_vector.h"
  39. #include "absl/crc/crc32c.h"
  40. #include "absl/crc/internal/crc_cord_state.h"
  41. #include "absl/functional/function_ref.h"
  42. #include "absl/strings/cord_buffer.h"
  43. #include "absl/strings/escaping.h"
  44. #include "absl/strings/internal/cord_data_edge.h"
  45. #include "absl/strings/internal/cord_internal.h"
  46. #include "absl/strings/internal/cord_rep_btree.h"
  47. #include "absl/strings/internal/cord_rep_crc.h"
  48. #include "absl/strings/internal/cord_rep_flat.h"
  49. #include "absl/strings/internal/cordz_update_tracker.h"
  50. #include "absl/strings/internal/resize_uninitialized.h"
  51. #include "absl/strings/match.h"
  52. #include "absl/strings/str_cat.h"
  53. #include "absl/strings/string_view.h"
  54. #include "absl/strings/strip.h"
  55. #include "absl/types/optional.h"
  56. #include "absl/types/span.h"
  57. namespace absl {
  58. ABSL_NAMESPACE_BEGIN
  59. using ::absl::cord_internal::CordRep;
  60. using ::absl::cord_internal::CordRepBtree;
  61. using ::absl::cord_internal::CordRepCrc;
  62. using ::absl::cord_internal::CordRepExternal;
  63. using ::absl::cord_internal::CordRepFlat;
  64. using ::absl::cord_internal::CordRepSubstring;
  65. using ::absl::cord_internal::CordzUpdateTracker;
  66. using ::absl::cord_internal::InlineData;
  67. using ::absl::cord_internal::kMaxFlatLength;
  68. using ::absl::cord_internal::kMinFlatLength;
  69. using ::absl::cord_internal::kInlinedVectorSize;
  70. using ::absl::cord_internal::kMaxBytesToCopy;
  71. static void DumpNode(absl::Nonnull<CordRep*> nonnull_rep, bool include_data,
  72. absl::Nonnull<std::ostream*> os, int indent = 0);
  73. static bool VerifyNode(absl::Nonnull<CordRep*> root,
  74. absl::Nonnull<CordRep*> start_node);
  75. static inline absl::Nullable<CordRep*> VerifyTree(
  76. absl::Nullable<CordRep*> node) {
  77. assert(node == nullptr || VerifyNode(node, node));
  78. static_cast<void>(&VerifyNode);
  79. return node;
  80. }
  81. static absl::Nonnull<CordRepFlat*> CreateFlat(absl::Nonnull<const char*> data,
  82. size_t length,
  83. size_t alloc_hint) {
  84. CordRepFlat* flat = CordRepFlat::New(length + alloc_hint);
  85. flat->length = length;
  86. memcpy(flat->Data(), data, length);
  87. return flat;
  88. }
  89. // Creates a new flat or Btree out of the specified array.
  90. // The returned node has a refcount of 1.
  91. static absl::Nonnull<CordRep*> NewBtree(absl::Nonnull<const char*> data,
  92. size_t length, size_t alloc_hint) {
  93. if (length <= kMaxFlatLength) {
  94. return CreateFlat(data, length, alloc_hint);
  95. }
  96. CordRepFlat* flat = CreateFlat(data, kMaxFlatLength, 0);
  97. data += kMaxFlatLength;
  98. length -= kMaxFlatLength;
  99. auto* root = CordRepBtree::Create(flat);
  100. return CordRepBtree::Append(root, {data, length}, alloc_hint);
  101. }
  102. // Create a new tree out of the specified array.
  103. // The returned node has a refcount of 1.
  104. static absl::Nullable<CordRep*> NewTree(absl::Nullable<const char*> data,
  105. size_t length, size_t alloc_hint) {
  106. if (length == 0) return nullptr;
  107. return NewBtree(data, length, alloc_hint);
  108. }
  109. namespace cord_internal {
  110. void InitializeCordRepExternal(absl::string_view data,
  111. absl::Nonnull<CordRepExternal*> rep) {
  112. assert(!data.empty());
  113. rep->length = data.size();
  114. rep->tag = EXTERNAL;
  115. rep->base = data.data();
  116. VerifyTree(rep);
  117. }
  118. } // namespace cord_internal
  119. // Creates a CordRep from the provided string. If the string is large enough,
  120. // and not wasteful, we move the string into an external cord rep, preserving
  121. // the already allocated string contents.
  122. // Requires the provided string length to be larger than `kMaxInline`.
  123. static absl::Nonnull<CordRep*> CordRepFromString(std::string&& src) {
  124. assert(src.length() > cord_internal::kMaxInline);
  125. if (
  126. // String is short: copy data to avoid external block overhead.
  127. src.size() <= kMaxBytesToCopy ||
  128. // String is wasteful: copy data to avoid pinning too much unused memory.
  129. src.size() < src.capacity() / 2
  130. ) {
  131. return NewTree(src.data(), src.size(), 0);
  132. }
  133. struct StringReleaser {
  134. void operator()(absl::string_view /* data */) {}
  135. std::string data;
  136. };
  137. const absl::string_view original_data = src;
  138. auto* rep =
  139. static_cast<::absl::cord_internal::CordRepExternalImpl<StringReleaser>*>(
  140. absl::cord_internal::NewExternalRep(original_data,
  141. StringReleaser{std::move(src)}));
  142. // Moving src may have invalidated its data pointer, so adjust it.
  143. rep->base = rep->template get<0>().data.data();
  144. return rep;
  145. }
  146. // --------------------------------------------------------------------
  147. // Cord::InlineRep functions
  148. #ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL
  149. constexpr unsigned char Cord::InlineRep::kMaxInline;
  150. #endif
  151. inline void Cord::InlineRep::set_data(absl::Nonnull<const char*> data,
  152. size_t n) {
  153. static_assert(kMaxInline == 15, "set_data is hard-coded for a length of 15");
  154. data_.set_inline_data(data, n);
  155. }
  156. inline absl::Nonnull<char*> Cord::InlineRep::set_data(size_t n) {
  157. assert(n <= kMaxInline);
  158. ResetToEmpty();
  159. set_inline_size(n);
  160. return data_.as_chars();
  161. }
  162. inline void Cord::InlineRep::reduce_size(size_t n) {
  163. size_t tag = inline_size();
  164. assert(tag <= kMaxInline);
  165. assert(tag >= n);
  166. tag -= n;
  167. memset(data_.as_chars() + tag, 0, n);
  168. set_inline_size(tag);
  169. }
  170. inline void Cord::InlineRep::remove_prefix(size_t n) {
  171. cord_internal::SmallMemmove(data_.as_chars(), data_.as_chars() + n,
  172. inline_size() - n);
  173. reduce_size(n);
  174. }
  175. // Returns `rep` converted into a CordRepBtree.
  176. // Directly returns `rep` if `rep` is already a CordRepBtree.
  177. static absl::Nonnull<CordRepBtree*> ForceBtree(CordRep* rep) {
  178. return rep->IsBtree()
  179. ? rep->btree()
  180. : CordRepBtree::Create(cord_internal::RemoveCrcNode(rep));
  181. }
  182. void Cord::InlineRep::AppendTreeToInlined(absl::Nonnull<CordRep*> tree,
  183. MethodIdentifier method) {
  184. assert(!is_tree());
  185. if (!data_.is_empty()) {
  186. CordRepFlat* flat = MakeFlatWithExtraCapacity(0);
  187. tree = CordRepBtree::Append(CordRepBtree::Create(flat), tree);
  188. }
  189. EmplaceTree(tree, method);
  190. }
  191. void Cord::InlineRep::AppendTreeToTree(absl::Nonnull<CordRep*> tree,
  192. MethodIdentifier method) {
  193. assert(is_tree());
  194. const CordzUpdateScope scope(data_.cordz_info(), method);
  195. tree = CordRepBtree::Append(ForceBtree(data_.as_tree()), tree);
  196. SetTree(tree, scope);
  197. }
  198. void Cord::InlineRep::AppendTree(absl::Nonnull<CordRep*> tree,
  199. MethodIdentifier method) {
  200. assert(tree != nullptr);
  201. assert(tree->length != 0);
  202. assert(!tree->IsCrc());
  203. if (data_.is_tree()) {
  204. AppendTreeToTree(tree, method);
  205. } else {
  206. AppendTreeToInlined(tree, method);
  207. }
  208. }
  209. void Cord::InlineRep::PrependTreeToInlined(absl::Nonnull<CordRep*> tree,
  210. MethodIdentifier method) {
  211. assert(!is_tree());
  212. if (!data_.is_empty()) {
  213. CordRepFlat* flat = MakeFlatWithExtraCapacity(0);
  214. tree = CordRepBtree::Prepend(CordRepBtree::Create(flat), tree);
  215. }
  216. EmplaceTree(tree, method);
  217. }
  218. void Cord::InlineRep::PrependTreeToTree(absl::Nonnull<CordRep*> tree,
  219. MethodIdentifier method) {
  220. assert(is_tree());
  221. const CordzUpdateScope scope(data_.cordz_info(), method);
  222. tree = CordRepBtree::Prepend(ForceBtree(data_.as_tree()), tree);
  223. SetTree(tree, scope);
  224. }
  225. void Cord::InlineRep::PrependTree(absl::Nonnull<CordRep*> tree,
  226. MethodIdentifier method) {
  227. assert(tree != nullptr);
  228. assert(tree->length != 0);
  229. assert(!tree->IsCrc());
  230. if (data_.is_tree()) {
  231. PrependTreeToTree(tree, method);
  232. } else {
  233. PrependTreeToInlined(tree, method);
  234. }
  235. }
  236. // Searches for a non-full flat node at the rightmost leaf of the tree. If a
  237. // suitable leaf is found, the function will update the length field for all
  238. // nodes to account for the size increase. The append region address will be
  239. // written to region and the actual size increase will be written to size.
  240. static inline bool PrepareAppendRegion(
  241. absl::Nonnull<CordRep*> root, absl::Nonnull<absl::Nullable<char*>*> region,
  242. absl::Nonnull<size_t*> size, size_t max_length) {
  243. if (root->IsBtree() && root->refcount.IsOne()) {
  244. Span<char> span = root->btree()->GetAppendBuffer(max_length);
  245. if (!span.empty()) {
  246. *region = span.data();
  247. *size = span.size();
  248. return true;
  249. }
  250. }
  251. CordRep* dst = root;
  252. if (!dst->IsFlat() || !dst->refcount.IsOne()) {
  253. *region = nullptr;
  254. *size = 0;
  255. return false;
  256. }
  257. const size_t in_use = dst->length;
  258. const size_t capacity = dst->flat()->Capacity();
  259. if (in_use == capacity) {
  260. *region = nullptr;
  261. *size = 0;
  262. return false;
  263. }
  264. const size_t size_increase = std::min(capacity - in_use, max_length);
  265. dst->length += size_increase;
  266. *region = dst->flat()->Data() + in_use;
  267. *size = size_increase;
  268. return true;
  269. }
  270. void Cord::InlineRep::AssignSlow(const Cord::InlineRep& src) {
  271. assert(&src != this);
  272. assert(is_tree() || src.is_tree());
  273. auto constexpr method = CordzUpdateTracker::kAssignCord;
  274. if (ABSL_PREDICT_TRUE(!is_tree())) {
  275. EmplaceTree(CordRep::Ref(src.as_tree()), src.data_, method);
  276. return;
  277. }
  278. CordRep* tree = as_tree();
  279. if (CordRep* src_tree = src.tree()) {
  280. // Leave any existing `cordz_info` in place, and let MaybeTrackCord()
  281. // decide if this cord should be (or remains to be) sampled or not.
  282. data_.set_tree(CordRep::Ref(src_tree));
  283. CordzInfo::MaybeTrackCord(data_, src.data_, method);
  284. } else {
  285. CordzInfo::MaybeUntrackCord(data_.cordz_info());
  286. data_ = src.data_;
  287. }
  288. CordRep::Unref(tree);
  289. }
  290. void Cord::InlineRep::UnrefTree() {
  291. if (is_tree()) {
  292. CordzInfo::MaybeUntrackCord(data_.cordz_info());
  293. CordRep::Unref(tree());
  294. }
  295. }
  296. // --------------------------------------------------------------------
  297. // Constructors and destructors
  298. Cord::Cord(absl::string_view src, MethodIdentifier method)
  299. : contents_(InlineData::kDefaultInit) {
  300. const size_t n = src.size();
  301. if (n <= InlineRep::kMaxInline) {
  302. contents_.set_data(src.data(), n);
  303. } else {
  304. CordRep* rep = NewTree(src.data(), n, 0);
  305. contents_.EmplaceTree(rep, method);
  306. }
  307. }
  308. template <typename T, Cord::EnableIfString<T>>
  309. Cord::Cord(T&& src) : contents_(InlineData::kDefaultInit) {
  310. if (src.size() <= InlineRep::kMaxInline) {
  311. contents_.set_data(src.data(), src.size());
  312. } else {
  313. CordRep* rep = CordRepFromString(std::forward<T>(src));
  314. contents_.EmplaceTree(rep, CordzUpdateTracker::kConstructorString);
  315. }
  316. }
  317. template Cord::Cord(std::string&& src);
  318. // The destruction code is separate so that the compiler can determine
  319. // that it does not need to call the destructor on a moved-from Cord.
  320. void Cord::DestroyCordSlow() {
  321. assert(contents_.is_tree());
  322. CordzInfo::MaybeUntrackCord(contents_.cordz_info());
  323. CordRep::Unref(VerifyTree(contents_.as_tree()));
  324. }
  325. // --------------------------------------------------------------------
  326. // Mutators
  327. void Cord::Clear() {
  328. if (CordRep* tree = contents_.clear()) {
  329. CordRep::Unref(tree);
  330. }
  331. }
  332. Cord& Cord::AssignLargeString(std::string&& src) {
  333. auto constexpr method = CordzUpdateTracker::kAssignString;
  334. assert(src.size() > kMaxBytesToCopy);
  335. CordRep* rep = CordRepFromString(std::move(src));
  336. if (CordRep* tree = contents_.tree()) {
  337. CordzUpdateScope scope(contents_.cordz_info(), method);
  338. contents_.SetTree(rep, scope);
  339. CordRep::Unref(tree);
  340. } else {
  341. contents_.EmplaceTree(rep, method);
  342. }
  343. return *this;
  344. }
  345. Cord& Cord::operator=(absl::string_view src) {
  346. auto constexpr method = CordzUpdateTracker::kAssignString;
  347. const char* data = src.data();
  348. size_t length = src.size();
  349. CordRep* tree = contents_.tree();
  350. if (length <= InlineRep::kMaxInline) {
  351. // Embed into this->contents_, which is somewhat subtle:
  352. // - MaybeUntrackCord must be called before Unref(tree).
  353. // - MaybeUntrackCord must be called before set_data() clobbers cordz_info.
  354. // - set_data() must be called before Unref(tree) as it may reference tree.
  355. if (tree != nullptr) CordzInfo::MaybeUntrackCord(contents_.cordz_info());
  356. contents_.set_data(data, length);
  357. if (tree != nullptr) CordRep::Unref(tree);
  358. return *this;
  359. }
  360. if (tree != nullptr) {
  361. CordzUpdateScope scope(contents_.cordz_info(), method);
  362. if (tree->IsFlat() && tree->flat()->Capacity() >= length &&
  363. tree->refcount.IsOne()) {
  364. // Copy in place if the existing FLAT node is reusable.
  365. memmove(tree->flat()->Data(), data, length);
  366. tree->length = length;
  367. VerifyTree(tree);
  368. return *this;
  369. }
  370. contents_.SetTree(NewTree(data, length, 0), scope);
  371. CordRep::Unref(tree);
  372. } else {
  373. contents_.EmplaceTree(NewTree(data, length, 0), method);
  374. }
  375. return *this;
  376. }
  377. // TODO(sanjay): Move to Cord::InlineRep section of file. For now,
  378. // we keep it here to make diffs easier.
  379. void Cord::InlineRep::AppendArray(absl::string_view src,
  380. MethodIdentifier method) {
  381. if (src.empty()) return; // memcpy(_, nullptr, 0) is undefined.
  382. MaybeRemoveEmptyCrcNode();
  383. size_t appended = 0;
  384. CordRep* rep = tree();
  385. const CordRep* const root = rep;
  386. CordzUpdateScope scope(root ? cordz_info() : nullptr, method);
  387. if (root != nullptr) {
  388. rep = cord_internal::RemoveCrcNode(rep);
  389. char* region;
  390. if (PrepareAppendRegion(rep, &region, &appended, src.size())) {
  391. memcpy(region, src.data(), appended);
  392. }
  393. } else {
  394. // Try to fit in the inline buffer if possible.
  395. size_t inline_length = inline_size();
  396. if (src.size() <= kMaxInline - inline_length) {
  397. // Append new data to embedded array
  398. set_inline_size(inline_length + src.size());
  399. memcpy(data_.as_chars() + inline_length, src.data(), src.size());
  400. return;
  401. }
  402. // Allocate flat to be a perfect fit on first append exceeding inlined size.
  403. // Subsequent growth will use amortized growth until we reach maximum flat
  404. // size.
  405. rep = CordRepFlat::New(inline_length + src.size());
  406. appended = std::min(src.size(), rep->flat()->Capacity() - inline_length);
  407. memcpy(rep->flat()->Data(), data_.as_chars(), inline_length);
  408. memcpy(rep->flat()->Data() + inline_length, src.data(), appended);
  409. rep->length = inline_length + appended;
  410. }
  411. src.remove_prefix(appended);
  412. if (src.empty()) {
  413. CommitTree(root, rep, scope, method);
  414. return;
  415. }
  416. // TODO(b/192061034): keep legacy 10% growth rate: consider other rates.
  417. rep = ForceBtree(rep);
  418. const size_t min_growth = std::max<size_t>(rep->length / 10, src.size());
  419. rep = CordRepBtree::Append(rep->btree(), src, min_growth - src.size());
  420. CommitTree(root, rep, scope, method);
  421. }
  422. inline absl::Nonnull<CordRep*> Cord::TakeRep() const& {
  423. return CordRep::Ref(contents_.tree());
  424. }
  425. inline absl::Nonnull<CordRep*> Cord::TakeRep() && {
  426. CordRep* rep = contents_.tree();
  427. contents_.clear();
  428. return rep;
  429. }
  430. template <typename C>
  431. inline void Cord::AppendImpl(C&& src) {
  432. auto constexpr method = CordzUpdateTracker::kAppendCord;
  433. contents_.MaybeRemoveEmptyCrcNode();
  434. if (src.empty()) return;
  435. if (empty()) {
  436. // Since destination is empty, we can avoid allocating a node,
  437. if (src.contents_.is_tree()) {
  438. // by taking the tree directly
  439. CordRep* rep =
  440. cord_internal::RemoveCrcNode(std::forward<C>(src).TakeRep());
  441. contents_.EmplaceTree(rep, method);
  442. } else {
  443. // or copying over inline data
  444. contents_.data_ = src.contents_.data_;
  445. }
  446. return;
  447. }
  448. // For short cords, it is faster to copy data if there is room in dst.
  449. const size_t src_size = src.contents_.size();
  450. if (src_size <= kMaxBytesToCopy) {
  451. CordRep* src_tree = src.contents_.tree();
  452. if (src_tree == nullptr) {
  453. // src has embedded data.
  454. contents_.AppendArray({src.contents_.data(), src_size}, method);
  455. return;
  456. }
  457. if (src_tree->IsFlat()) {
  458. // src tree just has one flat node.
  459. contents_.AppendArray({src_tree->flat()->Data(), src_size}, method);
  460. return;
  461. }
  462. if (&src == this) {
  463. // ChunkIterator below assumes that src is not modified during traversal.
  464. Append(Cord(src));
  465. return;
  466. }
  467. // TODO(mec): Should we only do this if "dst" has space?
  468. for (absl::string_view chunk : src.Chunks()) {
  469. Append(chunk);
  470. }
  471. return;
  472. }
  473. // Guaranteed to be a tree (kMaxBytesToCopy > kInlinedSize)
  474. CordRep* rep = cord_internal::RemoveCrcNode(std::forward<C>(src).TakeRep());
  475. contents_.AppendTree(rep, CordzUpdateTracker::kAppendCord);
  476. }
  477. static CordRep::ExtractResult ExtractAppendBuffer(absl::Nonnull<CordRep*> rep,
  478. size_t min_capacity) {
  479. switch (rep->tag) {
  480. case cord_internal::BTREE:
  481. return CordRepBtree::ExtractAppendBuffer(rep->btree(), min_capacity);
  482. default:
  483. if (rep->IsFlat() && rep->refcount.IsOne() &&
  484. rep->flat()->Capacity() - rep->length >= min_capacity) {
  485. return {nullptr, rep};
  486. }
  487. return {rep, nullptr};
  488. }
  489. }
  490. static CordBuffer CreateAppendBuffer(InlineData& data, size_t block_size,
  491. size_t capacity) {
  492. // Watch out for overflow, people can ask for size_t::max().
  493. const size_t size = data.inline_size();
  494. const size_t max_capacity = std::numeric_limits<size_t>::max() - size;
  495. capacity = (std::min)(max_capacity, capacity) + size;
  496. CordBuffer buffer =
  497. block_size ? CordBuffer::CreateWithCustomLimit(block_size, capacity)
  498. : CordBuffer::CreateWithDefaultLimit(capacity);
  499. cord_internal::SmallMemmove(buffer.data(), data.as_chars(), size);
  500. buffer.SetLength(size);
  501. data = {};
  502. return buffer;
  503. }
  504. CordBuffer Cord::GetAppendBufferSlowPath(size_t block_size, size_t capacity,
  505. size_t min_capacity) {
  506. auto constexpr method = CordzUpdateTracker::kGetAppendBuffer;
  507. CordRep* tree = contents_.tree();
  508. if (tree != nullptr) {
  509. CordzUpdateScope scope(contents_.cordz_info(), method);
  510. CordRep::ExtractResult result = ExtractAppendBuffer(tree, min_capacity);
  511. if (result.extracted != nullptr) {
  512. contents_.SetTreeOrEmpty(result.tree, scope);
  513. return CordBuffer(result.extracted->flat());
  514. }
  515. return block_size ? CordBuffer::CreateWithCustomLimit(block_size, capacity)
  516. : CordBuffer::CreateWithDefaultLimit(capacity);
  517. }
  518. return CreateAppendBuffer(contents_.data_, block_size, capacity);
  519. }
  520. void Cord::Append(const Cord& src) { AppendImpl(src); }
  521. void Cord::Append(Cord&& src) { AppendImpl(std::move(src)); }
  522. template <typename T, Cord::EnableIfString<T>>
  523. void Cord::Append(T&& src) {
  524. if (src.size() <= kMaxBytesToCopy) {
  525. Append(absl::string_view(src));
  526. } else {
  527. CordRep* rep = CordRepFromString(std::forward<T>(src));
  528. contents_.AppendTree(rep, CordzUpdateTracker::kAppendString);
  529. }
  530. }
  531. template void Cord::Append(std::string&& src);
  532. void Cord::Prepend(const Cord& src) {
  533. contents_.MaybeRemoveEmptyCrcNode();
  534. if (src.empty()) return;
  535. CordRep* src_tree = src.contents_.tree();
  536. if (src_tree != nullptr) {
  537. CordRep::Ref(src_tree);
  538. contents_.PrependTree(cord_internal::RemoveCrcNode(src_tree),
  539. CordzUpdateTracker::kPrependCord);
  540. return;
  541. }
  542. // `src` cord is inlined.
  543. absl::string_view src_contents(src.contents_.data(), src.contents_.size());
  544. return Prepend(src_contents);
  545. }
  546. void Cord::PrependArray(absl::string_view src, MethodIdentifier method) {
  547. contents_.MaybeRemoveEmptyCrcNode();
  548. if (src.empty()) return; // memcpy(_, nullptr, 0) is undefined.
  549. if (!contents_.is_tree()) {
  550. size_t cur_size = contents_.inline_size();
  551. if (cur_size + src.size() <= InlineRep::kMaxInline) {
  552. // Use embedded storage.
  553. InlineData data;
  554. data.set_inline_size(cur_size + src.size());
  555. memcpy(data.as_chars(), src.data(), src.size());
  556. memcpy(data.as_chars() + src.size(), contents_.data(), cur_size);
  557. contents_.data_ = data;
  558. return;
  559. }
  560. }
  561. CordRep* rep = NewTree(src.data(), src.size(), 0);
  562. contents_.PrependTree(rep, method);
  563. }
  564. void Cord::AppendPrecise(absl::string_view src, MethodIdentifier method) {
  565. assert(!src.empty());
  566. assert(src.size() <= cord_internal::kMaxFlatLength);
  567. if (contents_.remaining_inline_capacity() >= src.size()) {
  568. const size_t inline_length = contents_.inline_size();
  569. contents_.set_inline_size(inline_length + src.size());
  570. memcpy(contents_.data_.as_chars() + inline_length, src.data(), src.size());
  571. } else {
  572. contents_.AppendTree(CordRepFlat::Create(src), method);
  573. }
  574. }
  575. void Cord::PrependPrecise(absl::string_view src, MethodIdentifier method) {
  576. assert(!src.empty());
  577. assert(src.size() <= cord_internal::kMaxFlatLength);
  578. if (contents_.remaining_inline_capacity() >= src.size()) {
  579. const size_t cur_size = contents_.inline_size();
  580. InlineData data;
  581. data.set_inline_size(cur_size + src.size());
  582. memcpy(data.as_chars(), src.data(), src.size());
  583. memcpy(data.as_chars() + src.size(), contents_.data(), cur_size);
  584. contents_.data_ = data;
  585. } else {
  586. contents_.PrependTree(CordRepFlat::Create(src), method);
  587. }
  588. }
  589. template <typename T, Cord::EnableIfString<T>>
  590. inline void Cord::Prepend(T&& src) {
  591. if (src.size() <= kMaxBytesToCopy) {
  592. Prepend(absl::string_view(src));
  593. } else {
  594. CordRep* rep = CordRepFromString(std::forward<T>(src));
  595. contents_.PrependTree(rep, CordzUpdateTracker::kPrependString);
  596. }
  597. }
  598. template void Cord::Prepend(std::string&& src);
  599. void Cord::RemovePrefix(size_t n) {
  600. ABSL_INTERNAL_CHECK(n <= size(),
  601. absl::StrCat("Requested prefix size ", n,
  602. " exceeds Cord's size ", size()));
  603. contents_.MaybeRemoveEmptyCrcNode();
  604. CordRep* tree = contents_.tree();
  605. if (tree == nullptr) {
  606. contents_.remove_prefix(n);
  607. } else {
  608. auto constexpr method = CordzUpdateTracker::kRemovePrefix;
  609. CordzUpdateScope scope(contents_.cordz_info(), method);
  610. tree = cord_internal::RemoveCrcNode(tree);
  611. if (n >= tree->length) {
  612. CordRep::Unref(tree);
  613. tree = nullptr;
  614. } else if (tree->IsBtree()) {
  615. CordRep* old = tree;
  616. tree = tree->btree()->SubTree(n, tree->length - n);
  617. CordRep::Unref(old);
  618. } else if (tree->IsSubstring() && tree->refcount.IsOne()) {
  619. tree->substring()->start += n;
  620. tree->length -= n;
  621. } else {
  622. CordRep* rep = CordRepSubstring::Substring(tree, n, tree->length - n);
  623. CordRep::Unref(tree);
  624. tree = rep;
  625. }
  626. contents_.SetTreeOrEmpty(tree, scope);
  627. }
  628. }
  629. void Cord::RemoveSuffix(size_t n) {
  630. ABSL_INTERNAL_CHECK(n <= size(),
  631. absl::StrCat("Requested suffix size ", n,
  632. " exceeds Cord's size ", size()));
  633. contents_.MaybeRemoveEmptyCrcNode();
  634. CordRep* tree = contents_.tree();
  635. if (tree == nullptr) {
  636. contents_.reduce_size(n);
  637. } else {
  638. auto constexpr method = CordzUpdateTracker::kRemoveSuffix;
  639. CordzUpdateScope scope(contents_.cordz_info(), method);
  640. tree = cord_internal::RemoveCrcNode(tree);
  641. if (n >= tree->length) {
  642. CordRep::Unref(tree);
  643. tree = nullptr;
  644. } else if (tree->IsBtree()) {
  645. tree = CordRepBtree::RemoveSuffix(tree->btree(), n);
  646. } else if (!tree->IsExternal() && tree->refcount.IsOne()) {
  647. assert(tree->IsFlat() || tree->IsSubstring());
  648. tree->length -= n;
  649. } else {
  650. CordRep* rep = CordRepSubstring::Substring(tree, 0, tree->length - n);
  651. CordRep::Unref(tree);
  652. tree = rep;
  653. }
  654. contents_.SetTreeOrEmpty(tree, scope);
  655. }
  656. }
  657. Cord Cord::Subcord(size_t pos, size_t new_size) const {
  658. Cord sub_cord;
  659. size_t length = size();
  660. if (pos > length) pos = length;
  661. if (new_size > length - pos) new_size = length - pos;
  662. if (new_size == 0) return sub_cord;
  663. CordRep* tree = contents_.tree();
  664. if (tree == nullptr) {
  665. sub_cord.contents_.set_data(contents_.data() + pos, new_size);
  666. return sub_cord;
  667. }
  668. if (new_size <= InlineRep::kMaxInline) {
  669. sub_cord.contents_.set_inline_size(new_size);
  670. char* dest = sub_cord.contents_.data_.as_chars();
  671. Cord::ChunkIterator it = chunk_begin();
  672. it.AdvanceBytes(pos);
  673. size_t remaining_size = new_size;
  674. while (remaining_size > it->size()) {
  675. cord_internal::SmallMemmove(dest, it->data(), it->size());
  676. remaining_size -= it->size();
  677. dest += it->size();
  678. ++it;
  679. }
  680. cord_internal::SmallMemmove(dest, it->data(), remaining_size);
  681. return sub_cord;
  682. }
  683. tree = cord_internal::SkipCrcNode(tree);
  684. if (tree->IsBtree()) {
  685. tree = tree->btree()->SubTree(pos, new_size);
  686. } else {
  687. tree = CordRepSubstring::Substring(tree, pos, new_size);
  688. }
  689. sub_cord.contents_.EmplaceTree(tree, contents_.data_,
  690. CordzUpdateTracker::kSubCord);
  691. return sub_cord;
  692. }
  693. // --------------------------------------------------------------------
  694. // Comparators
  695. namespace {
  696. int ClampResult(int memcmp_res) {
  697. return static_cast<int>(memcmp_res > 0) - static_cast<int>(memcmp_res < 0);
  698. }
  699. int CompareChunks(absl::Nonnull<absl::string_view*> lhs,
  700. absl::Nonnull<absl::string_view*> rhs,
  701. absl::Nonnull<size_t*> size_to_compare) {
  702. size_t compared_size = std::min(lhs->size(), rhs->size());
  703. assert(*size_to_compare >= compared_size);
  704. *size_to_compare -= compared_size;
  705. int memcmp_res = ::memcmp(lhs->data(), rhs->data(), compared_size);
  706. if (memcmp_res != 0) return memcmp_res;
  707. lhs->remove_prefix(compared_size);
  708. rhs->remove_prefix(compared_size);
  709. return 0;
  710. }
  711. // This overload set computes comparison results from memcmp result. This
  712. // interface is used inside GenericCompare below. Different implementations
  713. // are specialized for int and bool. For int we clamp result to {-1, 0, 1}
  714. // set. For bool we just interested in "value == 0".
  715. template <typename ResultType>
  716. ResultType ComputeCompareResult(int memcmp_res) {
  717. return ClampResult(memcmp_res);
  718. }
  719. template <>
  720. bool ComputeCompareResult<bool>(int memcmp_res) {
  721. return memcmp_res == 0;
  722. }
  723. } // namespace
  724. // Helper routine. Locates the first flat or external chunk of the Cord without
  725. // initializing the iterator, and returns a string_view referencing the data.
  726. inline absl::string_view Cord::InlineRep::FindFlatStartPiece() const {
  727. if (!is_tree()) {
  728. return absl::string_view(data_.as_chars(), data_.inline_size());
  729. }
  730. CordRep* node = cord_internal::SkipCrcNode(tree());
  731. if (node->IsFlat()) {
  732. return absl::string_view(node->flat()->Data(), node->length);
  733. }
  734. if (node->IsExternal()) {
  735. return absl::string_view(node->external()->base, node->length);
  736. }
  737. if (node->IsBtree()) {
  738. CordRepBtree* tree = node->btree();
  739. int height = tree->height();
  740. while (--height >= 0) {
  741. tree = tree->Edge(CordRepBtree::kFront)->btree();
  742. }
  743. return tree->Data(tree->begin());
  744. }
  745. // Get the child node if we encounter a SUBSTRING.
  746. size_t offset = 0;
  747. size_t length = node->length;
  748. assert(length != 0);
  749. if (node->IsSubstring()) {
  750. offset = node->substring()->start;
  751. node = node->substring()->child;
  752. }
  753. if (node->IsFlat()) {
  754. return absl::string_view(node->flat()->Data() + offset, length);
  755. }
  756. assert(node->IsExternal() && "Expect FLAT or EXTERNAL node here");
  757. return absl::string_view(node->external()->base + offset, length);
  758. }
  759. void Cord::SetCrcCordState(crc_internal::CrcCordState state) {
  760. auto constexpr method = CordzUpdateTracker::kSetExpectedChecksum;
  761. if (empty()) {
  762. contents_.MaybeRemoveEmptyCrcNode();
  763. CordRep* rep = CordRepCrc::New(nullptr, std::move(state));
  764. contents_.EmplaceTree(rep, method);
  765. } else if (!contents_.is_tree()) {
  766. CordRep* rep = contents_.MakeFlatWithExtraCapacity(0);
  767. rep = CordRepCrc::New(rep, std::move(state));
  768. contents_.EmplaceTree(rep, method);
  769. } else {
  770. const CordzUpdateScope scope(contents_.data_.cordz_info(), method);
  771. CordRep* rep = CordRepCrc::New(contents_.data_.as_tree(), std::move(state));
  772. contents_.SetTree(rep, scope);
  773. }
  774. }
  775. void Cord::SetExpectedChecksum(uint32_t crc) {
  776. // Construct a CrcCordState with a single chunk.
  777. crc_internal::CrcCordState state;
  778. state.mutable_rep()->prefix_crc.push_back(
  779. crc_internal::CrcCordState::PrefixCrc(size(), absl::crc32c_t{crc}));
  780. SetCrcCordState(std::move(state));
  781. }
  782. absl::Nullable<const crc_internal::CrcCordState*> Cord::MaybeGetCrcCordState()
  783. const {
  784. if (!contents_.is_tree() || !contents_.tree()->IsCrc()) {
  785. return nullptr;
  786. }
  787. return &contents_.tree()->crc()->crc_cord_state;
  788. }
  789. absl::optional<uint32_t> Cord::ExpectedChecksum() const {
  790. if (!contents_.is_tree() || !contents_.tree()->IsCrc()) {
  791. return absl::nullopt;
  792. }
  793. return static_cast<uint32_t>(
  794. contents_.tree()->crc()->crc_cord_state.Checksum());
  795. }
  796. inline int Cord::CompareSlowPath(absl::string_view rhs, size_t compared_size,
  797. size_t size_to_compare) const {
  798. auto advance = [](absl::Nonnull<Cord::ChunkIterator*> it,
  799. absl::Nonnull<absl::string_view*> chunk) {
  800. if (!chunk->empty()) return true;
  801. ++*it;
  802. if (it->bytes_remaining_ == 0) return false;
  803. *chunk = **it;
  804. return true;
  805. };
  806. Cord::ChunkIterator lhs_it = chunk_begin();
  807. // compared_size is inside first chunk.
  808. absl::string_view lhs_chunk =
  809. (lhs_it.bytes_remaining_ != 0) ? *lhs_it : absl::string_view();
  810. assert(compared_size <= lhs_chunk.size());
  811. assert(compared_size <= rhs.size());
  812. lhs_chunk.remove_prefix(compared_size);
  813. rhs.remove_prefix(compared_size);
  814. size_to_compare -= compared_size; // skip already compared size.
  815. while (advance(&lhs_it, &lhs_chunk) && !rhs.empty()) {
  816. int comparison_result = CompareChunks(&lhs_chunk, &rhs, &size_to_compare);
  817. if (comparison_result != 0) return comparison_result;
  818. if (size_to_compare == 0) return 0;
  819. }
  820. return static_cast<int>(rhs.empty()) - static_cast<int>(lhs_chunk.empty());
  821. }
  822. inline int Cord::CompareSlowPath(const Cord& rhs, size_t compared_size,
  823. size_t size_to_compare) const {
  824. auto advance = [](absl::Nonnull<Cord::ChunkIterator*> it,
  825. absl::Nonnull<absl::string_view*> chunk) {
  826. if (!chunk->empty()) return true;
  827. ++*it;
  828. if (it->bytes_remaining_ == 0) return false;
  829. *chunk = **it;
  830. return true;
  831. };
  832. Cord::ChunkIterator lhs_it = chunk_begin();
  833. Cord::ChunkIterator rhs_it = rhs.chunk_begin();
  834. // compared_size is inside both first chunks.
  835. absl::string_view lhs_chunk =
  836. (lhs_it.bytes_remaining_ != 0) ? *lhs_it : absl::string_view();
  837. absl::string_view rhs_chunk =
  838. (rhs_it.bytes_remaining_ != 0) ? *rhs_it : absl::string_view();
  839. assert(compared_size <= lhs_chunk.size());
  840. assert(compared_size <= rhs_chunk.size());
  841. lhs_chunk.remove_prefix(compared_size);
  842. rhs_chunk.remove_prefix(compared_size);
  843. size_to_compare -= compared_size; // skip already compared size.
  844. while (advance(&lhs_it, &lhs_chunk) && advance(&rhs_it, &rhs_chunk)) {
  845. int memcmp_res = CompareChunks(&lhs_chunk, &rhs_chunk, &size_to_compare);
  846. if (memcmp_res != 0) return memcmp_res;
  847. if (size_to_compare == 0) return 0;
  848. }
  849. return static_cast<int>(rhs_chunk.empty()) -
  850. static_cast<int>(lhs_chunk.empty());
  851. }
  852. inline absl::string_view Cord::GetFirstChunk(const Cord& c) {
  853. if (c.empty()) return {};
  854. return c.contents_.FindFlatStartPiece();
  855. }
  856. inline absl::string_view Cord::GetFirstChunk(absl::string_view sv) {
  857. return sv;
  858. }
  859. // Compares up to 'size_to_compare' bytes of 'lhs' with 'rhs'. It is assumed
  860. // that 'size_to_compare' is greater that size of smallest of first chunks.
  861. template <typename ResultType, typename RHS>
  862. ResultType GenericCompare(const Cord& lhs, const RHS& rhs,
  863. size_t size_to_compare) {
  864. absl::string_view lhs_chunk = Cord::GetFirstChunk(lhs);
  865. absl::string_view rhs_chunk = Cord::GetFirstChunk(rhs);
  866. size_t compared_size = std::min(lhs_chunk.size(), rhs_chunk.size());
  867. assert(size_to_compare >= compared_size);
  868. int memcmp_res = ::memcmp(lhs_chunk.data(), rhs_chunk.data(), compared_size);
  869. if (compared_size == size_to_compare || memcmp_res != 0) {
  870. return ComputeCompareResult<ResultType>(memcmp_res);
  871. }
  872. return ComputeCompareResult<ResultType>(
  873. lhs.CompareSlowPath(rhs, compared_size, size_to_compare));
  874. }
  875. bool Cord::EqualsImpl(absl::string_view rhs, size_t size_to_compare) const {
  876. return GenericCompare<bool>(*this, rhs, size_to_compare);
  877. }
  878. bool Cord::EqualsImpl(const Cord& rhs, size_t size_to_compare) const {
  879. return GenericCompare<bool>(*this, rhs, size_to_compare);
  880. }
  881. template <typename RHS>
  882. inline int SharedCompareImpl(const Cord& lhs, const RHS& rhs) {
  883. size_t lhs_size = lhs.size();
  884. size_t rhs_size = rhs.size();
  885. if (lhs_size == rhs_size) {
  886. return GenericCompare<int>(lhs, rhs, lhs_size);
  887. }
  888. if (lhs_size < rhs_size) {
  889. auto data_comp_res = GenericCompare<int>(lhs, rhs, lhs_size);
  890. return data_comp_res == 0 ? -1 : data_comp_res;
  891. }
  892. auto data_comp_res = GenericCompare<int>(lhs, rhs, rhs_size);
  893. return data_comp_res == 0 ? +1 : data_comp_res;
  894. }
  895. int Cord::Compare(absl::string_view rhs) const {
  896. return SharedCompareImpl(*this, rhs);
  897. }
  898. int Cord::CompareImpl(const Cord& rhs) const {
  899. return SharedCompareImpl(*this, rhs);
  900. }
  901. bool Cord::EndsWith(absl::string_view rhs) const {
  902. size_t my_size = size();
  903. size_t rhs_size = rhs.size();
  904. if (my_size < rhs_size) return false;
  905. Cord tmp(*this);
  906. tmp.RemovePrefix(my_size - rhs_size);
  907. return tmp.EqualsImpl(rhs, rhs_size);
  908. }
  909. bool Cord::EndsWith(const Cord& rhs) const {
  910. size_t my_size = size();
  911. size_t rhs_size = rhs.size();
  912. if (my_size < rhs_size) return false;
  913. Cord tmp(*this);
  914. tmp.RemovePrefix(my_size - rhs_size);
  915. return tmp.EqualsImpl(rhs, rhs_size);
  916. }
  917. // --------------------------------------------------------------------
  918. // Misc.
  919. Cord::operator std::string() const {
  920. std::string s;
  921. absl::CopyCordToString(*this, &s);
  922. return s;
  923. }
  924. void CopyCordToString(const Cord& src, absl::Nonnull<std::string*> dst) {
  925. if (!src.contents_.is_tree()) {
  926. src.contents_.CopyTo(dst);
  927. } else {
  928. absl::strings_internal::STLStringResizeUninitialized(dst, src.size());
  929. src.CopyToArraySlowPath(&(*dst)[0]);
  930. }
  931. }
  932. void AppendCordToString(const Cord& src, absl::Nonnull<std::string*> dst) {
  933. const size_t cur_dst_size = dst->size();
  934. const size_t new_dst_size = cur_dst_size + src.size();
  935. absl::strings_internal::STLStringResizeUninitializedAmortized(dst,
  936. new_dst_size);
  937. char* append_ptr = &(*dst)[cur_dst_size];
  938. src.CopyToArrayImpl(append_ptr);
  939. }
  940. void Cord::CopyToArraySlowPath(absl::Nonnull<char*> dst) const {
  941. assert(contents_.is_tree());
  942. absl::string_view fragment;
  943. if (GetFlatAux(contents_.tree(), &fragment)) {
  944. memcpy(dst, fragment.data(), fragment.size());
  945. return;
  946. }
  947. for (absl::string_view chunk : Chunks()) {
  948. memcpy(dst, chunk.data(), chunk.size());
  949. dst += chunk.size();
  950. }
  951. }
  952. Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) {
  953. ABSL_HARDENING_ASSERT(bytes_remaining_ >= n &&
  954. "Attempted to iterate past `end()`");
  955. Cord subcord;
  956. auto constexpr method = CordzUpdateTracker::kCordReader;
  957. if (n <= InlineRep::kMaxInline) {
  958. // Range to read fits in inline data. Flatten it.
  959. char* data = subcord.contents_.set_data(n);
  960. while (n > current_chunk_.size()) {
  961. memcpy(data, current_chunk_.data(), current_chunk_.size());
  962. data += current_chunk_.size();
  963. n -= current_chunk_.size();
  964. ++*this;
  965. }
  966. memcpy(data, current_chunk_.data(), n);
  967. if (n < current_chunk_.size()) {
  968. RemoveChunkPrefix(n);
  969. } else if (n > 0) {
  970. ++*this;
  971. }
  972. return subcord;
  973. }
  974. if (btree_reader_) {
  975. size_t chunk_size = current_chunk_.size();
  976. if (n <= chunk_size && n <= kMaxBytesToCopy) {
  977. subcord = Cord(current_chunk_.substr(0, n), method);
  978. if (n < chunk_size) {
  979. current_chunk_.remove_prefix(n);
  980. } else {
  981. current_chunk_ = btree_reader_.Next();
  982. }
  983. } else {
  984. CordRep* rep;
  985. current_chunk_ = btree_reader_.Read(n, chunk_size, rep);
  986. subcord.contents_.EmplaceTree(rep, method);
  987. }
  988. bytes_remaining_ -= n;
  989. return subcord;
  990. }
  991. // Short circuit if reading the entire data edge.
  992. assert(current_leaf_ != nullptr);
  993. if (n == current_leaf_->length) {
  994. bytes_remaining_ = 0;
  995. current_chunk_ = {};
  996. CordRep* tree = CordRep::Ref(current_leaf_);
  997. subcord.contents_.EmplaceTree(VerifyTree(tree), method);
  998. return subcord;
  999. }
  1000. // From this point on, we need a partial substring node.
  1001. // Get pointer to the underlying flat or external data payload and
  1002. // compute data pointer and offset into current flat or external.
  1003. CordRep* payload = current_leaf_->IsSubstring()
  1004. ? current_leaf_->substring()->child
  1005. : current_leaf_;
  1006. const char* data = payload->IsExternal() ? payload->external()->base
  1007. : payload->flat()->Data();
  1008. const size_t offset = static_cast<size_t>(current_chunk_.data() - data);
  1009. auto* tree = CordRepSubstring::Substring(payload, offset, n);
  1010. subcord.contents_.EmplaceTree(VerifyTree(tree), method);
  1011. bytes_remaining_ -= n;
  1012. current_chunk_.remove_prefix(n);
  1013. return subcord;
  1014. }
  1015. char Cord::operator[](size_t i) const {
  1016. ABSL_HARDENING_ASSERT(i < size());
  1017. size_t offset = i;
  1018. const CordRep* rep = contents_.tree();
  1019. if (rep == nullptr) {
  1020. return contents_.data()[i];
  1021. }
  1022. rep = cord_internal::SkipCrcNode(rep);
  1023. while (true) {
  1024. assert(rep != nullptr);
  1025. assert(offset < rep->length);
  1026. if (rep->IsFlat()) {
  1027. // Get the "i"th character directly from the flat array.
  1028. return rep->flat()->Data()[offset];
  1029. } else if (rep->IsBtree()) {
  1030. return rep->btree()->GetCharacter(offset);
  1031. } else if (rep->IsExternal()) {
  1032. // Get the "i"th character from the external array.
  1033. return rep->external()->base[offset];
  1034. } else {
  1035. // This must be a substring a node, so bypass it to get to the child.
  1036. assert(rep->IsSubstring());
  1037. offset += rep->substring()->start;
  1038. rep = rep->substring()->child;
  1039. }
  1040. }
  1041. }
  1042. namespace {
  1043. // Tests whether the sequence of chunks beginning at `position` starts with
  1044. // `needle`.
  1045. //
  1046. // REQUIRES: remaining `absl::Cord` starting at `position` is greater than or
  1047. // equal to `needle.size()`.
  1048. bool IsSubstringInCordAt(absl::Cord::CharIterator position,
  1049. absl::string_view needle) {
  1050. auto haystack_chunk = absl::Cord::ChunkRemaining(position);
  1051. while (true) {
  1052. // Precondition is that `absl::Cord::ChunkRemaining(position)` is not
  1053. // empty. This assert will trigger if that is not true.
  1054. assert(!haystack_chunk.empty());
  1055. auto min_length = std::min(haystack_chunk.size(), needle.size());
  1056. if (!absl::ConsumePrefix(&needle, haystack_chunk.substr(0, min_length))) {
  1057. return false;
  1058. }
  1059. if (needle.empty()) {
  1060. return true;
  1061. }
  1062. absl::Cord::Advance(&position, min_length);
  1063. haystack_chunk = absl::Cord::ChunkRemaining(position);
  1064. }
  1065. }
  1066. } // namespace
  1067. // A few options how this could be implemented:
  1068. // (a) Flatten the Cord and find, i.e.
  1069. // haystack.Flatten().find(needle)
  1070. // For large 'haystack' (where Cord makes sense to be used), this copies
  1071. // the whole 'haystack' and can be slow.
  1072. // (b) Use std::search, i.e.
  1073. // std::search(haystack.char_begin(), haystack.char_end(),
  1074. // needle.begin(), needle.end())
  1075. // This avoids the copy, but compares one byte at a time, and branches a
  1076. // lot every time it has to advance. It is also not possible to use
  1077. // std::search as is, because CharIterator is only an input iterator, not a
  1078. // forward iterator.
  1079. // (c) Use string_view::find in each fragment, and specifically handle fragment
  1080. // boundaries.
  1081. //
  1082. // This currently implements option (b).
  1083. absl::Cord::CharIterator absl::Cord::FindImpl(CharIterator it,
  1084. absl::string_view needle) const {
  1085. // Ensure preconditions are met by callers first.
  1086. // Needle must not be empty.
  1087. assert(!needle.empty());
  1088. // Haystack must be at least as large as needle.
  1089. assert(it.chunk_iterator_.bytes_remaining_ >= needle.size());
  1090. // Cord is a sequence of chunks. To find `needle` we go chunk by chunk looking
  1091. // for the first char of needle, up until we have advanced `N` defined as
  1092. // `haystack.size() - needle.size()`. If we find the first char of needle at
  1093. // `P` and `P` is less than `N`, we then call `IsSubstringInCordAt` to
  1094. // see if this is the needle. If not, we advance to `P + 1` and try again.
  1095. while (it.chunk_iterator_.bytes_remaining_ >= needle.size()) {
  1096. auto haystack_chunk = Cord::ChunkRemaining(it);
  1097. assert(!haystack_chunk.empty());
  1098. // Look for the first char of `needle` in the current chunk.
  1099. auto idx = haystack_chunk.find(needle.front());
  1100. if (idx == absl::string_view::npos) {
  1101. // No potential match in this chunk, advance past it.
  1102. Cord::Advance(&it, haystack_chunk.size());
  1103. continue;
  1104. }
  1105. // We found the start of a potential match in the chunk. Advance the
  1106. // iterator and haystack chunk to the match the position.
  1107. Cord::Advance(&it, idx);
  1108. // Check if there is enough haystack remaining to actually have a match.
  1109. if (it.chunk_iterator_.bytes_remaining_ < needle.size()) {
  1110. break;
  1111. }
  1112. // Check if this is `needle`.
  1113. if (IsSubstringInCordAt(it, needle)) {
  1114. return it;
  1115. }
  1116. // No match, increment the iterator for the next attempt.
  1117. Cord::Advance(&it, 1);
  1118. }
  1119. // If we got here, we did not find `needle`.
  1120. return char_end();
  1121. }
  1122. absl::Cord::CharIterator absl::Cord::Find(absl::string_view needle) const {
  1123. if (needle.empty()) {
  1124. return char_begin();
  1125. }
  1126. if (needle.size() > size()) {
  1127. return char_end();
  1128. }
  1129. if (needle.size() == size()) {
  1130. return *this == needle ? char_begin() : char_end();
  1131. }
  1132. return FindImpl(char_begin(), needle);
  1133. }
  1134. namespace {
  1135. // Tests whether the sequence of chunks beginning at `haystack` starts with the
  1136. // sequence of chunks beginning at `needle_begin` and extending to `needle_end`.
  1137. //
  1138. // REQUIRES: remaining `absl::Cord` starting at `position` is greater than or
  1139. // equal to `needle_end - needle_begin` and `advance`.
  1140. bool IsSubcordInCordAt(absl::Cord::CharIterator haystack,
  1141. absl::Cord::CharIterator needle_begin,
  1142. absl::Cord::CharIterator needle_end) {
  1143. while (needle_begin != needle_end) {
  1144. auto haystack_chunk = absl::Cord::ChunkRemaining(haystack);
  1145. assert(!haystack_chunk.empty());
  1146. auto needle_chunk = absl::Cord::ChunkRemaining(needle_begin);
  1147. auto min_length = std::min(haystack_chunk.size(), needle_chunk.size());
  1148. if (haystack_chunk.substr(0, min_length) !=
  1149. needle_chunk.substr(0, min_length)) {
  1150. return false;
  1151. }
  1152. absl::Cord::Advance(&haystack, min_length);
  1153. absl::Cord::Advance(&needle_begin, min_length);
  1154. }
  1155. return true;
  1156. }
  1157. // Tests whether the sequence of chunks beginning at `position` starts with the
  1158. // cord `needle`.
  1159. //
  1160. // REQUIRES: remaining `absl::Cord` starting at `position` is greater than or
  1161. // equal to `needle.size()`.
  1162. bool IsSubcordInCordAt(absl::Cord::CharIterator position,
  1163. const absl::Cord& needle) {
  1164. return IsSubcordInCordAt(position, needle.char_begin(), needle.char_end());
  1165. }
  1166. } // namespace
  1167. absl::Cord::CharIterator absl::Cord::Find(const absl::Cord& needle) const {
  1168. if (needle.empty()) {
  1169. return char_begin();
  1170. }
  1171. const auto needle_size = needle.size();
  1172. if (needle_size > size()) {
  1173. return char_end();
  1174. }
  1175. if (needle_size == size()) {
  1176. return *this == needle ? char_begin() : char_end();
  1177. }
  1178. const auto needle_chunk = Cord::ChunkRemaining(needle.char_begin());
  1179. auto haystack_it = char_begin();
  1180. while (true) {
  1181. haystack_it = FindImpl(haystack_it, needle_chunk);
  1182. if (haystack_it == char_end() ||
  1183. haystack_it.chunk_iterator_.bytes_remaining_ < needle_size) {
  1184. break;
  1185. }
  1186. // We found the first chunk of `needle` at `haystack_it` but not the entire
  1187. // subcord. Advance past the first chunk and check for the remainder.
  1188. auto haystack_advanced_it = haystack_it;
  1189. auto needle_it = needle.char_begin();
  1190. Cord::Advance(&haystack_advanced_it, needle_chunk.size());
  1191. Cord::Advance(&needle_it, needle_chunk.size());
  1192. if (IsSubcordInCordAt(haystack_advanced_it, needle_it, needle.char_end())) {
  1193. return haystack_it;
  1194. }
  1195. Cord::Advance(&haystack_it, 1);
  1196. if (haystack_it.chunk_iterator_.bytes_remaining_ < needle_size) {
  1197. break;
  1198. }
  1199. if (haystack_it.chunk_iterator_.bytes_remaining_ == needle_size) {
  1200. // Special case, if there is exactly `needle_size` bytes remaining, the
  1201. // subcord is either at `haystack_it` or not at all.
  1202. if (IsSubcordInCordAt(haystack_it, needle)) {
  1203. return haystack_it;
  1204. }
  1205. break;
  1206. }
  1207. }
  1208. return char_end();
  1209. }
  1210. bool Cord::Contains(absl::string_view rhs) const {
  1211. return rhs.empty() || Find(rhs) != char_end();
  1212. }
  1213. bool Cord::Contains(const absl::Cord& rhs) const {
  1214. return rhs.empty() || Find(rhs) != char_end();
  1215. }
  1216. absl::string_view Cord::FlattenSlowPath() {
  1217. assert(contents_.is_tree());
  1218. size_t total_size = size();
  1219. CordRep* new_rep;
  1220. char* new_buffer;
  1221. // Try to put the contents into a new flat rep. If they won't fit in the
  1222. // biggest possible flat node, use an external rep instead.
  1223. if (total_size <= kMaxFlatLength) {
  1224. new_rep = CordRepFlat::New(total_size);
  1225. new_rep->length = total_size;
  1226. new_buffer = new_rep->flat()->Data();
  1227. CopyToArraySlowPath(new_buffer);
  1228. } else {
  1229. new_buffer = std::allocator<char>().allocate(total_size);
  1230. CopyToArraySlowPath(new_buffer);
  1231. new_rep = absl::cord_internal::NewExternalRep(
  1232. absl::string_view(new_buffer, total_size), [](absl::string_view s) {
  1233. std::allocator<char>().deallocate(const_cast<char*>(s.data()),
  1234. s.size());
  1235. });
  1236. }
  1237. CordzUpdateScope scope(contents_.cordz_info(), CordzUpdateTracker::kFlatten);
  1238. CordRep::Unref(contents_.as_tree());
  1239. contents_.SetTree(new_rep, scope);
  1240. return absl::string_view(new_buffer, total_size);
  1241. }
  1242. /* static */ bool Cord::GetFlatAux(absl::Nonnull<CordRep*> rep,
  1243. absl::Nonnull<absl::string_view*> fragment) {
  1244. assert(rep != nullptr);
  1245. if (rep->length == 0) {
  1246. *fragment = absl::string_view();
  1247. return true;
  1248. }
  1249. rep = cord_internal::SkipCrcNode(rep);
  1250. if (rep->IsFlat()) {
  1251. *fragment = absl::string_view(rep->flat()->Data(), rep->length);
  1252. return true;
  1253. } else if (rep->IsExternal()) {
  1254. *fragment = absl::string_view(rep->external()->base, rep->length);
  1255. return true;
  1256. } else if (rep->IsBtree()) {
  1257. return rep->btree()->IsFlat(fragment);
  1258. } else if (rep->IsSubstring()) {
  1259. CordRep* child = rep->substring()->child;
  1260. if (child->IsFlat()) {
  1261. *fragment = absl::string_view(
  1262. child->flat()->Data() + rep->substring()->start, rep->length);
  1263. return true;
  1264. } else if (child->IsExternal()) {
  1265. *fragment = absl::string_view(
  1266. child->external()->base + rep->substring()->start, rep->length);
  1267. return true;
  1268. } else if (child->IsBtree()) {
  1269. return child->btree()->IsFlat(rep->substring()->start, rep->length,
  1270. fragment);
  1271. }
  1272. }
  1273. return false;
  1274. }
  1275. /* static */ void Cord::ForEachChunkAux(
  1276. absl::Nonnull<absl::cord_internal::CordRep*> rep,
  1277. absl::FunctionRef<void(absl::string_view)> callback) {
  1278. assert(rep != nullptr);
  1279. if (rep->length == 0) return;
  1280. rep = cord_internal::SkipCrcNode(rep);
  1281. if (rep->IsBtree()) {
  1282. ChunkIterator it(rep), end;
  1283. while (it != end) {
  1284. callback(*it);
  1285. ++it;
  1286. }
  1287. return;
  1288. }
  1289. // This is a leaf node, so invoke our callback.
  1290. absl::cord_internal::CordRep* current_node = cord_internal::SkipCrcNode(rep);
  1291. absl::string_view chunk;
  1292. bool success = GetFlatAux(current_node, &chunk);
  1293. assert(success);
  1294. if (success) {
  1295. callback(chunk);
  1296. }
  1297. }
  1298. static void DumpNode(absl::Nonnull<CordRep*> nonnull_rep, bool include_data,
  1299. absl::Nonnull<std::ostream*> os, int indent) {
  1300. CordRep* rep = nonnull_rep;
  1301. const int kIndentStep = 1;
  1302. for (;;) {
  1303. *os << std::setw(3) << (rep == nullptr ? 0 : rep->refcount.Get());
  1304. *os << " " << std::setw(7) << (rep == nullptr ? 0 : rep->length);
  1305. *os << " [";
  1306. if (include_data) *os << static_cast<void*>(rep);
  1307. *os << "]";
  1308. *os << " " << std::setw(indent) << "";
  1309. bool leaf = false;
  1310. if (rep == nullptr) {
  1311. *os << "NULL\n";
  1312. leaf = true;
  1313. } else if (rep->IsCrc()) {
  1314. *os << "CRC crc=" << rep->crc()->crc_cord_state.Checksum() << "\n";
  1315. indent += kIndentStep;
  1316. rep = rep->crc()->child;
  1317. } else if (rep->IsSubstring()) {
  1318. *os << "SUBSTRING @ " << rep->substring()->start << "\n";
  1319. indent += kIndentStep;
  1320. rep = rep->substring()->child;
  1321. } else { // Leaf or ring
  1322. leaf = true;
  1323. if (rep->IsExternal()) {
  1324. *os << "EXTERNAL [";
  1325. if (include_data)
  1326. *os << absl::CEscape(
  1327. absl::string_view(rep->external()->base, rep->length));
  1328. *os << "]\n";
  1329. } else if (rep->IsFlat()) {
  1330. *os << "FLAT cap=" << rep->flat()->Capacity() << " [";
  1331. if (include_data)
  1332. *os << absl::CEscape(
  1333. absl::string_view(rep->flat()->Data(), rep->length));
  1334. *os << "]\n";
  1335. } else {
  1336. CordRepBtree::Dump(rep, /*label=*/"", include_data, *os);
  1337. }
  1338. }
  1339. if (leaf) {
  1340. break;
  1341. }
  1342. }
  1343. }
  1344. static std::string ReportError(absl::Nonnull<CordRep*> root,
  1345. absl::Nonnull<CordRep*> node) {
  1346. std::ostringstream buf;
  1347. buf << "Error at node " << node << " in:";
  1348. DumpNode(root, true, &buf);
  1349. return buf.str();
  1350. }
  1351. static bool VerifyNode(absl::Nonnull<CordRep*> root,
  1352. absl::Nonnull<CordRep*> start_node) {
  1353. absl::InlinedVector<absl::Nonnull<CordRep*>, 2> worklist;
  1354. worklist.push_back(start_node);
  1355. do {
  1356. CordRep* node = worklist.back();
  1357. worklist.pop_back();
  1358. ABSL_INTERNAL_CHECK(node != nullptr, ReportError(root, node));
  1359. if (node != root) {
  1360. ABSL_INTERNAL_CHECK(node->length != 0, ReportError(root, node));
  1361. ABSL_INTERNAL_CHECK(!node->IsCrc(), ReportError(root, node));
  1362. }
  1363. if (node->IsFlat()) {
  1364. ABSL_INTERNAL_CHECK(node->length <= node->flat()->Capacity(),
  1365. ReportError(root, node));
  1366. } else if (node->IsExternal()) {
  1367. ABSL_INTERNAL_CHECK(node->external()->base != nullptr,
  1368. ReportError(root, node));
  1369. } else if (node->IsSubstring()) {
  1370. ABSL_INTERNAL_CHECK(
  1371. node->substring()->start < node->substring()->child->length,
  1372. ReportError(root, node));
  1373. ABSL_INTERNAL_CHECK(node->substring()->start + node->length <=
  1374. node->substring()->child->length,
  1375. ReportError(root, node));
  1376. } else if (node->IsCrc()) {
  1377. ABSL_INTERNAL_CHECK(
  1378. node->crc()->child != nullptr || node->crc()->length == 0,
  1379. ReportError(root, node));
  1380. if (node->crc()->child != nullptr) {
  1381. ABSL_INTERNAL_CHECK(node->crc()->length == node->crc()->child->length,
  1382. ReportError(root, node));
  1383. worklist.push_back(node->crc()->child);
  1384. }
  1385. }
  1386. } while (!worklist.empty());
  1387. return true;
  1388. }
  1389. std::ostream& operator<<(std::ostream& out, const Cord& cord) {
  1390. for (absl::string_view chunk : cord.Chunks()) {
  1391. out.write(chunk.data(), static_cast<std::streamsize>(chunk.size()));
  1392. }
  1393. return out;
  1394. }
  1395. namespace strings_internal {
  1396. size_t CordTestAccess::FlatOverhead() { return cord_internal::kFlatOverhead; }
  1397. size_t CordTestAccess::MaxFlatLength() { return cord_internal::kMaxFlatLength; }
  1398. size_t CordTestAccess::FlatTagToLength(uint8_t tag) {
  1399. return cord_internal::TagToLength(tag);
  1400. }
  1401. uint8_t CordTestAccess::LengthToTag(size_t s) {
  1402. ABSL_INTERNAL_CHECK(s <= kMaxFlatLength, absl::StrCat("Invalid length ", s));
  1403. return cord_internal::AllocatedSizeToTag(s + cord_internal::kFlatOverhead);
  1404. }
  1405. size_t CordTestAccess::SizeofCordRepExternal() {
  1406. return sizeof(CordRepExternal);
  1407. }
  1408. size_t CordTestAccess::SizeofCordRepSubstring() {
  1409. return sizeof(CordRepSubstring);
  1410. }
  1411. } // namespace strings_internal
  1412. ABSL_NAMESPACE_END
  1413. } // namespace absl