cord.cc 53 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576
  1. // Copyright 2020 The Abseil Authors.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // https://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #include "absl/strings/cord.h"
  15. #include <algorithm>
  16. #include <cassert>
  17. #include <cstddef>
  18. #include <cstdint>
  19. #include <cstdio>
  20. #include <cstdlib>
  21. #include <cstring>
  22. #include <iomanip>
  23. #include <ios>
  24. #include <iostream>
  25. #include <limits>
  26. #include <memory>
  27. #include <ostream>
  28. #include <sstream>
  29. #include <string>
  30. #include <utility>
  31. #include "absl/base/attributes.h"
  32. #include "absl/base/config.h"
  33. #include "absl/base/internal/endian.h"
  34. #include "absl/base/internal/raw_logging.h"
  35. #include "absl/base/macros.h"
  36. #include "absl/base/optimization.h"
  37. #include "absl/base/nullability.h"
  38. #include "absl/container/inlined_vector.h"
  39. #include "absl/crc/crc32c.h"
  40. #include "absl/crc/internal/crc_cord_state.h"
  41. #include "absl/functional/function_ref.h"
  42. #include "absl/strings/cord_buffer.h"
  43. #include "absl/strings/escaping.h"
  44. #include "absl/strings/internal/cord_data_edge.h"
  45. #include "absl/strings/internal/cord_internal.h"
  46. #include "absl/strings/internal/cord_rep_btree.h"
  47. #include "absl/strings/internal/cord_rep_crc.h"
  48. #include "absl/strings/internal/cord_rep_flat.h"
  49. #include "absl/strings/internal/cordz_update_tracker.h"
  50. #include "absl/strings/internal/resize_uninitialized.h"
  51. #include "absl/strings/match.h"
  52. #include "absl/strings/str_cat.h"
  53. #include "absl/strings/string_view.h"
  54. #include "absl/strings/strip.h"
  55. #include "absl/types/optional.h"
  56. #include "absl/types/span.h"
  57. namespace absl {
  58. ABSL_NAMESPACE_BEGIN
  59. using ::absl::cord_internal::CordRep;
  60. using ::absl::cord_internal::CordRepBtree;
  61. using ::absl::cord_internal::CordRepCrc;
  62. using ::absl::cord_internal::CordRepExternal;
  63. using ::absl::cord_internal::CordRepFlat;
  64. using ::absl::cord_internal::CordRepSubstring;
  65. using ::absl::cord_internal::CordzUpdateTracker;
  66. using ::absl::cord_internal::InlineData;
  67. using ::absl::cord_internal::kMaxFlatLength;
  68. using ::absl::cord_internal::kMinFlatLength;
  69. using ::absl::cord_internal::kInlinedVectorSize;
  70. using ::absl::cord_internal::kMaxBytesToCopy;
  71. static void DumpNode(absl::Nonnull<CordRep*> rep, bool include_data,
  72. absl::Nonnull<std::ostream*> os, int indent = 0);
  73. static bool VerifyNode(absl::Nonnull<CordRep*> root,
  74. absl::Nonnull<CordRep*> start_node);
  75. static inline absl::Nullable<CordRep*> VerifyTree(
  76. absl::Nullable<CordRep*> node) {
  77. assert(node == nullptr || VerifyNode(node, node));
  78. static_cast<void>(&VerifyNode);
  79. return node;
  80. }
  81. static absl::Nonnull<CordRepFlat*> CreateFlat(absl::Nonnull<const char*> data,
  82. size_t length,
  83. size_t alloc_hint) {
  84. CordRepFlat* flat = CordRepFlat::New(length + alloc_hint);
  85. flat->length = length;
  86. memcpy(flat->Data(), data, length);
  87. return flat;
  88. }
  89. // Creates a new flat or Btree out of the specified array.
  90. // The returned node has a refcount of 1.
  91. static absl::Nonnull<CordRep*> NewBtree(absl::Nonnull<const char*> data,
  92. size_t length, size_t alloc_hint) {
  93. if (length <= kMaxFlatLength) {
  94. return CreateFlat(data, length, alloc_hint);
  95. }
  96. CordRepFlat* flat = CreateFlat(data, kMaxFlatLength, 0);
  97. data += kMaxFlatLength;
  98. length -= kMaxFlatLength;
  99. auto* root = CordRepBtree::Create(flat);
  100. return CordRepBtree::Append(root, {data, length}, alloc_hint);
  101. }
  102. // Create a new tree out of the specified array.
  103. // The returned node has a refcount of 1.
  104. static absl::Nullable<CordRep*> NewTree(absl::Nullable<const char*> data,
  105. size_t length, size_t alloc_hint) {
  106. if (length == 0) return nullptr;
  107. return NewBtree(data, length, alloc_hint);
  108. }
  109. namespace cord_internal {
  110. void InitializeCordRepExternal(absl::string_view data,
  111. absl::Nonnull<CordRepExternal*> rep) {
  112. assert(!data.empty());
  113. rep->length = data.size();
  114. rep->tag = EXTERNAL;
  115. rep->base = data.data();
  116. VerifyTree(rep);
  117. }
  118. } // namespace cord_internal
  119. // Creates a CordRep from the provided string. If the string is large enough,
  120. // and not wasteful, we move the string into an external cord rep, preserving
  121. // the already allocated string contents.
  122. // Requires the provided string length to be larger than `kMaxInline`.
  123. static absl::Nonnull<CordRep*> CordRepFromString(std::string&& src) {
  124. assert(src.length() > cord_internal::kMaxInline);
  125. if (
  126. // String is short: copy data to avoid external block overhead.
  127. src.size() <= kMaxBytesToCopy ||
  128. // String is wasteful: copy data to avoid pinning too much unused memory.
  129. src.size() < src.capacity() / 2
  130. ) {
  131. return NewTree(src.data(), src.size(), 0);
  132. }
  133. struct StringReleaser {
  134. void operator()(absl::string_view /* data */) {}
  135. std::string data;
  136. };
  137. const absl::string_view original_data = src;
  138. auto* rep =
  139. static_cast<::absl::cord_internal::CordRepExternalImpl<StringReleaser>*>(
  140. absl::cord_internal::NewExternalRep(original_data,
  141. StringReleaser{std::move(src)}));
  142. // Moving src may have invalidated its data pointer, so adjust it.
  143. rep->base = rep->template get<0>().data.data();
  144. return rep;
  145. }
  146. // --------------------------------------------------------------------
  147. // Cord::InlineRep functions
  148. #ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL
  149. constexpr unsigned char Cord::InlineRep::kMaxInline;
  150. #endif
  151. inline void Cord::InlineRep::set_data(absl::Nonnull<const char*> data,
  152. size_t n) {
  153. static_assert(kMaxInline == 15, "set_data is hard-coded for a length of 15");
  154. data_.set_inline_data(data, n);
  155. }
  156. inline absl::Nonnull<char*> Cord::InlineRep::set_data(size_t n) {
  157. assert(n <= kMaxInline);
  158. ResetToEmpty();
  159. set_inline_size(n);
  160. return data_.as_chars();
  161. }
  162. inline void Cord::InlineRep::reduce_size(size_t n) {
  163. size_t tag = inline_size();
  164. assert(tag <= kMaxInline);
  165. assert(tag >= n);
  166. tag -= n;
  167. memset(data_.as_chars() + tag, 0, n);
  168. set_inline_size(tag);
  169. }
  170. inline void Cord::InlineRep::remove_prefix(size_t n) {
  171. cord_internal::SmallMemmove(data_.as_chars(), data_.as_chars() + n,
  172. inline_size() - n);
  173. reduce_size(n);
  174. }
  175. // Returns `rep` converted into a CordRepBtree.
  176. // Directly returns `rep` if `rep` is already a CordRepBtree.
  177. static absl::Nonnull<CordRepBtree*> ForceBtree(CordRep* rep) {
  178. return rep->IsBtree()
  179. ? rep->btree()
  180. : CordRepBtree::Create(cord_internal::RemoveCrcNode(rep));
  181. }
  182. void Cord::InlineRep::AppendTreeToInlined(absl::Nonnull<CordRep*> tree,
  183. MethodIdentifier method) {
  184. assert(!is_tree());
  185. if (!data_.is_empty()) {
  186. CordRepFlat* flat = MakeFlatWithExtraCapacity(0);
  187. tree = CordRepBtree::Append(CordRepBtree::Create(flat), tree);
  188. }
  189. EmplaceTree(tree, method);
  190. }
  191. void Cord::InlineRep::AppendTreeToTree(absl::Nonnull<CordRep*> tree,
  192. MethodIdentifier method) {
  193. assert(is_tree());
  194. const CordzUpdateScope scope(data_.cordz_info(), method);
  195. tree = CordRepBtree::Append(ForceBtree(data_.as_tree()), tree);
  196. SetTree(tree, scope);
  197. }
  198. void Cord::InlineRep::AppendTree(absl::Nonnull<CordRep*> tree,
  199. MethodIdentifier method) {
  200. assert(tree != nullptr);
  201. assert(tree->length != 0);
  202. assert(!tree->IsCrc());
  203. if (data_.is_tree()) {
  204. AppendTreeToTree(tree, method);
  205. } else {
  206. AppendTreeToInlined(tree, method);
  207. }
  208. }
  209. void Cord::InlineRep::PrependTreeToInlined(absl::Nonnull<CordRep*> tree,
  210. MethodIdentifier method) {
  211. assert(!is_tree());
  212. if (!data_.is_empty()) {
  213. CordRepFlat* flat = MakeFlatWithExtraCapacity(0);
  214. tree = CordRepBtree::Prepend(CordRepBtree::Create(flat), tree);
  215. }
  216. EmplaceTree(tree, method);
  217. }
  218. void Cord::InlineRep::PrependTreeToTree(absl::Nonnull<CordRep*> tree,
  219. MethodIdentifier method) {
  220. assert(is_tree());
  221. const CordzUpdateScope scope(data_.cordz_info(), method);
  222. tree = CordRepBtree::Prepend(ForceBtree(data_.as_tree()), tree);
  223. SetTree(tree, scope);
  224. }
  225. void Cord::InlineRep::PrependTree(absl::Nonnull<CordRep*> tree,
  226. MethodIdentifier method) {
  227. assert(tree != nullptr);
  228. assert(tree->length != 0);
  229. assert(!tree->IsCrc());
  230. if (data_.is_tree()) {
  231. PrependTreeToTree(tree, method);
  232. } else {
  233. PrependTreeToInlined(tree, method);
  234. }
  235. }
  236. // Searches for a non-full flat node at the rightmost leaf of the tree. If a
  237. // suitable leaf is found, the function will update the length field for all
  238. // nodes to account for the size increase. The append region address will be
  239. // written to region and the actual size increase will be written to size.
  240. static inline bool PrepareAppendRegion(
  241. absl::Nonnull<CordRep*> root, absl::Nonnull<absl::Nullable<char*>*> region,
  242. absl::Nonnull<size_t*> size, size_t max_length) {
  243. if (root->IsBtree() && root->refcount.IsOne()) {
  244. Span<char> span = root->btree()->GetAppendBuffer(max_length);
  245. if (!span.empty()) {
  246. *region = span.data();
  247. *size = span.size();
  248. return true;
  249. }
  250. }
  251. CordRep* dst = root;
  252. if (!dst->IsFlat() || !dst->refcount.IsOne()) {
  253. *region = nullptr;
  254. *size = 0;
  255. return false;
  256. }
  257. const size_t in_use = dst->length;
  258. const size_t capacity = dst->flat()->Capacity();
  259. if (in_use == capacity) {
  260. *region = nullptr;
  261. *size = 0;
  262. return false;
  263. }
  264. const size_t size_increase = std::min(capacity - in_use, max_length);
  265. dst->length += size_increase;
  266. *region = dst->flat()->Data() + in_use;
  267. *size = size_increase;
  268. return true;
  269. }
  270. void Cord::InlineRep::AssignSlow(const Cord::InlineRep& src) {
  271. assert(&src != this);
  272. assert(is_tree() || src.is_tree());
  273. auto constexpr method = CordzUpdateTracker::kAssignCord;
  274. if (ABSL_PREDICT_TRUE(!is_tree())) {
  275. EmplaceTree(CordRep::Ref(src.as_tree()), src.data_, method);
  276. return;
  277. }
  278. CordRep* tree = as_tree();
  279. if (CordRep* src_tree = src.tree()) {
  280. // Leave any existing `cordz_info` in place, and let MaybeTrackCord()
  281. // decide if this cord should be (or remains to be) sampled or not.
  282. data_.set_tree(CordRep::Ref(src_tree));
  283. CordzInfo::MaybeTrackCord(data_, src.data_, method);
  284. } else {
  285. CordzInfo::MaybeUntrackCord(data_.cordz_info());
  286. data_ = src.data_;
  287. }
  288. CordRep::Unref(tree);
  289. }
  290. void Cord::InlineRep::UnrefTree() {
  291. if (is_tree()) {
  292. CordzInfo::MaybeUntrackCord(data_.cordz_info());
  293. CordRep::Unref(tree());
  294. }
  295. }
  296. // --------------------------------------------------------------------
  297. // Constructors and destructors
  298. Cord::Cord(absl::string_view src, MethodIdentifier method)
  299. : contents_(InlineData::kDefaultInit) {
  300. const size_t n = src.size();
  301. if (n <= InlineRep::kMaxInline) {
  302. contents_.set_data(src.data(), n);
  303. } else {
  304. CordRep* rep = NewTree(src.data(), n, 0);
  305. contents_.EmplaceTree(rep, method);
  306. }
  307. }
  308. template <typename T, Cord::EnableIfString<T>>
  309. Cord::Cord(T&& src) : contents_(InlineData::kDefaultInit) {
  310. if (src.size() <= InlineRep::kMaxInline) {
  311. contents_.set_data(src.data(), src.size());
  312. } else {
  313. CordRep* rep = CordRepFromString(std::forward<T>(src));
  314. contents_.EmplaceTree(rep, CordzUpdateTracker::kConstructorString);
  315. }
  316. }
  317. template Cord::Cord(std::string&& src);
  318. // The destruction code is separate so that the compiler can determine
  319. // that it does not need to call the destructor on a moved-from Cord.
  320. void Cord::DestroyCordSlow() {
  321. assert(contents_.is_tree());
  322. CordzInfo::MaybeUntrackCord(contents_.cordz_info());
  323. CordRep::Unref(VerifyTree(contents_.as_tree()));
  324. }
  325. // --------------------------------------------------------------------
  326. // Mutators
  327. void Cord::Clear() {
  328. if (CordRep* tree = contents_.clear()) {
  329. CordRep::Unref(tree);
  330. }
  331. }
  332. Cord& Cord::AssignLargeString(std::string&& src) {
  333. auto constexpr method = CordzUpdateTracker::kAssignString;
  334. assert(src.size() > kMaxBytesToCopy);
  335. CordRep* rep = CordRepFromString(std::move(src));
  336. if (CordRep* tree = contents_.tree()) {
  337. CordzUpdateScope scope(contents_.cordz_info(), method);
  338. contents_.SetTree(rep, scope);
  339. CordRep::Unref(tree);
  340. } else {
  341. contents_.EmplaceTree(rep, method);
  342. }
  343. return *this;
  344. }
  345. Cord& Cord::operator=(absl::string_view src) {
  346. auto constexpr method = CordzUpdateTracker::kAssignString;
  347. const char* data = src.data();
  348. size_t length = src.size();
  349. CordRep* tree = contents_.tree();
  350. if (length <= InlineRep::kMaxInline) {
  351. // Embed into this->contents_, which is somewhat subtle:
  352. // - MaybeUntrackCord must be called before Unref(tree).
  353. // - MaybeUntrackCord must be called before set_data() clobbers cordz_info.
  354. // - set_data() must be called before Unref(tree) as it may reference tree.
  355. if (tree != nullptr) CordzInfo::MaybeUntrackCord(contents_.cordz_info());
  356. contents_.set_data(data, length);
  357. if (tree != nullptr) CordRep::Unref(tree);
  358. return *this;
  359. }
  360. if (tree != nullptr) {
  361. CordzUpdateScope scope(contents_.cordz_info(), method);
  362. if (tree->IsFlat() && tree->flat()->Capacity() >= length &&
  363. tree->refcount.IsOne()) {
  364. // Copy in place if the existing FLAT node is reusable.
  365. memmove(tree->flat()->Data(), data, length);
  366. tree->length = length;
  367. VerifyTree(tree);
  368. return *this;
  369. }
  370. contents_.SetTree(NewTree(data, length, 0), scope);
  371. CordRep::Unref(tree);
  372. } else {
  373. contents_.EmplaceTree(NewTree(data, length, 0), method);
  374. }
  375. return *this;
  376. }
  377. // TODO(sanjay): Move to Cord::InlineRep section of file. For now,
  378. // we keep it here to make diffs easier.
  379. void Cord::InlineRep::AppendArray(absl::string_view src,
  380. MethodIdentifier method) {
  381. MaybeRemoveEmptyCrcNode();
  382. if (src.empty()) return; // memcpy(_, nullptr, 0) is undefined.
  383. size_t appended = 0;
  384. CordRep* rep = tree();
  385. const CordRep* const root = rep;
  386. CordzUpdateScope scope(root ? cordz_info() : nullptr, method);
  387. if (root != nullptr) {
  388. rep = cord_internal::RemoveCrcNode(rep);
  389. char* region;
  390. if (PrepareAppendRegion(rep, &region, &appended, src.size())) {
  391. memcpy(region, src.data(), appended);
  392. }
  393. } else {
  394. // Try to fit in the inline buffer if possible.
  395. size_t inline_length = inline_size();
  396. if (src.size() <= kMaxInline - inline_length) {
  397. // Append new data to embedded array
  398. set_inline_size(inline_length + src.size());
  399. memcpy(data_.as_chars() + inline_length, src.data(), src.size());
  400. return;
  401. }
  402. // Allocate flat to be a perfect fit on first append exceeding inlined size.
  403. // Subsequent growth will use amortized growth until we reach maximum flat
  404. // size.
  405. rep = CordRepFlat::New(inline_length + src.size());
  406. appended = std::min(src.size(), rep->flat()->Capacity() - inline_length);
  407. memcpy(rep->flat()->Data(), data_.as_chars(), inline_length);
  408. memcpy(rep->flat()->Data() + inline_length, src.data(), appended);
  409. rep->length = inline_length + appended;
  410. }
  411. src.remove_prefix(appended);
  412. if (src.empty()) {
  413. CommitTree(root, rep, scope, method);
  414. return;
  415. }
  416. // TODO(b/192061034): keep legacy 10% growth rate: consider other rates.
  417. rep = ForceBtree(rep);
  418. const size_t min_growth = std::max<size_t>(rep->length / 10, src.size());
  419. rep = CordRepBtree::Append(rep->btree(), src, min_growth - src.size());
  420. CommitTree(root, rep, scope, method);
  421. }
  422. inline absl::Nonnull<CordRep*> Cord::TakeRep() const& {
  423. return CordRep::Ref(contents_.tree());
  424. }
  425. inline absl::Nonnull<CordRep*> Cord::TakeRep() && {
  426. CordRep* rep = contents_.tree();
  427. contents_.clear();
  428. return rep;
  429. }
  430. template <typename C>
  431. inline void Cord::AppendImpl(C&& src) {
  432. auto constexpr method = CordzUpdateTracker::kAppendCord;
  433. contents_.MaybeRemoveEmptyCrcNode();
  434. if (src.empty()) return;
  435. if (empty()) {
  436. // Since destination is empty, we can avoid allocating a node,
  437. if (src.contents_.is_tree()) {
  438. // by taking the tree directly
  439. CordRep* rep =
  440. cord_internal::RemoveCrcNode(std::forward<C>(src).TakeRep());
  441. contents_.EmplaceTree(rep, method);
  442. } else {
  443. // or copying over inline data
  444. contents_.data_ = src.contents_.data_;
  445. }
  446. return;
  447. }
  448. // For short cords, it is faster to copy data if there is room in dst.
  449. const size_t src_size = src.contents_.size();
  450. if (src_size <= kMaxBytesToCopy) {
  451. CordRep* src_tree = src.contents_.tree();
  452. if (src_tree == nullptr) {
  453. // src has embedded data.
  454. contents_.AppendArray({src.contents_.data(), src_size}, method);
  455. return;
  456. }
  457. if (src_tree->IsFlat()) {
  458. // src tree just has one flat node.
  459. contents_.AppendArray({src_tree->flat()->Data(), src_size}, method);
  460. return;
  461. }
  462. if (&src == this) {
  463. // ChunkIterator below assumes that src is not modified during traversal.
  464. Append(Cord(src));
  465. return;
  466. }
  467. // TODO(mec): Should we only do this if "dst" has space?
  468. for (absl::string_view chunk : src.Chunks()) {
  469. Append(chunk);
  470. }
  471. return;
  472. }
  473. // Guaranteed to be a tree (kMaxBytesToCopy > kInlinedSize)
  474. CordRep* rep = cord_internal::RemoveCrcNode(std::forward<C>(src).TakeRep());
  475. contents_.AppendTree(rep, CordzUpdateTracker::kAppendCord);
  476. }
  477. static CordRep::ExtractResult ExtractAppendBuffer(absl::Nonnull<CordRep*> rep,
  478. size_t min_capacity) {
  479. switch (rep->tag) {
  480. case cord_internal::BTREE:
  481. return CordRepBtree::ExtractAppendBuffer(rep->btree(), min_capacity);
  482. default:
  483. if (rep->IsFlat() && rep->refcount.IsOne() &&
  484. rep->flat()->Capacity() - rep->length >= min_capacity) {
  485. return {nullptr, rep};
  486. }
  487. return {rep, nullptr};
  488. }
  489. }
  490. static CordBuffer CreateAppendBuffer(InlineData& data, size_t block_size,
  491. size_t capacity) {
  492. // Watch out for overflow, people can ask for size_t::max().
  493. const size_t size = data.inline_size();
  494. const size_t max_capacity = std::numeric_limits<size_t>::max() - size;
  495. capacity = (std::min)(max_capacity, capacity) + size;
  496. CordBuffer buffer =
  497. block_size ? CordBuffer::CreateWithCustomLimit(block_size, capacity)
  498. : CordBuffer::CreateWithDefaultLimit(capacity);
  499. cord_internal::SmallMemmove(buffer.data(), data.as_chars(), size);
  500. buffer.SetLength(size);
  501. data = {};
  502. return buffer;
  503. }
  504. CordBuffer Cord::GetAppendBufferSlowPath(size_t block_size, size_t capacity,
  505. size_t min_capacity) {
  506. auto constexpr method = CordzUpdateTracker::kGetAppendBuffer;
  507. CordRep* tree = contents_.tree();
  508. if (tree != nullptr) {
  509. CordzUpdateScope scope(contents_.cordz_info(), method);
  510. CordRep::ExtractResult result = ExtractAppendBuffer(tree, min_capacity);
  511. if (result.extracted != nullptr) {
  512. contents_.SetTreeOrEmpty(result.tree, scope);
  513. return CordBuffer(result.extracted->flat());
  514. }
  515. return block_size ? CordBuffer::CreateWithCustomLimit(block_size, capacity)
  516. : CordBuffer::CreateWithDefaultLimit(capacity);
  517. }
  518. return CreateAppendBuffer(contents_.data_, block_size, capacity);
  519. }
  520. void Cord::Append(const Cord& src) { AppendImpl(src); }
  521. void Cord::Append(Cord&& src) { AppendImpl(std::move(src)); }
  522. template <typename T, Cord::EnableIfString<T>>
  523. void Cord::Append(T&& src) {
  524. if (src.size() <= kMaxBytesToCopy) {
  525. Append(absl::string_view(src));
  526. } else {
  527. CordRep* rep = CordRepFromString(std::forward<T>(src));
  528. contents_.AppendTree(rep, CordzUpdateTracker::kAppendString);
  529. }
  530. }
  531. template void Cord::Append(std::string&& src);
  532. void Cord::Prepend(const Cord& src) {
  533. contents_.MaybeRemoveEmptyCrcNode();
  534. if (src.empty()) return;
  535. CordRep* src_tree = src.contents_.tree();
  536. if (src_tree != nullptr) {
  537. CordRep::Ref(src_tree);
  538. contents_.PrependTree(cord_internal::RemoveCrcNode(src_tree),
  539. CordzUpdateTracker::kPrependCord);
  540. return;
  541. }
  542. // `src` cord is inlined.
  543. absl::string_view src_contents(src.contents_.data(), src.contents_.size());
  544. return Prepend(src_contents);
  545. }
  546. void Cord::PrependArray(absl::string_view src, MethodIdentifier method) {
  547. contents_.MaybeRemoveEmptyCrcNode();
  548. if (src.empty()) return; // memcpy(_, nullptr, 0) is undefined.
  549. if (!contents_.is_tree()) {
  550. size_t cur_size = contents_.inline_size();
  551. if (cur_size + src.size() <= InlineRep::kMaxInline) {
  552. // Use embedded storage.
  553. InlineData data;
  554. data.set_inline_size(cur_size + src.size());
  555. memcpy(data.as_chars(), src.data(), src.size());
  556. memcpy(data.as_chars() + src.size(), contents_.data(), cur_size);
  557. contents_.data_ = data;
  558. return;
  559. }
  560. }
  561. CordRep* rep = NewTree(src.data(), src.size(), 0);
  562. contents_.PrependTree(rep, method);
  563. }
  564. void Cord::AppendPrecise(absl::string_view src, MethodIdentifier method) {
  565. assert(!src.empty());
  566. assert(src.size() <= cord_internal::kMaxFlatLength);
  567. if (contents_.remaining_inline_capacity() >= src.size()) {
  568. const size_t inline_length = contents_.inline_size();
  569. contents_.set_inline_size(inline_length + src.size());
  570. memcpy(contents_.data_.as_chars() + inline_length, src.data(), src.size());
  571. } else {
  572. contents_.AppendTree(CordRepFlat::Create(src), method);
  573. }
  574. }
  575. void Cord::PrependPrecise(absl::string_view src, MethodIdentifier method) {
  576. assert(!src.empty());
  577. assert(src.size() <= cord_internal::kMaxFlatLength);
  578. if (contents_.remaining_inline_capacity() >= src.size()) {
  579. const size_t cur_size = contents_.inline_size();
  580. InlineData data;
  581. data.set_inline_size(cur_size + src.size());
  582. memcpy(data.as_chars(), src.data(), src.size());
  583. memcpy(data.as_chars() + src.size(), contents_.data(), cur_size);
  584. contents_.data_ = data;
  585. } else {
  586. contents_.PrependTree(CordRepFlat::Create(src), method);
  587. }
  588. }
  589. template <typename T, Cord::EnableIfString<T>>
  590. inline void Cord::Prepend(T&& src) {
  591. if (src.size() <= kMaxBytesToCopy) {
  592. Prepend(absl::string_view(src));
  593. } else {
  594. CordRep* rep = CordRepFromString(std::forward<T>(src));
  595. contents_.PrependTree(rep, CordzUpdateTracker::kPrependString);
  596. }
  597. }
  598. template void Cord::Prepend(std::string&& src);
  599. void Cord::RemovePrefix(size_t n) {
  600. ABSL_INTERNAL_CHECK(n <= size(),
  601. absl::StrCat("Requested prefix size ", n,
  602. " exceeds Cord's size ", size()));
  603. contents_.MaybeRemoveEmptyCrcNode();
  604. CordRep* tree = contents_.tree();
  605. if (tree == nullptr) {
  606. contents_.remove_prefix(n);
  607. } else {
  608. auto constexpr method = CordzUpdateTracker::kRemovePrefix;
  609. CordzUpdateScope scope(contents_.cordz_info(), method);
  610. tree = cord_internal::RemoveCrcNode(tree);
  611. if (n >= tree->length) {
  612. CordRep::Unref(tree);
  613. tree = nullptr;
  614. } else if (tree->IsBtree()) {
  615. CordRep* old = tree;
  616. tree = tree->btree()->SubTree(n, tree->length - n);
  617. CordRep::Unref(old);
  618. } else if (tree->IsSubstring() && tree->refcount.IsOne()) {
  619. tree->substring()->start += n;
  620. tree->length -= n;
  621. } else {
  622. CordRep* rep = CordRepSubstring::Substring(tree, n, tree->length - n);
  623. CordRep::Unref(tree);
  624. tree = rep;
  625. }
  626. contents_.SetTreeOrEmpty(tree, scope);
  627. }
  628. }
  629. void Cord::RemoveSuffix(size_t n) {
  630. ABSL_INTERNAL_CHECK(n <= size(),
  631. absl::StrCat("Requested suffix size ", n,
  632. " exceeds Cord's size ", size()));
  633. contents_.MaybeRemoveEmptyCrcNode();
  634. CordRep* tree = contents_.tree();
  635. if (tree == nullptr) {
  636. contents_.reduce_size(n);
  637. } else {
  638. auto constexpr method = CordzUpdateTracker::kRemoveSuffix;
  639. CordzUpdateScope scope(contents_.cordz_info(), method);
  640. tree = cord_internal::RemoveCrcNode(tree);
  641. if (n >= tree->length) {
  642. CordRep::Unref(tree);
  643. tree = nullptr;
  644. } else if (tree->IsBtree()) {
  645. tree = CordRepBtree::RemoveSuffix(tree->btree(), n);
  646. } else if (!tree->IsExternal() && tree->refcount.IsOne()) {
  647. assert(tree->IsFlat() || tree->IsSubstring());
  648. tree->length -= n;
  649. } else {
  650. CordRep* rep = CordRepSubstring::Substring(tree, 0, tree->length - n);
  651. CordRep::Unref(tree);
  652. tree = rep;
  653. }
  654. contents_.SetTreeOrEmpty(tree, scope);
  655. }
  656. }
  657. Cord Cord::Subcord(size_t pos, size_t new_size) const {
  658. Cord sub_cord;
  659. size_t length = size();
  660. if (pos > length) pos = length;
  661. if (new_size > length - pos) new_size = length - pos;
  662. if (new_size == 0) return sub_cord;
  663. CordRep* tree = contents_.tree();
  664. if (tree == nullptr) {
  665. sub_cord.contents_.set_data(contents_.data() + pos, new_size);
  666. return sub_cord;
  667. }
  668. if (new_size <= InlineRep::kMaxInline) {
  669. sub_cord.contents_.set_inline_size(new_size);
  670. char* dest = sub_cord.contents_.data_.as_chars();
  671. Cord::ChunkIterator it = chunk_begin();
  672. it.AdvanceBytes(pos);
  673. size_t remaining_size = new_size;
  674. while (remaining_size > it->size()) {
  675. cord_internal::SmallMemmove(dest, it->data(), it->size());
  676. remaining_size -= it->size();
  677. dest += it->size();
  678. ++it;
  679. }
  680. cord_internal::SmallMemmove(dest, it->data(), remaining_size);
  681. return sub_cord;
  682. }
  683. tree = cord_internal::SkipCrcNode(tree);
  684. if (tree->IsBtree()) {
  685. tree = tree->btree()->SubTree(pos, new_size);
  686. } else {
  687. tree = CordRepSubstring::Substring(tree, pos, new_size);
  688. }
  689. sub_cord.contents_.EmplaceTree(tree, contents_.data_,
  690. CordzUpdateTracker::kSubCord);
  691. return sub_cord;
  692. }
  693. // --------------------------------------------------------------------
  694. // Comparators
  695. namespace {
  696. int ClampResult(int memcmp_res) {
  697. return static_cast<int>(memcmp_res > 0) - static_cast<int>(memcmp_res < 0);
  698. }
  699. int CompareChunks(absl::Nonnull<absl::string_view*> lhs,
  700. absl::Nonnull<absl::string_view*> rhs,
  701. absl::Nonnull<size_t*> size_to_compare) {
  702. size_t compared_size = std::min(lhs->size(), rhs->size());
  703. assert(*size_to_compare >= compared_size);
  704. *size_to_compare -= compared_size;
  705. int memcmp_res = ::memcmp(lhs->data(), rhs->data(), compared_size);
  706. if (memcmp_res != 0) return memcmp_res;
  707. lhs->remove_prefix(compared_size);
  708. rhs->remove_prefix(compared_size);
  709. return 0;
  710. }
  711. // This overload set computes comparison results from memcmp result. This
  712. // interface is used inside GenericCompare below. Different implementations
  713. // are specialized for int and bool. For int we clamp result to {-1, 0, 1}
  714. // set. For bool we just interested in "value == 0".
  715. template <typename ResultType>
  716. ResultType ComputeCompareResult(int memcmp_res) {
  717. return ClampResult(memcmp_res);
  718. }
  719. template <>
  720. bool ComputeCompareResult<bool>(int memcmp_res) {
  721. return memcmp_res == 0;
  722. }
  723. } // namespace
  724. // Helper routine. Locates the first flat or external chunk of the Cord without
  725. // initializing the iterator, and returns a string_view referencing the data.
  726. inline absl::string_view Cord::InlineRep::FindFlatStartPiece() const {
  727. if (!is_tree()) {
  728. return absl::string_view(data_.as_chars(), data_.inline_size());
  729. }
  730. CordRep* node = cord_internal::SkipCrcNode(tree());
  731. if (node->IsFlat()) {
  732. return absl::string_view(node->flat()->Data(), node->length);
  733. }
  734. if (node->IsExternal()) {
  735. return absl::string_view(node->external()->base, node->length);
  736. }
  737. if (node->IsBtree()) {
  738. CordRepBtree* tree = node->btree();
  739. int height = tree->height();
  740. while (--height >= 0) {
  741. tree = tree->Edge(CordRepBtree::kFront)->btree();
  742. }
  743. return tree->Data(tree->begin());
  744. }
  745. // Get the child node if we encounter a SUBSTRING.
  746. size_t offset = 0;
  747. size_t length = node->length;
  748. assert(length != 0);
  749. if (node->IsSubstring()) {
  750. offset = node->substring()->start;
  751. node = node->substring()->child;
  752. }
  753. if (node->IsFlat()) {
  754. return absl::string_view(node->flat()->Data() + offset, length);
  755. }
  756. assert(node->IsExternal() && "Expect FLAT or EXTERNAL node here");
  757. return absl::string_view(node->external()->base + offset, length);
  758. }
  759. void Cord::SetCrcCordState(crc_internal::CrcCordState state) {
  760. auto constexpr method = CordzUpdateTracker::kSetExpectedChecksum;
  761. if (empty()) {
  762. contents_.MaybeRemoveEmptyCrcNode();
  763. CordRep* rep = CordRepCrc::New(nullptr, std::move(state));
  764. contents_.EmplaceTree(rep, method);
  765. } else if (!contents_.is_tree()) {
  766. CordRep* rep = contents_.MakeFlatWithExtraCapacity(0);
  767. rep = CordRepCrc::New(rep, std::move(state));
  768. contents_.EmplaceTree(rep, method);
  769. } else {
  770. const CordzUpdateScope scope(contents_.data_.cordz_info(), method);
  771. CordRep* rep = CordRepCrc::New(contents_.data_.as_tree(), std::move(state));
  772. contents_.SetTree(rep, scope);
  773. }
  774. }
  775. void Cord::SetExpectedChecksum(uint32_t crc) {
  776. // Construct a CrcCordState with a single chunk.
  777. crc_internal::CrcCordState state;
  778. state.mutable_rep()->prefix_crc.push_back(
  779. crc_internal::CrcCordState::PrefixCrc(size(), absl::crc32c_t{crc}));
  780. SetCrcCordState(std::move(state));
  781. }
  782. absl::Nullable<const crc_internal::CrcCordState*> Cord::MaybeGetCrcCordState()
  783. const {
  784. if (!contents_.is_tree() || !contents_.tree()->IsCrc()) {
  785. return nullptr;
  786. }
  787. return &contents_.tree()->crc()->crc_cord_state;
  788. }
  789. absl::optional<uint32_t> Cord::ExpectedChecksum() const {
  790. if (!contents_.is_tree() || !contents_.tree()->IsCrc()) {
  791. return absl::nullopt;
  792. }
  793. return static_cast<uint32_t>(
  794. contents_.tree()->crc()->crc_cord_state.Checksum());
  795. }
  796. inline int Cord::CompareSlowPath(absl::string_view rhs, size_t compared_size,
  797. size_t size_to_compare) const {
  798. auto advance = [](absl::Nonnull<Cord::ChunkIterator*> it,
  799. absl::Nonnull<absl::string_view*> chunk) {
  800. if (!chunk->empty()) return true;
  801. ++*it;
  802. if (it->bytes_remaining_ == 0) return false;
  803. *chunk = **it;
  804. return true;
  805. };
  806. Cord::ChunkIterator lhs_it = chunk_begin();
  807. // compared_size is inside first chunk.
  808. absl::string_view lhs_chunk =
  809. (lhs_it.bytes_remaining_ != 0) ? *lhs_it : absl::string_view();
  810. assert(compared_size <= lhs_chunk.size());
  811. assert(compared_size <= rhs.size());
  812. lhs_chunk.remove_prefix(compared_size);
  813. rhs.remove_prefix(compared_size);
  814. size_to_compare -= compared_size; // skip already compared size.
  815. while (advance(&lhs_it, &lhs_chunk) && !rhs.empty()) {
  816. int comparison_result = CompareChunks(&lhs_chunk, &rhs, &size_to_compare);
  817. if (comparison_result != 0) return comparison_result;
  818. if (size_to_compare == 0) return 0;
  819. }
  820. return static_cast<int>(rhs.empty()) - static_cast<int>(lhs_chunk.empty());
  821. }
  822. inline int Cord::CompareSlowPath(const Cord& rhs, size_t compared_size,
  823. size_t size_to_compare) const {
  824. auto advance = [](absl::Nonnull<Cord::ChunkIterator*> it,
  825. absl::Nonnull<absl::string_view*> chunk) {
  826. if (!chunk->empty()) return true;
  827. ++*it;
  828. if (it->bytes_remaining_ == 0) return false;
  829. *chunk = **it;
  830. return true;
  831. };
  832. Cord::ChunkIterator lhs_it = chunk_begin();
  833. Cord::ChunkIterator rhs_it = rhs.chunk_begin();
  834. // compared_size is inside both first chunks.
  835. absl::string_view lhs_chunk =
  836. (lhs_it.bytes_remaining_ != 0) ? *lhs_it : absl::string_view();
  837. absl::string_view rhs_chunk =
  838. (rhs_it.bytes_remaining_ != 0) ? *rhs_it : absl::string_view();
  839. assert(compared_size <= lhs_chunk.size());
  840. assert(compared_size <= rhs_chunk.size());
  841. lhs_chunk.remove_prefix(compared_size);
  842. rhs_chunk.remove_prefix(compared_size);
  843. size_to_compare -= compared_size; // skip already compared size.
  844. while (advance(&lhs_it, &lhs_chunk) && advance(&rhs_it, &rhs_chunk)) {
  845. int memcmp_res = CompareChunks(&lhs_chunk, &rhs_chunk, &size_to_compare);
  846. if (memcmp_res != 0) return memcmp_res;
  847. if (size_to_compare == 0) return 0;
  848. }
  849. return static_cast<int>(rhs_chunk.empty()) -
  850. static_cast<int>(lhs_chunk.empty());
  851. }
  852. inline absl::string_view Cord::GetFirstChunk(const Cord& c) {
  853. if (c.empty()) return {};
  854. return c.contents_.FindFlatStartPiece();
  855. }
  856. inline absl::string_view Cord::GetFirstChunk(absl::string_view sv) {
  857. return sv;
  858. }
  859. // Compares up to 'size_to_compare' bytes of 'lhs' with 'rhs'. It is assumed
  860. // that 'size_to_compare' is greater that size of smallest of first chunks.
  861. template <typename ResultType, typename RHS>
  862. ResultType GenericCompare(const Cord& lhs, const RHS& rhs,
  863. size_t size_to_compare) {
  864. absl::string_view lhs_chunk = Cord::GetFirstChunk(lhs);
  865. absl::string_view rhs_chunk = Cord::GetFirstChunk(rhs);
  866. size_t compared_size = std::min(lhs_chunk.size(), rhs_chunk.size());
  867. assert(size_to_compare >= compared_size);
  868. int memcmp_res = ::memcmp(lhs_chunk.data(), rhs_chunk.data(), compared_size);
  869. if (compared_size == size_to_compare || memcmp_res != 0) {
  870. return ComputeCompareResult<ResultType>(memcmp_res);
  871. }
  872. return ComputeCompareResult<ResultType>(
  873. lhs.CompareSlowPath(rhs, compared_size, size_to_compare));
  874. }
  875. bool Cord::EqualsImpl(absl::string_view rhs, size_t size_to_compare) const {
  876. return GenericCompare<bool>(*this, rhs, size_to_compare);
  877. }
  878. bool Cord::EqualsImpl(const Cord& rhs, size_t size_to_compare) const {
  879. return GenericCompare<bool>(*this, rhs, size_to_compare);
  880. }
  881. template <typename RHS>
  882. inline int SharedCompareImpl(const Cord& lhs, const RHS& rhs) {
  883. size_t lhs_size = lhs.size();
  884. size_t rhs_size = rhs.size();
  885. if (lhs_size == rhs_size) {
  886. return GenericCompare<int>(lhs, rhs, lhs_size);
  887. }
  888. if (lhs_size < rhs_size) {
  889. auto data_comp_res = GenericCompare<int>(lhs, rhs, lhs_size);
  890. return data_comp_res == 0 ? -1 : data_comp_res;
  891. }
  892. auto data_comp_res = GenericCompare<int>(lhs, rhs, rhs_size);
  893. return data_comp_res == 0 ? +1 : data_comp_res;
  894. }
  895. int Cord::Compare(absl::string_view rhs) const {
  896. return SharedCompareImpl(*this, rhs);
  897. }
  898. int Cord::CompareImpl(const Cord& rhs) const {
  899. return SharedCompareImpl(*this, rhs);
  900. }
  901. bool Cord::EndsWith(absl::string_view rhs) const {
  902. size_t my_size = size();
  903. size_t rhs_size = rhs.size();
  904. if (my_size < rhs_size) return false;
  905. Cord tmp(*this);
  906. tmp.RemovePrefix(my_size - rhs_size);
  907. return tmp.EqualsImpl(rhs, rhs_size);
  908. }
  909. bool Cord::EndsWith(const Cord& rhs) const {
  910. size_t my_size = size();
  911. size_t rhs_size = rhs.size();
  912. if (my_size < rhs_size) return false;
  913. Cord tmp(*this);
  914. tmp.RemovePrefix(my_size - rhs_size);
  915. return tmp.EqualsImpl(rhs, rhs_size);
  916. }
  917. // --------------------------------------------------------------------
  918. // Misc.
  919. Cord::operator std::string() const {
  920. std::string s;
  921. absl::CopyCordToString(*this, &s);
  922. return s;
  923. }
  924. void CopyCordToString(const Cord& src, absl::Nonnull<std::string*> dst) {
  925. if (!src.contents_.is_tree()) {
  926. src.contents_.CopyTo(dst);
  927. } else {
  928. absl::strings_internal::STLStringResizeUninitialized(dst, src.size());
  929. src.CopyToArraySlowPath(&(*dst)[0]);
  930. }
  931. }
  932. void Cord::CopyToArraySlowPath(absl::Nonnull<char*> dst) const {
  933. assert(contents_.is_tree());
  934. absl::string_view fragment;
  935. if (GetFlatAux(contents_.tree(), &fragment)) {
  936. memcpy(dst, fragment.data(), fragment.size());
  937. return;
  938. }
  939. for (absl::string_view chunk : Chunks()) {
  940. memcpy(dst, chunk.data(), chunk.size());
  941. dst += chunk.size();
  942. }
  943. }
  944. Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) {
  945. ABSL_HARDENING_ASSERT(bytes_remaining_ >= n &&
  946. "Attempted to iterate past `end()`");
  947. Cord subcord;
  948. auto constexpr method = CordzUpdateTracker::kCordReader;
  949. if (n <= InlineRep::kMaxInline) {
  950. // Range to read fits in inline data. Flatten it.
  951. char* data = subcord.contents_.set_data(n);
  952. while (n > current_chunk_.size()) {
  953. memcpy(data, current_chunk_.data(), current_chunk_.size());
  954. data += current_chunk_.size();
  955. n -= current_chunk_.size();
  956. ++*this;
  957. }
  958. memcpy(data, current_chunk_.data(), n);
  959. if (n < current_chunk_.size()) {
  960. RemoveChunkPrefix(n);
  961. } else if (n > 0) {
  962. ++*this;
  963. }
  964. return subcord;
  965. }
  966. if (btree_reader_) {
  967. size_t chunk_size = current_chunk_.size();
  968. if (n <= chunk_size && n <= kMaxBytesToCopy) {
  969. subcord = Cord(current_chunk_.substr(0, n), method);
  970. if (n < chunk_size) {
  971. current_chunk_.remove_prefix(n);
  972. } else {
  973. current_chunk_ = btree_reader_.Next();
  974. }
  975. } else {
  976. CordRep* rep;
  977. current_chunk_ = btree_reader_.Read(n, chunk_size, rep);
  978. subcord.contents_.EmplaceTree(rep, method);
  979. }
  980. bytes_remaining_ -= n;
  981. return subcord;
  982. }
  983. // Short circuit if reading the entire data edge.
  984. assert(current_leaf_ != nullptr);
  985. if (n == current_leaf_->length) {
  986. bytes_remaining_ = 0;
  987. current_chunk_ = {};
  988. CordRep* tree = CordRep::Ref(current_leaf_);
  989. subcord.contents_.EmplaceTree(VerifyTree(tree), method);
  990. return subcord;
  991. }
  992. // From this point on, we need a partial substring node.
  993. // Get pointer to the underlying flat or external data payload and
  994. // compute data pointer and offset into current flat or external.
  995. CordRep* payload = current_leaf_->IsSubstring()
  996. ? current_leaf_->substring()->child
  997. : current_leaf_;
  998. const char* data = payload->IsExternal() ? payload->external()->base
  999. : payload->flat()->Data();
  1000. const size_t offset = static_cast<size_t>(current_chunk_.data() - data);
  1001. auto* tree = CordRepSubstring::Substring(payload, offset, n);
  1002. subcord.contents_.EmplaceTree(VerifyTree(tree), method);
  1003. bytes_remaining_ -= n;
  1004. current_chunk_.remove_prefix(n);
  1005. return subcord;
  1006. }
  1007. char Cord::operator[](size_t i) const {
  1008. ABSL_HARDENING_ASSERT(i < size());
  1009. size_t offset = i;
  1010. const CordRep* rep = contents_.tree();
  1011. if (rep == nullptr) {
  1012. return contents_.data()[i];
  1013. }
  1014. rep = cord_internal::SkipCrcNode(rep);
  1015. while (true) {
  1016. assert(rep != nullptr);
  1017. assert(offset < rep->length);
  1018. if (rep->IsFlat()) {
  1019. // Get the "i"th character directly from the flat array.
  1020. return rep->flat()->Data()[offset];
  1021. } else if (rep->IsBtree()) {
  1022. return rep->btree()->GetCharacter(offset);
  1023. } else if (rep->IsExternal()) {
  1024. // Get the "i"th character from the external array.
  1025. return rep->external()->base[offset];
  1026. } else {
  1027. // This must be a substring a node, so bypass it to get to the child.
  1028. assert(rep->IsSubstring());
  1029. offset += rep->substring()->start;
  1030. rep = rep->substring()->child;
  1031. }
  1032. }
  1033. }
  1034. namespace {
  1035. // Tests whether the sequence of chunks beginning at `position` starts with
  1036. // `needle`.
  1037. //
  1038. // REQUIRES: remaining `absl::Cord` starting at `position` is greater than or
  1039. // equal to `needle.size()`.
  1040. bool IsSubstringInCordAt(absl::Cord::CharIterator position,
  1041. absl::string_view needle) {
  1042. auto haystack_chunk = absl::Cord::ChunkRemaining(position);
  1043. while (true) {
  1044. // Precondition is that `absl::Cord::ChunkRemaining(position)` is not
  1045. // empty. This assert will trigger if that is not true.
  1046. assert(!haystack_chunk.empty());
  1047. auto min_length = std::min(haystack_chunk.size(), needle.size());
  1048. if (!absl::ConsumePrefix(&needle, haystack_chunk.substr(0, min_length))) {
  1049. return false;
  1050. }
  1051. if (needle.empty()) {
  1052. return true;
  1053. }
  1054. absl::Cord::Advance(&position, min_length);
  1055. haystack_chunk = absl::Cord::ChunkRemaining(position);
  1056. }
  1057. }
  1058. } // namespace
  1059. // A few options how this could be implemented:
  1060. // (a) Flatten the Cord and find, i.e.
  1061. // haystack.Flatten().find(needle)
  1062. // For large 'haystack' (where Cord makes sense to be used), this copies
  1063. // the whole 'haystack' and can be slow.
  1064. // (b) Use std::search, i.e.
  1065. // std::search(haystack.char_begin(), haystack.char_end(),
  1066. // needle.begin(), needle.end())
  1067. // This avoids the copy, but compares one byte at a time, and branches a
  1068. // lot every time it has to advance. It is also not possible to use
  1069. // std::search as is, because CharIterator is only an input iterator, not a
  1070. // forward iterator.
  1071. // (c) Use string_view::find in each fragment, and specifically handle fragment
  1072. // boundaries.
  1073. //
  1074. // This currently implements option (b).
  1075. absl::Cord::CharIterator absl::Cord::FindImpl(CharIterator it,
  1076. absl::string_view needle) const {
  1077. // Ensure preconditions are met by callers first.
  1078. // Needle must not be empty.
  1079. assert(!needle.empty());
  1080. // Haystack must be at least as large as needle.
  1081. assert(it.chunk_iterator_.bytes_remaining_ >= needle.size());
  1082. // Cord is a sequence of chunks. To find `needle` we go chunk by chunk looking
  1083. // for the first char of needle, up until we have advanced `N` defined as
  1084. // `haystack.size() - needle.size()`. If we find the first char of needle at
  1085. // `P` and `P` is less than `N`, we then call `IsSubstringInCordAt` to
  1086. // see if this is the needle. If not, we advance to `P + 1` and try again.
  1087. while (it.chunk_iterator_.bytes_remaining_ >= needle.size()) {
  1088. auto haystack_chunk = Cord::ChunkRemaining(it);
  1089. assert(!haystack_chunk.empty());
  1090. // Look for the first char of `needle` in the current chunk.
  1091. auto idx = haystack_chunk.find(needle.front());
  1092. if (idx == absl::string_view::npos) {
  1093. // No potential match in this chunk, advance past it.
  1094. Cord::Advance(&it, haystack_chunk.size());
  1095. continue;
  1096. }
  1097. // We found the start of a potential match in the chunk. Advance the
  1098. // iterator and haystack chunk to the match the position.
  1099. Cord::Advance(&it, idx);
  1100. // Check if there is enough haystack remaining to actually have a match.
  1101. if (it.chunk_iterator_.bytes_remaining_ < needle.size()) {
  1102. break;
  1103. }
  1104. // Check if this is `needle`.
  1105. if (IsSubstringInCordAt(it, needle)) {
  1106. return it;
  1107. }
  1108. // No match, increment the iterator for the next attempt.
  1109. Cord::Advance(&it, 1);
  1110. }
  1111. // If we got here, we did not find `needle`.
  1112. return char_end();
  1113. }
  1114. absl::Cord::CharIterator absl::Cord::Find(absl::string_view needle) const {
  1115. if (needle.empty()) {
  1116. return char_begin();
  1117. }
  1118. if (needle.size() > size()) {
  1119. return char_end();
  1120. }
  1121. if (needle.size() == size()) {
  1122. return *this == needle ? char_begin() : char_end();
  1123. }
  1124. return FindImpl(char_begin(), needle);
  1125. }
  1126. namespace {
  1127. // Tests whether the sequence of chunks beginning at `haystack` starts with the
  1128. // sequence of chunks beginning at `needle_begin` and extending to `needle_end`.
  1129. //
  1130. // REQUIRES: remaining `absl::Cord` starting at `position` is greater than or
  1131. // equal to `needle_end - needle_begin` and `advance`.
  1132. bool IsSubcordInCordAt(absl::Cord::CharIterator haystack,
  1133. absl::Cord::CharIterator needle_begin,
  1134. absl::Cord::CharIterator needle_end) {
  1135. while (needle_begin != needle_end) {
  1136. auto haystack_chunk = absl::Cord::ChunkRemaining(haystack);
  1137. assert(!haystack_chunk.empty());
  1138. auto needle_chunk = absl::Cord::ChunkRemaining(needle_begin);
  1139. auto min_length = std::min(haystack_chunk.size(), needle_chunk.size());
  1140. if (haystack_chunk.substr(0, min_length) !=
  1141. needle_chunk.substr(0, min_length)) {
  1142. return false;
  1143. }
  1144. absl::Cord::Advance(&haystack, min_length);
  1145. absl::Cord::Advance(&needle_begin, min_length);
  1146. }
  1147. return true;
  1148. }
  1149. // Tests whether the sequence of chunks beginning at `position` starts with the
  1150. // cord `needle`.
  1151. //
  1152. // REQUIRES: remaining `absl::Cord` starting at `position` is greater than or
  1153. // equal to `needle.size()`.
  1154. bool IsSubcordInCordAt(absl::Cord::CharIterator position,
  1155. const absl::Cord& needle) {
  1156. return IsSubcordInCordAt(position, needle.char_begin(), needle.char_end());
  1157. }
  1158. } // namespace
  1159. absl::Cord::CharIterator absl::Cord::Find(const absl::Cord& needle) const {
  1160. if (needle.empty()) {
  1161. return char_begin();
  1162. }
  1163. const auto needle_size = needle.size();
  1164. if (needle_size > size()) {
  1165. return char_end();
  1166. }
  1167. if (needle_size == size()) {
  1168. return *this == needle ? char_begin() : char_end();
  1169. }
  1170. const auto needle_chunk = Cord::ChunkRemaining(needle.char_begin());
  1171. auto haystack_it = char_begin();
  1172. while (true) {
  1173. haystack_it = FindImpl(haystack_it, needle_chunk);
  1174. if (haystack_it == char_end() ||
  1175. haystack_it.chunk_iterator_.bytes_remaining_ < needle_size) {
  1176. break;
  1177. }
  1178. // We found the first chunk of `needle` at `haystack_it` but not the entire
  1179. // subcord. Advance past the first chunk and check for the remainder.
  1180. auto haystack_advanced_it = haystack_it;
  1181. auto needle_it = needle.char_begin();
  1182. Cord::Advance(&haystack_advanced_it, needle_chunk.size());
  1183. Cord::Advance(&needle_it, needle_chunk.size());
  1184. if (IsSubcordInCordAt(haystack_advanced_it, needle_it, needle.char_end())) {
  1185. return haystack_it;
  1186. }
  1187. Cord::Advance(&haystack_it, 1);
  1188. if (haystack_it.chunk_iterator_.bytes_remaining_ < needle_size) {
  1189. break;
  1190. }
  1191. if (haystack_it.chunk_iterator_.bytes_remaining_ == needle_size) {
  1192. // Special case, if there is exactly `needle_size` bytes remaining, the
  1193. // subcord is either at `haystack_it` or not at all.
  1194. if (IsSubcordInCordAt(haystack_it, needle)) {
  1195. return haystack_it;
  1196. }
  1197. break;
  1198. }
  1199. }
  1200. return char_end();
  1201. }
  1202. bool Cord::Contains(absl::string_view rhs) const {
  1203. return rhs.empty() || Find(rhs) != char_end();
  1204. }
  1205. bool Cord::Contains(const absl::Cord& rhs) const {
  1206. return rhs.empty() || Find(rhs) != char_end();
  1207. }
  1208. absl::string_view Cord::FlattenSlowPath() {
  1209. assert(contents_.is_tree());
  1210. size_t total_size = size();
  1211. CordRep* new_rep;
  1212. char* new_buffer;
  1213. // Try to put the contents into a new flat rep. If they won't fit in the
  1214. // biggest possible flat node, use an external rep instead.
  1215. if (total_size <= kMaxFlatLength) {
  1216. new_rep = CordRepFlat::New(total_size);
  1217. new_rep->length = total_size;
  1218. new_buffer = new_rep->flat()->Data();
  1219. CopyToArraySlowPath(new_buffer);
  1220. } else {
  1221. new_buffer = std::allocator<char>().allocate(total_size);
  1222. CopyToArraySlowPath(new_buffer);
  1223. new_rep = absl::cord_internal::NewExternalRep(
  1224. absl::string_view(new_buffer, total_size), [](absl::string_view s) {
  1225. std::allocator<char>().deallocate(const_cast<char*>(s.data()),
  1226. s.size());
  1227. });
  1228. }
  1229. CordzUpdateScope scope(contents_.cordz_info(), CordzUpdateTracker::kFlatten);
  1230. CordRep::Unref(contents_.as_tree());
  1231. contents_.SetTree(new_rep, scope);
  1232. return absl::string_view(new_buffer, total_size);
  1233. }
  1234. /* static */ bool Cord::GetFlatAux(absl::Nonnull<CordRep*> rep,
  1235. absl::Nonnull<absl::string_view*> fragment) {
  1236. assert(rep != nullptr);
  1237. if (rep->length == 0) {
  1238. *fragment = absl::string_view();
  1239. return true;
  1240. }
  1241. rep = cord_internal::SkipCrcNode(rep);
  1242. if (rep->IsFlat()) {
  1243. *fragment = absl::string_view(rep->flat()->Data(), rep->length);
  1244. return true;
  1245. } else if (rep->IsExternal()) {
  1246. *fragment = absl::string_view(rep->external()->base, rep->length);
  1247. return true;
  1248. } else if (rep->IsBtree()) {
  1249. return rep->btree()->IsFlat(fragment);
  1250. } else if (rep->IsSubstring()) {
  1251. CordRep* child = rep->substring()->child;
  1252. if (child->IsFlat()) {
  1253. *fragment = absl::string_view(
  1254. child->flat()->Data() + rep->substring()->start, rep->length);
  1255. return true;
  1256. } else if (child->IsExternal()) {
  1257. *fragment = absl::string_view(
  1258. child->external()->base + rep->substring()->start, rep->length);
  1259. return true;
  1260. } else if (child->IsBtree()) {
  1261. return child->btree()->IsFlat(rep->substring()->start, rep->length,
  1262. fragment);
  1263. }
  1264. }
  1265. return false;
  1266. }
  1267. /* static */ void Cord::ForEachChunkAux(
  1268. absl::Nonnull<absl::cord_internal::CordRep*> rep,
  1269. absl::FunctionRef<void(absl::string_view)> callback) {
  1270. assert(rep != nullptr);
  1271. if (rep->length == 0) return;
  1272. rep = cord_internal::SkipCrcNode(rep);
  1273. if (rep->IsBtree()) {
  1274. ChunkIterator it(rep), end;
  1275. while (it != end) {
  1276. callback(*it);
  1277. ++it;
  1278. }
  1279. return;
  1280. }
  1281. // This is a leaf node, so invoke our callback.
  1282. absl::cord_internal::CordRep* current_node = cord_internal::SkipCrcNode(rep);
  1283. absl::string_view chunk;
  1284. bool success = GetFlatAux(current_node, &chunk);
  1285. assert(success);
  1286. if (success) {
  1287. callback(chunk);
  1288. }
  1289. }
  1290. static void DumpNode(absl::Nonnull<CordRep*> rep, bool include_data,
  1291. absl::Nonnull<std::ostream*> os, int indent) {
  1292. const int kIndentStep = 1;
  1293. absl::InlinedVector<CordRep*, kInlinedVectorSize> stack;
  1294. absl::InlinedVector<int, kInlinedVectorSize> indents;
  1295. for (;;) {
  1296. *os << std::setw(3) << rep->refcount.Get();
  1297. *os << " " << std::setw(7) << rep->length;
  1298. *os << " [";
  1299. if (include_data) *os << static_cast<void*>(rep);
  1300. *os << "]";
  1301. *os << " " << std::setw(indent) << "";
  1302. bool leaf = false;
  1303. if (rep == nullptr) {
  1304. *os << "NULL\n";
  1305. leaf = true;
  1306. } else if (rep->IsCrc()) {
  1307. *os << "CRC crc=" << rep->crc()->crc_cord_state.Checksum() << "\n";
  1308. indent += kIndentStep;
  1309. rep = rep->crc()->child;
  1310. } else if (rep->IsSubstring()) {
  1311. *os << "SUBSTRING @ " << rep->substring()->start << "\n";
  1312. indent += kIndentStep;
  1313. rep = rep->substring()->child;
  1314. } else { // Leaf or ring
  1315. leaf = true;
  1316. if (rep->IsExternal()) {
  1317. *os << "EXTERNAL [";
  1318. if (include_data)
  1319. *os << absl::CEscape(std::string(rep->external()->base, rep->length));
  1320. *os << "]\n";
  1321. } else if (rep->IsFlat()) {
  1322. *os << "FLAT cap=" << rep->flat()->Capacity() << " [";
  1323. if (include_data)
  1324. *os << absl::CEscape(std::string(rep->flat()->Data(), rep->length));
  1325. *os << "]\n";
  1326. } else {
  1327. CordRepBtree::Dump(rep, /*label=*/"", include_data, *os);
  1328. }
  1329. }
  1330. if (leaf) {
  1331. if (stack.empty()) break;
  1332. rep = stack.back();
  1333. stack.pop_back();
  1334. indent = indents.back();
  1335. indents.pop_back();
  1336. }
  1337. }
  1338. ABSL_INTERNAL_CHECK(indents.empty(), "");
  1339. }
  1340. static std::string ReportError(absl::Nonnull<CordRep*> root,
  1341. absl::Nonnull<CordRep*> node) {
  1342. std::ostringstream buf;
  1343. buf << "Error at node " << node << " in:";
  1344. DumpNode(root, true, &buf);
  1345. return buf.str();
  1346. }
  1347. static bool VerifyNode(absl::Nonnull<CordRep*> root,
  1348. absl::Nonnull<CordRep*> start_node) {
  1349. absl::InlinedVector<absl::Nonnull<CordRep*>, 2> worklist;
  1350. worklist.push_back(start_node);
  1351. do {
  1352. CordRep* node = worklist.back();
  1353. worklist.pop_back();
  1354. ABSL_INTERNAL_CHECK(node != nullptr, ReportError(root, node));
  1355. if (node != root) {
  1356. ABSL_INTERNAL_CHECK(node->length != 0, ReportError(root, node));
  1357. ABSL_INTERNAL_CHECK(!node->IsCrc(), ReportError(root, node));
  1358. }
  1359. if (node->IsFlat()) {
  1360. ABSL_INTERNAL_CHECK(node->length <= node->flat()->Capacity(),
  1361. ReportError(root, node));
  1362. } else if (node->IsExternal()) {
  1363. ABSL_INTERNAL_CHECK(node->external()->base != nullptr,
  1364. ReportError(root, node));
  1365. } else if (node->IsSubstring()) {
  1366. ABSL_INTERNAL_CHECK(
  1367. node->substring()->start < node->substring()->child->length,
  1368. ReportError(root, node));
  1369. ABSL_INTERNAL_CHECK(node->substring()->start + node->length <=
  1370. node->substring()->child->length,
  1371. ReportError(root, node));
  1372. } else if (node->IsCrc()) {
  1373. ABSL_INTERNAL_CHECK(
  1374. node->crc()->child != nullptr || node->crc()->length == 0,
  1375. ReportError(root, node));
  1376. if (node->crc()->child != nullptr) {
  1377. ABSL_INTERNAL_CHECK(node->crc()->length == node->crc()->child->length,
  1378. ReportError(root, node));
  1379. worklist.push_back(node->crc()->child);
  1380. }
  1381. }
  1382. } while (!worklist.empty());
  1383. return true;
  1384. }
  1385. std::ostream& operator<<(std::ostream& out, const Cord& cord) {
  1386. for (absl::string_view chunk : cord.Chunks()) {
  1387. out.write(chunk.data(), static_cast<std::streamsize>(chunk.size()));
  1388. }
  1389. return out;
  1390. }
  1391. namespace strings_internal {
  1392. size_t CordTestAccess::FlatOverhead() { return cord_internal::kFlatOverhead; }
  1393. size_t CordTestAccess::MaxFlatLength() { return cord_internal::kMaxFlatLength; }
  1394. size_t CordTestAccess::FlatTagToLength(uint8_t tag) {
  1395. return cord_internal::TagToLength(tag);
  1396. }
  1397. uint8_t CordTestAccess::LengthToTag(size_t s) {
  1398. ABSL_INTERNAL_CHECK(s <= kMaxFlatLength, absl::StrCat("Invalid length ", s));
  1399. return cord_internal::AllocatedSizeToTag(s + cord_internal::kFlatOverhead);
  1400. }
  1401. size_t CordTestAccess::SizeofCordRepExternal() {
  1402. return sizeof(CordRepExternal);
  1403. }
  1404. size_t CordTestAccess::SizeofCordRepSubstring() {
  1405. return sizeof(CordRepSubstring);
  1406. }
  1407. } // namespace strings_internal
  1408. ABSL_NAMESPACE_END
  1409. } // namespace absl