1 // Copyright 2020 The Abseil Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "absl/strings/cord.h"
16
17 #include <algorithm>
18 #include <cassert>
19 #include <cstddef>
20 #include <cstdint>
21 #include <cstdio>
22 #include <cstdlib>
23 #include <cstring>
24 #include <iomanip>
25 #include <ios>
26 #include <iostream>
27 #include <limits>
28 #include <memory>
29 #include <ostream>
30 #include <sstream>
31 #include <string>
32 #include <utility>
33
34 #include "absl/base/attributes.h"
35 #include "absl/base/config.h"
36 #include "absl/base/internal/endian.h"
37 #include "absl/base/internal/raw_logging.h"
38 #include "absl/base/macros.h"
39 #include "absl/base/optimization.h"
40 #include "absl/base/nullability.h"
41 #include "absl/container/inlined_vector.h"
42 #include "absl/crc/crc32c.h"
43 #include "absl/crc/internal/crc_cord_state.h"
44 #include "absl/functional/function_ref.h"
45 #include "absl/strings/cord_buffer.h"
46 #include "absl/strings/escaping.h"
47 #include "absl/strings/internal/cord_data_edge.h"
48 #include "absl/strings/internal/cord_internal.h"
49 #include "absl/strings/internal/cord_rep_btree.h"
50 #include "absl/strings/internal/cord_rep_crc.h"
51 #include "absl/strings/internal/cord_rep_flat.h"
52 #include "absl/strings/internal/cordz_update_tracker.h"
53 #include "absl/strings/internal/resize_uninitialized.h"
54 #include "absl/strings/match.h"
55 #include "absl/strings/str_cat.h"
56 #include "absl/strings/string_view.h"
57 #include "absl/strings/strip.h"
58 #include "absl/types/optional.h"
59 #include "absl/types/span.h"
60
61 namespace absl {
62 ABSL_NAMESPACE_BEGIN
63
64 using ::absl::cord_internal::CordRep;
65 using ::absl::cord_internal::CordRepBtree;
66 using ::absl::cord_internal::CordRepCrc;
67 using ::absl::cord_internal::CordRepExternal;
68 using ::absl::cord_internal::CordRepFlat;
69 using ::absl::cord_internal::CordRepSubstring;
70 using ::absl::cord_internal::CordzUpdateTracker;
71 using ::absl::cord_internal::InlineData;
72 using ::absl::cord_internal::kMaxFlatLength;
73 using ::absl::cord_internal::kMinFlatLength;
74
75 using ::absl::cord_internal::kInlinedVectorSize;
76 using ::absl::cord_internal::kMaxBytesToCopy;
77
78 static void DumpNode(absl::Nonnull<CordRep*> nonnull_rep, bool include_data,
79 absl::Nonnull<std::ostream*> os, int indent = 0);
80 static bool VerifyNode(absl::Nonnull<CordRep*> root,
81 absl::Nonnull<CordRep*> start_node);
82
VerifyTree(absl::Nullable<CordRep * > node)83 static inline absl::Nullable<CordRep*> VerifyTree(
84 absl::Nullable<CordRep*> node) {
85 assert(node == nullptr || VerifyNode(node, node));
86 static_cast<void>(&VerifyNode);
87 return node;
88 }
89
CreateFlat(absl::Nonnull<const char * > data,size_t length,size_t alloc_hint)90 static absl::Nonnull<CordRepFlat*> CreateFlat(absl::Nonnull<const char*> data,
91 size_t length,
92 size_t alloc_hint) {
93 CordRepFlat* flat = CordRepFlat::New(length + alloc_hint);
94 flat->length = length;
95 memcpy(flat->Data(), data, length);
96 return flat;
97 }
98
99 // Creates a new flat or Btree out of the specified array.
100 // The returned node has a refcount of 1.
NewBtree(absl::Nonnull<const char * > data,size_t length,size_t alloc_hint)101 static absl::Nonnull<CordRep*> NewBtree(absl::Nonnull<const char*> data,
102 size_t length, size_t alloc_hint) {
103 if (length <= kMaxFlatLength) {
104 return CreateFlat(data, length, alloc_hint);
105 }
106 CordRepFlat* flat = CreateFlat(data, kMaxFlatLength, 0);
107 data += kMaxFlatLength;
108 length -= kMaxFlatLength;
109 auto* root = CordRepBtree::Create(flat);
110 return CordRepBtree::Append(root, {data, length}, alloc_hint);
111 }
112
113 // Create a new tree out of the specified array.
114 // The returned node has a refcount of 1.
NewTree(absl::Nullable<const char * > data,size_t length,size_t alloc_hint)115 static absl::Nullable<CordRep*> NewTree(absl::Nullable<const char*> data,
116 size_t length, size_t alloc_hint) {
117 if (length == 0) return nullptr;
118 return NewBtree(data, length, alloc_hint);
119 }
120
121 namespace cord_internal {
122
InitializeCordRepExternal(absl::string_view data,absl::Nonnull<CordRepExternal * > rep)123 void InitializeCordRepExternal(absl::string_view data,
124 absl::Nonnull<CordRepExternal*> rep) {
125 assert(!data.empty());
126 rep->length = data.size();
127 rep->tag = EXTERNAL;
128 rep->base = data.data();
129 VerifyTree(rep);
130 }
131
132 } // namespace cord_internal
133
134 // Creates a CordRep from the provided string. If the string is large enough,
135 // and not wasteful, we move the string into an external cord rep, preserving
136 // the already allocated string contents.
137 // Requires the provided string length to be larger than `kMaxInline`.
CordRepFromString(std::string && src)138 static absl::Nonnull<CordRep*> CordRepFromString(std::string&& src) {
139 assert(src.length() > cord_internal::kMaxInline);
140 if (
141 // String is short: copy data to avoid external block overhead.
142 src.size() <= kMaxBytesToCopy ||
143 // String is wasteful: copy data to avoid pinning too much unused memory.
144 src.size() < src.capacity() / 2
145 ) {
146 return NewTree(src.data(), src.size(), 0);
147 }
148
149 struct StringReleaser {
150 void operator()(absl::string_view /* data */) {}
151 std::string data;
152 };
153 const absl::string_view original_data = src;
154 auto* rep =
155 static_cast<::absl::cord_internal::CordRepExternalImpl<StringReleaser>*>(
156 absl::cord_internal::NewExternalRep(original_data,
157 StringReleaser{std::move(src)}));
158 // Moving src may have invalidated its data pointer, so adjust it.
159 rep->base = rep->template get<0>().data.data();
160 return rep;
161 }
162
163 // --------------------------------------------------------------------
164 // Cord::InlineRep functions
165
166 #ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL
167 constexpr unsigned char Cord::InlineRep::kMaxInline;
168 #endif
169
set_data(absl::Nonnull<const char * > data,size_t n)170 inline void Cord::InlineRep::set_data(absl::Nonnull<const char*> data,
171 size_t n) {
172 static_assert(kMaxInline == 15, "set_data is hard-coded for a length of 15");
173 data_.set_inline_data(data, n);
174 }
175
set_data(size_t n)176 inline absl::Nonnull<char*> Cord::InlineRep::set_data(size_t n) {
177 assert(n <= kMaxInline);
178 ResetToEmpty();
179 set_inline_size(n);
180 return data_.as_chars();
181 }
182
reduce_size(size_t n)183 inline void Cord::InlineRep::reduce_size(size_t n) {
184 size_t tag = inline_size();
185 assert(tag <= kMaxInline);
186 assert(tag >= n);
187 tag -= n;
188 memset(data_.as_chars() + tag, 0, n);
189 set_inline_size(tag);
190 }
191
remove_prefix(size_t n)192 inline void Cord::InlineRep::remove_prefix(size_t n) {
193 cord_internal::SmallMemmove(data_.as_chars(), data_.as_chars() + n,
194 inline_size() - n);
195 reduce_size(n);
196 }
197
198 // Returns `rep` converted into a CordRepBtree.
199 // Directly returns `rep` if `rep` is already a CordRepBtree.
ForceBtree(CordRep * rep)200 static absl::Nonnull<CordRepBtree*> ForceBtree(CordRep* rep) {
201 return rep->IsBtree()
202 ? rep->btree()
203 : CordRepBtree::Create(cord_internal::RemoveCrcNode(rep));
204 }
205
AppendTreeToInlined(absl::Nonnull<CordRep * > tree,MethodIdentifier method)206 void Cord::InlineRep::AppendTreeToInlined(absl::Nonnull<CordRep*> tree,
207 MethodIdentifier method) {
208 assert(!is_tree());
209 if (!data_.is_empty()) {
210 CordRepFlat* flat = MakeFlatWithExtraCapacity(0);
211 tree = CordRepBtree::Append(CordRepBtree::Create(flat), tree);
212 }
213 EmplaceTree(tree, method);
214 }
215
AppendTreeToTree(absl::Nonnull<CordRep * > tree,MethodIdentifier method)216 void Cord::InlineRep::AppendTreeToTree(absl::Nonnull<CordRep*> tree,
217 MethodIdentifier method) {
218 assert(is_tree());
219 const CordzUpdateScope scope(data_.cordz_info(), method);
220 tree = CordRepBtree::Append(ForceBtree(data_.as_tree()), tree);
221 SetTree(tree, scope);
222 }
223
AppendTree(absl::Nonnull<CordRep * > tree,MethodIdentifier method)224 void Cord::InlineRep::AppendTree(absl::Nonnull<CordRep*> tree,
225 MethodIdentifier method) {
226 assert(tree != nullptr);
227 assert(tree->length != 0);
228 assert(!tree->IsCrc());
229 if (data_.is_tree()) {
230 AppendTreeToTree(tree, method);
231 } else {
232 AppendTreeToInlined(tree, method);
233 }
234 }
235
PrependTreeToInlined(absl::Nonnull<CordRep * > tree,MethodIdentifier method)236 void Cord::InlineRep::PrependTreeToInlined(absl::Nonnull<CordRep*> tree,
237 MethodIdentifier method) {
238 assert(!is_tree());
239 if (!data_.is_empty()) {
240 CordRepFlat* flat = MakeFlatWithExtraCapacity(0);
241 tree = CordRepBtree::Prepend(CordRepBtree::Create(flat), tree);
242 }
243 EmplaceTree(tree, method);
244 }
245
PrependTreeToTree(absl::Nonnull<CordRep * > tree,MethodIdentifier method)246 void Cord::InlineRep::PrependTreeToTree(absl::Nonnull<CordRep*> tree,
247 MethodIdentifier method) {
248 assert(is_tree());
249 const CordzUpdateScope scope(data_.cordz_info(), method);
250 tree = CordRepBtree::Prepend(ForceBtree(data_.as_tree()), tree);
251 SetTree(tree, scope);
252 }
253
PrependTree(absl::Nonnull<CordRep * > tree,MethodIdentifier method)254 void Cord::InlineRep::PrependTree(absl::Nonnull<CordRep*> tree,
255 MethodIdentifier method) {
256 assert(tree != nullptr);
257 assert(tree->length != 0);
258 assert(!tree->IsCrc());
259 if (data_.is_tree()) {
260 PrependTreeToTree(tree, method);
261 } else {
262 PrependTreeToInlined(tree, method);
263 }
264 }
265
266 // Searches for a non-full flat node at the rightmost leaf of the tree. If a
267 // suitable leaf is found, the function will update the length field for all
268 // nodes to account for the size increase. The append region address will be
269 // written to region and the actual size increase will be written to size.
PrepareAppendRegion(absl::Nonnull<CordRep * > root,absl::Nonnull<absl::Nullable<char * > * > region,absl::Nonnull<size_t * > size,size_t max_length)270 static inline bool PrepareAppendRegion(
271 absl::Nonnull<CordRep*> root, absl::Nonnull<absl::Nullable<char*>*> region,
272 absl::Nonnull<size_t*> size, size_t max_length) {
273 if (root->IsBtree() && root->refcount.IsOne()) {
274 Span<char> span = root->btree()->GetAppendBuffer(max_length);
275 if (!span.empty()) {
276 *region = span.data();
277 *size = span.size();
278 return true;
279 }
280 }
281
282 CordRep* dst = root;
283 if (!dst->IsFlat() || !dst->refcount.IsOne()) {
284 *region = nullptr;
285 *size = 0;
286 return false;
287 }
288
289 const size_t in_use = dst->length;
290 const size_t capacity = dst->flat()->Capacity();
291 if (in_use == capacity) {
292 *region = nullptr;
293 *size = 0;
294 return false;
295 }
296
297 const size_t size_increase = std::min(capacity - in_use, max_length);
298 dst->length += size_increase;
299
300 *region = dst->flat()->Data() + in_use;
301 *size = size_increase;
302 return true;
303 }
304
AssignSlow(const Cord::InlineRep & src)305 void Cord::InlineRep::AssignSlow(const Cord::InlineRep& src) {
306 assert(&src != this);
307 assert(is_tree() || src.is_tree());
308 auto constexpr method = CordzUpdateTracker::kAssignCord;
309 if (ABSL_PREDICT_TRUE(!is_tree())) {
310 EmplaceTree(CordRep::Ref(src.as_tree()), src.data_, method);
311 return;
312 }
313
314 CordRep* tree = as_tree();
315 if (CordRep* src_tree = src.tree()) {
316 // Leave any existing `cordz_info` in place, and let MaybeTrackCord()
317 // decide if this cord should be (or remains to be) sampled or not.
318 data_.set_tree(CordRep::Ref(src_tree));
319 CordzInfo::MaybeTrackCord(data_, src.data_, method);
320 } else {
321 CordzInfo::MaybeUntrackCord(data_.cordz_info());
322 data_ = src.data_;
323 }
324 CordRep::Unref(tree);
325 }
326
UnrefTree()327 void Cord::InlineRep::UnrefTree() {
328 if (is_tree()) {
329 CordzInfo::MaybeUntrackCord(data_.cordz_info());
330 CordRep::Unref(tree());
331 }
332 }
333
334 // --------------------------------------------------------------------
335 // Constructors and destructors
336
Cord(absl::string_view src,MethodIdentifier method)337 Cord::Cord(absl::string_view src, MethodIdentifier method)
338 : contents_(InlineData::kDefaultInit) {
339 const size_t n = src.size();
340 if (n <= InlineRep::kMaxInline) {
341 contents_.set_data(src.data(), n);
342 } else {
343 CordRep* rep = NewTree(src.data(), n, 0);
344 contents_.EmplaceTree(rep, method);
345 }
346 }
347
348 template <typename T, Cord::EnableIfString<T>>
Cord(T && src)349 Cord::Cord(T&& src) : contents_(InlineData::kDefaultInit) {
350 if (src.size() <= InlineRep::kMaxInline) {
351 contents_.set_data(src.data(), src.size());
352 } else {
353 CordRep* rep = CordRepFromString(std::forward<T>(src));
354 contents_.EmplaceTree(rep, CordzUpdateTracker::kConstructorString);
355 }
356 }
357
358 template Cord::Cord(std::string&& src);
359
360 // The destruction code is separate so that the compiler can determine
361 // that it does not need to call the destructor on a moved-from Cord.
DestroyCordSlow()362 void Cord::DestroyCordSlow() {
363 assert(contents_.is_tree());
364 CordzInfo::MaybeUntrackCord(contents_.cordz_info());
365 CordRep::Unref(VerifyTree(contents_.as_tree()));
366 }
367
368 // --------------------------------------------------------------------
369 // Mutators
370
Clear()371 void Cord::Clear() {
372 if (CordRep* tree = contents_.clear()) {
373 CordRep::Unref(tree);
374 }
375 }
376
AssignLargeString(std::string && src)377 Cord& Cord::AssignLargeString(std::string&& src) {
378 auto constexpr method = CordzUpdateTracker::kAssignString;
379 assert(src.size() > kMaxBytesToCopy);
380 CordRep* rep = CordRepFromString(std::move(src));
381 if (CordRep* tree = contents_.tree()) {
382 CordzUpdateScope scope(contents_.cordz_info(), method);
383 contents_.SetTree(rep, scope);
384 CordRep::Unref(tree);
385 } else {
386 contents_.EmplaceTree(rep, method);
387 }
388 return *this;
389 }
390
operator =(absl::string_view src)391 Cord& Cord::operator=(absl::string_view src) {
392 auto constexpr method = CordzUpdateTracker::kAssignString;
393 const char* data = src.data();
394 size_t length = src.size();
395 CordRep* tree = contents_.tree();
396 if (length <= InlineRep::kMaxInline) {
397 // Embed into this->contents_, which is somewhat subtle:
398 // - MaybeUntrackCord must be called before Unref(tree).
399 // - MaybeUntrackCord must be called before set_data() clobbers cordz_info.
400 // - set_data() must be called before Unref(tree) as it may reference tree.
401 if (tree != nullptr) CordzInfo::MaybeUntrackCord(contents_.cordz_info());
402 contents_.set_data(data, length);
403 if (tree != nullptr) CordRep::Unref(tree);
404 return *this;
405 }
406 if (tree != nullptr) {
407 CordzUpdateScope scope(contents_.cordz_info(), method);
408 if (tree->IsFlat() && tree->flat()->Capacity() >= length &&
409 tree->refcount.IsOne()) {
410 // Copy in place if the existing FLAT node is reusable.
411 memmove(tree->flat()->Data(), data, length);
412 tree->length = length;
413 VerifyTree(tree);
414 return *this;
415 }
416 contents_.SetTree(NewTree(data, length, 0), scope);
417 CordRep::Unref(tree);
418 } else {
419 contents_.EmplaceTree(NewTree(data, length, 0), method);
420 }
421 return *this;
422 }
423
424 // TODO(sanjay): Move to Cord::InlineRep section of file. For now,
425 // we keep it here to make diffs easier.
AppendArray(absl::string_view src,MethodIdentifier method)426 void Cord::InlineRep::AppendArray(absl::string_view src,
427 MethodIdentifier method) {
428 if (src.empty()) return; // memcpy(_, nullptr, 0) is undefined.
429 MaybeRemoveEmptyCrcNode();
430
431 size_t appended = 0;
432 CordRep* rep = tree();
433 const CordRep* const root = rep;
434 CordzUpdateScope scope(root ? cordz_info() : nullptr, method);
435 if (root != nullptr) {
436 rep = cord_internal::RemoveCrcNode(rep);
437 char* region;
438 if (PrepareAppendRegion(rep, ®ion, &appended, src.size())) {
439 memcpy(region, src.data(), appended);
440 }
441 } else {
442 // Try to fit in the inline buffer if possible.
443 size_t inline_length = inline_size();
444 if (src.size() <= kMaxInline - inline_length) {
445 // Append new data to embedded array
446 set_inline_size(inline_length + src.size());
447 memcpy(data_.as_chars() + inline_length, src.data(), src.size());
448 return;
449 }
450
451 // Allocate flat to be a perfect fit on first append exceeding inlined size.
452 // Subsequent growth will use amortized growth until we reach maximum flat
453 // size.
454 rep = CordRepFlat::New(inline_length + src.size());
455 appended = std::min(src.size(), rep->flat()->Capacity() - inline_length);
456 memcpy(rep->flat()->Data(), data_.as_chars(), inline_length);
457 memcpy(rep->flat()->Data() + inline_length, src.data(), appended);
458 rep->length = inline_length + appended;
459 }
460
461 src.remove_prefix(appended);
462 if (src.empty()) {
463 CommitTree(root, rep, scope, method);
464 return;
465 }
466
467 // TODO(b/192061034): keep legacy 10% growth rate: consider other rates.
468 rep = ForceBtree(rep);
469 const size_t min_growth = std::max<size_t>(rep->length / 10, src.size());
470 rep = CordRepBtree::Append(rep->btree(), src, min_growth - src.size());
471
472 CommitTree(root, rep, scope, method);
473 }
474
TakeRep() const475 inline absl::Nonnull<CordRep*> Cord::TakeRep() const& {
476 return CordRep::Ref(contents_.tree());
477 }
478
TakeRep()479 inline absl::Nonnull<CordRep*> Cord::TakeRep() && {
480 CordRep* rep = contents_.tree();
481 contents_.clear();
482 return rep;
483 }
484
485 template <typename C>
AppendImpl(C && src)486 inline void Cord::AppendImpl(C&& src) {
487 auto constexpr method = CordzUpdateTracker::kAppendCord;
488
489 contents_.MaybeRemoveEmptyCrcNode();
490 if (src.empty()) return;
491
492 if (empty()) {
493 // Since destination is empty, we can avoid allocating a node,
494 if (src.contents_.is_tree()) {
495 // by taking the tree directly
496 CordRep* rep =
497 cord_internal::RemoveCrcNode(std::forward<C>(src).TakeRep());
498 contents_.EmplaceTree(rep, method);
499 } else {
500 // or copying over inline data
501 contents_.data_ = src.contents_.data_;
502 }
503 return;
504 }
505
506 // For short cords, it is faster to copy data if there is room in dst.
507 const size_t src_size = src.contents_.size();
508 if (src_size <= kMaxBytesToCopy) {
509 CordRep* src_tree = src.contents_.tree();
510 if (src_tree == nullptr) {
511 // src has embedded data.
512 contents_.AppendArray({src.contents_.data(), src_size}, method);
513 return;
514 }
515 if (src_tree->IsFlat()) {
516 // src tree just has one flat node.
517 contents_.AppendArray({src_tree->flat()->Data(), src_size}, method);
518 return;
519 }
520 if (&src == this) {
521 // ChunkIterator below assumes that src is not modified during traversal.
522 Append(Cord(src));
523 return;
524 }
525 // TODO(mec): Should we only do this if "dst" has space?
526 for (absl::string_view chunk : src.Chunks()) {
527 Append(chunk);
528 }
529 return;
530 }
531
532 // Guaranteed to be a tree (kMaxBytesToCopy > kInlinedSize)
533 CordRep* rep = cord_internal::RemoveCrcNode(std::forward<C>(src).TakeRep());
534 contents_.AppendTree(rep, CordzUpdateTracker::kAppendCord);
535 }
536
ExtractAppendBuffer(absl::Nonnull<CordRep * > rep,size_t min_capacity)537 static CordRep::ExtractResult ExtractAppendBuffer(absl::Nonnull<CordRep*> rep,
538 size_t min_capacity) {
539 switch (rep->tag) {
540 case cord_internal::BTREE:
541 return CordRepBtree::ExtractAppendBuffer(rep->btree(), min_capacity);
542 default:
543 if (rep->IsFlat() && rep->refcount.IsOne() &&
544 rep->flat()->Capacity() - rep->length >= min_capacity) {
545 return {nullptr, rep};
546 }
547 return {rep, nullptr};
548 }
549 }
550
CreateAppendBuffer(InlineData & data,size_t block_size,size_t capacity)551 static CordBuffer CreateAppendBuffer(InlineData& data, size_t block_size,
552 size_t capacity) {
553 // Watch out for overflow, people can ask for size_t::max().
554 const size_t size = data.inline_size();
555 const size_t max_capacity = std::numeric_limits<size_t>::max() - size;
556 capacity = (std::min)(max_capacity, capacity) + size;
557 CordBuffer buffer =
558 block_size ? CordBuffer::CreateWithCustomLimit(block_size, capacity)
559 : CordBuffer::CreateWithDefaultLimit(capacity);
560 cord_internal::SmallMemmove(buffer.data(), data.as_chars(), size);
561 buffer.SetLength(size);
562 data = {};
563 return buffer;
564 }
565
GetAppendBufferSlowPath(size_t block_size,size_t capacity,size_t min_capacity)566 CordBuffer Cord::GetAppendBufferSlowPath(size_t block_size, size_t capacity,
567 size_t min_capacity) {
568 auto constexpr method = CordzUpdateTracker::kGetAppendBuffer;
569 CordRep* tree = contents_.tree();
570 if (tree != nullptr) {
571 CordzUpdateScope scope(contents_.cordz_info(), method);
572 CordRep::ExtractResult result = ExtractAppendBuffer(tree, min_capacity);
573 if (result.extracted != nullptr) {
574 contents_.SetTreeOrEmpty(result.tree, scope);
575 return CordBuffer(result.extracted->flat());
576 }
577 return block_size ? CordBuffer::CreateWithCustomLimit(block_size, capacity)
578 : CordBuffer::CreateWithDefaultLimit(capacity);
579 }
580 return CreateAppendBuffer(contents_.data_, block_size, capacity);
581 }
582
Append(const Cord & src)583 void Cord::Append(const Cord& src) { AppendImpl(src); }
584
Append(Cord && src)585 void Cord::Append(Cord&& src) { AppendImpl(std::move(src)); }
586
587 template <typename T, Cord::EnableIfString<T>>
Append(T && src)588 void Cord::Append(T&& src) {
589 if (src.size() <= kMaxBytesToCopy) {
590 Append(absl::string_view(src));
591 } else {
592 CordRep* rep = CordRepFromString(std::forward<T>(src));
593 contents_.AppendTree(rep, CordzUpdateTracker::kAppendString);
594 }
595 }
596
597 template void Cord::Append(std::string&& src);
598
Prepend(const Cord & src)599 void Cord::Prepend(const Cord& src) {
600 contents_.MaybeRemoveEmptyCrcNode();
601 if (src.empty()) return;
602
603 CordRep* src_tree = src.contents_.tree();
604 if (src_tree != nullptr) {
605 CordRep::Ref(src_tree);
606 contents_.PrependTree(cord_internal::RemoveCrcNode(src_tree),
607 CordzUpdateTracker::kPrependCord);
608 return;
609 }
610
611 // `src` cord is inlined.
612 absl::string_view src_contents(src.contents_.data(), src.contents_.size());
613 return Prepend(src_contents);
614 }
615
PrependArray(absl::string_view src,MethodIdentifier method)616 void Cord::PrependArray(absl::string_view src, MethodIdentifier method) {
617 contents_.MaybeRemoveEmptyCrcNode();
618 if (src.empty()) return; // memcpy(_, nullptr, 0) is undefined.
619
620 if (!contents_.is_tree()) {
621 size_t cur_size = contents_.inline_size();
622 if (cur_size + src.size() <= InlineRep::kMaxInline) {
623 // Use embedded storage.
624 InlineData data;
625 data.set_inline_size(cur_size + src.size());
626 memcpy(data.as_chars(), src.data(), src.size());
627 memcpy(data.as_chars() + src.size(), contents_.data(), cur_size);
628 contents_.data_ = data;
629 return;
630 }
631 }
632 CordRep* rep = NewTree(src.data(), src.size(), 0);
633 contents_.PrependTree(rep, method);
634 }
635
AppendPrecise(absl::string_view src,MethodIdentifier method)636 void Cord::AppendPrecise(absl::string_view src, MethodIdentifier method) {
637 assert(!src.empty());
638 assert(src.size() <= cord_internal::kMaxFlatLength);
639 if (contents_.remaining_inline_capacity() >= src.size()) {
640 const size_t inline_length = contents_.inline_size();
641 contents_.set_inline_size(inline_length + src.size());
642 memcpy(contents_.data_.as_chars() + inline_length, src.data(), src.size());
643 } else {
644 contents_.AppendTree(CordRepFlat::Create(src), method);
645 }
646 }
647
PrependPrecise(absl::string_view src,MethodIdentifier method)648 void Cord::PrependPrecise(absl::string_view src, MethodIdentifier method) {
649 assert(!src.empty());
650 assert(src.size() <= cord_internal::kMaxFlatLength);
651 if (contents_.remaining_inline_capacity() >= src.size()) {
652 const size_t cur_size = contents_.inline_size();
653 InlineData data;
654 data.set_inline_size(cur_size + src.size());
655 memcpy(data.as_chars(), src.data(), src.size());
656 memcpy(data.as_chars() + src.size(), contents_.data(), cur_size);
657 contents_.data_ = data;
658 } else {
659 contents_.PrependTree(CordRepFlat::Create(src), method);
660 }
661 }
662
663 template <typename T, Cord::EnableIfString<T>>
Prepend(T && src)664 inline void Cord::Prepend(T&& src) {
665 if (src.size() <= kMaxBytesToCopy) {
666 Prepend(absl::string_view(src));
667 } else {
668 CordRep* rep = CordRepFromString(std::forward<T>(src));
669 contents_.PrependTree(rep, CordzUpdateTracker::kPrependString);
670 }
671 }
672
673 template void Cord::Prepend(std::string&& src);
674
RemovePrefix(size_t n)675 void Cord::RemovePrefix(size_t n) {
676 ABSL_INTERNAL_CHECK(n <= size(),
677 absl::StrCat("Requested prefix size ", n,
678 " exceeds Cord's size ", size()));
679 contents_.MaybeRemoveEmptyCrcNode();
680 CordRep* tree = contents_.tree();
681 if (tree == nullptr) {
682 contents_.remove_prefix(n);
683 } else {
684 auto constexpr method = CordzUpdateTracker::kRemovePrefix;
685 CordzUpdateScope scope(contents_.cordz_info(), method);
686 tree = cord_internal::RemoveCrcNode(tree);
687 if (n >= tree->length) {
688 CordRep::Unref(tree);
689 tree = nullptr;
690 } else if (tree->IsBtree()) {
691 CordRep* old = tree;
692 tree = tree->btree()->SubTree(n, tree->length - n);
693 CordRep::Unref(old);
694 } else if (tree->IsSubstring() && tree->refcount.IsOne()) {
695 tree->substring()->start += n;
696 tree->length -= n;
697 } else {
698 CordRep* rep = CordRepSubstring::Substring(tree, n, tree->length - n);
699 CordRep::Unref(tree);
700 tree = rep;
701 }
702 contents_.SetTreeOrEmpty(tree, scope);
703 }
704 }
705
RemoveSuffix(size_t n)706 void Cord::RemoveSuffix(size_t n) {
707 ABSL_INTERNAL_CHECK(n <= size(),
708 absl::StrCat("Requested suffix size ", n,
709 " exceeds Cord's size ", size()));
710 contents_.MaybeRemoveEmptyCrcNode();
711 CordRep* tree = contents_.tree();
712 if (tree == nullptr) {
713 contents_.reduce_size(n);
714 } else {
715 auto constexpr method = CordzUpdateTracker::kRemoveSuffix;
716 CordzUpdateScope scope(contents_.cordz_info(), method);
717 tree = cord_internal::RemoveCrcNode(tree);
718 if (n >= tree->length) {
719 CordRep::Unref(tree);
720 tree = nullptr;
721 } else if (tree->IsBtree()) {
722 tree = CordRepBtree::RemoveSuffix(tree->btree(), n);
723 } else if (!tree->IsExternal() && tree->refcount.IsOne()) {
724 assert(tree->IsFlat() || tree->IsSubstring());
725 tree->length -= n;
726 } else {
727 CordRep* rep = CordRepSubstring::Substring(tree, 0, tree->length - n);
728 CordRep::Unref(tree);
729 tree = rep;
730 }
731 contents_.SetTreeOrEmpty(tree, scope);
732 }
733 }
734
Subcord(size_t pos,size_t new_size) const735 Cord Cord::Subcord(size_t pos, size_t new_size) const {
736 Cord sub_cord;
737 size_t length = size();
738 if (pos > length) pos = length;
739 if (new_size > length - pos) new_size = length - pos;
740 if (new_size == 0) return sub_cord;
741
742 CordRep* tree = contents_.tree();
743 if (tree == nullptr) {
744 sub_cord.contents_.set_data(contents_.data() + pos, new_size);
745 return sub_cord;
746 }
747
748 if (new_size <= InlineRep::kMaxInline) {
749 sub_cord.contents_.set_inline_size(new_size);
750 char* dest = sub_cord.contents_.data_.as_chars();
751 Cord::ChunkIterator it = chunk_begin();
752 it.AdvanceBytes(pos);
753 size_t remaining_size = new_size;
754 while (remaining_size > it->size()) {
755 cord_internal::SmallMemmove(dest, it->data(), it->size());
756 remaining_size -= it->size();
757 dest += it->size();
758 ++it;
759 }
760 cord_internal::SmallMemmove(dest, it->data(), remaining_size);
761 return sub_cord;
762 }
763
764 tree = cord_internal::SkipCrcNode(tree);
765 if (tree->IsBtree()) {
766 tree = tree->btree()->SubTree(pos, new_size);
767 } else {
768 tree = CordRepSubstring::Substring(tree, pos, new_size);
769 }
770 sub_cord.contents_.EmplaceTree(tree, contents_.data_,
771 CordzUpdateTracker::kSubCord);
772 return sub_cord;
773 }
774
775 // --------------------------------------------------------------------
776 // Comparators
777
778 namespace {
779
ClampResult(int memcmp_res)780 int ClampResult(int memcmp_res) {
781 return static_cast<int>(memcmp_res > 0) - static_cast<int>(memcmp_res < 0);
782 }
783
CompareChunks(absl::Nonnull<absl::string_view * > lhs,absl::Nonnull<absl::string_view * > rhs,absl::Nonnull<size_t * > size_to_compare)784 int CompareChunks(absl::Nonnull<absl::string_view*> lhs,
785 absl::Nonnull<absl::string_view*> rhs,
786 absl::Nonnull<size_t*> size_to_compare) {
787 size_t compared_size = std::min(lhs->size(), rhs->size());
788 assert(*size_to_compare >= compared_size);
789 *size_to_compare -= compared_size;
790
791 int memcmp_res = ::memcmp(lhs->data(), rhs->data(), compared_size);
792 if (memcmp_res != 0) return memcmp_res;
793
794 lhs->remove_prefix(compared_size);
795 rhs->remove_prefix(compared_size);
796
797 return 0;
798 }
799
800 // This overload set computes comparison results from memcmp result. This
801 // interface is used inside GenericCompare below. Different implementations
802 // are specialized for int and bool. For int we clamp result to {-1, 0, 1}
803 // set. For bool we just interested in "value == 0".
804 template <typename ResultType>
ComputeCompareResult(int memcmp_res)805 ResultType ComputeCompareResult(int memcmp_res) {
806 return ClampResult(memcmp_res);
807 }
808 template <>
ComputeCompareResult(int memcmp_res)809 bool ComputeCompareResult<bool>(int memcmp_res) {
810 return memcmp_res == 0;
811 }
812
813 } // namespace
814
815 // Helper routine. Locates the first flat or external chunk of the Cord without
816 // initializing the iterator, and returns a string_view referencing the data.
FindFlatStartPiece() const817 inline absl::string_view Cord::InlineRep::FindFlatStartPiece() const {
818 if (!is_tree()) {
819 return absl::string_view(data_.as_chars(), data_.inline_size());
820 }
821
822 CordRep* node = cord_internal::SkipCrcNode(tree());
823 if (node->IsFlat()) {
824 return absl::string_view(node->flat()->Data(), node->length);
825 }
826
827 if (node->IsExternal()) {
828 return absl::string_view(node->external()->base, node->length);
829 }
830
831 if (node->IsBtree()) {
832 CordRepBtree* tree = node->btree();
833 int height = tree->height();
834 while (--height >= 0) {
835 tree = tree->Edge(CordRepBtree::kFront)->btree();
836 }
837 return tree->Data(tree->begin());
838 }
839
840 // Get the child node if we encounter a SUBSTRING.
841 size_t offset = 0;
842 size_t length = node->length;
843 assert(length != 0);
844
845 if (node->IsSubstring()) {
846 offset = node->substring()->start;
847 node = node->substring()->child;
848 }
849
850 if (node->IsFlat()) {
851 return absl::string_view(node->flat()->Data() + offset, length);
852 }
853
854 assert(node->IsExternal() && "Expect FLAT or EXTERNAL node here");
855
856 return absl::string_view(node->external()->base + offset, length);
857 }
858
SetCrcCordState(crc_internal::CrcCordState state)859 void Cord::SetCrcCordState(crc_internal::CrcCordState state) {
860 auto constexpr method = CordzUpdateTracker::kSetExpectedChecksum;
861 if (empty()) {
862 contents_.MaybeRemoveEmptyCrcNode();
863 CordRep* rep = CordRepCrc::New(nullptr, std::move(state));
864 contents_.EmplaceTree(rep, method);
865 } else if (!contents_.is_tree()) {
866 CordRep* rep = contents_.MakeFlatWithExtraCapacity(0);
867 rep = CordRepCrc::New(rep, std::move(state));
868 contents_.EmplaceTree(rep, method);
869 } else {
870 const CordzUpdateScope scope(contents_.data_.cordz_info(), method);
871 CordRep* rep = CordRepCrc::New(contents_.data_.as_tree(), std::move(state));
872 contents_.SetTree(rep, scope);
873 }
874 }
875
SetExpectedChecksum(uint32_t crc)876 void Cord::SetExpectedChecksum(uint32_t crc) {
877 // Construct a CrcCordState with a single chunk.
878 crc_internal::CrcCordState state;
879 state.mutable_rep()->prefix_crc.push_back(
880 crc_internal::CrcCordState::PrefixCrc(size(), absl::crc32c_t{crc}));
881 SetCrcCordState(std::move(state));
882 }
883
MaybeGetCrcCordState() const884 absl::Nullable<const crc_internal::CrcCordState*> Cord::MaybeGetCrcCordState()
885 const {
886 if (!contents_.is_tree() || !contents_.tree()->IsCrc()) {
887 return nullptr;
888 }
889 return &contents_.tree()->crc()->crc_cord_state;
890 }
891
ExpectedChecksum() const892 absl::optional<uint32_t> Cord::ExpectedChecksum() const {
893 if (!contents_.is_tree() || !contents_.tree()->IsCrc()) {
894 return absl::nullopt;
895 }
896 return static_cast<uint32_t>(
897 contents_.tree()->crc()->crc_cord_state.Checksum());
898 }
899
CompareSlowPath(absl::string_view rhs,size_t compared_size,size_t size_to_compare) const900 inline int Cord::CompareSlowPath(absl::string_view rhs, size_t compared_size,
901 size_t size_to_compare) const {
902 auto advance = [](absl::Nonnull<Cord::ChunkIterator*> it,
903 absl::Nonnull<absl::string_view*> chunk) {
904 if (!chunk->empty()) return true;
905 ++*it;
906 if (it->bytes_remaining_ == 0) return false;
907 *chunk = **it;
908 return true;
909 };
910
911 Cord::ChunkIterator lhs_it = chunk_begin();
912
913 // compared_size is inside first chunk.
914 absl::string_view lhs_chunk =
915 (lhs_it.bytes_remaining_ != 0) ? *lhs_it : absl::string_view();
916 assert(compared_size <= lhs_chunk.size());
917 assert(compared_size <= rhs.size());
918 lhs_chunk.remove_prefix(compared_size);
919 rhs.remove_prefix(compared_size);
920 size_to_compare -= compared_size; // skip already compared size.
921
922 while (advance(&lhs_it, &lhs_chunk) && !rhs.empty()) {
923 int comparison_result = CompareChunks(&lhs_chunk, &rhs, &size_to_compare);
924 if (comparison_result != 0) return comparison_result;
925 if (size_to_compare == 0) return 0;
926 }
927
928 return static_cast<int>(rhs.empty()) - static_cast<int>(lhs_chunk.empty());
929 }
930
CompareSlowPath(const Cord & rhs,size_t compared_size,size_t size_to_compare) const931 inline int Cord::CompareSlowPath(const Cord& rhs, size_t compared_size,
932 size_t size_to_compare) const {
933 auto advance = [](absl::Nonnull<Cord::ChunkIterator*> it,
934 absl::Nonnull<absl::string_view*> chunk) {
935 if (!chunk->empty()) return true;
936 ++*it;
937 if (it->bytes_remaining_ == 0) return false;
938 *chunk = **it;
939 return true;
940 };
941
942 Cord::ChunkIterator lhs_it = chunk_begin();
943 Cord::ChunkIterator rhs_it = rhs.chunk_begin();
944
945 // compared_size is inside both first chunks.
946 absl::string_view lhs_chunk =
947 (lhs_it.bytes_remaining_ != 0) ? *lhs_it : absl::string_view();
948 absl::string_view rhs_chunk =
949 (rhs_it.bytes_remaining_ != 0) ? *rhs_it : absl::string_view();
950 assert(compared_size <= lhs_chunk.size());
951 assert(compared_size <= rhs_chunk.size());
952 lhs_chunk.remove_prefix(compared_size);
953 rhs_chunk.remove_prefix(compared_size);
954 size_to_compare -= compared_size; // skip already compared size.
955
956 while (advance(&lhs_it, &lhs_chunk) && advance(&rhs_it, &rhs_chunk)) {
957 int memcmp_res = CompareChunks(&lhs_chunk, &rhs_chunk, &size_to_compare);
958 if (memcmp_res != 0) return memcmp_res;
959 if (size_to_compare == 0) return 0;
960 }
961
962 return static_cast<int>(rhs_chunk.empty()) -
963 static_cast<int>(lhs_chunk.empty());
964 }
965
GetFirstChunk(const Cord & c)966 inline absl::string_view Cord::GetFirstChunk(const Cord& c) {
967 if (c.empty()) return {};
968 return c.contents_.FindFlatStartPiece();
969 }
GetFirstChunk(absl::string_view sv)970 inline absl::string_view Cord::GetFirstChunk(absl::string_view sv) {
971 return sv;
972 }
973
974 // Compares up to 'size_to_compare' bytes of 'lhs' with 'rhs'. It is assumed
975 // that 'size_to_compare' is greater that size of smallest of first chunks.
976 template <typename ResultType, typename RHS>
GenericCompare(const Cord & lhs,const RHS & rhs,size_t size_to_compare)977 ResultType GenericCompare(const Cord& lhs, const RHS& rhs,
978 size_t size_to_compare) {
979 absl::string_view lhs_chunk = Cord::GetFirstChunk(lhs);
980 absl::string_view rhs_chunk = Cord::GetFirstChunk(rhs);
981
982 size_t compared_size = std::min(lhs_chunk.size(), rhs_chunk.size());
983 assert(size_to_compare >= compared_size);
984 int memcmp_res = ::memcmp(lhs_chunk.data(), rhs_chunk.data(), compared_size);
985 if (compared_size == size_to_compare || memcmp_res != 0) {
986 return ComputeCompareResult<ResultType>(memcmp_res);
987 }
988
989 return ComputeCompareResult<ResultType>(
990 lhs.CompareSlowPath(rhs, compared_size, size_to_compare));
991 }
992
EqualsImpl(absl::string_view rhs,size_t size_to_compare) const993 bool Cord::EqualsImpl(absl::string_view rhs, size_t size_to_compare) const {
994 return GenericCompare<bool>(*this, rhs, size_to_compare);
995 }
996
EqualsImpl(const Cord & rhs,size_t size_to_compare) const997 bool Cord::EqualsImpl(const Cord& rhs, size_t size_to_compare) const {
998 return GenericCompare<bool>(*this, rhs, size_to_compare);
999 }
1000
1001 template <typename RHS>
SharedCompareImpl(const Cord & lhs,const RHS & rhs)1002 inline int SharedCompareImpl(const Cord& lhs, const RHS& rhs) {
1003 size_t lhs_size = lhs.size();
1004 size_t rhs_size = rhs.size();
1005 if (lhs_size == rhs_size) {
1006 return GenericCompare<int>(lhs, rhs, lhs_size);
1007 }
1008 if (lhs_size < rhs_size) {
1009 auto data_comp_res = GenericCompare<int>(lhs, rhs, lhs_size);
1010 return data_comp_res == 0 ? -1 : data_comp_res;
1011 }
1012
1013 auto data_comp_res = GenericCompare<int>(lhs, rhs, rhs_size);
1014 return data_comp_res == 0 ? +1 : data_comp_res;
1015 }
1016
Compare(absl::string_view rhs) const1017 int Cord::Compare(absl::string_view rhs) const {
1018 return SharedCompareImpl(*this, rhs);
1019 }
1020
CompareImpl(const Cord & rhs) const1021 int Cord::CompareImpl(const Cord& rhs) const {
1022 return SharedCompareImpl(*this, rhs);
1023 }
1024
EndsWith(absl::string_view rhs) const1025 bool Cord::EndsWith(absl::string_view rhs) const {
1026 size_t my_size = size();
1027 size_t rhs_size = rhs.size();
1028
1029 if (my_size < rhs_size) return false;
1030
1031 Cord tmp(*this);
1032 tmp.RemovePrefix(my_size - rhs_size);
1033 return tmp.EqualsImpl(rhs, rhs_size);
1034 }
1035
EndsWith(const Cord & rhs) const1036 bool Cord::EndsWith(const Cord& rhs) const {
1037 size_t my_size = size();
1038 size_t rhs_size = rhs.size();
1039
1040 if (my_size < rhs_size) return false;
1041
1042 Cord tmp(*this);
1043 tmp.RemovePrefix(my_size - rhs_size);
1044 return tmp.EqualsImpl(rhs, rhs_size);
1045 }
1046
1047 // --------------------------------------------------------------------
1048 // Misc.
1049
operator std::string() const1050 Cord::operator std::string() const {
1051 std::string s;
1052 absl::CopyCordToString(*this, &s);
1053 return s;
1054 }
1055
CopyCordToString(const Cord & src,absl::Nonnull<std::string * > dst)1056 void CopyCordToString(const Cord& src, absl::Nonnull<std::string*> dst) {
1057 if (!src.contents_.is_tree()) {
1058 src.contents_.CopyTo(dst);
1059 } else {
1060 absl::strings_internal::STLStringResizeUninitialized(dst, src.size());
1061 src.CopyToArraySlowPath(&(*dst)[0]);
1062 }
1063 }
1064
AppendCordToString(const Cord & src,absl::Nonnull<std::string * > dst)1065 void AppendCordToString(const Cord& src, absl::Nonnull<std::string*> dst) {
1066 const size_t cur_dst_size = dst->size();
1067 const size_t new_dst_size = cur_dst_size + src.size();
1068 absl::strings_internal::STLStringResizeUninitializedAmortized(dst,
1069 new_dst_size);
1070 char* append_ptr = &(*dst)[cur_dst_size];
1071 src.CopyToArrayImpl(append_ptr);
1072 }
1073
CopyToArraySlowPath(absl::Nonnull<char * > dst) const1074 void Cord::CopyToArraySlowPath(absl::Nonnull<char*> dst) const {
1075 assert(contents_.is_tree());
1076 absl::string_view fragment;
1077 if (GetFlatAux(contents_.tree(), &fragment)) {
1078 memcpy(dst, fragment.data(), fragment.size());
1079 return;
1080 }
1081 for (absl::string_view chunk : Chunks()) {
1082 memcpy(dst, chunk.data(), chunk.size());
1083 dst += chunk.size();
1084 }
1085 }
1086
AdvanceAndReadBytes(size_t n)1087 Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) {
1088 ABSL_HARDENING_ASSERT(bytes_remaining_ >= n &&
1089 "Attempted to iterate past `end()`");
1090 Cord subcord;
1091 auto constexpr method = CordzUpdateTracker::kCordReader;
1092
1093 if (n <= InlineRep::kMaxInline) {
1094 // Range to read fits in inline data. Flatten it.
1095 char* data = subcord.contents_.set_data(n);
1096 while (n > current_chunk_.size()) {
1097 memcpy(data, current_chunk_.data(), current_chunk_.size());
1098 data += current_chunk_.size();
1099 n -= current_chunk_.size();
1100 ++*this;
1101 }
1102 memcpy(data, current_chunk_.data(), n);
1103 if (n < current_chunk_.size()) {
1104 RemoveChunkPrefix(n);
1105 } else if (n > 0) {
1106 ++*this;
1107 }
1108 return subcord;
1109 }
1110
1111 if (btree_reader_) {
1112 size_t chunk_size = current_chunk_.size();
1113 if (n <= chunk_size && n <= kMaxBytesToCopy) {
1114 subcord = Cord(current_chunk_.substr(0, n), method);
1115 if (n < chunk_size) {
1116 current_chunk_.remove_prefix(n);
1117 } else {
1118 current_chunk_ = btree_reader_.Next();
1119 }
1120 } else {
1121 CordRep* rep;
1122 current_chunk_ = btree_reader_.Read(n, chunk_size, rep);
1123 subcord.contents_.EmplaceTree(rep, method);
1124 }
1125 bytes_remaining_ -= n;
1126 return subcord;
1127 }
1128
1129 // Short circuit if reading the entire data edge.
1130 assert(current_leaf_ != nullptr);
1131 if (n == current_leaf_->length) {
1132 bytes_remaining_ = 0;
1133 current_chunk_ = {};
1134 CordRep* tree = CordRep::Ref(current_leaf_);
1135 subcord.contents_.EmplaceTree(VerifyTree(tree), method);
1136 return subcord;
1137 }
1138
1139 // From this point on, we need a partial substring node.
1140 // Get pointer to the underlying flat or external data payload and
1141 // compute data pointer and offset into current flat or external.
1142 CordRep* payload = current_leaf_->IsSubstring()
1143 ? current_leaf_->substring()->child
1144 : current_leaf_;
1145 const char* data = payload->IsExternal() ? payload->external()->base
1146 : payload->flat()->Data();
1147 const size_t offset = static_cast<size_t>(current_chunk_.data() - data);
1148
1149 auto* tree = CordRepSubstring::Substring(payload, offset, n);
1150 subcord.contents_.EmplaceTree(VerifyTree(tree), method);
1151 bytes_remaining_ -= n;
1152 current_chunk_.remove_prefix(n);
1153 return subcord;
1154 }
1155
operator [](size_t i) const1156 char Cord::operator[](size_t i) const {
1157 ABSL_HARDENING_ASSERT(i < size());
1158 size_t offset = i;
1159 const CordRep* rep = contents_.tree();
1160 if (rep == nullptr) {
1161 return contents_.data()[i];
1162 }
1163 rep = cord_internal::SkipCrcNode(rep);
1164 while (true) {
1165 assert(rep != nullptr);
1166 assert(offset < rep->length);
1167 if (rep->IsFlat()) {
1168 // Get the "i"th character directly from the flat array.
1169 return rep->flat()->Data()[offset];
1170 } else if (rep->IsBtree()) {
1171 return rep->btree()->GetCharacter(offset);
1172 } else if (rep->IsExternal()) {
1173 // Get the "i"th character from the external array.
1174 return rep->external()->base[offset];
1175 } else {
1176 // This must be a substring a node, so bypass it to get to the child.
1177 assert(rep->IsSubstring());
1178 offset += rep->substring()->start;
1179 rep = rep->substring()->child;
1180 }
1181 }
1182 }
1183
1184 namespace {
1185
1186 // Tests whether the sequence of chunks beginning at `position` starts with
1187 // `needle`.
1188 //
1189 // REQUIRES: remaining `absl::Cord` starting at `position` is greater than or
1190 // equal to `needle.size()`.
IsSubstringInCordAt(absl::Cord::CharIterator position,absl::string_view needle)1191 bool IsSubstringInCordAt(absl::Cord::CharIterator position,
1192 absl::string_view needle) {
1193 auto haystack_chunk = absl::Cord::ChunkRemaining(position);
1194 while (true) {
1195 // Precondition is that `absl::Cord::ChunkRemaining(position)` is not
1196 // empty. This assert will trigger if that is not true.
1197 assert(!haystack_chunk.empty());
1198 auto min_length = std::min(haystack_chunk.size(), needle.size());
1199 if (!absl::ConsumePrefix(&needle, haystack_chunk.substr(0, min_length))) {
1200 return false;
1201 }
1202 if (needle.empty()) {
1203 return true;
1204 }
1205 absl::Cord::Advance(&position, min_length);
1206 haystack_chunk = absl::Cord::ChunkRemaining(position);
1207 }
1208 }
1209
1210 } // namespace
1211
1212 // A few options how this could be implemented:
1213 // (a) Flatten the Cord and find, i.e.
1214 // haystack.Flatten().find(needle)
1215 // For large 'haystack' (where Cord makes sense to be used), this copies
1216 // the whole 'haystack' and can be slow.
1217 // (b) Use std::search, i.e.
1218 // std::search(haystack.char_begin(), haystack.char_end(),
1219 // needle.begin(), needle.end())
1220 // This avoids the copy, but compares one byte at a time, and branches a
1221 // lot every time it has to advance. It is also not possible to use
1222 // std::search as is, because CharIterator is only an input iterator, not a
1223 // forward iterator.
1224 // (c) Use string_view::find in each fragment, and specifically handle fragment
1225 // boundaries.
1226 //
1227 // This currently implements option (b).
FindImpl(CharIterator it,absl::string_view needle) const1228 absl::Cord::CharIterator absl::Cord::FindImpl(CharIterator it,
1229 absl::string_view needle) const {
1230 // Ensure preconditions are met by callers first.
1231
1232 // Needle must not be empty.
1233 assert(!needle.empty());
1234 // Haystack must be at least as large as needle.
1235 assert(it.chunk_iterator_.bytes_remaining_ >= needle.size());
1236
1237 // Cord is a sequence of chunks. To find `needle` we go chunk by chunk looking
1238 // for the first char of needle, up until we have advanced `N` defined as
1239 // `haystack.size() - needle.size()`. If we find the first char of needle at
1240 // `P` and `P` is less than `N`, we then call `IsSubstringInCordAt` to
1241 // see if this is the needle. If not, we advance to `P + 1` and try again.
1242 while (it.chunk_iterator_.bytes_remaining_ >= needle.size()) {
1243 auto haystack_chunk = Cord::ChunkRemaining(it);
1244 assert(!haystack_chunk.empty());
1245 // Look for the first char of `needle` in the current chunk.
1246 auto idx = haystack_chunk.find(needle.front());
1247 if (idx == absl::string_view::npos) {
1248 // No potential match in this chunk, advance past it.
1249 Cord::Advance(&it, haystack_chunk.size());
1250 continue;
1251 }
1252 // We found the start of a potential match in the chunk. Advance the
1253 // iterator and haystack chunk to the match the position.
1254 Cord::Advance(&it, idx);
1255 // Check if there is enough haystack remaining to actually have a match.
1256 if (it.chunk_iterator_.bytes_remaining_ < needle.size()) {
1257 break;
1258 }
1259 // Check if this is `needle`.
1260 if (IsSubstringInCordAt(it, needle)) {
1261 return it;
1262 }
1263 // No match, increment the iterator for the next attempt.
1264 Cord::Advance(&it, 1);
1265 }
1266 // If we got here, we did not find `needle`.
1267 return char_end();
1268 }
1269
Find(absl::string_view needle) const1270 absl::Cord::CharIterator absl::Cord::Find(absl::string_view needle) const {
1271 if (needle.empty()) {
1272 return char_begin();
1273 }
1274 if (needle.size() > size()) {
1275 return char_end();
1276 }
1277 if (needle.size() == size()) {
1278 return *this == needle ? char_begin() : char_end();
1279 }
1280 return FindImpl(char_begin(), needle);
1281 }
1282
1283 namespace {
1284
1285 // Tests whether the sequence of chunks beginning at `haystack` starts with the
1286 // sequence of chunks beginning at `needle_begin` and extending to `needle_end`.
1287 //
1288 // REQUIRES: remaining `absl::Cord` starting at `position` is greater than or
1289 // equal to `needle_end - needle_begin` and `advance`.
IsSubcordInCordAt(absl::Cord::CharIterator haystack,absl::Cord::CharIterator needle_begin,absl::Cord::CharIterator needle_end)1290 bool IsSubcordInCordAt(absl::Cord::CharIterator haystack,
1291 absl::Cord::CharIterator needle_begin,
1292 absl::Cord::CharIterator needle_end) {
1293 while (needle_begin != needle_end) {
1294 auto haystack_chunk = absl::Cord::ChunkRemaining(haystack);
1295 assert(!haystack_chunk.empty());
1296 auto needle_chunk = absl::Cord::ChunkRemaining(needle_begin);
1297 auto min_length = std::min(haystack_chunk.size(), needle_chunk.size());
1298 if (haystack_chunk.substr(0, min_length) !=
1299 needle_chunk.substr(0, min_length)) {
1300 return false;
1301 }
1302 absl::Cord::Advance(&haystack, min_length);
1303 absl::Cord::Advance(&needle_begin, min_length);
1304 }
1305 return true;
1306 }
1307
1308 // Tests whether the sequence of chunks beginning at `position` starts with the
1309 // cord `needle`.
1310 //
1311 // REQUIRES: remaining `absl::Cord` starting at `position` is greater than or
1312 // equal to `needle.size()`.
IsSubcordInCordAt(absl::Cord::CharIterator position,const absl::Cord & needle)1313 bool IsSubcordInCordAt(absl::Cord::CharIterator position,
1314 const absl::Cord& needle) {
1315 return IsSubcordInCordAt(position, needle.char_begin(), needle.char_end());
1316 }
1317
1318 } // namespace
1319
Find(const absl::Cord & needle) const1320 absl::Cord::CharIterator absl::Cord::Find(const absl::Cord& needle) const {
1321 if (needle.empty()) {
1322 return char_begin();
1323 }
1324 const auto needle_size = needle.size();
1325 if (needle_size > size()) {
1326 return char_end();
1327 }
1328 if (needle_size == size()) {
1329 return *this == needle ? char_begin() : char_end();
1330 }
1331 const auto needle_chunk = Cord::ChunkRemaining(needle.char_begin());
1332 auto haystack_it = char_begin();
1333 while (true) {
1334 haystack_it = FindImpl(haystack_it, needle_chunk);
1335 if (haystack_it == char_end() ||
1336 haystack_it.chunk_iterator_.bytes_remaining_ < needle_size) {
1337 break;
1338 }
1339 // We found the first chunk of `needle` at `haystack_it` but not the entire
1340 // subcord. Advance past the first chunk and check for the remainder.
1341 auto haystack_advanced_it = haystack_it;
1342 auto needle_it = needle.char_begin();
1343 Cord::Advance(&haystack_advanced_it, needle_chunk.size());
1344 Cord::Advance(&needle_it, needle_chunk.size());
1345 if (IsSubcordInCordAt(haystack_advanced_it, needle_it, needle.char_end())) {
1346 return haystack_it;
1347 }
1348 Cord::Advance(&haystack_it, 1);
1349 if (haystack_it.chunk_iterator_.bytes_remaining_ < needle_size) {
1350 break;
1351 }
1352 if (haystack_it.chunk_iterator_.bytes_remaining_ == needle_size) {
1353 // Special case, if there is exactly `needle_size` bytes remaining, the
1354 // subcord is either at `haystack_it` or not at all.
1355 if (IsSubcordInCordAt(haystack_it, needle)) {
1356 return haystack_it;
1357 }
1358 break;
1359 }
1360 }
1361 return char_end();
1362 }
1363
Contains(absl::string_view rhs) const1364 bool Cord::Contains(absl::string_view rhs) const {
1365 return rhs.empty() || Find(rhs) != char_end();
1366 }
1367
Contains(const absl::Cord & rhs) const1368 bool Cord::Contains(const absl::Cord& rhs) const {
1369 return rhs.empty() || Find(rhs) != char_end();
1370 }
1371
FlattenSlowPath()1372 absl::string_view Cord::FlattenSlowPath() {
1373 assert(contents_.is_tree());
1374 size_t total_size = size();
1375 CordRep* new_rep;
1376 char* new_buffer;
1377
1378 // Try to put the contents into a new flat rep. If they won't fit in the
1379 // biggest possible flat node, use an external rep instead.
1380 if (total_size <= kMaxFlatLength) {
1381 new_rep = CordRepFlat::New(total_size);
1382 new_rep->length = total_size;
1383 new_buffer = new_rep->flat()->Data();
1384 CopyToArraySlowPath(new_buffer);
1385 } else {
1386 new_buffer = std::allocator<char>().allocate(total_size);
1387 CopyToArraySlowPath(new_buffer);
1388 new_rep = absl::cord_internal::NewExternalRep(
1389 absl::string_view(new_buffer, total_size), [](absl::string_view s) {
1390 std::allocator<char>().deallocate(const_cast<char*>(s.data()),
1391 s.size());
1392 });
1393 }
1394 CordzUpdateScope scope(contents_.cordz_info(), CordzUpdateTracker::kFlatten);
1395 CordRep::Unref(contents_.as_tree());
1396 contents_.SetTree(new_rep, scope);
1397 return absl::string_view(new_buffer, total_size);
1398 }
1399
GetFlatAux(absl::Nonnull<CordRep * > rep,absl::Nonnull<absl::string_view * > fragment)1400 /* static */ bool Cord::GetFlatAux(absl::Nonnull<CordRep*> rep,
1401 absl::Nonnull<absl::string_view*> fragment) {
1402 assert(rep != nullptr);
1403 if (rep->length == 0) {
1404 *fragment = absl::string_view();
1405 return true;
1406 }
1407 rep = cord_internal::SkipCrcNode(rep);
1408 if (rep->IsFlat()) {
1409 *fragment = absl::string_view(rep->flat()->Data(), rep->length);
1410 return true;
1411 } else if (rep->IsExternal()) {
1412 *fragment = absl::string_view(rep->external()->base, rep->length);
1413 return true;
1414 } else if (rep->IsBtree()) {
1415 return rep->btree()->IsFlat(fragment);
1416 } else if (rep->IsSubstring()) {
1417 CordRep* child = rep->substring()->child;
1418 if (child->IsFlat()) {
1419 *fragment = absl::string_view(
1420 child->flat()->Data() + rep->substring()->start, rep->length);
1421 return true;
1422 } else if (child->IsExternal()) {
1423 *fragment = absl::string_view(
1424 child->external()->base + rep->substring()->start, rep->length);
1425 return true;
1426 } else if (child->IsBtree()) {
1427 return child->btree()->IsFlat(rep->substring()->start, rep->length,
1428 fragment);
1429 }
1430 }
1431 return false;
1432 }
1433
ForEachChunkAux(absl::Nonnull<absl::cord_internal::CordRep * > rep,absl::FunctionRef<void (absl::string_view)> callback)1434 /* static */ void Cord::ForEachChunkAux(
1435 absl::Nonnull<absl::cord_internal::CordRep*> rep,
1436 absl::FunctionRef<void(absl::string_view)> callback) {
1437 assert(rep != nullptr);
1438 if (rep->length == 0) return;
1439 rep = cord_internal::SkipCrcNode(rep);
1440
1441 if (rep->IsBtree()) {
1442 ChunkIterator it(rep), end;
1443 while (it != end) {
1444 callback(*it);
1445 ++it;
1446 }
1447 return;
1448 }
1449
1450 // This is a leaf node, so invoke our callback.
1451 absl::cord_internal::CordRep* current_node = cord_internal::SkipCrcNode(rep);
1452 absl::string_view chunk;
1453 bool success = GetFlatAux(current_node, &chunk);
1454 assert(success);
1455 if (success) {
1456 callback(chunk);
1457 }
1458 }
1459
DumpNode(absl::Nonnull<CordRep * > nonnull_rep,bool include_data,absl::Nonnull<std::ostream * > os,int indent)1460 static void DumpNode(absl::Nonnull<CordRep*> nonnull_rep, bool include_data,
1461 absl::Nonnull<std::ostream*> os, int indent) {
1462 CordRep* rep = nonnull_rep;
1463 const int kIndentStep = 1;
1464 for (;;) {
1465 *os << std::setw(3) << (rep == nullptr ? 0 : rep->refcount.Get());
1466 *os << " " << std::setw(7) << (rep == nullptr ? 0 : rep->length);
1467 *os << " [";
1468 if (include_data) *os << static_cast<void*>(rep);
1469 *os << "]";
1470 *os << " " << std::setw(indent) << "";
1471 bool leaf = false;
1472 if (rep == nullptr) {
1473 *os << "NULL\n";
1474 leaf = true;
1475 } else if (rep->IsCrc()) {
1476 *os << "CRC crc=" << rep->crc()->crc_cord_state.Checksum() << "\n";
1477 indent += kIndentStep;
1478 rep = rep->crc()->child;
1479 } else if (rep->IsSubstring()) {
1480 *os << "SUBSTRING @ " << rep->substring()->start << "\n";
1481 indent += kIndentStep;
1482 rep = rep->substring()->child;
1483 } else { // Leaf or ring
1484 leaf = true;
1485 if (rep->IsExternal()) {
1486 *os << "EXTERNAL [";
1487 if (include_data)
1488 *os << absl::CEscape(
1489 absl::string_view(rep->external()->base, rep->length));
1490 *os << "]\n";
1491 } else if (rep->IsFlat()) {
1492 *os << "FLAT cap=" << rep->flat()->Capacity() << " [";
1493 if (include_data)
1494 *os << absl::CEscape(
1495 absl::string_view(rep->flat()->Data(), rep->length));
1496 *os << "]\n";
1497 } else {
1498 CordRepBtree::Dump(rep, /*label=*/"", include_data, *os);
1499 }
1500 }
1501 if (leaf) {
1502 break;
1503 }
1504 }
1505 }
1506
ReportError(absl::Nonnull<CordRep * > root,absl::Nonnull<CordRep * > node)1507 static std::string ReportError(absl::Nonnull<CordRep*> root,
1508 absl::Nonnull<CordRep*> node) {
1509 std::ostringstream buf;
1510 buf << "Error at node " << node << " in:";
1511 DumpNode(root, true, &buf);
1512 return buf.str();
1513 }
1514
VerifyNode(absl::Nonnull<CordRep * > root,absl::Nonnull<CordRep * > start_node)1515 static bool VerifyNode(absl::Nonnull<CordRep*> root,
1516 absl::Nonnull<CordRep*> start_node) {
1517 absl::InlinedVector<absl::Nonnull<CordRep*>, 2> worklist;
1518 worklist.push_back(start_node);
1519 do {
1520 CordRep* node = worklist.back();
1521 worklist.pop_back();
1522
1523 ABSL_INTERNAL_CHECK(node != nullptr, ReportError(root, node));
1524 if (node != root) {
1525 ABSL_INTERNAL_CHECK(node->length != 0, ReportError(root, node));
1526 ABSL_INTERNAL_CHECK(!node->IsCrc(), ReportError(root, node));
1527 }
1528
1529 if (node->IsFlat()) {
1530 ABSL_INTERNAL_CHECK(node->length <= node->flat()->Capacity(),
1531 ReportError(root, node));
1532 } else if (node->IsExternal()) {
1533 ABSL_INTERNAL_CHECK(node->external()->base != nullptr,
1534 ReportError(root, node));
1535 } else if (node->IsSubstring()) {
1536 ABSL_INTERNAL_CHECK(
1537 node->substring()->start < node->substring()->child->length,
1538 ReportError(root, node));
1539 ABSL_INTERNAL_CHECK(node->substring()->start + node->length <=
1540 node->substring()->child->length,
1541 ReportError(root, node));
1542 } else if (node->IsCrc()) {
1543 ABSL_INTERNAL_CHECK(
1544 node->crc()->child != nullptr || node->crc()->length == 0,
1545 ReportError(root, node));
1546 if (node->crc()->child != nullptr) {
1547 ABSL_INTERNAL_CHECK(node->crc()->length == node->crc()->child->length,
1548 ReportError(root, node));
1549 worklist.push_back(node->crc()->child);
1550 }
1551 }
1552 } while (!worklist.empty());
1553 return true;
1554 }
1555
operator <<(std::ostream & out,const Cord & cord)1556 std::ostream& operator<<(std::ostream& out, const Cord& cord) {
1557 for (absl::string_view chunk : cord.Chunks()) {
1558 out.write(chunk.data(), static_cast<std::streamsize>(chunk.size()));
1559 }
1560 return out;
1561 }
1562
1563 namespace strings_internal {
FlatOverhead()1564 size_t CordTestAccess::FlatOverhead() { return cord_internal::kFlatOverhead; }
MaxFlatLength()1565 size_t CordTestAccess::MaxFlatLength() { return cord_internal::kMaxFlatLength; }
FlatTagToLength(uint8_t tag)1566 size_t CordTestAccess::FlatTagToLength(uint8_t tag) {
1567 return cord_internal::TagToLength(tag);
1568 }
LengthToTag(size_t s)1569 uint8_t CordTestAccess::LengthToTag(size_t s) {
1570 ABSL_INTERNAL_CHECK(s <= kMaxFlatLength, absl::StrCat("Invalid length ", s));
1571 return cord_internal::AllocatedSizeToTag(s + cord_internal::kFlatOverhead);
1572 }
SizeofCordRepExternal()1573 size_t CordTestAccess::SizeofCordRepExternal() {
1574 return sizeof(CordRepExternal);
1575 }
SizeofCordRepSubstring()1576 size_t CordTestAccess::SizeofCordRepSubstring() {
1577 return sizeof(CordRepSubstring);
1578 }
1579 } // namespace strings_internal
1580 ABSL_NAMESPACE_END
1581 } // namespace absl
1582