76class RopePieceBTreeNode {
83 enum { WidthFactor = 8 };
93 RopePieceBTreeNode(
bool isLeaf) : IsLeaf(isLeaf) {}
94 ~RopePieceBTreeNode() =
default;
97 bool isLeaf()
const {
return IsLeaf; }
98 unsigned size()
const {
return Size; }
116 RopePieceBTreeNode *insert(
unsigned Offset,
const RopePiece &R);
134class RopePieceBTreeLeaf :
public RopePieceBTreeNode {
137 unsigned char NumPieces = 0;
140 RopePiece Pieces[2 * WidthFactor];
144 RopePieceBTreeLeaf **PrevLeaf =
nullptr;
145 RopePieceBTreeLeaf *NextLeaf =
nullptr;
148 RopePieceBTreeLeaf() : RopePieceBTreeNode(
true) {}
150 ~RopePieceBTreeLeaf() {
151 if (PrevLeaf || NextLeaf)
152 removeFromLeafInOrder();
156 bool isFull()
const {
return NumPieces == 2 * WidthFactor; }
161 Pieces[--NumPieces] = RopePiece();
165 unsigned getNumPieces()
const {
return NumPieces; }
167 const RopePiece &getPiece(
unsigned i)
const {
168 assert(i < getNumPieces() &&
"Invalid piece ID");
172 const RopePieceBTreeLeaf *getNextLeafInOrder()
const {
return NextLeaf; }
174 void insertAfterLeafInOrder(RopePieceBTreeLeaf *Node) {
175 assert(!PrevLeaf && !NextLeaf &&
"Already in ordering");
177 NextLeaf =
Node->NextLeaf;
179 NextLeaf->PrevLeaf = &NextLeaf;
180 PrevLeaf = &
Node->NextLeaf;
181 Node->NextLeaf =
this;
184 void removeFromLeafInOrder() {
186 *PrevLeaf = NextLeaf;
188 NextLeaf->PrevLeaf = PrevLeaf;
189 }
else if (NextLeaf) {
190 NextLeaf->PrevLeaf =
nullptr;
196 void FullRecomputeSizeLocally() {
198 for (
unsigned i = 0, e = getNumPieces(); i !=
e; ++i)
199 Size += getPiece(i).size();
216 RopePieceBTreeNode *insert(
unsigned Offset,
const RopePiece &R);
222 static bool classof(
const RopePieceBTreeNode *
N) {
return N->isLeaf(); }
233RopePieceBTreeNode *RopePieceBTreeLeaf::split(
unsigned Offset) {
242 unsigned PieceOffs = 0;
244 while (
Offset >= PieceOffs + Pieces[i].
size()) {
245 PieceOffs += Pieces[i].
size();
256 unsigned IntraPieceOffset =
Offset - PieceOffs;
259 RopePiece
Tail(Pieces[i].StrData, Pieces[i].StartOffs + IntraPieceOffset,
273RopePieceBTreeNode *RopePieceBTreeLeaf::insert(
unsigned Offset,
274 const RopePiece &R) {
279 unsigned i = 0,
e = getNumPieces();
284 unsigned SlotOffs = 0;
285 for (;
Offset > SlotOffs; ++i)
286 SlotOffs += getPiece(i).size();
287 assert(SlotOffs ==
Offset &&
"Split didn't occur before insertion!");
293 Pieces[e] = Pieces[e - 1];
306 RopePieceBTreeLeaf *NewNode =
new RopePieceBTreeLeaf();
309 std::copy(&Pieces[WidthFactor], &Pieces[2 * WidthFactor],
310 &NewNode->Pieces[0]);
312 std::fill(&Pieces[WidthFactor], &Pieces[2 * WidthFactor], RopePiece());
315 NewNode->NumPieces = NumPieces = WidthFactor;
318 NewNode->FullRecomputeSizeLocally();
319 FullRecomputeSizeLocally();
322 NewNode->insertAfterLeafInOrder(
this);
334void RopePieceBTreeLeaf::erase(
unsigned Offset,
unsigned NumBytes) {
337 unsigned PieceOffs = 0;
339 for (;
Offset > PieceOffs; ++i)
340 PieceOffs += getPiece(i).size();
341 assert(PieceOffs ==
Offset &&
"Split didn't occur before erase!");
343 unsigned StartPiece = i;
347 for (;
Offset + NumBytes > PieceOffs + getPiece(i).size(); ++i)
348 PieceOffs += getPiece(i).size();
351 if (
Offset + NumBytes == PieceOffs + getPiece(i).
size()) {
352 PieceOffs += getPiece(i).size();
357 if (i != StartPiece) {
358 unsigned NumDeleted = i - StartPiece;
359 for (; i != getNumPieces(); ++i)
360 Pieces[i - NumDeleted] = Pieces[i];
363 std::fill(&Pieces[getNumPieces() - NumDeleted], &Pieces[getNumPieces()],
365 NumPieces -= NumDeleted;
367 unsigned CoverBytes = PieceOffs -
Offset;
368 NumBytes -= CoverBytes;
378 assert(getPiece(StartPiece).
size() > NumBytes);
379 Pieces[StartPiece].
StartOffs += NumBytes;
393class RopePieceBTreeInterior :
public RopePieceBTreeNode {
396 unsigned char NumChildren = 0;
398 RopePieceBTreeNode *Children[2 * WidthFactor];
401 RopePieceBTreeInterior() : RopePieceBTreeNode(
false) {}
403 RopePieceBTreeInterior(RopePieceBTreeNode *
LHS, RopePieceBTreeNode *
RHS)
404 : RopePieceBTreeNode(
false) {
411 ~RopePieceBTreeInterior() {
412 for (
unsigned i = 0, e = getNumChildren(); i !=
e; ++i)
413 Children[i]->Destroy();
416 bool isFull()
const {
return NumChildren == 2 * WidthFactor; }
418 unsigned getNumChildren()
const {
return NumChildren; }
420 const RopePieceBTreeNode *getChild(
unsigned i)
const {
421 assert(i < NumChildren &&
"invalid child #");
425 RopePieceBTreeNode *getChild(
unsigned i) {
426 assert(i < NumChildren &&
"invalid child #");
432 void FullRecomputeSizeLocally() {
434 for (
unsigned i = 0, e = getNumChildren(); i !=
e; ++i)
435 Size += getChild(i)->size();
452 RopePieceBTreeNode *insert(
unsigned Offset,
const RopePiece &R);
456 RopePieceBTreeNode *HandleChildPiece(
unsigned i, RopePieceBTreeNode *
RHS);
462 static bool classof(
const RopePieceBTreeNode *
N) {
return !
N->isLeaf(); }
473RopePieceBTreeNode *RopePieceBTreeInterior::split(
unsigned Offset) {
478 unsigned ChildOffset = 0;
480 for (;
Offset >= ChildOffset + getChild(i)->size(); ++i)
481 ChildOffset += getChild(i)->size();
484 if (ChildOffset ==
Offset)
488 if (RopePieceBTreeNode *
RHS = getChild(i)->
split(
Offset - ChildOffset))
489 return HandleChildPiece(i,
RHS);
499RopePieceBTreeNode *RopePieceBTreeInterior::insert(
unsigned Offset,
500 const RopePiece &R) {
503 unsigned i = 0,
e = getNumChildren();
505 unsigned ChildOffs = 0;
509 ChildOffs =
size() - getChild(i)->size();
511 for (;
Offset > ChildOffs + getChild(i)->size(); ++i)
512 ChildOffs += getChild(i)->size();
518 if (RopePieceBTreeNode *
RHS = getChild(i)->insert(
Offset - ChildOffs, R))
519 return HandleChildPiece(i,
RHS);
527RopePieceBTreeInterior::HandleChildPiece(
unsigned i, RopePieceBTreeNode *
RHS) {
532 if (i + 1 != getNumChildren())
533 memmove(&Children[i + 2], &Children[i + 1],
534 (getNumChildren() - i - 1) *
sizeof(Children[0]));
544 RopePieceBTreeInterior *NewNode =
new RopePieceBTreeInterior();
547 memcpy(&NewNode->Children[0], &Children[WidthFactor],
548 WidthFactor *
sizeof(Children[0]));
551 NewNode->NumChildren = NumChildren = WidthFactor;
556 this->HandleChildPiece(i,
RHS);
558 NewNode->HandleChildPiece(i - WidthFactor,
RHS);
561 NewNode->FullRecomputeSizeLocally();
562 FullRecomputeSizeLocally();
568void RopePieceBTreeInterior::erase(
unsigned Offset,
unsigned NumBytes) {
574 for (;
Offset >= getChild(i)->size(); ++i)
575 Offset -= getChild(i)->size();
580 RopePieceBTreeNode *CurChild = getChild(i);
585 CurChild->erase(
Offset, NumBytes);
592 unsigned BytesFromChild = CurChild->size() -
Offset;
593 CurChild->erase(
Offset, BytesFromChild);
594 NumBytes -= BytesFromChild;
603 NumBytes -= CurChild->size();
606 if (i != getNumChildren())
607 memmove(&Children[i], &Children[i + 1],
608 (getNumChildren() - i) *
sizeof(Children[0]));
616void RopePieceBTreeNode::Destroy() {
629RopePieceBTreeNode *RopePieceBTreeNode::split(
unsigned Offset) {
632 return Leaf->split(
Offset);
642RopePieceBTreeNode *RopePieceBTreeNode::insert(
unsigned Offset,
643 const RopePiece &R) {
646 return Leaf->insert(
Offset, R);
652void RopePieceBTreeNode::erase(
unsigned Offset,
unsigned NumBytes) {
655 return Leaf->erase(
Offset, NumBytes);
663static const RopePieceBTreeLeaf *
getCN(
const void *
P) {
664 return static_cast<const RopePieceBTreeLeaf *
>(
P);
669 const auto *
N =
static_cast<const RopePieceBTreeNode *
>(n);
680 while (CurNode &&
getCN(CurNode)->getNumPieces() == 0)
681 CurNode =
getCN(CurNode)->getNextLeafInOrder();
684 CurPiece = &
getCN(CurNode)->getPiece(0);
692 &
getCN(CurNode)->getPiece(
getCN(CurNode)->getNumPieces() - 1)) {
700 CurNode =
getCN(CurNode)->getNextLeafInOrder();
701 while (CurNode &&
getCN(CurNode)->getNumPieces() == 0);
704 CurPiece = &
getCN(CurNode)->getPiece(0);
715 return static_cast<RopePieceBTreeNode *
>(
P);
721 assert(RHS.empty() &&
"Can't copy non-empty tree yet");
722 Root =
new RopePieceBTreeLeaf();
734 Root =
new RopePieceBTreeLeaf();
741 Root =
new RopePieceBTreeInterior(
getRoot(Root), RHS);
745 Root =
new RopePieceBTreeInterior(
getRoot(Root), RHS);
751 Root =
new RopePieceBTreeInterior(
getRoot(Root), RHS);
765RopePiece RewriteRope::MakeRopeString(
const char *Start,
const char *End) {
766 unsigned Len = End - Start;
767 assert(Len &&
"Zero length RopePiece is invalid!");
770 if (AllocOffs + Len <= AllocChunkSize) {
771 memcpy(AllocBuffer->Data + AllocOffs, Start, Len);
773 return RopePiece(AllocBuffer, AllocOffs - Len, AllocOffs);
778 if (Len > AllocChunkSize) {
782 memcpy(Res->Data, Start, End - Start);
789 unsigned AllocSize =
offsetof(RopeRefCountString,
Data) + AllocChunkSize;
790 auto *Res =
reinterpret_cast<RopeRefCountString *
>(
new char[AllocSize]);
792 memcpy(Res->Data, Start, Len);
796 return RopePiece(AllocBuffer, 0, Len);
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
#define offsetof(TYPE, MEMBER)
Function Alias Analysis false
static DeltaTreeNode * getRoot(void *Root)
static const RopePieceBTreeLeaf * getCN(const void *P)
RopePieceBTreeIterator()=default
LLVM_ABI void MoveToNextPiece()
LLVM_ABI ~RopePieceBTree()
LLVM_ABI unsigned size() const
LLVM_ABI void insert(unsigned Offset, const RopePiece &R)
LLVM_ABI void erase(unsigned Offset, unsigned NumBytes)
LLVM_ABI RopePieceBTree()
@ Tail
Attemps to make calls as fast as possible while guaranteeing that tail call optimization can always b...
NodeAddr< NodeBase * > Node
This is an optimization pass for GlobalISel generic memory operations.
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
void erase(Container &C, ValueType V)
Wrapper function to remove a value from a container:
iterator_range< SplittingIterator > split(StringRef Str, StringRef Separator)
Split the specified string over a separator and return a range-compatible iterable over its partition...
FunctionAddr VTableAddr uintptr_t uintptr_t Data
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
RopePiece - This class represents a view into a RopeRefCountString object.
RopeRefCountString - This struct is allocated with 'new char[]' from the heap, and represents a refer...