// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Author: kenton@google.com (Kenton Varda) // Based on original Protocol Buffers design by // Sanjay Ghemawat, Jeff Dean, and others. // // RepeatedField and RepeatedPtrField are used by generated protocol message // classes to manipulate repeated fields. These classes are very similar to // STL's vector, but include a number of optimizations found to be useful // specifically in the case of Protocol Buffers. RepeatedPtrField is // particularly different from STL vector as it manages ownership of the // pointers that it contains. // // Typically, clients should not need to access RepeatedField objects directly, // but should instead use the accessor functions generated automatically by the // protocol compiler. #ifndef GOOGLE_PROTOBUF_REPEATED_FIELD_H__ #define GOOGLE_PROTOBUF_REPEATED_FIELD_H__ #include #ifdef _MSC_VER // This is required for min/max on VS2013 only. #include #endif #include #include #include #include #include #include #include #include #include #include #include // Must be included last. #include #ifdef SWIG #error "You cannot SWIG proto headers" #endif namespace google { namespace protobuf { class Message; class Reflection; template struct WeakRepeatedPtrField; namespace internal { class MergePartialFromCodedStreamHelper; class SwapFieldHelper; // kRepeatedFieldLowerClampLimit is the smallest size that will be allocated // when growing a repeated field. constexpr int kRepeatedFieldLowerClampLimit = 4; // kRepeatedFieldUpperClampLimit is the lowest signed integer value that // overflows when multiplied by 2 (which is undefined behavior). Sizes above // this will clamp to the maximum int value instead of following exponential // growth when growing a repeated field. constexpr int kRepeatedFieldUpperClampLimit = (std::numeric_limits::max() / 2) + 1; // A utility function for logging that doesn't need any template types. void LogIndexOutOfBounds(int index, int size); template inline int CalculateReserve(Iter begin, Iter end, std::forward_iterator_tag) { return static_cast(std::distance(begin, end)); } template inline int CalculateReserve(Iter /*begin*/, Iter /*end*/, std::input_iterator_tag /*unused*/) { return -1; } template inline int CalculateReserve(Iter begin, Iter end) { typedef typename std::iterator_traits::iterator_category Category; return CalculateReserve(begin, end, Category()); } // Swaps two blocks of memory of size sizeof(T). template inline void SwapBlock(char* p, char* q) { T tmp; memcpy(&tmp, p, sizeof(T)); memcpy(p, q, sizeof(T)); memcpy(q, &tmp, sizeof(T)); } // Swaps two blocks of memory of size kSize: // template void memswap(char* p, char* q); template inline typename std::enable_if<(kSize == 0), void>::type memswap(char*, char*) { } #define PROTO_MEMSWAP_DEF_SIZE(reg_type, max_size) \ template \ typename std::enable_if<(kSize >= sizeof(reg_type) && kSize < (max_size)), \ void>::type \ memswap(char* p, char* q) { \ SwapBlock(p, q); \ memswap(p + sizeof(reg_type), \ q + sizeof(reg_type)); \ } PROTO_MEMSWAP_DEF_SIZE(uint8, 2) PROTO_MEMSWAP_DEF_SIZE(uint16, 4) PROTO_MEMSWAP_DEF_SIZE(uint32, 8) #ifdef __SIZEOF_INT128__ PROTO_MEMSWAP_DEF_SIZE(uint64, 16) PROTO_MEMSWAP_DEF_SIZE(__uint128_t, (1u << 31)) #else PROTO_MEMSWAP_DEF_SIZE(uint64, (1u << 31)) #endif #undef PROTO_MEMSWAP_DEF_SIZE } // namespace internal // RepeatedField is used to represent repeated fields of a primitive type (in // other words, everything except strings and nested Messages). Most users will // not ever use a RepeatedField directly; they will use the get-by-index, // set-by-index, and add accessors that are generated for all repeated fields. template class RepeatedField final { static_assert( alignof(Arena) >= alignof(Element), "We only support types that have an alignment smaller than Arena"); public: constexpr RepeatedField(); explicit RepeatedField(Arena* arena); RepeatedField(const RepeatedField& other); template ())>::value>::type> RepeatedField(Iter begin, Iter end); ~RepeatedField(); RepeatedField& operator=(const RepeatedField& other); RepeatedField(RepeatedField&& other) noexcept; RepeatedField& operator=(RepeatedField&& other) noexcept; bool empty() const; int size() const; const Element& Get(int index) const; Element* Mutable(int index); const Element& operator[](int index) const { return Get(index); } Element& operator[](int index) { return *Mutable(index); } const Element& at(int index) const; Element& at(int index); void Set(int index, const Element& value); void Add(const Element& value); // Appends a new element and return a pointer to it. // The new element is uninitialized if |Element| is a POD type. Element* Add(); // Append elements in the range [begin, end) after reserving // the appropriate number of elements. template void Add(Iter begin, Iter end); // Remove the last element in the array. void RemoveLast(); // Extract elements with indices in "[start .. start+num-1]". // Copy them into "elements[0 .. num-1]" if "elements" is not NULL. // Caution: implementation also moves elements with indices [start+num ..]. // Calling this routine inside a loop can cause quadratic behavior. void ExtractSubrange(int start, int num, Element* elements); void Clear(); void MergeFrom(const RepeatedField& other); void CopyFrom(const RepeatedField& other); // Replaces the contents with RepeatedField(begin, end). template void Assign(Iter begin, Iter end); // Reserve space to expand the field to at least the given size. If the // array is grown, it will always be at least doubled in size. void Reserve(int new_size); // Resize the RepeatedField to a new, smaller size. This is O(1). void Truncate(int new_size); void AddAlreadyReserved(const Element& value); // Appends a new element and return a pointer to it. // The new element is uninitialized if |Element| is a POD type. // Should be called only if Capacity() > Size(). Element* AddAlreadyReserved(); Element* AddNAlreadyReserved(int elements); int Capacity() const; // Like STL resize. Uses value to fill appended elements. // Like Truncate() if new_size <= size(), otherwise this is // O(new_size - size()). void Resize(int new_size, const Element& value); // Gets the underlying array. This pointer is possibly invalidated by // any add or remove operation. Element* mutable_data(); const Element* data() const; // Swap entire contents with "other". If they are separate arenas then, copies // data between each other. void Swap(RepeatedField* other); // Swap entire contents with "other". Should be called only if the caller can // guarantee that both repeated fields are on the same arena or are on the // heap. Swapping between different arenas is disallowed and caught by a // GOOGLE_DCHECK (see API docs for details). void UnsafeArenaSwap(RepeatedField* other); // Swap two elements. void SwapElements(int index1, int index2); // STL-like iterator support typedef Element* iterator; typedef const Element* const_iterator; typedef Element value_type; typedef value_type& reference; typedef const value_type& const_reference; typedef value_type* pointer; typedef const value_type* const_pointer; typedef int size_type; typedef ptrdiff_t difference_type; iterator begin(); const_iterator begin() const; const_iterator cbegin() const; iterator end(); const_iterator end() const; const_iterator cend() const; // Reverse iterator support typedef std::reverse_iterator const_reverse_iterator; typedef std::reverse_iterator reverse_iterator; reverse_iterator rbegin() { return reverse_iterator(end()); } const_reverse_iterator rbegin() const { return const_reverse_iterator(end()); } reverse_iterator rend() { return reverse_iterator(begin()); } const_reverse_iterator rend() const { return const_reverse_iterator(begin()); } // Returns the number of bytes used by the repeated field, excluding // sizeof(*this) size_t SpaceUsedExcludingSelfLong() const; int SpaceUsedExcludingSelf() const { return internal::ToIntSize(SpaceUsedExcludingSelfLong()); } // Removes the element referenced by position. // // Returns an iterator to the element immediately following the removed // element. // // Invalidates all iterators at or after the removed element, including end(). iterator erase(const_iterator position); // Removes the elements in the range [first, last). // // Returns an iterator to the element immediately following the removed range. // // Invalidates all iterators at or after the removed range, including end(). iterator erase(const_iterator first, const_iterator last); // Get the Arena on which this RepeatedField stores its elements. inline Arena* GetArena() const { return (total_size_ == 0) ? static_cast(arena_or_elements_) : rep()->arena; } // For internal use only. // // This is public due to it being called by generated code. inline void InternalSwap(RepeatedField* other); private: static constexpr int kInitialSize = 0; // A note on the representation here (see also comment below for // RepeatedPtrFieldBase's struct Rep): // // We maintain the same sizeof(RepeatedField) as before we added arena support // so that we do not degrade performance by bloating memory usage. Directly // adding an arena_ element to RepeatedField is quite costly. By using // indirection in this way, we keep the same size when the RepeatedField is // empty (common case), and add only an 8-byte header to the elements array // when non-empty. We make sure to place the size fields directly in the // RepeatedField class to avoid costly cache misses due to the indirection. int current_size_; int total_size_; struct Rep { Arena* arena; // Here we declare a huge array as a way of approximating C's "flexible // array member" feature without relying on undefined behavior. Element elements[(std::numeric_limits::max() - 2 * sizeof(Arena*)) / sizeof(Element)]; }; static constexpr size_t kRepHeaderSize = offsetof(Rep, elements); // If total_size_ == 0 this points to an Arena otherwise it points to the // elements member of a Rep struct. Using this invariant allows the storage of // the arena pointer without an extra allocation in the constructor. void* arena_or_elements_; // Return pointer to elements array. // pre-condition: the array must have been allocated. Element* elements() const { GOOGLE_DCHECK_GT(total_size_, 0); // Because of above pre-condition this cast is safe. return unsafe_elements(); } // Return pointer to elements array if it exists otherwise either null or // a invalid pointer is returned. This only happens for empty repeated fields, // where you can't dereference this pointer anyway (it's empty). Element* unsafe_elements() const { return static_cast(arena_or_elements_); } // Return pointer to the Rep struct. // pre-condition: the Rep must have been allocated, ie elements() is safe. Rep* rep() const { char* addr = reinterpret_cast(elements()) - offsetof(Rep, elements); return reinterpret_cast(addr); } friend class Arena; typedef void InternalArenaConstructable_; // Move the contents of |from| into |to|, possibly clobbering |from| in the // process. For primitive types this is just a memcpy(), but it could be // specialized for non-primitive types to, say, swap each element instead. void MoveArray(Element* to, Element* from, int size); // Copy the elements of |from| into |to|. void CopyArray(Element* to, const Element* from, int size); // Internal helper to delete all elements and deallocate the storage. void InternalDeallocate(Rep* rep, int size) { if (rep != NULL) { Element* e = &rep->elements[0]; if (!std::is_trivial::value) { Element* limit = &rep->elements[size]; for (; e < limit; e++) { e->~Element(); } } if (rep->arena == NULL) { #if defined(__GXX_DELETE_WITH_SIZE__) || defined(__cpp_sized_deallocation) const size_t bytes = size * sizeof(*e) + kRepHeaderSize; ::operator delete(static_cast(rep), bytes); #else ::operator delete(static_cast(rep)); #endif } } } // This class is a performance wrapper around RepeatedField::Add(const T&) // function. In general unless a RepeatedField is a local stack variable LLVM // has a hard time optimizing Add. The machine code tends to be // loop: // mov %size, dword ptr [%repeated_field] // load // cmp %size, dword ptr [%repeated_field + 4] // jae fallback // mov %buffer, qword ptr [%repeated_field + 8] // mov dword [%buffer + %size * 4], %value // inc %size // increment // mov dword ptr [%repeated_field], %size // store // jmp loop // // This puts a load/store in each iteration of the important loop variable // size. It's a pretty bad compile that happens even in simple cases, but // largely the presence of the fallback path disturbs the compilers mem-to-reg // analysis. // // This class takes ownership of a repeated field for the duration of it's // lifetime. The repeated field should not be accessed during this time, ie. // only access through this class is allowed. This class should always be a // function local stack variable. Intended use // // void AddSequence(const int* begin, const int* end, RepeatedField* out) // { // RepeatedFieldAdder adder(out); // Take ownership of out // for (auto it = begin; it != end; ++it) { // adder.Add(*it); // } // } // // Typically due to the fact adder is a local stack variable. The compiler // will be successful in mem-to-reg transformation and the machine code will // be loop: cmp %size, %capacity jae fallback mov dword ptr [%buffer + %size * // 4], %val inc %size jmp loop // // The first version executes at 7 cycles per iteration while the second // version near 1 or 2 cycles. template ::value> class FastAdderImpl { public: explicit FastAdderImpl(RepeatedField* rf) : repeated_field_(rf) { index_ = repeated_field_->current_size_; capacity_ = repeated_field_->total_size_; buffer_ = repeated_field_->unsafe_elements(); } ~FastAdderImpl() { repeated_field_->current_size_ = index_; } void Add(Element val) { if (index_ == capacity_) { repeated_field_->current_size_ = index_; repeated_field_->Reserve(index_ + 1); capacity_ = repeated_field_->total_size_; buffer_ = repeated_field_->unsafe_elements(); } buffer_[index_++] = val; } private: RepeatedField* repeated_field_; int index_; int capacity_; Element* buffer_; GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FastAdderImpl); }; // FastAdder is a wrapper for adding fields. The specialization above handles // POD types more efficiently than RepeatedField. template class FastAdderImpl { public: explicit FastAdderImpl(RepeatedField* rf) : repeated_field_(rf) {} void Add(const Element& val) { repeated_field_->Add(val); } private: RepeatedField* repeated_field_; GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FastAdderImpl); }; using FastAdder = FastAdderImpl<>; friend class TestRepeatedFieldHelper; friend class ::google::protobuf::internal::ParseContext; }; namespace internal { template class RepeatedPtrIterator; template class RepeatedPtrOverPtrsIterator; } // namespace internal namespace internal { // This is a helper template to copy an array of elements efficiently when they // have a trivial copy constructor, and correctly otherwise. This really // shouldn't be necessary, but our compiler doesn't optimize std::copy very // effectively. template ::value> struct ElementCopier { void operator()(Element* to, const Element* from, int array_size); }; } // namespace internal namespace internal { // type-traits helper for RepeatedPtrFieldBase: we only want to invoke // arena-related "copy if on different arena" behavior if the necessary methods // exist on the contained type. In particular, we rely on MergeFrom() existing // as a general proxy for the fact that a copy will work, and we also provide a // specific override for std::string*. template struct TypeImplementsMergeBehaviorProbeForMergeFrom { typedef char HasMerge; typedef long HasNoMerge; // We accept either of: // - void MergeFrom(const T& other) // - bool MergeFrom(const T& other) // // We mangle these names a bit to avoid compatibility issues in 'unclean' // include environments that may have, e.g., "#define test ..." (yes, this // exists). template struct CheckType; template static HasMerge Check(CheckType*); template static HasMerge Check(CheckType*); template static HasNoMerge Check(...); // Resolves to either std::true_type or std::false_type. typedef std::integral_constant(0)) == sizeof(HasMerge))> type; }; template struct TypeImplementsMergeBehavior : TypeImplementsMergeBehaviorProbeForMergeFrom {}; template <> struct TypeImplementsMergeBehavior { typedef std::true_type type; }; template struct IsMovable : std::integral_constant::value && std::is_move_assignable::value> {}; // This is the common base class for RepeatedPtrFields. It deals only in void* // pointers. Users should not use this interface directly. // // The methods of this interface correspond to the methods of RepeatedPtrField, // but may have a template argument called TypeHandler. Its signature is: // class TypeHandler { // public: // typedef MyType Type; // static Type* New(); // static Type* NewFromPrototype(const Type* prototype, // Arena* arena); // static void Delete(Type*); // static void Clear(Type*); // static void Merge(const Type& from, Type* to); // // // Only needs to be implemented if SpaceUsedExcludingSelf() is called. // static int SpaceUsedLong(const Type&); // }; class PROTOBUF_EXPORT RepeatedPtrFieldBase { protected: constexpr RepeatedPtrFieldBase(); explicit RepeatedPtrFieldBase(Arena* arena); ~RepeatedPtrFieldBase() { #ifndef NDEBUG // Try to trigger segfault / asan failure in non-opt builds. If arena_ // lifetime has ended before the destructor. if (arena_) (void)arena_->SpaceAllocated(); #endif } // Must be called from destructor. template void Destroy(); bool empty() const; int size() const; template const typename TypeHandler::Type& at(int index) const; template typename TypeHandler::Type& at(int index); template typename TypeHandler::Type* Mutable(int index); template void Delete(int index); template typename TypeHandler::Type* Add(typename TypeHandler::Type* prototype = NULL); public: // The next few methods are public so that they can be called from generated // code when implicit weak fields are used, but they should never be called by // application code. template const typename TypeHandler::Type& Get(int index) const; // Creates and adds an element using the given prototype, without introducing // a link-time dependency on the concrete message type. This method is used to // implement implicit weak fields. The prototype may be NULL, in which case an // ImplicitWeakMessage will be used as a placeholder. MessageLite* AddWeak(const MessageLite* prototype); template void Clear(); template void MergeFrom(const RepeatedPtrFieldBase& other); inline void InternalSwap(RepeatedPtrFieldBase* other); protected: template < typename TypeHandler, typename std::enable_if::type* = nullptr> void Add(typename TypeHandler::Type&& value); template void RemoveLast(); template void CopyFrom(const RepeatedPtrFieldBase& other); void CloseGap(int start, int num); void Reserve(int new_size); int Capacity() const; template static inline typename TypeHandler::Type* copy( typename TypeHandler::Type* value) { auto* new_value = TypeHandler::NewFromPrototype(value, nullptr); TypeHandler::Merge(*value, new_value); return new_value; } // Used for constructing iterators. void* const* raw_data() const; void** raw_mutable_data() const; template typename TypeHandler::Type** mutable_data(); template const typename TypeHandler::Type* const* data() const; template PROTOBUF_NDEBUG_INLINE void Swap(RepeatedPtrFieldBase* other); void SwapElements(int index1, int index2); template size_t SpaceUsedExcludingSelfLong() const; // Advanced memory management -------------------------------------- // Like Add(), but if there are no cleared objects to use, returns NULL. template typename TypeHandler::Type* AddFromCleared(); template void AddAllocated(typename TypeHandler::Type* value) { typename TypeImplementsMergeBehavior::type t; AddAllocatedInternal(value, t); } template void UnsafeArenaAddAllocated(typename TypeHandler::Type* value); template PROTOBUF_MUST_USE_RESULT typename TypeHandler::Type* ReleaseLast() { typename TypeImplementsMergeBehavior::type t; return ReleaseLastInternal(t); } // Releases last element and returns it, but does not do out-of-arena copy. // And just returns the raw pointer to the contained element in the arena. template typename TypeHandler::Type* UnsafeArenaReleaseLast(); int ClearedCount() const; template void AddCleared(typename TypeHandler::Type* value); template PROTOBUF_MUST_USE_RESULT typename TypeHandler::Type* ReleaseCleared(); template void AddAllocatedInternal(typename TypeHandler::Type* value, std::true_type); template void AddAllocatedInternal(typename TypeHandler::Type* value, std::false_type); template PROTOBUF_NOINLINE void AddAllocatedSlowWithCopy( typename TypeHandler::Type* value, Arena* value_arena, Arena* my_arena); template PROTOBUF_NOINLINE void AddAllocatedSlowWithoutCopy( typename TypeHandler::Type* value); template typename TypeHandler::Type* ReleaseLastInternal(std::true_type); template typename TypeHandler::Type* ReleaseLastInternal(std::false_type); template PROTOBUF_NOINLINE void SwapFallback(RepeatedPtrFieldBase* other); inline Arena* GetArena() const { return arena_; } private: static constexpr int kInitialSize = 0; // A few notes on internal representation: // // We use an indirected approach, with struct Rep, to keep // sizeof(RepeatedPtrFieldBase) equivalent to what it was before arena support // was added, namely, 3 8-byte machine words on x86-64. An instance of Rep is // allocated only when the repeated field is non-empty, and it is a // dynamically-sized struct (the header is directly followed by elements[]). // We place arena_ and current_size_ directly in the object to avoid cache // misses due to the indirection, because these fields are checked frequently. // Placing all fields directly in the RepeatedPtrFieldBase instance costs // significant performance for memory-sensitive workloads. Arena* arena_; int current_size_; int total_size_; struct Rep { int allocated_size; // Here we declare a huge array as a way of approximating C's "flexible // array member" feature without relying on undefined behavior. void* elements[(std::numeric_limits::max() - 2 * sizeof(int)) / sizeof(void*)]; }; static constexpr size_t kRepHeaderSize = offsetof(Rep, elements); Rep* rep_; template static inline typename TypeHandler::Type* cast(void* element) { return reinterpret_cast(element); } template static inline const typename TypeHandler::Type* cast(const void* element) { return reinterpret_cast(element); } // Non-templated inner function to avoid code duplication. Takes a function // pointer to the type-specific (templated) inner allocate/merge loop. void MergeFromInternal(const RepeatedPtrFieldBase& other, void (RepeatedPtrFieldBase::*inner_loop)(void**, void**, int, int)); template PROTOBUF_NOINLINE void MergeFromInnerLoop(void** our_elems, void** other_elems, int length, int already_allocated); // Internal helper: extend array space if necessary to contain |extend_amount| // more elements, and return a pointer to the element immediately following // the old list of elements. This interface factors out common behavior from // Reserve() and MergeFrom() to reduce code size. |extend_amount| must be > 0. void** InternalExtend(int extend_amount); // Internal helper for Add: add "obj" as the next element in the // array, including potentially resizing the array with Reserve if // needed void* AddOutOfLineHelper(void* obj); // The reflection implementation needs to call protected methods directly, // reinterpreting pointers as being to Message instead of a specific Message // subclass. friend class ::PROTOBUF_NAMESPACE_ID::Reflection; friend class ::PROTOBUF_NAMESPACE_ID::internal::SwapFieldHelper; // ExtensionSet stores repeated message extensions as // RepeatedPtrField, but non-lite ExtensionSets need to implement // SpaceUsedLong(), and thus need to call SpaceUsedExcludingSelfLong() // reinterpreting MessageLite as Message. ExtensionSet also needs to make use // of AddFromCleared(), which is not part of the public interface. friend class ExtensionSet; // The MapFieldBase implementation needs to call protected methods directly, // reinterpreting pointers as being to Message instead of a specific Message // subclass. friend class MapFieldBase; friend class MapFieldBaseStub; // The table-driven MergePartialFromCodedStream implementation needs to // operate on RepeatedPtrField. friend class MergePartialFromCodedStreamHelper; friend class AccessorHelper; template friend struct google::protobuf::WeakRepeatedPtrField; GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(RepeatedPtrFieldBase); }; template class GenericTypeHandler { public: typedef GenericType Type; using Movable = IsMovable; static inline GenericType* New(Arena* arena) { return Arena::CreateMaybeMessage(arena); } static inline GenericType* New(Arena* arena, GenericType&& value) { return Arena::Create(arena, std::move(value)); } static inline GenericType* NewFromPrototype(const GenericType* prototype, Arena* arena = NULL); static inline void Delete(GenericType* value, Arena* arena) { if (arena == NULL) { delete value; } } static inline Arena* GetOwningArena(GenericType* value) { return Arena::GetOwningArena(value); } static inline void Clear(GenericType* value) { value->Clear(); } PROTOBUF_NOINLINE static void Merge(const GenericType& from, GenericType* to); static inline size_t SpaceUsedLong(const GenericType& value) { return value.SpaceUsedLong(); } }; template GenericType* GenericTypeHandler::NewFromPrototype( const GenericType* /* prototype */, Arena* arena) { return New(arena); } template void GenericTypeHandler::Merge(const GenericType& from, GenericType* to) { to->MergeFrom(from); } // NewFromPrototype() and Merge() are not defined inline here, as we will need // to do a virtual function dispatch anyways to go from Message* to call // New/Merge. template <> MessageLite* GenericTypeHandler::NewFromPrototype( const MessageLite* prototype, Arena* arena); template <> inline Arena* GenericTypeHandler::GetOwningArena( MessageLite* value) { return value->GetOwningArena(); } template <> void GenericTypeHandler::Merge(const MessageLite& from, MessageLite* to); template <> inline void GenericTypeHandler::Clear(std::string* value) { value->clear(); } template <> void GenericTypeHandler::Merge(const std::string& from, std::string* to); // Message specialization bodies defined in message.cc. This split is necessary // to allow proto2-lite (which includes this header) to be independent of // Message. template <> PROTOBUF_EXPORT Message* GenericTypeHandler::NewFromPrototype( const Message* prototype, Arena* arena); template <> PROTOBUF_EXPORT Arena* GenericTypeHandler::GetOwningArena( Message* value); class StringTypeHandler { public: typedef std::string Type; using Movable = IsMovable; static inline std::string* New(Arena* arena) { return Arena::Create(arena); } static inline std::string* New(Arena* arena, std::string&& value) { return Arena::Create(arena, std::move(value)); } static inline std::string* NewFromPrototype(const std::string*, Arena* arena) { return New(arena); } static inline Arena* GetOwningArena(std::string*) { return nullptr; } static inline void Delete(std::string* value, Arena* arena) { if (arena == NULL) { delete value; } } static inline void Clear(std::string* value) { value->clear(); } static inline void Merge(const std::string& from, std::string* to) { *to = from; } static size_t SpaceUsedLong(const std::string& value) { return sizeof(value) + StringSpaceUsedExcludingSelfLong(value); } }; } // namespace internal // RepeatedPtrField is like RepeatedField, but used for repeated strings or // Messages. template class RepeatedPtrField final : private internal::RepeatedPtrFieldBase { public: constexpr RepeatedPtrField(); explicit RepeatedPtrField(Arena* arena); RepeatedPtrField(const RepeatedPtrField& other); template ())>::value>::type> RepeatedPtrField(Iter begin, Iter end); ~RepeatedPtrField(); RepeatedPtrField& operator=(const RepeatedPtrField& other); RepeatedPtrField(RepeatedPtrField&& other) noexcept; RepeatedPtrField& operator=(RepeatedPtrField&& other) noexcept; bool empty() const; int size() const; const Element& Get(int index) const; Element* Mutable(int index); Element* Add(); void Add(Element&& value); // Append elements in the range [begin, end) after reserving // the appropriate number of elements. template void Add(Iter begin, Iter end); const Element& operator[](int index) const { return Get(index); } Element& operator[](int index) { return *Mutable(index); } const Element& at(int index) const; Element& at(int index); // Remove the last element in the array. // Ownership of the element is retained by the array. void RemoveLast(); // Delete elements with indices in the range [start .. start+num-1]. // Caution: implementation moves all elements with indices [start+num .. ]. // Calling this routine inside a loop can cause quadratic behavior. void DeleteSubrange(int start, int num); void Clear(); void MergeFrom(const RepeatedPtrField& other); void CopyFrom(const RepeatedPtrField& other); // Replaces the contents with RepeatedPtrField(begin, end). template void Assign(Iter begin, Iter end); // Reserve space to expand the field to at least the given size. This only // resizes the pointer array; it doesn't allocate any objects. If the // array is grown, it will always be at least doubled in size. void Reserve(int new_size); int Capacity() const; // Gets the underlying array. This pointer is possibly invalidated by // any add or remove operation. Element** mutable_data(); const Element* const* data() const; // Swap entire contents with "other". If they are on separate arenas, then // copies data. void Swap(RepeatedPtrField* other); // Swap entire contents with "other". Caller should guarantee that either both // fields are on the same arena or both are on the heap. Swapping between // different arenas with this function is disallowed and is caught via // GOOGLE_DCHECK. void UnsafeArenaSwap(RepeatedPtrField* other); // Swap two elements. void SwapElements(int index1, int index2); // STL-like iterator support typedef internal::RepeatedPtrIterator iterator; typedef internal::RepeatedPtrIterator const_iterator; typedef Element value_type; typedef value_type& reference; typedef const value_type& const_reference; typedef value_type* pointer; typedef const value_type* const_pointer; typedef int size_type; typedef ptrdiff_t difference_type; iterator begin(); const_iterator begin() const; const_iterator cbegin() const; iterator end(); const_iterator end() const; const_iterator cend() const; // Reverse iterator support typedef std::reverse_iterator const_reverse_iterator; typedef std::reverse_iterator reverse_iterator; reverse_iterator rbegin() { return reverse_iterator(end()); } const_reverse_iterator rbegin() const { return const_reverse_iterator(end()); } reverse_iterator rend() { return reverse_iterator(begin()); } const_reverse_iterator rend() const { return const_reverse_iterator(begin()); } // Custom STL-like iterator that iterates over and returns the underlying // pointers to Element rather than Element itself. typedef internal::RepeatedPtrOverPtrsIterator pointer_iterator; typedef internal::RepeatedPtrOverPtrsIterator const_pointer_iterator; pointer_iterator pointer_begin(); const_pointer_iterator pointer_begin() const; pointer_iterator pointer_end(); const_pointer_iterator pointer_end() const; // Returns (an estimate of) the number of bytes used by the repeated field, // excluding sizeof(*this). size_t SpaceUsedExcludingSelfLong() const; int SpaceUsedExcludingSelf() const { return internal::ToIntSize(SpaceUsedExcludingSelfLong()); } // Advanced memory management -------------------------------------- // When hardcore memory management becomes necessary -- as it sometimes // does here at Google -- the following methods may be useful. // Add an already-allocated object, passing ownership to the // RepeatedPtrField. // // Note that some special behavior occurs with respect to arenas: // // (i) if this field holds submessages, the new submessage will be copied if // the original is in an arena and this RepeatedPtrField is either in a // different arena, or on the heap. // (ii) if this field holds strings, the passed-in string *must* be // heap-allocated, not arena-allocated. There is no way to dynamically check // this at runtime, so User Beware. void AddAllocated(Element* value); // Remove the last element and return it, passing ownership to the caller. // Requires: size() > 0 // // If this RepeatedPtrField is on an arena, an object copy is required to pass // ownership back to the user (for compatible semantics). Use // UnsafeArenaReleaseLast() if this behavior is undesired. PROTOBUF_MUST_USE_RESULT Element* ReleaseLast(); // Add an already-allocated object, skipping arena-ownership checks. The user // must guarantee that the given object is in the same arena as this // RepeatedPtrField. // It is also useful in legacy code that uses temporary ownership to avoid // copies. Example: // RepeatedPtrField temp_field; // temp_field.AddAllocated(new T); // ... // Do something with temp_field // temp_field.ExtractSubrange(0, temp_field.size(), nullptr); // If you put temp_field on the arena this fails, because the ownership // transfers to the arena at the "AddAllocated" call and is not released // anymore causing a double delete. UnsafeArenaAddAllocated prevents this. void UnsafeArenaAddAllocated(Element* value); // Remove the last element and return it. Works only when operating on an // arena. The returned pointer is to the original object in the arena, hence // has the arena's lifetime. // Requires: current_size_ > 0 Element* UnsafeArenaReleaseLast(); // Extract elements with indices in the range "[start .. start+num-1]". // The caller assumes ownership of the extracted elements and is responsible // for deleting them when they are no longer needed. // If "elements" is non-NULL, then pointers to the extracted elements // are stored in "elements[0 .. num-1]" for the convenience of the caller. // If "elements" is NULL, then the caller must use some other mechanism // to perform any further operations (like deletion) on these elements. // Caution: implementation also moves elements with indices [start+num ..]. // Calling this routine inside a loop can cause quadratic behavior. // // Memory copying behavior is identical to ReleaseLast(), described above: if // this RepeatedPtrField is on an arena, an object copy is performed for each // returned element, so that all returned element pointers are to // heap-allocated copies. If this copy is not desired, the user should call // UnsafeArenaExtractSubrange(). void ExtractSubrange(int start, int num, Element** elements); // Identical to ExtractSubrange() described above, except that when this // repeated field is on an arena, no object copies are performed. Instead, the // raw object pointers are returned. Thus, if on an arena, the returned // objects must not be freed, because they will not be heap-allocated objects. void UnsafeArenaExtractSubrange(int start, int num, Element** elements); // When elements are removed by calls to RemoveLast() or Clear(), they // are not actually freed. Instead, they are cleared and kept so that // they can be reused later. This can save lots of CPU time when // repeatedly reusing a protocol message for similar purposes. // // Hardcore programs may choose to manipulate these cleared objects // to better optimize memory management using the following routines. // Get the number of cleared objects that are currently being kept // around for reuse. int ClearedCount() const; // Add an element to the pool of cleared objects, passing ownership to // the RepeatedPtrField. The element must be cleared prior to calling // this method. // // This method cannot be called when the repeated field is on an arena or when // |value| is; both cases will trigger a GOOGLE_DCHECK-failure. void AddCleared(Element* value); // Remove a single element from the cleared pool and return it, passing // ownership to the caller. The element is guaranteed to be cleared. // Requires: ClearedCount() > 0 // // // This method cannot be called when the repeated field is on an arena; doing // so will trigger a GOOGLE_DCHECK-failure. PROTOBUF_MUST_USE_RESULT Element* ReleaseCleared(); // Removes the element referenced by position. // // Returns an iterator to the element immediately following the removed // element. // // Invalidates all iterators at or after the removed element, including end(). iterator erase(const_iterator position); // Removes the elements in the range [first, last). // // Returns an iterator to the element immediately following the removed range. // // Invalidates all iterators at or after the removed range, including end(). iterator erase(const_iterator first, const_iterator last); // Gets the arena on which this RepeatedPtrField stores its elements. inline Arena* GetArena() const; // For internal use only. // // This is public due to it being called by generated code. void InternalSwap(RepeatedPtrField* other) { internal::RepeatedPtrFieldBase::InternalSwap(other); } private: // Note: RepeatedPtrField SHOULD NOT be subclassed by users. class TypeHandler; // Implementations for ExtractSubrange(). The copying behavior must be // included only if the type supports the necessary operations (e.g., // MergeFrom()), so we must resolve this at compile time. ExtractSubrange() // uses SFINAE to choose one of the below implementations. void ExtractSubrangeInternal(int start, int num, Element** elements, std::true_type); void ExtractSubrangeInternal(int start, int num, Element** elements, std::false_type); friend class Arena; template friend struct WeakRepeatedPtrField; typedef void InternalArenaConstructable_; }; // implementation ==================================================== template constexpr RepeatedField::RepeatedField() : current_size_(0), total_size_(0), arena_or_elements_(nullptr) {} template inline RepeatedField::RepeatedField(Arena* arena) : current_size_(0), total_size_(0), arena_or_elements_(arena) {} template inline RepeatedField::RepeatedField(const RepeatedField& other) : current_size_(0), total_size_(0), arena_or_elements_(nullptr) { if (other.current_size_ != 0) { Reserve(other.size()); AddNAlreadyReserved(other.size()); CopyArray(Mutable(0), &other.Get(0), other.size()); } } template template RepeatedField::RepeatedField(Iter begin, Iter end) : current_size_(0), total_size_(0), arena_or_elements_(nullptr) { Add(begin, end); } template RepeatedField::~RepeatedField() { #ifndef NDEBUG // Try to trigger segfault / asan failure in non-opt builds. If arena_ // lifetime has ended before the destructor. auto arena = GetArena(); if (arena) (void)arena->SpaceAllocated(); #endif if (total_size_ > 0) { InternalDeallocate(rep(), total_size_); } } template inline RepeatedField& RepeatedField::operator=( const RepeatedField& other) { if (this != &other) CopyFrom(other); return *this; } template inline RepeatedField::RepeatedField(RepeatedField&& other) noexcept : RepeatedField() { // We don't just call Swap(&other) here because it would perform 3 copies if // other is on an arena. This field can't be on an arena because arena // construction always uses the Arena* accepting constructor. if (other.GetArena()) { CopyFrom(other); } else { InternalSwap(&other); } } template inline RepeatedField& RepeatedField::operator=( RepeatedField&& other) noexcept { // We don't just call Swap(&other) here because it would perform 3 copies if // the two fields are on different arenas. if (this != &other) { if (this->GetArena() != other.GetArena()) { CopyFrom(other); } else { InternalSwap(&other); } } return *this; } template inline bool RepeatedField::empty() const { return current_size_ == 0; } template inline int RepeatedField::size() const { return current_size_; } template inline int RepeatedField::Capacity() const { return total_size_; } template inline void RepeatedField::AddAlreadyReserved(const Element& value) { GOOGLE_DCHECK_LT(current_size_, total_size_); elements()[current_size_++] = value; } template inline Element* RepeatedField::AddAlreadyReserved() { GOOGLE_DCHECK_LT(current_size_, total_size_); return &elements()[current_size_++]; } template inline Element* RepeatedField::AddNAlreadyReserved(int n) { GOOGLE_DCHECK_GE(total_size_ - current_size_, n) << total_size_ << ", " << current_size_; // Warning: sometimes people call this when n == 0 and total_size_ == 0. In // this case the return pointer points to a zero size array (n == 0). Hence // we can just use unsafe_elements(), because the user cannot dereference the // pointer anyway. Element* ret = unsafe_elements() + current_size_; current_size_ += n; return ret; } template inline void RepeatedField::Resize(int new_size, const Element& value) { GOOGLE_DCHECK_GE(new_size, 0); if (new_size > current_size_) { Reserve(new_size); std::fill(&elements()[current_size_], &elements()[new_size], value); } current_size_ = new_size; } template inline const Element& RepeatedField::Get(int index) const { GOOGLE_DCHECK_GE(index, 0); GOOGLE_DCHECK_LT(index, current_size_); return elements()[index]; } template inline const Element& RepeatedField::at(int index) const { GOOGLE_CHECK_GE(index, 0); GOOGLE_CHECK_LT(index, current_size_); return elements()[index]; } template inline Element& RepeatedField::at(int index) { GOOGLE_CHECK_GE(index, 0); GOOGLE_CHECK_LT(index, current_size_); return elements()[index]; } template inline Element* RepeatedField::Mutable(int index) { GOOGLE_DCHECK_GE(index, 0); GOOGLE_DCHECK_LT(index, current_size_); return &elements()[index]; } template inline void RepeatedField::Set(int index, const Element& value) { GOOGLE_DCHECK_GE(index, 0); GOOGLE_DCHECK_LT(index, current_size_); elements()[index] = value; } template inline void RepeatedField::Add(const Element& value) { uint32 size = current_size_; if (static_cast(size) == total_size_) { // value could reference an element of the array. Reserving new space will // invalidate the reference. So we must make a copy first. auto tmp = value; Reserve(total_size_ + 1); elements()[size] = std::move(tmp); } else { elements()[size] = value; } current_size_ = size + 1; } template inline Element* RepeatedField::Add() { uint32 size = current_size_; if (static_cast(size) == total_size_) Reserve(total_size_ + 1); auto ptr = &elements()[size]; current_size_ = size + 1; return ptr; } template template inline void RepeatedField::Add(Iter begin, Iter end) { int reserve = internal::CalculateReserve(begin, end); if (reserve != -1) { if (reserve == 0) { return; } Reserve(reserve + size()); // TODO(ckennelly): The compiler loses track of the buffer freshly // allocated by Reserve() by the time we call elements, so it cannot // guarantee that elements does not alias [begin(), end()). // // If restrict is available, annotating the pointer obtained from elements() // causes this to lower to memcpy instead of memmove. std::copy(begin, end, elements() + size()); current_size_ = reserve + size(); } else { FastAdder fast_adder(this); for (; begin != end; ++begin) fast_adder.Add(*begin); } } template inline void RepeatedField::RemoveLast() { GOOGLE_DCHECK_GT(current_size_, 0); current_size_--; } template void RepeatedField::ExtractSubrange(int start, int num, Element* elements) { GOOGLE_DCHECK_GE(start, 0); GOOGLE_DCHECK_GE(num, 0); GOOGLE_DCHECK_LE(start + num, this->current_size_); // Save the values of the removed elements if requested. if (elements != NULL) { for (int i = 0; i < num; ++i) elements[i] = this->Get(i + start); } // Slide remaining elements down to fill the gap. if (num > 0) { for (int i = start + num; i < this->current_size_; ++i) this->Set(i - num, this->Get(i)); this->Truncate(this->current_size_ - num); } } template inline void RepeatedField::Clear() { current_size_ = 0; } template inline void RepeatedField::MergeFrom(const RepeatedField& other) { GOOGLE_DCHECK_NE(&other, this); if (other.current_size_ != 0) { int existing_size = size(); Reserve(existing_size + other.size()); AddNAlreadyReserved(other.size()); CopyArray(Mutable(existing_size), &other.Get(0), other.size()); } } template inline void RepeatedField::CopyFrom(const RepeatedField& other) { if (&other == this) return; Clear(); MergeFrom(other); } template template inline void RepeatedField::Assign(Iter begin, Iter end) { Clear(); Add(begin, end); } template inline typename RepeatedField::iterator RepeatedField::erase( const_iterator position) { return erase(position, position + 1); } template inline typename RepeatedField::iterator RepeatedField::erase( const_iterator first, const_iterator last) { size_type first_offset = first - cbegin(); if (first != last) { Truncate(std::copy(last, cend(), begin() + first_offset) - cbegin()); } return begin() + first_offset; } template inline Element* RepeatedField::mutable_data() { return unsafe_elements(); } template inline const Element* RepeatedField::data() const { return unsafe_elements(); } template inline void RepeatedField::InternalSwap(RepeatedField* other) { GOOGLE_DCHECK(this != other); // Swap all fields at once. static_assert(std::is_standard_layout>::value, "offsetof() requires standard layout before c++17"); internal::memswaparena_or_elements_) - offsetof(RepeatedField, current_size_)>( reinterpret_cast(this) + offsetof(RepeatedField, current_size_), reinterpret_cast(other) + offsetof(RepeatedField, current_size_)); } template void RepeatedField::Swap(RepeatedField* other) { if (this == other) return; #ifdef PROTOBUF_FORCE_COPY_IN_SWAP if (GetArena() != nullptr && GetArena() == other->GetArena()) { #else // PROTOBUF_FORCE_COPY_IN_SWAP if (GetArena() == other->GetArena()) { #endif // !PROTOBUF_FORCE_COPY_IN_SWAP InternalSwap(other); } else { RepeatedField temp(other->GetArena()); temp.MergeFrom(*this); CopyFrom(*other); other->UnsafeArenaSwap(&temp); } } template void RepeatedField::UnsafeArenaSwap(RepeatedField* other) { if (this == other) return; InternalSwap(other); } template void RepeatedField::SwapElements(int index1, int index2) { using std::swap; // enable ADL with fallback swap(elements()[index1], elements()[index2]); } template inline typename RepeatedField::iterator RepeatedField::begin() { return unsafe_elements(); } template inline typename RepeatedField::const_iterator RepeatedField::begin() const { return unsafe_elements(); } template inline typename RepeatedField::const_iterator RepeatedField::cbegin() const { return unsafe_elements(); } template inline typename RepeatedField::iterator RepeatedField::end() { return unsafe_elements() + current_size_; } template inline typename RepeatedField::const_iterator RepeatedField::end() const { return unsafe_elements() + current_size_; } template inline typename RepeatedField::const_iterator RepeatedField::cend() const { return unsafe_elements() + current_size_; } template inline size_t RepeatedField::SpaceUsedExcludingSelfLong() const { return total_size_ > 0 ? (total_size_ * sizeof(Element) + kRepHeaderSize) : 0; } namespace internal { // Returns the new size for a reserved field based on its 'total_size' and the // requested 'new_size'. The result is clamped to the closed interval: // [internal::kMinRepeatedFieldAllocationSize, // std::numeric_limits::max()] // Requires: // new_size > total_size && // (total_size == 0 || // total_size >= kRepeatedFieldLowerClampLimit) inline int CalculateReserveSize(int total_size, int new_size) { if (new_size < kRepeatedFieldLowerClampLimit) { // Clamp to smallest allowed size. return kRepeatedFieldLowerClampLimit; } if (total_size < kRepeatedFieldUpperClampLimit) { return std::max(total_size * 2, new_size); } else { // Clamp to largest allowed size. GOOGLE_DCHECK_GT(new_size, kRepeatedFieldUpperClampLimit); return std::numeric_limits::max(); } } } // namespace internal // Avoid inlining of Reserve(): new, copy, and delete[] lead to a significant // amount of code bloat. template void RepeatedField::Reserve(int new_size) { if (total_size_ >= new_size) return; Rep* old_rep = total_size_ > 0 ? rep() : NULL; Rep* new_rep; Arena* arena = GetArena(); new_size = internal::CalculateReserveSize(total_size_, new_size); GOOGLE_DCHECK_LE( static_cast(new_size), (std::numeric_limits::max() - kRepHeaderSize) / sizeof(Element)) << "Requested size is too large to fit into size_t."; size_t bytes = kRepHeaderSize + sizeof(Element) * static_cast(new_size); if (arena == NULL) { new_rep = static_cast(::operator new(bytes)); } else { new_rep = reinterpret_cast(Arena::CreateArray(arena, bytes)); } new_rep->arena = arena; int old_total_size = total_size_; // Already known: new_size >= internal::kMinRepeatedFieldAllocationSize // Maintain invariant: // total_size_ == 0 || // total_size_ >= internal::kMinRepeatedFieldAllocationSize total_size_ = new_size; arena_or_elements_ = new_rep->elements; // Invoke placement-new on newly allocated elements. We shouldn't have to do // this, since Element is supposed to be POD, but a previous version of this // code allocated storage with "new Element[size]" and some code uses // RepeatedField with non-POD types, relying on constructor invocation. If // Element has a trivial constructor (e.g., int32), gcc (tested with -O2) // completely removes this loop because the loop body is empty, so this has no // effect unless its side-effects are required for correctness. // Note that we do this before MoveArray() below because Element's copy // assignment implementation will want an initialized instance first. Element* e = &elements()[0]; Element* limit = e + total_size_; for (; e < limit; e++) { new (e) Element; } if (current_size_ > 0) { MoveArray(&elements()[0], old_rep->elements, current_size_); } // Likewise, we need to invoke destructors on the old array. InternalDeallocate(old_rep, old_total_size); } template inline void RepeatedField::Truncate(int new_size) { GOOGLE_DCHECK_LE(new_size, current_size_); if (current_size_ > 0) { current_size_ = new_size; } } template inline void RepeatedField::MoveArray(Element* to, Element* from, int array_size) { CopyArray(to, from, array_size); } template inline void RepeatedField::CopyArray(Element* to, const Element* from, int array_size) { internal::ElementCopier()(to, from, array_size); } namespace internal { template void ElementCopier::operator()(Element* to, const Element* from, int array_size) { std::copy(from, from + array_size, to); } template struct ElementCopier { void operator()(Element* to, const Element* from, int array_size) { memcpy(to, from, static_cast(array_size) * sizeof(Element)); } }; } // namespace internal // ------------------------------------------------------------------- namespace internal { constexpr RepeatedPtrFieldBase::RepeatedPtrFieldBase() : arena_(NULL), current_size_(0), total_size_(0), rep_(NULL) {} inline RepeatedPtrFieldBase::RepeatedPtrFieldBase(Arena* arena) : arena_(arena), current_size_(0), total_size_(0), rep_(NULL) {} template void RepeatedPtrFieldBase::Destroy() { if (rep_ != NULL && arena_ == NULL) { int n = rep_->allocated_size; void* const* elements = rep_->elements; for (int i = 0; i < n; i++) { TypeHandler::Delete(cast(elements[i]), NULL); } #if defined(__GXX_DELETE_WITH_SIZE__) || defined(__cpp_sized_deallocation) const size_t size = total_size_ * sizeof(elements[0]) + kRepHeaderSize; ::operator delete(static_cast(rep_), size); #else ::operator delete(static_cast(rep_)); #endif } rep_ = NULL; } template inline void RepeatedPtrFieldBase::Swap(RepeatedPtrFieldBase* other) { #ifdef PROTOBUF_FORCE_COPY_IN_SWAP if (GetArena() != nullptr && GetArena() == other->GetArena()) { #else // PROTOBUF_FORCE_COPY_IN_SWAP if (GetArena() == other->GetArena()) { #endif // !PROTOBUF_FORCE_COPY_IN_SWAP InternalSwap(other); } else { SwapFallback(other); } } template void RepeatedPtrFieldBase::SwapFallback(RepeatedPtrFieldBase* other) { #ifdef PROTOBUF_FORCE_COPY_IN_SWAP GOOGLE_DCHECK(GetArena() == nullptr || other->GetArena() != GetArena()); #else // PROTOBUF_FORCE_COPY_IN_SWAP GOOGLE_DCHECK(other->GetArena() != GetArena()); #endif // !PROTOBUF_FORCE_COPY_IN_SWAP // Copy semantics in this case. We try to improve efficiency by placing the // temporary on |other|'s arena so that messages are copied twice rather than // three times. RepeatedPtrFieldBase temp(other->GetArena()); temp.MergeFrom(*this); this->Clear(); this->MergeFrom(*other); other->InternalSwap(&temp); temp.Destroy(); // Frees rep_ if `other` had no arena. } inline bool RepeatedPtrFieldBase::empty() const { return current_size_ == 0; } inline int RepeatedPtrFieldBase::size() const { return current_size_; } template inline const typename TypeHandler::Type& RepeatedPtrFieldBase::Get( int index) const { GOOGLE_DCHECK_GE(index, 0); GOOGLE_DCHECK_LT(index, current_size_); return *cast(rep_->elements[index]); } template inline const typename TypeHandler::Type& RepeatedPtrFieldBase::at( int index) const { GOOGLE_CHECK_GE(index, 0); GOOGLE_CHECK_LT(index, current_size_); return *cast(rep_->elements[index]); } template inline typename TypeHandler::Type& RepeatedPtrFieldBase::at(int index) { GOOGLE_CHECK_GE(index, 0); GOOGLE_CHECK_LT(index, current_size_); return *cast(rep_->elements[index]); } template inline typename TypeHandler::Type* RepeatedPtrFieldBase::Mutable(int index) { GOOGLE_DCHECK_GE(index, 0); GOOGLE_DCHECK_LT(index, current_size_); return cast(rep_->elements[index]); } template inline void RepeatedPtrFieldBase::Delete(int index) { GOOGLE_DCHECK_GE(index, 0); GOOGLE_DCHECK_LT(index, current_size_); TypeHandler::Delete(cast(rep_->elements[index]), arena_); } template inline typename TypeHandler::Type* RepeatedPtrFieldBase::Add( typename TypeHandler::Type* prototype) { if (rep_ != NULL && current_size_ < rep_->allocated_size) { return cast(rep_->elements[current_size_++]); } typename TypeHandler::Type* result = TypeHandler::NewFromPrototype(prototype, arena_); return reinterpret_cast( AddOutOfLineHelper(result)); } template ::type*> inline void RepeatedPtrFieldBase::Add(typename TypeHandler::Type&& value) { if (rep_ != NULL && current_size_ < rep_->allocated_size) { *cast(rep_->elements[current_size_++]) = std::move(value); return; } if (!rep_ || rep_->allocated_size == total_size_) { Reserve(total_size_ + 1); } ++rep_->allocated_size; typename TypeHandler::Type* result = TypeHandler::New(arena_, std::move(value)); rep_->elements[current_size_++] = result; } template inline void RepeatedPtrFieldBase::RemoveLast() { GOOGLE_DCHECK_GT(current_size_, 0); TypeHandler::Clear(cast(rep_->elements[--current_size_])); } template void RepeatedPtrFieldBase::Clear() { const int n = current_size_; GOOGLE_DCHECK_GE(n, 0); if (n > 0) { void* const* elements = rep_->elements; int i = 0; do { TypeHandler::Clear(cast(elements[i++])); } while (i < n); current_size_ = 0; } } // To avoid unnecessary code duplication and reduce binary size, we use a // layered approach to implementing MergeFrom(). The toplevel method is // templated, so we get a small thunk per concrete message type in the binary. // This calls a shared implementation with most of the logic, passing a function // pointer to another type-specific piece of code that calls the object-allocate // and merge handlers. template inline void RepeatedPtrFieldBase::MergeFrom(const RepeatedPtrFieldBase& other) { GOOGLE_DCHECK_NE(&other, this); if (other.current_size_ == 0) return; MergeFromInternal(other, &RepeatedPtrFieldBase::MergeFromInnerLoop); } inline void RepeatedPtrFieldBase::MergeFromInternal( const RepeatedPtrFieldBase& other, void (RepeatedPtrFieldBase::*inner_loop)(void**, void**, int, int)) { // Note: wrapper has already guaranteed that other.rep_ != NULL here. int other_size = other.current_size_; void** other_elements = other.rep_->elements; void** new_elements = InternalExtend(other_size); int allocated_elems = rep_->allocated_size - current_size_; (this->*inner_loop)(new_elements, other_elements, other_size, allocated_elems); current_size_ += other_size; if (rep_->allocated_size < current_size_) { rep_->allocated_size = current_size_; } } // Merges other_elems to our_elems. template void RepeatedPtrFieldBase::MergeFromInnerLoop(void** our_elems, void** other_elems, int length, int already_allocated) { if (already_allocated < length) { Arena* arena = GetArena(); typename TypeHandler::Type* elem_prototype = reinterpret_cast(other_elems[0]); for (int i = already_allocated; i < length; i++) { // Allocate a new empty element that we'll merge into below typename TypeHandler::Type* new_elem = TypeHandler::NewFromPrototype(elem_prototype, arena); our_elems[i] = new_elem; } } // Main loop that does the actual merging for (int i = 0; i < length; i++) { // Already allocated: use existing element. typename TypeHandler::Type* other_elem = reinterpret_cast(other_elems[i]); typename TypeHandler::Type* new_elem = reinterpret_cast(our_elems[i]); TypeHandler::Merge(*other_elem, new_elem); } } template inline void RepeatedPtrFieldBase::CopyFrom(const RepeatedPtrFieldBase& other) { if (&other == this) return; RepeatedPtrFieldBase::Clear(); RepeatedPtrFieldBase::MergeFrom(other); } inline int RepeatedPtrFieldBase::Capacity() const { return total_size_; } inline void* const* RepeatedPtrFieldBase::raw_data() const { return rep_ ? rep_->elements : NULL; } inline void** RepeatedPtrFieldBase::raw_mutable_data() const { return rep_ ? const_cast(rep_->elements) : NULL; } template inline typename TypeHandler::Type** RepeatedPtrFieldBase::mutable_data() { // TODO(kenton): Breaks C++ aliasing rules. We should probably remove this // method entirely. return reinterpret_cast(raw_mutable_data()); } template inline const typename TypeHandler::Type* const* RepeatedPtrFieldBase::data() const { // TODO(kenton): Breaks C++ aliasing rules. We should probably remove this // method entirely. return reinterpret_cast(raw_data()); } inline void RepeatedPtrFieldBase::SwapElements(int index1, int index2) { using std::swap; // enable ADL with fallback swap(rep_->elements[index1], rep_->elements[index2]); } template inline size_t RepeatedPtrFieldBase::SpaceUsedExcludingSelfLong() const { size_t allocated_bytes = static_cast(total_size_) * sizeof(void*); if (rep_ != NULL) { for (int i = 0; i < rep_->allocated_size; ++i) { allocated_bytes += TypeHandler::SpaceUsedLong(*cast(rep_->elements[i])); } allocated_bytes += kRepHeaderSize; } return allocated_bytes; } template inline typename TypeHandler::Type* RepeatedPtrFieldBase::AddFromCleared() { if (rep_ != NULL && current_size_ < rep_->allocated_size) { return cast(rep_->elements[current_size_++]); } else { return NULL; } } // AddAllocated version that implements arena-safe copying behavior. template void RepeatedPtrFieldBase::AddAllocatedInternal( typename TypeHandler::Type* value, std::true_type) { Arena* element_arena = reinterpret_cast(TypeHandler::GetOwningArena(value)); Arena* arena = GetArena(); if (arena == element_arena && rep_ && rep_->allocated_size < total_size_) { // Fast path: underlying arena representation (tagged pointer) is equal to // our arena pointer, and we can add to array without resizing it (at least // one slot that is not allocated). void** elems = rep_->elements; if (current_size_ < rep_->allocated_size) { // Make space at [current] by moving first allocated element to end of // allocated list. elems[rep_->allocated_size] = elems[current_size_]; } elems[current_size_] = value; current_size_ = current_size_ + 1; rep_->allocated_size = rep_->allocated_size + 1; } else { AddAllocatedSlowWithCopy(value, element_arena, arena); } } // Slowpath handles all cases, copying if necessary. template void RepeatedPtrFieldBase::AddAllocatedSlowWithCopy( // Pass value_arena and my_arena to avoid duplicate virtual call (value) or // load (mine). typename TypeHandler::Type* value, Arena* value_arena, Arena* my_arena) { #ifdef PROTOBUF_INTERNAL_USE_MUST_USE_RESULT GOOGLE_DCHECK(value_arena == nullptr || value_arena == my_arena); #endif // PROTOBUF_INTERNAL_USE_MUST_USE_RESULT // Ensure that either the value is in the same arena, or if not, we do the // appropriate thing: Own() it (if it's on heap and we're in an arena) or copy // it to our arena/heap (otherwise). if (my_arena != NULL && value_arena == NULL) { my_arena->Own(value); } else if (my_arena != value_arena) { typename TypeHandler::Type* new_value = TypeHandler::NewFromPrototype(value, my_arena); TypeHandler::Merge(*value, new_value); TypeHandler::Delete(value, value_arena); value = new_value; } UnsafeArenaAddAllocated(value); } // AddAllocated version that does not implement arena-safe copying behavior. template void RepeatedPtrFieldBase::AddAllocatedInternal( typename TypeHandler::Type* value, std::false_type) { if (rep_ && rep_->allocated_size < total_size_) { // Fast path: underlying arena representation (tagged pointer) is equal to // our arena pointer, and we can add to array without resizing it (at least // one slot that is not allocated). void** elems = rep_->elements; if (current_size_ < rep_->allocated_size) { // Make space at [current] by moving first allocated element to end of // allocated list. elems[rep_->allocated_size] = elems[current_size_]; } elems[current_size_] = value; current_size_ = current_size_ + 1; ++rep_->allocated_size; } else { UnsafeArenaAddAllocated(value); } } template void RepeatedPtrFieldBase::UnsafeArenaAddAllocated( typename TypeHandler::Type* value) { // Make room for the new pointer. if (!rep_ || current_size_ == total_size_) { // The array is completely full with no cleared objects, so grow it. Reserve(total_size_ + 1); ++rep_->allocated_size; } else if (rep_->allocated_size == total_size_) { // There is no more space in the pointer array because it contains some // cleared objects awaiting reuse. We don't want to grow the array in this // case because otherwise a loop calling AddAllocated() followed by Clear() // would leak memory. TypeHandler::Delete(cast(rep_->elements[current_size_]), arena_); } else if (current_size_ < rep_->allocated_size) { // We have some cleared objects. We don't care about their order, so we // can just move the first one to the end to make space. rep_->elements[rep_->allocated_size] = rep_->elements[current_size_]; ++rep_->allocated_size; } else { // There are no cleared objects. ++rep_->allocated_size; } rep_->elements[current_size_++] = value; } // ReleaseLast() for types that implement merge/copy behavior. template inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseLastInternal( std::true_type) { // First, release an element. typename TypeHandler::Type* result = UnsafeArenaReleaseLast(); // Now perform a copy if we're on an arena. Arena* arena = GetArena(); typename TypeHandler::Type* new_result; #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE new_result = copy(result); if (arena == nullptr) delete result; #else // PROTOBUF_FORCE_COPY_IN_RELEASE new_result = (arena == nullptr) ? result : copy(result); #endif // !PROTOBUF_FORCE_COPY_IN_RELEASE return new_result; } // ReleaseLast() for types that *do not* implement merge/copy behavior -- this // is the same as UnsafeArenaReleaseLast(). Note that we GOOGLE_DCHECK-fail if we're on // an arena, since the user really should implement the copy operation in this // case. template inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseLastInternal( std::false_type) { GOOGLE_DCHECK(GetArena() == nullptr) << "ReleaseLast() called on a RepeatedPtrField that is on an arena, " << "with a type that does not implement MergeFrom. This is unsafe; " << "please implement MergeFrom for your type."; return UnsafeArenaReleaseLast(); } template inline typename TypeHandler::Type* RepeatedPtrFieldBase::UnsafeArenaReleaseLast() { GOOGLE_DCHECK_GT(current_size_, 0); typename TypeHandler::Type* result = cast(rep_->elements[--current_size_]); --rep_->allocated_size; if (current_size_ < rep_->allocated_size) { // There are cleared elements on the end; replace the removed element // with the last allocated element. rep_->elements[current_size_] = rep_->elements[rep_->allocated_size]; } return result; } inline int RepeatedPtrFieldBase::ClearedCount() const { return rep_ ? (rep_->allocated_size - current_size_) : 0; } template inline void RepeatedPtrFieldBase::AddCleared( typename TypeHandler::Type* value) { GOOGLE_DCHECK(GetArena() == NULL) << "AddCleared() can only be used on a RepeatedPtrField not on an arena."; GOOGLE_DCHECK(TypeHandler::GetOwningArena(value) == nullptr) << "AddCleared() can only accept values not on an arena."; if (!rep_ || rep_->allocated_size == total_size_) { Reserve(total_size_ + 1); } rep_->elements[rep_->allocated_size++] = value; } template inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseCleared() { GOOGLE_DCHECK(GetArena() == NULL) << "ReleaseCleared() can only be used on a RepeatedPtrField not on " << "an arena."; GOOGLE_DCHECK(GetArena() == NULL); GOOGLE_DCHECK(rep_ != NULL); GOOGLE_DCHECK_GT(rep_->allocated_size, current_size_); return cast(rep_->elements[--rep_->allocated_size]); } } // namespace internal // ------------------------------------------------------------------- template class RepeatedPtrField::TypeHandler : public internal::GenericTypeHandler {}; template <> class RepeatedPtrField::TypeHandler : public internal::StringTypeHandler {}; template constexpr RepeatedPtrField::RepeatedPtrField() : RepeatedPtrFieldBase() {} template inline RepeatedPtrField::RepeatedPtrField(Arena* arena) : RepeatedPtrFieldBase(arena) {} template inline RepeatedPtrField::RepeatedPtrField( const RepeatedPtrField& other) : RepeatedPtrFieldBase() { MergeFrom(other); } template template inline RepeatedPtrField::RepeatedPtrField(Iter begin, Iter end) { Add(begin, end); } template RepeatedPtrField::~RepeatedPtrField() { Destroy(); } template inline RepeatedPtrField& RepeatedPtrField::operator=( const RepeatedPtrField& other) { if (this != &other) CopyFrom(other); return *this; } template inline RepeatedPtrField::RepeatedPtrField( RepeatedPtrField&& other) noexcept : RepeatedPtrField() { // We don't just call Swap(&other) here because it would perform 3 copies if // other is on an arena. This field can't be on an arena because arena // construction always uses the Arena* accepting constructor. if (other.GetArena()) { CopyFrom(other); } else { InternalSwap(&other); } } template inline RepeatedPtrField& RepeatedPtrField::operator=( RepeatedPtrField&& other) noexcept { // We don't just call Swap(&other) here because it would perform 3 copies if // the two fields are on different arenas. if (this != &other) { if (this->GetArena() != other.GetArena()) { CopyFrom(other); } else { InternalSwap(&other); } } return *this; } template inline bool RepeatedPtrField::empty() const { return RepeatedPtrFieldBase::empty(); } template inline int RepeatedPtrField::size() const { return RepeatedPtrFieldBase::size(); } template inline const Element& RepeatedPtrField::Get(int index) const { return RepeatedPtrFieldBase::Get(index); } template inline const Element& RepeatedPtrField::at(int index) const { return RepeatedPtrFieldBase::at(index); } template inline Element& RepeatedPtrField::at(int index) { return RepeatedPtrFieldBase::at(index); } template inline Element* RepeatedPtrField::Mutable(int index) { return RepeatedPtrFieldBase::Mutable(index); } template inline Element* RepeatedPtrField::Add() { return RepeatedPtrFieldBase::Add(); } template inline void RepeatedPtrField::Add(Element&& value) { RepeatedPtrFieldBase::Add(std::move(value)); } template template inline void RepeatedPtrField::Add(Iter begin, Iter end) { int reserve = internal::CalculateReserve(begin, end); if (reserve != -1) { Reserve(size() + reserve); } for (; begin != end; ++begin) { *Add() = *begin; } } template inline void RepeatedPtrField::RemoveLast() { RepeatedPtrFieldBase::RemoveLast(); } template inline void RepeatedPtrField::DeleteSubrange(int start, int num) { GOOGLE_DCHECK_GE(start, 0); GOOGLE_DCHECK_GE(num, 0); GOOGLE_DCHECK_LE(start + num, size()); for (int i = 0; i < num; ++i) { RepeatedPtrFieldBase::Delete(start + i); } UnsafeArenaExtractSubrange(start, num, nullptr); } template inline void RepeatedPtrField::ExtractSubrange(int start, int num, Element** elements) { typename internal::TypeImplementsMergeBehavior< typename TypeHandler::Type>::type t; ExtractSubrangeInternal(start, num, elements, t); } // ExtractSubrange() implementation for types that implement merge/copy // behavior. template inline void RepeatedPtrField::ExtractSubrangeInternal( int start, int num, Element** elements, std::true_type) { GOOGLE_DCHECK_GE(start, 0); GOOGLE_DCHECK_GE(num, 0); GOOGLE_DCHECK_LE(start + num, size()); if (num == 0) return; #ifdef PROTOBUF_MUST_USE_EXTRACT_RESULT GOOGLE_DCHECK_NE(elements, nullptr) << "Releasing elements without transferring ownership is an unsafe " "operation. Use UnsafeArenaExtractSubrange."; #endif if (elements == nullptr) { CloseGap(start, num); return; } Arena* arena = GetArena(); #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE // Always copy. for (int i = 0; i < num; ++i) { elements[i] = copy( RepeatedPtrFieldBase::Mutable(i + start)); } if (arena == nullptr) { for (int i = 0; i < num; ++i) { delete RepeatedPtrFieldBase::Mutable(i + start); } } #else // PROTOBUF_FORCE_COPY_IN_RELEASE // If we're on an arena, we perform a copy for each element so that the // returned elements are heap-allocated. Otherwise, just forward it. if (arena != nullptr) { for (int i = 0; i < num; ++i) { elements[i] = copy( RepeatedPtrFieldBase::Mutable(i + start)); } } else { for (int i = 0; i < num; ++i) { elements[i] = RepeatedPtrFieldBase::Mutable(i + start); } } #endif // !PROTOBUF_FORCE_COPY_IN_RELEASE CloseGap(start, num); } // ExtractSubrange() implementation for types that do not implement merge/copy // behavior. template inline void RepeatedPtrField::ExtractSubrangeInternal( int start, int num, Element** elements, std::false_type) { // This case is identical to UnsafeArenaExtractSubrange(). However, since // ExtractSubrange() must return heap-allocated objects by contract, and we // cannot fulfill this contract if we are an on arena, we must GOOGLE_DCHECK() that // we are not on an arena. GOOGLE_DCHECK(GetArena() == NULL) << "ExtractSubrange() when arena is non-NULL is only supported when " << "the Element type supplies a MergeFrom() operation to make copies."; UnsafeArenaExtractSubrange(start, num, elements); } template inline void RepeatedPtrField::UnsafeArenaExtractSubrange( int start, int num, Element** elements) { GOOGLE_DCHECK_GE(start, 0); GOOGLE_DCHECK_GE(num, 0); GOOGLE_DCHECK_LE(start + num, size()); if (num > 0) { // Save the values of the removed elements if requested. if (elements != NULL) { for (int i = 0; i < num; ++i) { elements[i] = RepeatedPtrFieldBase::Mutable(i + start); } } CloseGap(start, num); } } template inline void RepeatedPtrField::Clear() { RepeatedPtrFieldBase::Clear(); } template inline void RepeatedPtrField::MergeFrom( const RepeatedPtrField& other) { RepeatedPtrFieldBase::MergeFrom(other); } template inline void RepeatedPtrField::CopyFrom(const RepeatedPtrField& other) { RepeatedPtrFieldBase::CopyFrom(other); } template template inline void RepeatedPtrField::Assign(Iter begin, Iter end) { Clear(); Add(begin, end); } template inline typename RepeatedPtrField::iterator RepeatedPtrField::erase(const_iterator position) { return erase(position, position + 1); } template inline typename RepeatedPtrField::iterator RepeatedPtrField::erase(const_iterator first, const_iterator last) { size_type pos_offset = std::distance(cbegin(), first); size_type last_offset = std::distance(cbegin(), last); DeleteSubrange(pos_offset, last_offset - pos_offset); return begin() + pos_offset; } template inline Element** RepeatedPtrField::mutable_data() { return RepeatedPtrFieldBase::mutable_data(); } template inline const Element* const* RepeatedPtrField::data() const { return RepeatedPtrFieldBase::data(); } template inline void RepeatedPtrField::Swap(RepeatedPtrField* other) { if (this == other) return; RepeatedPtrFieldBase::Swap(other); } template inline void RepeatedPtrField::UnsafeArenaSwap( RepeatedPtrField* other) { if (this == other) return; RepeatedPtrFieldBase::InternalSwap(other); } template inline void RepeatedPtrField::SwapElements(int index1, int index2) { RepeatedPtrFieldBase::SwapElements(index1, index2); } template inline Arena* RepeatedPtrField::GetArena() const { return RepeatedPtrFieldBase::GetArena(); } template inline size_t RepeatedPtrField::SpaceUsedExcludingSelfLong() const { return RepeatedPtrFieldBase::SpaceUsedExcludingSelfLong(); } template inline void RepeatedPtrField::AddAllocated(Element* value) { RepeatedPtrFieldBase::AddAllocated(value); } template inline void RepeatedPtrField::UnsafeArenaAddAllocated(Element* value) { RepeatedPtrFieldBase::UnsafeArenaAddAllocated(value); } template inline Element* RepeatedPtrField::ReleaseLast() { return RepeatedPtrFieldBase::ReleaseLast(); } template inline Element* RepeatedPtrField::UnsafeArenaReleaseLast() { return RepeatedPtrFieldBase::UnsafeArenaReleaseLast(); } template inline int RepeatedPtrField::ClearedCount() const { return RepeatedPtrFieldBase::ClearedCount(); } template inline void RepeatedPtrField::AddCleared(Element* value) { return RepeatedPtrFieldBase::AddCleared(value); } template inline Element* RepeatedPtrField::ReleaseCleared() { return RepeatedPtrFieldBase::ReleaseCleared(); } template inline void RepeatedPtrField::Reserve(int new_size) { return RepeatedPtrFieldBase::Reserve(new_size); } template inline int RepeatedPtrField::Capacity() const { return RepeatedPtrFieldBase::Capacity(); } // ------------------------------------------------------------------- namespace internal { // STL-like iterator implementation for RepeatedPtrField. You should not // refer to this class directly; use RepeatedPtrField::iterator instead. // // The iterator for RepeatedPtrField, RepeatedPtrIterator, is // very similar to iterator_ptr in util/gtl/iterator_adaptors.h, // but adds random-access operators and is modified to wrap a void** base // iterator (since RepeatedPtrField stores its array as a void* array and // casting void** to T** would violate C++ aliasing rules). // // This code based on net/proto/proto-array-internal.h by Jeffrey Yasskin // (jyasskin@google.com). template class RepeatedPtrIterator { public: using iterator = RepeatedPtrIterator; using iterator_category = std::random_access_iterator_tag; using value_type = typename std::remove_const::type; using difference_type = std::ptrdiff_t; using pointer = Element*; using reference = Element&; RepeatedPtrIterator() : it_(NULL) {} explicit RepeatedPtrIterator(void* const* it) : it_(it) {} // Allow "upcasting" from RepeatedPtrIterator to // RepeatedPtrIterator. template RepeatedPtrIterator(const RepeatedPtrIterator& other) : it_(other.it_) { // Force a compiler error if the other type is not convertible to ours. if (false) { implicit_cast(static_cast(nullptr)); } } // dereferenceable reference operator*() const { return *reinterpret_cast(*it_); } pointer operator->() const { return &(operator*()); } // {inc,dec}rementable iterator& operator++() { ++it_; return *this; } iterator operator++(int) { return iterator(it_++); } iterator& operator--() { --it_; return *this; } iterator operator--(int) { return iterator(it_--); } // equality_comparable bool operator==(const iterator& x) const { return it_ == x.it_; } bool operator!=(const iterator& x) const { return it_ != x.it_; } // less_than_comparable bool operator<(const iterator& x) const { return it_ < x.it_; } bool operator<=(const iterator& x) const { return it_ <= x.it_; } bool operator>(const iterator& x) const { return it_ > x.it_; } bool operator>=(const iterator& x) const { return it_ >= x.it_; } // addable, subtractable iterator& operator+=(difference_type d) { it_ += d; return *this; } friend iterator operator+(iterator it, const difference_type d) { it += d; return it; } friend iterator operator+(const difference_type d, iterator it) { it += d; return it; } iterator& operator-=(difference_type d) { it_ -= d; return *this; } friend iterator operator-(iterator it, difference_type d) { it -= d; return it; } // indexable reference operator[](difference_type d) const { return *(*this + d); } // random access iterator difference_type operator-(const iterator& x) const { return it_ - x.it_; } private: template friend class RepeatedPtrIterator; // The internal iterator. void* const* it_; }; // Provide an iterator that operates on pointers to the underlying objects // rather than the objects themselves as RepeatedPtrIterator does. // Consider using this when working with stl algorithms that change // the array. // The VoidPtr template parameter holds the type-agnostic pointer value // referenced by the iterator. It should either be "void *" for a mutable // iterator, or "const void* const" for a constant iterator. template class RepeatedPtrOverPtrsIterator { public: using iterator = RepeatedPtrOverPtrsIterator; using iterator_category = std::random_access_iterator_tag; using value_type = typename std::remove_const::type; using difference_type = std::ptrdiff_t; using pointer = Element*; using reference = Element&; RepeatedPtrOverPtrsIterator() : it_(NULL) {} explicit RepeatedPtrOverPtrsIterator(VoidPtr* it) : it_(it) {} // dereferenceable reference operator*() const { return *reinterpret_cast(it_); } pointer operator->() const { return &(operator*()); } // {inc,dec}rementable iterator& operator++() { ++it_; return *this; } iterator operator++(int) { return iterator(it_++); } iterator& operator--() { --it_; return *this; } iterator operator--(int) { return iterator(it_--); } // equality_comparable bool operator==(const iterator& x) const { return it_ == x.it_; } bool operator!=(const iterator& x) const { return it_ != x.it_; } // less_than_comparable bool operator<(const iterator& x) const { return it_ < x.it_; } bool operator<=(const iterator& x) const { return it_ <= x.it_; } bool operator>(const iterator& x) const { return it_ > x.it_; } bool operator>=(const iterator& x) const { return it_ >= x.it_; } // addable, subtractable iterator& operator+=(difference_type d) { it_ += d; return *this; } friend iterator operator+(iterator it, difference_type d) { it += d; return it; } friend iterator operator+(difference_type d, iterator it) { it += d; return it; } iterator& operator-=(difference_type d) { it_ -= d; return *this; } friend iterator operator-(iterator it, difference_type d) { it -= d; return it; } // indexable reference operator[](difference_type d) const { return *(*this + d); } // random access iterator difference_type operator-(const iterator& x) const { return it_ - x.it_; } private: template friend class RepeatedPtrIterator; // The internal iterator. VoidPtr* it_; }; void RepeatedPtrFieldBase::InternalSwap(RepeatedPtrFieldBase* other) { GOOGLE_DCHECK(this != other); // Swap all fields at once. static_assert(std::is_standard_layout::value, "offsetof() requires standard layout before c++17"); internal::memswaprep_) - offsetof(RepeatedPtrFieldBase, arena_)>( reinterpret_cast(this) + offsetof(RepeatedPtrFieldBase, arena_), reinterpret_cast(other) + offsetof(RepeatedPtrFieldBase, arena_)); } } // namespace internal template inline typename RepeatedPtrField::iterator RepeatedPtrField::begin() { return iterator(raw_data()); } template inline typename RepeatedPtrField::const_iterator RepeatedPtrField::begin() const { return iterator(raw_data()); } template inline typename RepeatedPtrField::const_iterator RepeatedPtrField::cbegin() const { return begin(); } template inline typename RepeatedPtrField::iterator RepeatedPtrField::end() { return iterator(raw_data() + size()); } template inline typename RepeatedPtrField::const_iterator RepeatedPtrField::end() const { return iterator(raw_data() + size()); } template inline typename RepeatedPtrField::const_iterator RepeatedPtrField::cend() const { return end(); } template inline typename RepeatedPtrField::pointer_iterator RepeatedPtrField::pointer_begin() { return pointer_iterator(raw_mutable_data()); } template inline typename RepeatedPtrField::const_pointer_iterator RepeatedPtrField::pointer_begin() const { return const_pointer_iterator(const_cast(raw_data())); } template inline typename RepeatedPtrField::pointer_iterator RepeatedPtrField::pointer_end() { return pointer_iterator(raw_mutable_data() + size()); } template inline typename RepeatedPtrField::const_pointer_iterator RepeatedPtrField::pointer_end() const { return const_pointer_iterator( const_cast(raw_data() + size())); } // Iterators and helper functions that follow the spirit of the STL // std::back_insert_iterator and std::back_inserter but are tailor-made // for RepeatedField and RepeatedPtrField. Typical usage would be: // // std::copy(some_sequence.begin(), some_sequence.end(), // RepeatedFieldBackInserter(proto.mutable_sequence())); // // Ported by johannes from util/gtl/proto-array-iterators.h namespace internal { // A back inserter for RepeatedField objects. template class RepeatedFieldBackInsertIterator : public std::iterator { public: explicit RepeatedFieldBackInsertIterator( RepeatedField* const mutable_field) : field_(mutable_field) {} RepeatedFieldBackInsertIterator& operator=(const T& value) { field_->Add(value); return *this; } RepeatedFieldBackInsertIterator& operator*() { return *this; } RepeatedFieldBackInsertIterator& operator++() { return *this; } RepeatedFieldBackInsertIterator& operator++(int /* unused */) { return *this; } private: RepeatedField* field_; }; // A back inserter for RepeatedPtrField objects. template class RepeatedPtrFieldBackInsertIterator : public std::iterator { public: RepeatedPtrFieldBackInsertIterator(RepeatedPtrField* const mutable_field) : field_(mutable_field) {} RepeatedPtrFieldBackInsertIterator& operator=(const T& value) { *field_->Add() = value; return *this; } RepeatedPtrFieldBackInsertIterator& operator=( const T* const ptr_to_value) { *field_->Add() = *ptr_to_value; return *this; } RepeatedPtrFieldBackInsertIterator& operator=(T&& value) { *field_->Add() = std::move(value); return *this; } RepeatedPtrFieldBackInsertIterator& operator*() { return *this; } RepeatedPtrFieldBackInsertIterator& operator++() { return *this; } RepeatedPtrFieldBackInsertIterator& operator++(int /* unused */) { return *this; } private: RepeatedPtrField* field_; }; // A back inserter for RepeatedPtrFields that inserts by transferring ownership // of a pointer. template class AllocatedRepeatedPtrFieldBackInsertIterator : public std::iterator { public: explicit AllocatedRepeatedPtrFieldBackInsertIterator( RepeatedPtrField* const mutable_field) : field_(mutable_field) {} AllocatedRepeatedPtrFieldBackInsertIterator& operator=( T* const ptr_to_value) { field_->AddAllocated(ptr_to_value); return *this; } AllocatedRepeatedPtrFieldBackInsertIterator& operator*() { return *this; } AllocatedRepeatedPtrFieldBackInsertIterator& operator++() { return *this; } AllocatedRepeatedPtrFieldBackInsertIterator& operator++(int /* unused */) { return *this; } private: RepeatedPtrField* field_; }; // Almost identical to AllocatedRepeatedPtrFieldBackInsertIterator. This one // uses the UnsafeArenaAddAllocated instead. template class UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator : public std::iterator { public: explicit UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator( RepeatedPtrField* const mutable_field) : field_(mutable_field) {} UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator& operator=( T const* const ptr_to_value) { field_->UnsafeArenaAddAllocated(const_cast(ptr_to_value)); return *this; } UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator& operator*() { return *this; } UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator& operator++() { return *this; } UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator& operator++( int /* unused */) { return *this; } private: RepeatedPtrField* field_; }; } // namespace internal // Provides a back insert iterator for RepeatedField instances, // similar to std::back_inserter(). template internal::RepeatedFieldBackInsertIterator RepeatedFieldBackInserter( RepeatedField* const mutable_field) { return internal::RepeatedFieldBackInsertIterator(mutable_field); } // Provides a back insert iterator for RepeatedPtrField instances, // similar to std::back_inserter(). template internal::RepeatedPtrFieldBackInsertIterator RepeatedPtrFieldBackInserter( RepeatedPtrField* const mutable_field) { return internal::RepeatedPtrFieldBackInsertIterator(mutable_field); } // Special back insert iterator for RepeatedPtrField instances, just in // case someone wants to write generic template code that can access both // RepeatedFields and RepeatedPtrFields using a common name. template internal::RepeatedPtrFieldBackInsertIterator RepeatedFieldBackInserter( RepeatedPtrField* const mutable_field) { return internal::RepeatedPtrFieldBackInsertIterator(mutable_field); } // Provides a back insert iterator for RepeatedPtrField instances // similar to std::back_inserter() which transfers the ownership while // copying elements. template internal::AllocatedRepeatedPtrFieldBackInsertIterator AllocatedRepeatedPtrFieldBackInserter( RepeatedPtrField* const mutable_field) { return internal::AllocatedRepeatedPtrFieldBackInsertIterator( mutable_field); } // Similar to AllocatedRepeatedPtrFieldBackInserter, using // UnsafeArenaAddAllocated instead of AddAllocated. // This is slightly faster if that matters. It is also useful in legacy code // that uses temporary ownership to avoid copies. Example: // RepeatedPtrField temp_field; // temp_field.AddAllocated(new T); // ... // Do something with temp_field // temp_field.ExtractSubrange(0, temp_field.size(), nullptr); // If you put temp_field on the arena this fails, because the ownership // transfers to the arena at the "AddAllocated" call and is not released anymore // causing a double delete. Using UnsafeArenaAddAllocated prevents this. template internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator UnsafeArenaAllocatedRepeatedPtrFieldBackInserter( RepeatedPtrField* const mutable_field) { return internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator( mutable_field); } // Extern declarations of common instantiations to reduce library bloat. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField; extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField; extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField; extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField; extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField; extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField; extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField; extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedPtrField; } // namespace protobuf } // namespace google #include #endif // GOOGLE_PROTOBUF_REPEATED_FIELD_H__