Sierra Toolkit  Version of the Day
red_black_tree_eastl.h
1 /*
2 Copyright (C) 2005,2009-2010 Electronic Arts, Inc. All rights reserved.
3 
4 Redistribution and use in source and binary forms, with or without
5 modification, are permitted provided that the following conditions
6 are met:
7 
8 1. Redistributions of source code must retain the above copyright
9  notice, this list of conditions and the following disclaimer.
10 2. Redistributions in binary form must reproduce the above copyright
11  notice, this list of conditions and the following disclaimer in the
12  documentation and/or other materials provided with the distribution.
13 3. Neither the name of Electronic Arts, Inc. ("EA") nor the names of
14  its contributors may be used to endorse or promote products derived
15  from this software without specific prior written permission.
16 
17 THIS SOFTWARE IS PROVIDED BY ELECTRONIC ARTS AND ITS CONTRIBUTORS "AS IS" AND ANY
18 EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 DISCLAIMED. IN NO EVENT SHALL ELECTRONIC ARTS OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28 
30 // EASTL/red_black_tree.h
31 // Written by Paul Pedriana 2005.
33 
34 
35 
36 #ifndef EASTL_RED_BLACK_TREE_H
37 #define EASTL_RED_BLACK_TREE_H
38 
39 
40 
41 #include <stk_util/util/config_eastl.h>
42 #include <stk_util/util/type_traits_eastl.h>
43 #include <stk_util/util/allocator_eastl.h>
44 #include <stk_util/util/iterator_eastl.h>
45 #include <stk_util/util/utility_eastl.h>
46 #include <stk_util/util/algorithm_eastl.h>
47 
48 #ifdef _MSC_VER
49  #pragma warning(push, 0)
50  #include <new>
51  #include <stddef.h>
52  #pragma warning(pop)
53 #else
54  #include <new>
55  #include <stddef.h>
56 #endif
57 
58 
59 #ifdef _MSC_VER
60  #pragma warning(push)
61  #pragma warning(disable: 4512) // 'class' : assignment operator could not be generated
62  #pragma warning(disable: 4530) // C++ exception handler used, but unwind semantics are not enabled. Specify /EHsc
63 #endif
64 
65 
66 namespace eastl
67 {
68 
73  #ifndef EASTL_RBTREE_DEFAULT_NAME
74  #define EASTL_RBTREE_DEFAULT_NAME EASTL_DEFAULT_NAME_PREFIX " rbtree" // Unless the user overrides something, this is "EASTL rbtree".
75  #endif
76 
77 
80  #ifndef EASTL_RBTREE_DEFAULT_ALLOCATOR
81  #define EASTL_RBTREE_DEFAULT_ALLOCATOR allocator_type(EASTL_RBTREE_DEFAULT_NAME)
82  #endif
83 
84 
85 
89  {
90  kRBTreeColorRed,
91  kRBTreeColorBlack
92  };
93 
94 
95 
99  {
100  kRBTreeSideLeft,
101  kRBTreeSideRight
102  };
103 
104 
105 
117  {
118  typedef rbtree_node_base this_type;
119 
120  public:
121  this_type* mpNodeRight; // Declared first because it is used most often.
122  this_type* mpNodeLeft;
123  this_type* mpNodeParent;
124  char mColor; // We only need one bit here, would be nice if we could stuff that bit somewhere else.
125  };
126 
127 
130  template <typename Value>
132  {
133  Value mValue; // For set and multiset, this is the user's value, for map and multimap, this is a pair of key/value.
134  };
135 
136 
137 
138 
139  // rbtree_node_base functions
140  //
141  // These are the fundamental functions that we use to maintain the
142  // tree. The bulk of the work of the tree maintenance is done in
143  // these functions.
144  //
145  EASTL_API rbtree_node_base* RBTreeIncrement (const rbtree_node_base* pNode);
146  EASTL_API rbtree_node_base* RBTreeDecrement (const rbtree_node_base* pNode);
147  EASTL_API rbtree_node_base* RBTreeGetMinChild (const rbtree_node_base* pNode);
148  EASTL_API rbtree_node_base* RBTreeGetMaxChild (const rbtree_node_base* pNode);
149  EASTL_API size_t RBTreeGetBlackCount(const rbtree_node_base* pNodeTop,
150  const rbtree_node_base* pNodeBottom);
151  EASTL_API void RBTreeInsert ( rbtree_node_base* pNode,
152  rbtree_node_base* pNodeParent,
153  rbtree_node_base* pNodeAnchor,
154  RBTreeSide insertionSide);
155  EASTL_API void RBTreeErase ( rbtree_node_base* pNode,
156  rbtree_node_base* pNodeAnchor);
157 
158 
159 
160 
161 
162 
163 
166  template <typename T, typename Pointer, typename Reference>
168  {
172  typedef eastl_size_t size_type; // See config.h for the definition of eastl_size_t, which defaults to uint32_t.
173  typedef ptrdiff_t difference_type;
174  typedef T value_type;
176  typedef rbtree_node<T> node_type;
177  typedef Pointer pointer;
178  typedef Reference reference;
179  typedef EASTL_ITC_NS::bidirectional_iterator_tag iterator_category;
180 
181  public:
182  node_type* mpNode;
183 
184  public:
185  rbtree_iterator();
186  explicit rbtree_iterator(const node_type* pNode);
187  rbtree_iterator(const iterator& x);
188 
189  reference operator*() const;
190  pointer operator->() const;
191 
192  rbtree_iterator& operator++();
193  rbtree_iterator operator++(int);
194 
195  rbtree_iterator& operator--();
196  rbtree_iterator operator--(int);
197 
198  }; // rbtree_iterator
199 
200 
201 
202 
203 
205  // rb_base
206  //
207  // This class allows us to use a generic rbtree as the basis of map, multimap,
208  // set, and multiset transparently. The vital template parameters for this are
209  // the ExtractKey and the bUniqueKeys parameters.
210  //
211  // If the rbtree has a value type of the form pair<T1, T2> (i.e. it is a map or
212  // multimap and not a set or multiset) and a key extraction policy that returns
213  // the first part of the pair, the rbtree gets a mapped_type typedef.
214  // If it satisfies those criteria and also has unique keys, then it also gets an
215  // operator[] (which only map and set have and multimap and multiset don't have).
216  //
218 
219 
220 
225  template <typename Key, typename Value, typename Compare, typename ExtractKey, bool bUniqueKeys, typename RBTree>
226  struct rb_base
227  {
228  typedef ExtractKey extract_key;
229 
230  public:
231  Compare mCompare; // To do: Make sure that empty Compare classes go away via empty base optimizations.
232 
233  public:
234  rb_base() : mCompare() {}
235  rb_base(const Compare& compare) : mCompare(compare) {}
236  };
237 
238 
244  template <typename Key, typename Value, typename Compare, typename ExtractKey, typename RBTree>
245  struct rb_base<Key, Value, Compare, ExtractKey, false, RBTree>
246  {
247  typedef ExtractKey extract_key;
248 
249  public:
250  Compare mCompare; // To do: Make sure that empty Compare classes go away via empty base optimizations.
251 
252  public:
253  rb_base() : mCompare() {}
254  rb_base(const Compare& compare) : mCompare(compare) {}
255  };
256 
257 
261  template <typename Key, typename Pair, typename Compare, typename RBTree>
262  struct rb_base<Key, Pair, Compare, eastl::use_first<Pair>, true, RBTree>
263  {
265 
266  public:
267  Compare mCompare; // To do: Make sure that empty Compare classes go away via empty base optimizations.
268 
269  public:
270  rb_base() : mCompare() {}
271  rb_base(const Compare& compare) : mCompare(compare) {}
272  };
273 
274 
278  template <typename Key, typename Pair, typename Compare, typename RBTree>
279  struct rb_base<Key, Pair, Compare, eastl::use_first<Pair>, false, RBTree>
280  {
282 
283  public:
284  Compare mCompare; // To do: Make sure that empty Compare classes go away via empty base optimizations.
285 
286  public:
287  rb_base() : mCompare() {}
288  rb_base(const Compare& compare) : mCompare(compare) {}
289  };
290 
291 
292 
293 
294 
343  template <typename Key, typename Value, typename Compare, typename Allocator,
344  typename ExtractKey, bool bMutableIterators, bool bUniqueKeys>
345  class rbtree
346  : public rb_base<Key, Value, Compare, ExtractKey, bUniqueKeys,
347  rbtree<Key, Value, Compare, Allocator, ExtractKey, bMutableIterators, bUniqueKeys> >
348  {
349  public:
350  typedef ptrdiff_t difference_type;
351  typedef eastl_size_t size_type; // See config.h for the definition of eastl_size_t, which defaults to uint32_t.
352  typedef Key key_type;
353  typedef Value value_type;
355  typedef value_type& reference;
356  typedef const value_type& const_reference;
357  typedef typename type_select<bMutableIterators,
363 
364  typedef Allocator allocator_type;
365  typedef Compare key_compare;
366  typedef typename type_select<bUniqueKeys, eastl::pair<iterator, bool>, iterator>::type insert_return_type; // map/set::insert return a pair, multimap/multiset::iterator return an iterator.
367  typedef rbtree<Key, Value, Compare, Allocator,
368  ExtractKey, bMutableIterators, bUniqueKeys> this_type;
370  typedef integral_constant<bool, bUniqueKeys> has_unique_keys_type;
371  typedef typename base_type::extract_key extract_key;
372 
373  using base_type::mCompare;
374 
375  enum
376  {
377  kKeyAlignment = EASTL_ALIGN_OF(key_type),
378  kKeyAlignmentOffset = 0, // To do: Make sure this really is zero for all uses of this template.
379  kValueAlignment = EASTL_ALIGN_OF(value_type),
380  kValueAlignmentOffset = 0 // To fix: This offset is zero for sets and >0 for maps. Need to fix this.
381  };
382 
383  public:
384  rbtree_node_base mAnchor;
385  size_type mnSize;
386  allocator_type mAllocator; // To do: Use base class optimization to make this go away.
387 
388  public:
389  // ctor/dtor
390  rbtree();
391  rbtree(const allocator_type& allocator);
392  rbtree(const Compare& compare, const allocator_type& allocator = EASTL_RBTREE_DEFAULT_ALLOCATOR);
393  rbtree(const this_type& x);
394 
395  template <typename InputIterator>
396  rbtree(InputIterator first, InputIterator last, const Compare& compare, const allocator_type& allocator = EASTL_RBTREE_DEFAULT_ALLOCATOR);
397 
398  ~rbtree();
399 
400  public:
401  // properties
402  allocator_type& get_allocator();
403  void set_allocator(const allocator_type& allocator);
404 
405  const key_compare& key_comp() const { return mCompare; }
406  key_compare& key_comp() { return mCompare; }
407 
408  this_type& operator=(const this_type& x);
409 
410  void swap(this_type& x);
411 
412  public:
413  // iterators
414  iterator begin();
415  const_iterator begin() const;
416  iterator end();
417  const_iterator end() const;
418 
419  reverse_iterator rbegin();
420  const_reverse_iterator rbegin() const;
421  reverse_iterator rend();
422  const_reverse_iterator rend() const;
423 
424  public:
425  bool empty() const;
426  size_type size() const;
427 
430  insert_return_type insert(const value_type& value);
431 
432  // C++ standard: inserts value if and only if there is no element with
433  // key equivalent to the key of t in containers with unique keys; always
434  // inserts value in containers with equivalent keys. Always returns the
435  // iterator pointing to the element with key equivalent to the key of value.
436  // iterator position is a hint pointing to where the insert should start
437  // to search. However, there is a potential defect/improvement report on this behaviour:
438  // LWG issue #233 (http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2005/n1780.html)
439  // We follow the same approach as SGI STL/STLPort and use the position as
440  // a forced insertion position for the value when possible.
441  iterator insert(iterator position, const value_type& value);
442 
443  template <typename InputIterator>
444  void insert(InputIterator first, InputIterator last);
445 
446  iterator erase(iterator position);
447  iterator erase(iterator first, iterator last);
448 
449  reverse_iterator erase(reverse_iterator position);
450  reverse_iterator erase(reverse_iterator first, reverse_iterator last);
451 
452  // For some reason, multiple STL versions make a specialization
453  // for erasing an array of key_types. I'm pretty sure we don't
454  // need this, but just to be safe we will follow suit.
455  // The implementation is trivial. Returns void because the values
456  // could well be randomly distributed throughout the tree and thus
457  // a return value would be nearly meaningless.
458  void erase(const key_type* first, const key_type* last);
459 
460  void clear();
461  void reset();
462 
463  iterator find(const key_type& key);
464  const_iterator find(const key_type& key) const;
465 
477  template <typename U, typename Compare2>
478  iterator find_as(const U& u, Compare2 compare2);
479 
480  template <typename U, typename Compare2>
481  const_iterator find_as(const U& u, Compare2 compare2) const;
482 
483  iterator lower_bound(const key_type& key);
484  const_iterator lower_bound(const key_type& key) const;
485 
486  iterator upper_bound(const key_type& key);
487  const_iterator upper_bound(const key_type& key) const;
488 
489  bool validate() const;
490  int validate_iterator(const_iterator i) const;
491 
492  protected:
493  node_type* DoAllocateNode();
494  void DoFreeNode(node_type* pNode);
495 
496  node_type* DoCreateNodeFromKey(const key_type& key);
497  node_type* DoCreateNode(const value_type& value);
498  node_type* DoCreateNode(const node_type* pNodeSource, node_type* pNodeParent);
499 
500  node_type* DoCopySubtree(const node_type* pNodeSource, node_type* pNodeDest);
501  void DoNukeSubtree(node_type* pNode);
502 
503  // Intentionally return a pair and not an iterator for DoInsertValue(..., true_type)
504  // This is because the C++ standard for map and set is to return a pair and not just an iterator.
505  eastl::pair<iterator, bool> DoInsertValue(const value_type& value, true_type); // true_type means keys are unique.
506  iterator DoInsertValue(const value_type& value, false_type); // false_type means keys are not unique.
507 
508  eastl::pair<iterator, bool> DoInsertKey(const key_type& key, true_type);
509  iterator DoInsertKey(const key_type& key, false_type);
510 
511  iterator DoInsertValue(iterator position, const value_type& value, true_type);
512  iterator DoInsertValue(iterator position, const value_type& value, false_type);
513 
514  iterator DoInsertKey(iterator position, const key_type& key, true_type);
515  iterator DoInsertKey(iterator position, const key_type& key, false_type);
516 
517  iterator DoInsertValueImpl(node_type* pNodeParent, const value_type& value, bool bForceToLeft);
518  iterator DoInsertKeyImpl(node_type* pNodeParent, const key_type& key, bool bForceToLeft);
519 
520  }; // rbtree
521 
522 
523 
524 
525 
527  // rbtree_node_base functions
529 
530  EASTL_API inline rbtree_node_base* RBTreeGetMinChild(const rbtree_node_base* pNodeBase)
531  {
532  while(pNodeBase->mpNodeLeft)
533  pNodeBase = pNodeBase->mpNodeLeft;
534  return const_cast<rbtree_node_base*>(pNodeBase);
535  }
536 
537  EASTL_API inline rbtree_node_base* RBTreeGetMaxChild(const rbtree_node_base* pNodeBase)
538  {
539  while(pNodeBase->mpNodeRight)
540  pNodeBase = pNodeBase->mpNodeRight;
541  return const_cast<rbtree_node_base*>(pNodeBase);
542  }
543 
544  // The rest of the functions are non-trivial and are found in
545  // the corresponding .cpp file to this file.
546 
547 
548 
550  // rbtree_iterator functions
552 
553  template <typename T, typename Pointer, typename Reference>
554  rbtree_iterator<T, Pointer, Reference>::rbtree_iterator()
555  : mpNode(NULL) { }
556 
557 
558  template <typename T, typename Pointer, typename Reference>
559  rbtree_iterator<T, Pointer, Reference>::rbtree_iterator(const node_type* pNode)
560  : mpNode(static_cast<node_type*>(const_cast<node_type*>(pNode))) { }
561 
562 
563  template <typename T, typename Pointer, typename Reference>
564  rbtree_iterator<T, Pointer, Reference>::rbtree_iterator(const iterator& x)
565  : mpNode(x.mpNode) { }
566 
567 
568  template <typename T, typename Pointer, typename Reference>
569  typename rbtree_iterator<T, Pointer, Reference>::reference
570  rbtree_iterator<T, Pointer, Reference>::operator*() const
571  { return mpNode->mValue; }
572 
573 
574  template <typename T, typename Pointer, typename Reference>
575  typename rbtree_iterator<T, Pointer, Reference>::pointer
576  rbtree_iterator<T, Pointer, Reference>::operator->() const
577  { return &mpNode->mValue; }
578 
579 
580  template <typename T, typename Pointer, typename Reference>
581  typename rbtree_iterator<T, Pointer, Reference>::this_type&
582  rbtree_iterator<T, Pointer, Reference>::operator++()
583  {
584  mpNode = static_cast<node_type*>(RBTreeIncrement(mpNode));
585  return *this;
586  }
587 
588 
589  template <typename T, typename Pointer, typename Reference>
590  typename rbtree_iterator<T, Pointer, Reference>::this_type
591  rbtree_iterator<T, Pointer, Reference>::operator++(int)
592  {
593  this_type temp(*this);
594  mpNode = static_cast<node_type*>(RBTreeIncrement(mpNode));
595  return temp;
596  }
597 
598 
599  template <typename T, typename Pointer, typename Reference>
600  typename rbtree_iterator<T, Pointer, Reference>::this_type&
601  rbtree_iterator<T, Pointer, Reference>::operator--()
602  {
603  mpNode = static_cast<node_type*>(RBTreeDecrement(mpNode));
604  return *this;
605  }
606 
607 
608  template <typename T, typename Pointer, typename Reference>
609  typename rbtree_iterator<T, Pointer, Reference>::this_type
610  rbtree_iterator<T, Pointer, Reference>::operator--(int)
611  {
612  this_type temp(*this);
613  mpNode = static_cast<node_type*>(RBTreeDecrement(mpNode));
614  return temp;
615  }
616 
617 
618  // The C++ defect report #179 requires that we support comparisons between const and non-const iterators.
619  // Thus we provide additional template paremeters here to support this. The defect report does not
620  // require us to support comparisons between reverse_iterators and const_reverse_iterators.
621  template <typename T, typename PointerA, typename ReferenceA, typename PointerB, typename ReferenceB>
622  inline bool operator==(const rbtree_iterator<T, PointerA, ReferenceA>& a,
623  const rbtree_iterator<T, PointerB, ReferenceB>& b)
624  {
625  return a.mpNode == b.mpNode;
626  }
627 
628 
629  template <typename T, typename PointerA, typename ReferenceA, typename PointerB, typename ReferenceB>
630  inline bool operator!=(const rbtree_iterator<T, PointerA, ReferenceA>& a,
631  const rbtree_iterator<T, PointerB, ReferenceB>& b)
632  {
633  return a.mpNode != b.mpNode;
634  }
635 
636 
637  // We provide a version of operator!= for the case where the iterators are of the
638  // same type. This helps prevent ambiguity errors in the presence of rel_ops.
639  template <typename T, typename Pointer, typename Reference>
640  inline bool operator!=(const rbtree_iterator<T, Pointer, Reference>& a,
641  const rbtree_iterator<T, Pointer, Reference>& b)
642  {
643  return a.mpNode != b.mpNode;
644  }
645 
646 
647 
648 
650  // rbtree functions
652 
653  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
654  inline rbtree<K, V, C, A, E, bM, bU>::rbtree()
655  : mAnchor(),
656  mnSize(0),
657  mAllocator(EASTL_RBTREE_DEFAULT_NAME)
658  {
659  reset();
660  }
661 
662 
663  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
664  inline rbtree<K, V, C, A, E, bM, bU>::rbtree(const allocator_type& allocator)
665  : mAnchor(),
666  mnSize(0),
667  mAllocator(allocator)
668  {
669  reset();
670  }
671 
672 
673  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
674  inline rbtree<K, V, C, A, E, bM, bU>::rbtree(const C& compare, const allocator_type& allocator)
675  : base_type(compare),
676  mAnchor(),
677  mnSize(0),
678  mAllocator(allocator)
679  {
680  reset();
681  }
682 
683 
684  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
685  inline rbtree<K, V, C, A, E, bM, bU>::rbtree(const this_type& x)
686  : base_type(x.mCompare),
687  mAnchor(),
688  mnSize(0),
689  mAllocator(x.mAllocator)
690  {
691  reset();
692 
693  if(x.mAnchor.mpNodeParent) // mAnchor.mpNodeParent is the rb_tree root node.
694  {
695  mAnchor.mpNodeParent = DoCopySubtree((const node_type*)x.mAnchor.mpNodeParent, (node_type*)&mAnchor);
696  mAnchor.mpNodeRight = RBTreeGetMaxChild(mAnchor.mpNodeParent);
697  mAnchor.mpNodeLeft = RBTreeGetMinChild(mAnchor.mpNodeParent);
698  mnSize = x.mnSize;
699  }
700  }
701 
702 
703  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
704  template <typename InputIterator>
705  inline rbtree<K, V, C, A, E, bM, bU>::rbtree(InputIterator first, InputIterator last, const C& compare, const allocator_type& allocator)
706  : base_type(compare),
707  mAnchor(),
708  mnSize(0),
709  mAllocator(allocator)
710  {
711  reset();
712 
713  #if EASTL_EXCEPTIONS_ENABLED
714  try
715  {
716  #endif
717  for(; first != last; ++first)
718  insert(*first);
719  #if EASTL_EXCEPTIONS_ENABLED
720  }
721  catch(...)
722  {
723  clear();
724  throw;
725  }
726  #endif
727  }
728 
729 
730  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
731  inline rbtree<K, V, C, A, E, bM, bU>::~rbtree()
732  {
733  // Erase the entire tree. DoNukeSubtree is not a
734  // conventional erase function, as it does no rebalancing.
735  DoNukeSubtree((node_type*)mAnchor.mpNodeParent);
736  }
737 
738 
739  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
740  inline typename rbtree<K, V, C, A, E, bM, bU>::allocator_type&
741  rbtree<K, V, C, A, E, bM, bU>::get_allocator()
742  {
743  return mAllocator;
744  }
745 
746 
747  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
748  inline void rbtree<K, V, C, A, E, bM, bU>::set_allocator(const allocator_type& allocator)
749  {
750  mAllocator = allocator;
751  }
752 
753 
754  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
755  inline typename rbtree<K, V, C, A, E, bM, bU>::size_type
756  rbtree<K, V, C, A, E, bM, bU>::size() const
757  { return mnSize; }
758 
759 
760  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
761  inline bool rbtree<K, V, C, A, E, bM, bU>::empty() const
762  { return (mnSize == 0); }
763 
764 
765  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
766  inline typename rbtree<K, V, C, A, E, bM, bU>::iterator
767  rbtree<K, V, C, A, E, bM, bU>::begin()
768  { return iterator(static_cast<node_type*>(mAnchor.mpNodeLeft)); }
769 
770 
771  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
772  inline typename rbtree<K, V, C, A, E, bM, bU>::const_iterator
773  rbtree<K, V, C, A, E, bM, bU>::begin() const
774  { return const_iterator(static_cast<node_type*>(const_cast<rbtree_node_base*>(mAnchor.mpNodeLeft))); }
775 
776 
777  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
778  inline typename rbtree<K, V, C, A, E, bM, bU>::iterator
779  rbtree<K, V, C, A, E, bM, bU>::end()
780  { return iterator(static_cast<node_type*>(&mAnchor)); }
781 
782 
783  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
784  inline typename rbtree<K, V, C, A, E, bM, bU>::const_iterator
785  rbtree<K, V, C, A, E, bM, bU>::end() const
786  { return const_iterator(static_cast<node_type*>(const_cast<rbtree_node_base*>(&mAnchor))); }
787 
788 
789  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
790  inline typename rbtree<K, V, C, A, E, bM, bU>::reverse_iterator
791  rbtree<K, V, C, A, E, bM, bU>::rbegin()
792  { return reverse_iterator(end()); }
793 
794 
795  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
796  inline typename rbtree<K, V, C, A, E, bM, bU>::const_reverse_iterator
797  rbtree<K, V, C, A, E, bM, bU>::rbegin() const
798  { return const_reverse_iterator(end()); }
799 
800 
801  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
802  inline typename rbtree<K, V, C, A, E, bM, bU>::reverse_iterator
803  rbtree<K, V, C, A, E, bM, bU>::rend()
804  { return reverse_iterator(begin()); }
805 
806 
807  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
808  inline typename rbtree<K, V, C, A, E, bM, bU>::const_reverse_iterator
809  rbtree<K, V, C, A, E, bM, bU>::rend() const
810  { return const_reverse_iterator(begin()); }
811 
812 
813  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
814  inline typename rbtree<K, V, C, A, E, bM, bU>::this_type&
815  rbtree<K, V, C, A, E, bM, bU>::operator=(const this_type& x)
816  {
817  if(this != &x)
818  {
819  clear();
820 
821  #if EASTL_ALLOCATOR_COPY_ENABLED
822  mAllocator = x.mAllocator;
823  #endif
824 
825  base_type::mCompare = x.mCompare;
826 
827  if(x.mAnchor.mpNodeParent) // mAnchor.mpNodeParent is the rb_tree root node.
828  {
829  mAnchor.mpNodeParent = DoCopySubtree((const node_type*)x.mAnchor.mpNodeParent, (node_type*)&mAnchor);
830  mAnchor.mpNodeRight = RBTreeGetMaxChild(mAnchor.mpNodeParent);
831  mAnchor.mpNodeLeft = RBTreeGetMinChild(mAnchor.mpNodeParent);
832  mnSize = x.mnSize;
833  }
834  }
835  return *this;
836  }
837 
838 
839  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
840  void rbtree<K, V, C, A, E, bM, bU>::swap(this_type& x)
841  {
842  if(mAllocator == x.mAllocator) // If allocators are equivalent...
843  {
844  // Most of our members can be exchaged by a basic swap:
845  // We leave mAllocator as-is.
846  eastl::swap(mnSize, x.mnSize);
847  eastl::swap(base_type::mCompare, x.mCompare);
848 
849  // However, because our anchor node is a part of our class instance and not
850  // dynamically allocated, we can't do a swap of it but must do a more elaborate
851  // procedure. This is the downside to having the mAnchor be like this, but
852  // otherwise we consider it a good idea to avoid allocating memory for a
853  // nominal container instance.
854 
855  // We optimize for the expected most common case: both pointers being non-null.
856  if(mAnchor.mpNodeParent && x.mAnchor.mpNodeParent) // If both pointers are non-null...
857  {
858  eastl::swap(mAnchor.mpNodeRight, x.mAnchor.mpNodeRight);
859  eastl::swap(mAnchor.mpNodeLeft, x.mAnchor.mpNodeLeft);
860  eastl::swap(mAnchor.mpNodeParent, x.mAnchor.mpNodeParent);
861 
862  // We need to fix up the anchors to point to themselves (we can't just swap them).
863  mAnchor.mpNodeParent->mpNodeParent = &mAnchor;
864  x.mAnchor.mpNodeParent->mpNodeParent = &x.mAnchor;
865  }
866  else if(mAnchor.mpNodeParent)
867  {
868  x.mAnchor.mpNodeRight = mAnchor.mpNodeRight;
869  x.mAnchor.mpNodeLeft = mAnchor.mpNodeLeft;
870  x.mAnchor.mpNodeParent = mAnchor.mpNodeParent;
871  x.mAnchor.mpNodeParent->mpNodeParent = &x.mAnchor;
872 
873  // We need to fix up our anchor to point it itself (we can't have it swap with x).
874  mAnchor.mpNodeRight = &mAnchor;
875  mAnchor.mpNodeLeft = &mAnchor;
876  mAnchor.mpNodeParent = NULL;
877  }
878  else if(x.mAnchor.mpNodeParent)
879  {
880  mAnchor.mpNodeRight = x.mAnchor.mpNodeRight;
881  mAnchor.mpNodeLeft = x.mAnchor.mpNodeLeft;
882  mAnchor.mpNodeParent = x.mAnchor.mpNodeParent;
883  mAnchor.mpNodeParent->mpNodeParent = &mAnchor;
884 
885  // We need to fix up x's anchor to point it itself (we can't have it swap with us).
886  x.mAnchor.mpNodeRight = &x.mAnchor;
887  x.mAnchor.mpNodeLeft = &x.mAnchor;
888  x.mAnchor.mpNodeParent = NULL;
889  } // Else both are NULL and there is nothing to do.
890  }
891  else
892  {
893  const this_type temp(*this); // Can't call eastl::swap because that would
894  *this = x; // itself call this member swap function.
895  x = temp;
896  }
897  }
898 
899 
900  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
901  inline typename rbtree<K, V, C, A, E, bM, bU>::insert_return_type // map/set::insert return a pair, multimap/multiset::iterator return an iterator.
902  rbtree<K, V, C, A, E, bM, bU>::insert(const value_type& value)
903  { return DoInsertValue(value, has_unique_keys_type()); }
904 
905 
906  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
908  rbtree<K, V, C, A, E, bM, bU>::insert(iterator position, const value_type& value)
909  { return DoInsertValue(position, value, has_unique_keys_type()); }
910 
911 
912  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
914  rbtree<K, V, C, A, E, bM, bU>::DoInsertValue(const value_type& value, true_type) // true_type means keys are unique.
915  {
916  // This is the pathway for insertion of unique keys (map and set, but not multimap and multiset).
917  // Note that we return a pair and not an iterator. This is because the C++ standard for map
918  // and set is to return a pair and not just an iterator.
919  extract_key extractKey;
920 
921  node_type* pCurrent = (node_type*)mAnchor.mpNodeParent; // Start with the root node.
922  node_type* pLowerBound = (node_type*)&mAnchor; // Set it to the container end for now.
923  node_type* pParent; // This will be where we insert the new node.
924 
925  bool bValueLessThanNode = true; // If the tree is empty, this will result in an insertion at the front.
926 
927  // Find insertion position of the value. This will either be a position which
928  // already contains the value, a position which is greater than the value or
929  // end(), which we treat like a position which is greater than the value.
930  while(EASTL_LIKELY(pCurrent)) // Do a walk down the tree.
931  {
932  bValueLessThanNode = mCompare(extractKey(value), extractKey(pCurrent->mValue));
933  pLowerBound = pCurrent;
934 
935  if(bValueLessThanNode)
936  {
937  EASTL_VALIDATE_COMPARE(!mCompare(extractKey(pCurrent->mValue), extractKey(value))); // Validate that the compare function is sane.
938  pCurrent = (node_type*)pCurrent->mpNodeLeft;
939  }
940  else
941  pCurrent = (node_type*)pCurrent->mpNodeRight;
942  }
943 
944  pParent = pLowerBound; // pLowerBound is actually upper bound right now (i.e. it is > value instead of <=), but we will make it the lower bound below.
945 
946  if(bValueLessThanNode) // If we ended up on the left side of the last parent node...
947  {
948  if(EASTL_LIKELY(pLowerBound != (node_type*)mAnchor.mpNodeLeft)) // If the tree was empty or if we otherwise need to insert at the very front of the tree...
949  {
950  // At this point, pLowerBound points to a node which is > than value.
951  // Move it back by one, so that it points to a node which is <= value.
952  pLowerBound = (node_type*)RBTreeDecrement(pLowerBound);
953  }
954  else
955  {
956  const iterator itResult(DoInsertValueImpl(pLowerBound, value, false));
957  return pair<iterator, bool>(itResult, true);
958  }
959  }
960 
961  // Since here we require values to be unique, we will do nothing if the value already exists.
962  if(mCompare(extractKey(pLowerBound->mValue), extractKey(value))) // If the node is < the value (i.e. if value is >= the node)...
963  {
964  EASTL_VALIDATE_COMPARE(!mCompare(extractKey(value), extractKey(pLowerBound->mValue))); // Validate that the compare function is sane.
965  const iterator itResult(DoInsertValueImpl(pParent, value, false));
966  return pair<iterator, bool>(itResult, true);
967  }
968 
969  // The item already exists (as found by the compare directly above), so return false.
970  return pair<iterator, bool>(iterator(pLowerBound), false);
971  }
972 
973 
974  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
975  typename rbtree<K, V, C, A, E, bM, bU>::iterator
976  rbtree<K, V, C, A, E, bM, bU>::DoInsertValue(const value_type& value, false_type) // false_type means keys are not unique.
977  {
978  // This is the pathway for insertion of non-unique keys (multimap and multiset, but not map and set).
979  node_type* pCurrent = (node_type*)mAnchor.mpNodeParent; // Start with the root node.
980  node_type* pRangeEnd = (node_type*)&mAnchor; // Set it to the container end for now.
981  extract_key extractKey;
982 
983  while(pCurrent)
984  {
985  pRangeEnd = pCurrent;
986 
987  if(mCompare(extractKey(value), extractKey(pCurrent->mValue)))
988  {
989  EASTL_VALIDATE_COMPARE(!mCompare(extractKey(pCurrent->mValue), extractKey(value))); // Validate that the compare function is sane.
990  pCurrent = (node_type*)pCurrent->mpNodeLeft;
991  }
992  else
993  pCurrent = (node_type*)pCurrent->mpNodeRight;
994  }
995 
996  return DoInsertValueImpl(pRangeEnd, value, false);
997  }
998 
999 
1000  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1002  rbtree<K, V, C, A, E, bM, bU>::DoInsertKey(const key_type& key, true_type) // true_type means keys are unique.
1003  {
1004  // This code is essentially a slightly modified copy of the the rbtree::insert
1005  // function whereby this version takes a key and not a full value_type.
1006  extract_key extractKey;
1007 
1008  node_type* pCurrent = (node_type*)mAnchor.mpNodeParent; // Start with the root node.
1009  node_type* pLowerBound = (node_type*)&mAnchor; // Set it to the container end for now.
1010  node_type* pParent; // This will be where we insert the new node.
1011 
1012  bool bValueLessThanNode = true; // If the tree is empty, this will result in an insertion at the front.
1013 
1014  // Find insertion position of the value. This will either be a position which
1015  // already contains the value, a position which is greater than the value or
1016  // end(), which we treat like a position which is greater than the value.
1017  while(EASTL_LIKELY(pCurrent)) // Do a walk down the tree.
1018  {
1019  bValueLessThanNode = mCompare(key, extractKey(pCurrent->mValue));
1020  pLowerBound = pCurrent;
1021 
1022  if(bValueLessThanNode)
1023  {
1024  EASTL_VALIDATE_COMPARE(!mCompare(extractKey(pCurrent->mValue), key)); // Validate that the compare function is sane.
1025  pCurrent = (node_type*)pCurrent->mpNodeLeft;
1026  }
1027  else
1028  pCurrent = (node_type*)pCurrent->mpNodeRight;
1029  }
1030 
1031  pParent = pLowerBound; // pLowerBound is actually upper bound right now (i.e. it is > value instead of <=), but we will make it the lower bound below.
1032 
1033  if(bValueLessThanNode) // If we ended up on the left side of the last parent node...
1034  {
1035  if(EASTL_LIKELY(pLowerBound != (node_type*)mAnchor.mpNodeLeft)) // If the tree was empty or if we otherwise need to insert at the very front of the tree...
1036  {
1037  // At this point, pLowerBound points to a node which is > than value.
1038  // Move it back by one, so that it points to a node which is <= value.
1039  pLowerBound = (node_type*)RBTreeDecrement(pLowerBound);
1040  }
1041  else
1042  {
1043  const iterator itResult(DoInsertKeyImpl(pLowerBound, key, false));
1044  return pair<iterator, bool>(itResult, true);
1045  }
1046  }
1047 
1048  // Since here we require values to be unique, we will do nothing if the value already exists.
1049  if(mCompare(extractKey(pLowerBound->mValue), key)) // If the node is < the value (i.e. if value is >= the node)...
1050  {
1051  EASTL_VALIDATE_COMPARE(!mCompare(key, extractKey(pLowerBound->mValue))); // Validate that the compare function is sane.
1052  const iterator itResult(DoInsertKeyImpl(pParent, key, false));
1053  return pair<iterator, bool>(itResult, true);
1054  }
1055 
1056  // The item already exists (as found by the compare directly above), so return false.
1057  return pair<iterator, bool>(iterator(pLowerBound), false);
1058  }
1059 
1060 
1061  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1062  typename rbtree<K, V, C, A, E, bM, bU>::iterator
1063  rbtree<K, V, C, A, E, bM, bU>::DoInsertKey(const key_type& key, false_type) // false_type means keys are not unique.
1064  {
1065  // This is the pathway for insertion of non-unique keys (multimap and multiset, but not map and set).
1066  node_type* pCurrent = (node_type*)mAnchor.mpNodeParent; // Start with the root node.
1067  node_type* pRangeEnd = (node_type*)&mAnchor; // Set it to the container end for now.
1068  extract_key extractKey;
1069 
1070  while(pCurrent)
1071  {
1072  pRangeEnd = pCurrent;
1073 
1074  if(mCompare(key, extractKey(pCurrent->mValue)))
1075  {
1076  EASTL_VALIDATE_COMPARE(!mCompare(extractKey(pCurrent->mValue), key)); // Validate that the compare function is sane.
1077  pCurrent = (node_type*)pCurrent->mpNodeLeft;
1078  }
1079  else
1080  pCurrent = (node_type*)pCurrent->mpNodeRight;
1081  }
1082 
1083  return DoInsertKeyImpl(pRangeEnd, key, false);
1084  }
1085 
1086 
1087  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1088  typename rbtree<K, V, C, A, E, bM, bU>::iterator
1089  rbtree<K, V, C, A, E, bM, bU>::DoInsertValue(iterator position, const value_type& value, true_type) // true_type means keys are unique.
1090  {
1091  // This is the pathway for insertion of unique keys (map and set, but not multimap and multiset).
1092  //
1093  // We follow the same approach as SGI STL/STLPort and use the position as
1094  // a forced insertion position for the value when possible.
1095  extract_key extractKey;
1096 
1097  if((position.mpNode != mAnchor.mpNodeRight) && (position.mpNode != &mAnchor)) // If the user specified a specific insertion position...
1098  {
1099  iterator itNext(position);
1100  ++itNext;
1101 
1102  // To consider: Change this so that 'position' specifies the position after
1103  // where the insertion goes and not the position before where the insertion goes.
1104  // Doing so would make this more in line with user expectations and with LWG #233.
1105  const bool bPositionLessThanValue = mCompare(extractKey(position.mpNode->mValue), extractKey(value));
1106 
1107  if(bPositionLessThanValue) // If (value > *position)...
1108  {
1109  EASTL_VALIDATE_COMPARE(!mCompare(extractKey(value), extractKey(position.mpNode->mValue))); // Validate that the compare function is sane.
1110 
1111  const bool bValueLessThanNext = mCompare(extractKey(value), extractKey(itNext.mpNode->mValue));
1112 
1113  if(bValueLessThanNext) // if (value < *itNext)...
1114  {
1115  EASTL_VALIDATE_COMPARE(!mCompare(extractKey(itNext.mpNode->mValue), extractKey(value))); // Validate that the compare function is sane.
1116 
1117  if(position.mpNode->mpNodeRight)
1118  return DoInsertValueImpl(itNext.mpNode, value, true);
1119  return DoInsertValueImpl(position.mpNode, value, false);
1120  }
1121  }
1122 
1123  return DoInsertValue(value, has_unique_keys_type()).first;
1124  }
1125 
1126  if(mnSize && mCompare(extractKey(((node_type*)mAnchor.mpNodeRight)->mValue), extractKey(value)))
1127  {
1128  EASTL_VALIDATE_COMPARE(!mCompare(extractKey(value), extractKey(((node_type*)mAnchor.mpNodeRight)->mValue))); // Validate that the compare function is sane.
1129  return DoInsertValueImpl((node_type*)mAnchor.mpNodeRight, value, false);
1130  }
1131 
1132  return DoInsertValue(value, has_unique_keys_type()).first;
1133  }
1134 
1135 
1136  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1137  typename rbtree<K, V, C, A, E, bM, bU>::iterator
1138  rbtree<K, V, C, A, E, bM, bU>::DoInsertValue(iterator position, const value_type& value, false_type) // false_type means keys are not unique.
1139  {
1140  // This is the pathway for insertion of non-unique keys (multimap and multiset, but not map and set).
1141  //
1142  // We follow the same approach as SGI STL/STLPort and use the position as
1143  // a forced insertion position for the value when possible.
1144  extract_key extractKey;
1145 
1146  if((position.mpNode != mAnchor.mpNodeRight) && (position.mpNode != &mAnchor)) // If the user specified a specific insertion position...
1147  {
1148  iterator itNext(position);
1149  ++itNext;
1150 
1151  // To consider: Change this so that 'position' specifies the position after
1152  // where the insertion goes and not the position before where the insertion goes.
1153  // Doing so would make this more in line with user expectations and with LWG #233.
1154 
1155  if(!mCompare(extractKey(value), extractKey(position.mpNode->mValue)) && // If value >= *position &&
1156  !mCompare(extractKey(itNext.mpNode->mValue), extractKey(value))) // if value <= *itNext...
1157  {
1158  if(position.mpNode->mpNodeRight) // If there are any nodes to the right... [this expression will always be true as long as we aren't at the end()]
1159  return DoInsertValueImpl(itNext.mpNode, value, true); // Specifically insert in front of (to the left of) itNext (and thus after 'position').
1160  return DoInsertValueImpl(position.mpNode, value, false);
1161  }
1162 
1163  return DoInsertValue(value, has_unique_keys_type()); // If the above specified hint was not useful, then we do a regular insertion.
1164  }
1165 
1166  // This pathway shouldn't be commonly executed, as the user shouldn't be calling
1167  // this hinted version of insert if the user isn't providing a useful hint.
1168 
1169  if(mnSize && !mCompare(extractKey(value), extractKey(((node_type*)mAnchor.mpNodeRight)->mValue))) // If we are non-empty and the value is >= the last node...
1170  return DoInsertValueImpl((node_type*)mAnchor.mpNodeRight, value, false); // Insert after the last node (doesn't matter if we force left or not).
1171 
1172  return DoInsertValue(value, has_unique_keys_type()); // We are empty or we are inserting at the end.
1173  }
1174 
1175 
1176  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1177  typename rbtree<K, V, C, A, E, bM, bU>::iterator
1178  rbtree<K, V, C, A, E, bM, bU>::DoInsertKey(iterator position, const key_type& key, true_type) // true_type means keys are unique.
1179  {
1180  // This is the pathway for insertion of unique keys (map and set, but not multimap and multiset).
1181  //
1182  // We follow the same approach as SGI STL/STLPort and use the position as
1183  // a forced insertion position for the value when possible.
1184  extract_key extractKey;
1185 
1186  if((position.mpNode != mAnchor.mpNodeRight) && (position.mpNode != &mAnchor)) // If the user specified a specific insertion position...
1187  {
1188  iterator itNext(position);
1189  ++itNext;
1190 
1191  // To consider: Change this so that 'position' specifies the position after
1192  // where the insertion goes and not the position before where the insertion goes.
1193  // Doing so would make this more in line with user expectations and with LWG #233.
1194  const bool bPositionLessThanValue = mCompare(extractKey(position.mpNode->mValue), key);
1195 
1196  if(bPositionLessThanValue) // If (value > *position)...
1197  {
1198  EASTL_VALIDATE_COMPARE(!mCompare(key, extractKey(position.mpNode->mValue))); // Validate that the compare function is sane.
1199 
1200  const bool bValueLessThanNext = mCompare(key, extractKey(itNext.mpNode->mValue));
1201 
1202  if(bValueLessThanNext) // If value < *itNext...
1203  {
1204  EASTL_VALIDATE_COMPARE(!mCompare(extractKey(itNext.mpNode->mValue), key)); // Validate that the compare function is sane.
1205 
1206  if(position.mpNode->mpNodeRight)
1207  return DoInsertKeyImpl(itNext.mpNode, key, true);
1208  return DoInsertKeyImpl(position.mpNode, key, false);
1209  }
1210  }
1211 
1212  return DoInsertKey(key, has_unique_keys_type()).first;
1213  }
1214 
1215  if(mnSize && mCompare(extractKey(((node_type*)mAnchor.mpNodeRight)->mValue), key))
1216  {
1217  EASTL_VALIDATE_COMPARE(!mCompare(key, extractKey(((node_type*)mAnchor.mpNodeRight)->mValue))); // Validate that the compare function is sane.
1218  return DoInsertKeyImpl((node_type*)mAnchor.mpNodeRight, key, false);
1219  }
1220 
1221  return DoInsertKey(key, has_unique_keys_type()).first;
1222  }
1223 
1224 
1225  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1226  typename rbtree<K, V, C, A, E, bM, bU>::iterator
1227  rbtree<K, V, C, A, E, bM, bU>::DoInsertKey(iterator position, const key_type& key, false_type) // false_type means keys are not unique.
1228  {
1229  // This is the pathway for insertion of non-unique keys (multimap and multiset, but not map and set).
1230  //
1231  // We follow the same approach as SGI STL/STLPort and use the position as
1232  // a forced insertion position for the value when possible.
1233  extract_key extractKey;
1234 
1235  if((position.mpNode != mAnchor.mpNodeRight) && (position.mpNode != &mAnchor)) // If the user specified a specific insertion position...
1236  {
1237  iterator itNext(position);
1238  ++itNext;
1239 
1240  // To consider: Change this so that 'position' specifies the position after
1241  // where the insertion goes and not the position before where the insertion goes.
1242  // Doing so would make this more in line with user expectations and with LWG #233.
1243  if(!mCompare(key, extractKey(position.mpNode->mValue)) && // If value >= *position &&
1244  !mCompare(extractKey(itNext.mpNode->mValue), key)) // if value <= *itNext...
1245  {
1246  if(position.mpNode->mpNodeRight) // If there are any nodes to the right... [this expression will always be true as long as we aren't at the end()]
1247  return DoInsertKeyImpl(itNext.mpNode, key, true); // Specifically insert in front of (to the left of) itNext (and thus after 'position').
1248  return DoInsertKeyImpl(position.mpNode, key, false);
1249  }
1250 
1251  return DoInsertKey(key, has_unique_keys_type()); // If the above specified hint was not useful, then we do a regular insertion.
1252  }
1253 
1254  // This pathway shouldn't be commonly executed, as the user shouldn't be calling
1255  // this hinted version of insert if the user isn't providing a useful hint.
1256  if(mnSize && !mCompare(key, extractKey(((node_type*)mAnchor.mpNodeRight)->mValue))) // If we are non-empty and the value is >= the last node...
1257  return DoInsertKeyImpl((node_type*)mAnchor.mpNodeRight, key, false); // Insert after the last node (doesn't matter if we force left or not).
1258 
1259  return DoInsertKey(key, has_unique_keys_type()); // We are empty or we are inserting at the end.
1260  }
1261 
1262 
1263  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1264  typename rbtree<K, V, C, A, E, bM, bU>::iterator
1265  rbtree<K, V, C, A, E, bM, bU>::DoInsertValueImpl(node_type* pNodeParent, const value_type& value, bool bForceToLeft)
1266  {
1267  RBTreeSide side;
1268  extract_key extractKey;
1269 
1270  // The reason we may want to have bForceToLeft == true is that pNodeParent->mValue and value may be equal.
1271  // In that case it doesn't matter what side we insert on, except that the C++ LWG #233 improvement report
1272  // suggests that we should use the insert hint position to force an ordering. So that's what we do.
1273  if(bForceToLeft || (pNodeParent == &mAnchor) || mCompare(extractKey(value), extractKey(pNodeParent->mValue)))
1274  side = kRBTreeSideLeft;
1275  else
1276  side = kRBTreeSideRight;
1277 
1278  node_type* const pNodeNew = DoCreateNode(value); // Note that pNodeNew->mpLeft, mpRight, mpParent, will be uninitialized.
1279  RBTreeInsert(pNodeNew, pNodeParent, &mAnchor, side);
1280  mnSize++;
1281 
1282  return iterator(pNodeNew);
1283  }
1284 
1285 
1286  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1287  typename rbtree<K, V, C, A, E, bM, bU>::iterator
1288  rbtree<K, V, C, A, E, bM, bU>::DoInsertKeyImpl(node_type* pNodeParent, const key_type& key, bool bForceToLeft)
1289  {
1290  RBTreeSide side;
1291  extract_key extractKey;
1292 
1293  // The reason we may want to have bForceToLeft == true is that pNodeParent->mValue and value may be equal.
1294  // In that case it doesn't matter what side we insert on, except that the C++ LWG #233 improvement report
1295  // suggests that we should use the insert hint position to force an ordering. So that's what we do.
1296  if(bForceToLeft || (pNodeParent == &mAnchor) || mCompare(key, extractKey(pNodeParent->mValue)))
1297  side = kRBTreeSideLeft;
1298  else
1299  side = kRBTreeSideRight;
1300 
1301  node_type* const pNodeNew = DoCreateNodeFromKey(key); // Note that pNodeNew->mpLeft, mpRight, mpParent, will be uninitialized.
1302  RBTreeInsert(pNodeNew, pNodeParent, &mAnchor, side);
1303  mnSize++;
1304 
1305  return iterator(pNodeNew);
1306  }
1307 
1308 
1309  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1310  template <typename InputIterator>
1311  void rbtree<K, V, C, A, E, bM, bU>::insert(InputIterator first, InputIterator last)
1312  {
1313  for( ; first != last; ++first)
1314  DoInsertValue(*first, has_unique_keys_type()); // Or maybe we should call 'insert(end(), *first)' instead. If the first-last range was sorted then this might make some sense.
1315  }
1316 
1317 
1318  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1319  inline void rbtree<K, V, C, A, E, bM, bU>::clear()
1320  {
1321  // Erase the entire tree. DoNukeSubtree is not a
1322  // conventional erase function, as it does no rebalancing.
1323  DoNukeSubtree((node_type*)mAnchor.mpNodeParent);
1324  reset();
1325  }
1326 
1327 
1328  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1329  inline void rbtree<K, V, C, A, E, bM, bU>::reset()
1330  {
1331  // The reset function is a special extension function which unilaterally
1332  // resets the container to an empty state without freeing the memory of
1333  // the contained objects. This is useful for very quickly tearing down a
1334  // container built into scratch memory.
1335  mAnchor.mpNodeRight = &mAnchor;
1336  mAnchor.mpNodeLeft = &mAnchor;
1337  mAnchor.mpNodeParent = NULL;
1338  mAnchor.mColor = kRBTreeColorRed;
1339  mnSize = 0;
1340  }
1341 
1342 
1343  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1344  inline typename rbtree<K, V, C, A, E, bM, bU>::iterator
1345  rbtree<K, V, C, A, E, bM, bU>::erase(iterator position)
1346  {
1347  const iterator iErase(position);
1348  --mnSize; // Interleave this between the two references to itNext. We expect no exceptions to occur during the code below.
1349  ++position;
1350  RBTreeErase(iErase.mpNode, &mAnchor);
1351  DoFreeNode(iErase.mpNode);
1352  return position;
1353  }
1354 
1355 
1356  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1357  typename rbtree<K, V, C, A, E, bM, bU>::iterator
1358  rbtree<K, V, C, A, E, bM, bU>::erase(iterator first, iterator last)
1359  {
1360  // We expect that if the user means to clear the container, they will call clear.
1361  if(EASTL_LIKELY((first.mpNode != mAnchor.mpNodeLeft) || (last.mpNode != &mAnchor))) // If (first != begin or last != end) ...
1362  {
1363  // Basic implementation:
1364  while(first != last)
1365  first = erase(first);
1366  return first;
1367 
1368  // Inlined implementation:
1369  //size_type n = 0;
1370  //while(first != last)
1371  //{
1372  // const iterator itErase(first);
1373  // ++n;
1374  // ++first;
1375  // RBTreeErase(itErase.mpNode, &mAnchor);
1376  // DoFreeNode(itErase.mpNode);
1377  //}
1378  //mnSize -= n;
1379  //return first;
1380  }
1381 
1382  clear();
1383  return iterator((node_type*)&mAnchor); // Same as: return end();
1384  }
1385 
1386 
1387  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1388  inline typename rbtree<K, V, C, A, E, bM, bU>::reverse_iterator
1389  rbtree<K, V, C, A, E, bM, bU>::erase(reverse_iterator position)
1390  {
1391  return reverse_iterator(erase((++position).base()));
1392  }
1393 
1394 
1395  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1396  typename rbtree<K, V, C, A, E, bM, bU>::reverse_iterator
1397  rbtree<K, V, C, A, E, bM, bU>::erase(reverse_iterator first, reverse_iterator last)
1398  {
1399  // Version which erases in order from first to last.
1400  // difference_type i(first.base() - last.base());
1401  // while(i--)
1402  // first = erase(first);
1403  // return first;
1404 
1405  // Version which erases in order from last to first, but is slightly more efficient:
1406  return reverse_iterator(erase((++last).base(), (++first).base()));
1407  }
1408 
1409 
1410  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1411  inline void rbtree<K, V, C, A, E, bM, bU>::erase(const key_type* first, const key_type* last)
1412  {
1413  // We have no choice but to run a loop like this, as the first/last range could
1414  // have values that are discontiguously located in the tree. And some may not
1415  // even be in the tree.
1416  while(first != last)
1417  erase(*first++);
1418  }
1419 
1420 
1421  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1422  typename rbtree<K, V, C, A, E, bM, bU>::iterator
1423  rbtree<K, V, C, A, E, bM, bU>::find(const key_type& key)
1424  {
1425  // To consider: Implement this instead via calling lower_bound and
1426  // inspecting the result. The following is an implementation of this:
1427  // const iterator it(lower_bound(key));
1428  // return ((it.mpNode == &mAnchor) || mCompare(key, extractKey(it.mpNode->mValue))) ? iterator(&mAnchor) : it;
1429  // We don't currently implement the above because in practice people tend to call
1430  // find a lot with trees, but very uncommonly call lower_bound.
1431  extract_key extractKey;
1432 
1433  node_type* pCurrent = (node_type*)mAnchor.mpNodeParent; // Start with the root node.
1434  node_type* pRangeEnd = (node_type*)&mAnchor; // Set it to the container end for now.
1435 
1436  while(EASTL_LIKELY(pCurrent)) // Do a walk down the tree.
1437  {
1438  if(EASTL_LIKELY(!mCompare(extractKey(pCurrent->mValue), key))) // If pCurrent is >= key...
1439  {
1440  pRangeEnd = pCurrent;
1441  pCurrent = (node_type*)pCurrent->mpNodeLeft;
1442  }
1443  else
1444  {
1445  EASTL_VALIDATE_COMPARE(!mCompare(key, extractKey(pCurrent->mValue))); // Validate that the compare function is sane.
1446  pCurrent = (node_type*)pCurrent->mpNodeRight;
1447  }
1448  }
1449 
1450  if(EASTL_LIKELY((pRangeEnd != &mAnchor) && !mCompare(key, extractKey(pRangeEnd->mValue))))
1451  return iterator(pRangeEnd);
1452  return iterator((node_type*)&mAnchor);
1453  }
1454 
1455 
1456  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1457  inline typename rbtree<K, V, C, A, E, bM, bU>::const_iterator
1458  rbtree<K, V, C, A, E, bM, bU>::find(const key_type& key) const
1459  {
1460  typedef rbtree<K, V, C, A, E, bM, bU> rbtree_type;
1461  return const_iterator(const_cast<rbtree_type*>(this)->find(key));
1462  }
1463 
1464 
1465  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1466  template <typename U, typename Compare2>
1467  typename rbtree<K, V, C, A, E, bM, bU>::iterator
1468  rbtree<K, V, C, A, E, bM, bU>::find_as(const U& u, Compare2 compare2)
1469  {
1470  extract_key extractKey;
1471 
1472  node_type* pCurrent = (node_type*)mAnchor.mpNodeParent; // Start with the root node.
1473  node_type* pRangeEnd = (node_type*)&mAnchor; // Set it to the container end for now.
1474 
1475  while(EASTL_LIKELY(pCurrent)) // Do a walk down the tree.
1476  {
1477  if(EASTL_LIKELY(!compare2(extractKey(pCurrent->mValue), u))) // If pCurrent is >= u...
1478  {
1479  pRangeEnd = pCurrent;
1480  pCurrent = (node_type*)pCurrent->mpNodeLeft;
1481  }
1482  else
1483  {
1484  EASTL_VALIDATE_COMPARE(!compare2(u, extractKey(pCurrent->mValue))); // Validate that the compare function is sane.
1485  pCurrent = (node_type*)pCurrent->mpNodeRight;
1486  }
1487  }
1488 
1489  if(EASTL_LIKELY((pRangeEnd != &mAnchor) && !compare2(u, extractKey(pRangeEnd->mValue))))
1490  return iterator(pRangeEnd);
1491  return iterator((node_type*)&mAnchor);
1492  }
1493 
1494 
1495  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1496  template <typename U, typename Compare2>
1498  rbtree<K, V, C, A, E, bM, bU>::find_as(const U& u, Compare2 compare2) const
1499  {
1500  typedef rbtree<K, V, C, A, E, bM, bU> rbtree_type;
1501  return const_iterator(const_cast<rbtree_type*>(this)->find_as(u, compare2));
1502  }
1503 
1504 
1505  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1506  typename rbtree<K, V, C, A, E, bM, bU>::iterator
1507  rbtree<K, V, C, A, E, bM, bU>::lower_bound(const key_type& key)
1508  {
1509  extract_key extractKey;
1510 
1511  node_type* pCurrent = (node_type*)mAnchor.mpNodeParent; // Start with the root node.
1512  node_type* pRangeEnd = (node_type*)&mAnchor; // Set it to the container end for now.
1513 
1514  while(EASTL_LIKELY(pCurrent)) // Do a walk down the tree.
1515  {
1516  if(EASTL_LIKELY(!mCompare(extractKey(pCurrent->mValue), key))) // If pCurrent is >= key...
1517  {
1518  pRangeEnd = pCurrent;
1519  pCurrent = (node_type*)pCurrent->mpNodeLeft;
1520  }
1521  else
1522  {
1523  EASTL_VALIDATE_COMPARE(!mCompare(key, extractKey(pCurrent->mValue))); // Validate that the compare function is sane.
1524  pCurrent = (node_type*)pCurrent->mpNodeRight;
1525  }
1526  }
1527 
1528  return iterator(pRangeEnd);
1529  }
1530 
1531 
1532  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1533  inline typename rbtree<K, V, C, A, E, bM, bU>::const_iterator
1534  rbtree<K, V, C, A, E, bM, bU>::lower_bound(const key_type& key) const
1535  {
1536  typedef rbtree<K, V, C, A, E, bM, bU> rbtree_type;
1537  return const_iterator(const_cast<rbtree_type*>(this)->lower_bound(key));
1538  }
1539 
1540 
1541  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1542  typename rbtree<K, V, C, A, E, bM, bU>::iterator
1543  rbtree<K, V, C, A, E, bM, bU>::upper_bound(const key_type& key)
1544  {
1545  extract_key extractKey;
1546 
1547  node_type* pCurrent = (node_type*)mAnchor.mpNodeParent; // Start with the root node.
1548  node_type* pRangeEnd = (node_type*)&mAnchor; // Set it to the container end for now.
1549 
1550  while(EASTL_LIKELY(pCurrent)) // Do a walk down the tree.
1551  {
1552  if(EASTL_LIKELY(mCompare(key, extractKey(pCurrent->mValue)))) // If key is < pCurrent...
1553  {
1554  EASTL_VALIDATE_COMPARE(!mCompare(extractKey(pCurrent->mValue), key)); // Validate that the compare function is sane.
1555  pRangeEnd = pCurrent;
1556  pCurrent = (node_type*)pCurrent->mpNodeLeft;
1557  }
1558  else
1559  pCurrent = (node_type*)pCurrent->mpNodeRight;
1560  }
1561 
1562  return iterator(pRangeEnd);
1563  }
1564 
1565 
1566  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1567  inline typename rbtree<K, V, C, A, E, bM, bU>::const_iterator
1568  rbtree<K, V, C, A, E, bM, bU>::upper_bound(const key_type& key) const
1569  {
1570  typedef rbtree<K, V, C, A, E, bM, bU> rbtree_type;
1571  return const_iterator(const_cast<rbtree_type*>(this)->upper_bound(key));
1572  }
1573 
1574 
1575  // To do: Move this validate function entirely to a template-less implementation.
1576  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1577  bool rbtree<K, V, C, A, E, bM, bU>::validate() const
1578  {
1579  // Red-black trees have the following canonical properties which we validate here:
1580  // 1 Every node is either red or black.
1581  // 2 Every leaf (NULL) is black by defintion. Any number of black nodes may appear in a sequence.
1582  // 3 If a node is red, then both its children are black. Thus, on any path from
1583  // the root to a leaf, red nodes must not be adjacent.
1584  // 4 Every simple path from a node to a descendant leaf contains the same number of black nodes.
1585  // 5 The mnSize member of the tree must equal the number of nodes in the tree.
1586  // 6 The tree is sorted as per a conventional binary tree.
1587  // 7 The comparison function is sane; it obeys strict weak ordering. If mCompare(a,b) is true, then mCompare(b,a) must be false. Both cannot be true.
1588 
1589  extract_key extractKey;
1590 
1591  if(mnSize)
1592  {
1593  // Verify basic integrity.
1594  //if(!mAnchor.mpNodeParent || (mAnchor.mpNodeLeft == mAnchor.mpNodeRight))
1595  // return false; // Fix this for case of empty tree.
1596 
1597  if(mAnchor.mpNodeLeft != RBTreeGetMinChild(mAnchor.mpNodeParent))
1598  return false;
1599 
1600  if(mAnchor.mpNodeRight != RBTreeGetMaxChild(mAnchor.mpNodeParent))
1601  return false;
1602 
1603  const size_t nBlackCount = RBTreeGetBlackCount(mAnchor.mpNodeParent, mAnchor.mpNodeLeft);
1604  size_type nIteratedSize = 0;
1605 
1606  for(const_iterator it = begin(); it != end(); ++it, ++nIteratedSize)
1607  {
1608  const node_type* const pNode = (const node_type*)it.mpNode;
1609  const node_type* const pNodeRight = (const node_type*)pNode->mpNodeRight;
1610  const node_type* const pNodeLeft = (const node_type*)pNode->mpNodeLeft;
1611 
1612  // Verify #7 above.
1613  if(pNodeRight && mCompare(extractKey(pNodeRight->mValue), extractKey(pNode->mValue)) && mCompare(extractKey(pNode->mValue), extractKey(pNodeRight->mValue))) // Validate that the compare function is sane.
1614  return false;
1615 
1616  // Verify #7 above.
1617  if(pNodeLeft && mCompare(extractKey(pNodeLeft->mValue), extractKey(pNode->mValue)) && mCompare(extractKey(pNode->mValue), extractKey(pNodeLeft->mValue))) // Validate that the compare function is sane.
1618  return false;
1619 
1620  // Verify item #1 above.
1621  if((pNode->mColor != kRBTreeColorRed) && (pNode->mColor != kRBTreeColorBlack))
1622  return false;
1623 
1624  // Verify item #3 above.
1625  if(pNode->mColor == kRBTreeColorRed)
1626  {
1627  if((pNodeRight && (pNodeRight->mColor == kRBTreeColorRed)) ||
1628  (pNodeLeft && (pNodeLeft->mColor == kRBTreeColorRed)))
1629  return false;
1630  }
1631 
1632  // Verify item #6 above.
1633  if(pNodeRight && mCompare(extractKey(pNodeRight->mValue), extractKey(pNode->mValue)))
1634  return false;
1635 
1636  if(pNodeLeft && mCompare(extractKey(pNode->mValue), extractKey(pNodeLeft->mValue)))
1637  return false;
1638 
1639  if(!pNodeRight && !pNodeLeft) // If we are at a bottom node of the tree...
1640  {
1641  // Verify item #4 above.
1642  if(RBTreeGetBlackCount(mAnchor.mpNodeParent, pNode) != nBlackCount)
1643  return false;
1644  }
1645  }
1646 
1647  // Verify item #5 above.
1648  if(nIteratedSize != mnSize)
1649  return false;
1650 
1651  return true;
1652  }
1653  else
1654  {
1655  if((mAnchor.mpNodeLeft != &mAnchor) || (mAnchor.mpNodeRight != &mAnchor))
1656  return false;
1657  }
1658 
1659  return true;
1660  }
1661 
1662 
1663  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1664  inline int rbtree<K, V, C, A, E, bM, bU>::validate_iterator(const_iterator i) const
1665  {
1666  // To do: Come up with a more efficient mechanism of doing this.
1667 
1668  for(const_iterator temp = begin(), tempEnd = end(); temp != tempEnd; ++temp)
1669  {
1670  if(temp == i)
1672  }
1673 
1674  if(i == end())
1675  return (isf_valid | isf_current);
1676 
1677  return isf_none;
1678  }
1679 
1680 
1681  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1682  inline typename rbtree<K, V, C, A, E, bM, bU>::node_type*
1683  rbtree<K, V, C, A, E, bM, bU>::DoAllocateNode()
1684  {
1685  return (node_type*)allocate_memory(mAllocator, sizeof(node_type), kValueAlignment, kValueAlignmentOffset);
1686  }
1687 
1688 
1689  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1690  inline void rbtree<K, V, C, A, E, bM, bU>::DoFreeNode(node_type* pNode)
1691  {
1692  pNode->~node_type();
1693  EASTLFree(mAllocator, pNode, sizeof(node_type));
1694  }
1695 
1696 
1697  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1698  typename rbtree<K, V, C, A, E, bM, bU>::node_type*
1699  rbtree<K, V, C, A, E, bM, bU>::DoCreateNodeFromKey(const key_type& key)
1700  {
1701  // Note that this function intentionally leaves the node pointers uninitialized.
1702  // The caller would otherwise just turn right around and modify them, so there's
1703  // no point in us initializing them to anything (except in a debug build).
1704  node_type* const pNode = DoAllocateNode();
1705 
1706  #if EASTL_EXCEPTIONS_ENABLED
1707  try
1708  {
1709  #endif
1710  ::new(&pNode->mValue) value_type(key);
1711 
1712  #if EASTL_EXCEPTIONS_ENABLED
1713  }
1714  catch(...)
1715  {
1716  DoFreeNode(pNode);
1717  throw;
1718  }
1719  #endif
1720 
1721  #if EASTL_DEBUG
1722  pNode->mpNodeRight = NULL;
1723  pNode->mpNodeLeft = NULL;
1724  pNode->mpNodeParent = NULL;
1725  pNode->mColor = kRBTreeColorBlack;
1726  #endif
1727 
1728  return pNode;
1729  }
1730 
1731 
1732  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1733  typename rbtree<K, V, C, A, E, bM, bU>::node_type*
1734  rbtree<K, V, C, A, E, bM, bU>::DoCreateNode(const value_type& value)
1735  {
1736  // Note that this function intentionally leaves the node pointers uninitialized.
1737  // The caller would otherwise just turn right around and modify them, so there's
1738  // no point in us initializing them to anything (except in a debug build).
1739  node_type* const pNode = DoAllocateNode();
1740 
1741  #if EASTL_EXCEPTIONS_ENABLED
1742  try
1743  {
1744  #endif
1745  ::new(&pNode->mValue) value_type(value);
1746  #if EASTL_EXCEPTIONS_ENABLED
1747  }
1748  catch(...)
1749  {
1750  DoFreeNode(pNode);
1751  throw;
1752  }
1753  #endif
1754 
1755  #if EASTL_DEBUG
1756  pNode->mpNodeRight = NULL;
1757  pNode->mpNodeLeft = NULL;
1758  pNode->mpNodeParent = NULL;
1759  pNode->mColor = kRBTreeColorBlack;
1760  #endif
1761 
1762  return pNode;
1763  }
1764 
1765 
1766  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1767  typename rbtree<K, V, C, A, E, bM, bU>::node_type*
1768  rbtree<K, V, C, A, E, bM, bU>::DoCreateNode(const node_type* pNodeSource, node_type* pNodeParent)
1769  {
1770  node_type* const pNode = DoCreateNode(pNodeSource->mValue);
1771 
1772  pNode->mpNodeRight = NULL;
1773  pNode->mpNodeLeft = NULL;
1774  pNode->mpNodeParent = pNodeParent;
1775  pNode->mColor = pNodeSource->mColor;
1776 
1777  return pNode;
1778  }
1779 
1780 
1781  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1782  typename rbtree<K, V, C, A, E, bM, bU>::node_type*
1783  rbtree<K, V, C, A, E, bM, bU>::DoCopySubtree(const node_type* pNodeSource, node_type* pNodeDest)
1784  {
1785  node_type* const pNewNodeRoot = DoCreateNode(pNodeSource, pNodeDest);
1786 
1787  #if EASTL_EXCEPTIONS_ENABLED
1788  try
1789  {
1790  #endif
1791  // Copy the right side of the tree recursively.
1792  if(pNodeSource->mpNodeRight)
1793  pNewNodeRoot->mpNodeRight = DoCopySubtree((const node_type*)pNodeSource->mpNodeRight, pNewNodeRoot);
1794 
1795  node_type* pNewNodeLeft;
1796 
1797  for(pNodeSource = (node_type*)pNodeSource->mpNodeLeft, pNodeDest = pNewNodeRoot;
1798  pNodeSource;
1799  pNodeSource = (node_type*)pNodeSource->mpNodeLeft, pNodeDest = pNewNodeLeft)
1800  {
1801  pNewNodeLeft = DoCreateNode(pNodeSource, pNodeDest);
1802 
1803  pNodeDest->mpNodeLeft = pNewNodeLeft;
1804 
1805  // Copy the right side of the tree recursively.
1806  if(pNodeSource->mpNodeRight)
1807  pNewNodeLeft->mpNodeRight = DoCopySubtree((const node_type*)pNodeSource->mpNodeRight, pNewNodeLeft);
1808  }
1809  #if EASTL_EXCEPTIONS_ENABLED
1810  }
1811  catch(...)
1812  {
1813  DoNukeSubtree(pNewNodeRoot);
1814  throw;
1815  }
1816  #endif
1817 
1818  return pNewNodeRoot;
1819  }
1820 
1821 
1822  template <typename K, typename V, typename C, typename A, typename E, bool bM, bool bU>
1823  void rbtree<K, V, C, A, E, bM, bU>::DoNukeSubtree(node_type* pNode)
1824  {
1825  while(pNode) // Recursively traverse the tree and destroy items as we go.
1826  {
1827  DoNukeSubtree((node_type*)pNode->mpNodeRight);
1828 
1829  node_type* const pNodeLeft = (node_type*)pNode->mpNodeLeft;
1830  DoFreeNode(pNode);
1831  pNode = pNodeLeft;
1832  }
1833  }
1834 
1835 
1836 
1838  // global operators
1840 
1841  template <typename K, typename V, typename A, typename C, typename E, bool bM, bool bU>
1842  inline bool operator==(const rbtree<K, V, C, A, E, bM, bU>& a, const rbtree<K, V, C, A, E, bM, bU>& b)
1843  {
1844  return (a.size() == b.size()) && eastl::equal(a.begin(), a.end(), b.begin());
1845  }
1846 
1847 
1848  // Note that in operator< we do comparisons based on the tree value_type with operator<() of the
1849  // value_type instead of the tree's Compare function. For set/multiset, the value_type is T, while
1850  // for map/multimap the value_type is a pair<Key, T>. operator< for pair can be seen by looking
1851  // utility.h, but it basically is uses the operator< for pair.first and pair.second. The C++ standard
1852  // appears to require this behaviour, whether intentionally or not. If anything, a good reason to do
1853  // this is for consistency. A map and a vector that contain the same items should compare the same.
1854  template <typename K, typename V, typename A, typename C, typename E, bool bM, bool bU>
1855  inline bool operator<(const rbtree<K, V, C, A, E, bM, bU>& a, const rbtree<K, V, C, A, E, bM, bU>& b)
1856  {
1857  return eastl::lexicographical_compare(a.begin(), a.end(), b.begin(), b.end());
1858  }
1859 
1860 
1861  template <typename K, typename V, typename A, typename C, typename E, bool bM, bool bU>
1862  inline bool operator!=(const rbtree<K, V, C, A, E, bM, bU>& a, const rbtree<K, V, C, A, E, bM, bU>& b)
1863  {
1864  return !(a == b);
1865  }
1866 
1867 
1868  template <typename K, typename V, typename A, typename C, typename E, bool bM, bool bU>
1869  inline bool operator>(const rbtree<K, V, C, A, E, bM, bU>& a, const rbtree<K, V, C, A, E, bM, bU>& b)
1870  {
1871  return b < a;
1872  }
1873 
1874 
1875  template <typename K, typename V, typename A, typename C, typename E, bool bM, bool bU>
1876  inline bool operator<=(const rbtree<K, V, C, A, E, bM, bU>& a, const rbtree<K, V, C, A, E, bM, bU>& b)
1877  {
1878  return !(b < a);
1879  }
1880 
1881 
1882  template <typename K, typename V, typename A, typename C, typename E, bool bM, bool bU>
1883  inline bool operator>=(const rbtree<K, V, C, A, E, bM, bU>& a, const rbtree<K, V, C, A, E, bM, bU>& b)
1884  {
1885  return !(a < b);
1886  }
1887 
1888 
1889  template <typename K, typename V, typename A, typename C, typename E, bool bM, bool bU>
1890  inline void swap(rbtree<K, V, C, A, E, bM, bU>& a, rbtree<K, V, C, A, E, bM, bU>& b)
1891  {
1892  a.swap(b);
1893  }
1894 
1895 
1896 } // namespace eastl
1897 
1898 
1899 #ifdef _MSC_VER
1900  #pragma warning(pop)
1901 #endif
1902 
1903 
1904 #endif // Header include guard
EASTL_API void RBTreeErase(rbtree_node_base *pNode, rbtree_node_base *pNodeAnchor)
allocator_type mAllocator
Stores the count of nodes in the tree (not counting the anchor node).
EASTL_API size_t RBTreeGetBlackCount(const rbtree_node_base *pNodeTop, const rbtree_node_base *pNodeBottom)
The iterator is valid and points to the same element it did when created. For example, if an iterator points to vector::begin() but an element is inserted at the front, the iterator is valid but not current. Modification of elements in place do not make iterators non-current.
ForwardIterator lower_bound(ForwardIterator first, ForwardIterator last, const T &value)
EASTL_API rbtree_node_base * RBTreeIncrement(const rbtree_node_base *pNode)
size_type mnSize
This node acts as end() and its mpLeft points to begin(), and mpRight points to rbegin() (the last no...
insert_return_type insert(const value_type &value)
EASTL_API void RBTreeInsert(rbtree_node_base *pNode, rbtree_node_base *pNodeParent, rbtree_node_base *pNodeAnchor, RBTreeSide insertionSide)
iterator find_as(const U &u, Compare2 compare2)
void swap(T &a, T &b)
ForwardIterator upper_bound(ForwardIterator first, ForwardIterator last, const T &value)
InputIterator find(InputIterator first, InputIterator last, const T &value)
The iterator is valid, which means it is in the range of [begin, end].
bool lexicographical_compare(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2)
This is called none and not called invalid because it is not strictly the opposite of invalid...
void reset(MPI_Comm new_comm)
Function reset determines new parallel_size and parallel_rank. Flushes, closes, and reopens log files...
Definition: Env.cpp:1067
void * allocate_memory(Allocator &a, size_t n, size_t alignment, size_t alignmentOffset)
EASTL_API rbtree_node_base * RBTreeDecrement(const rbtree_node_base *pNode)
EA Standard Template Library.
bool equal(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2)