3 * Copyright (c) 1996,1997
4 * Silicon Graphics Computer Systems, Inc.
7 * Moscow Center for SPARC Technology
12 * This material is provided "as is", with absolutely no warranty expressed
13 * or implied. Any use is at your own risk.
15 * Permission to use or copy this software for any purpose is hereby granted
16 * without fee, provided the above notices are retained on all copies.
17 * Permission to modify the code and to distribute modified code is granted,
18 * provided the above notices are retained, and a notice that the code was
19 * modified is included with the above copyright notice.
23 /* NOTE: This is an internal header file, included by other STL headers.
24 * You should not attempt to use it directly.
27 // rope<_CharT,_Alloc> is a sequence of _CharT.
28 // Ropes appear to be mutable, but update operations
29 // really copy enough of the data structure to leave the original
30 // valid. Thus ropes can be logically copied by just copying
33 #ifndef _STLP_INTERNAL_ROPE_H
34 #define _STLP_INTERNAL_ROPE_H
36 #ifndef _STLP_INTERNAL_ALGOBASE_H
37 # include <stl/_algobase.h>
40 #if !defined (_STLP_USE_NO_IOSTREAMS) && !defined (_STLP_INTERNAL_IOSFWD)
41 # include <stl/_iosfwd.h>
44 #ifndef _STLP_INTERNAL_ALLOC_H
45 # include <stl/_alloc.h>
48 #ifndef _STLP_INTERNAL_ITERATOR_H
49 # include <stl/_iterator.h>
52 #ifndef _STLP_INTERNAL_ALGO_H
53 # include <stl/_algo.h>
56 #ifndef _STLP_INTERNAL_FUNCTION_BASE_H
57 # include <stl/_function_base.h>
60 #ifndef _STLP_INTERNAL_NUMERIC_H
61 # include <stl/_numeric.h>
64 #ifndef _STLP_INTERNAL_HASH_FUN_H
65 # include <stl/_hash_fun.h>
68 #ifndef _STLP_CHAR_TRAITS_H
69 # include <stl/char_traits.h>
72 #ifndef _STLP_INTERNAL_THREADS_H
73 # include <stl/_threads.h>
76 #ifdef _STLP_SGI_THREADS
80 #ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
81 # define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) (_Alloc_traits<_Tp,__atype>::create_allocator(__a))
83 # define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create(__a,(_Tp*)0)
88 // First a lot of forward declarations. The standard seems to require
89 // much stricter "declaration before use" than many of the implementations
91 template<class _CharT
, _STLP_DFL_TMPL_PARAM(_Alloc
, allocator
<_CharT
>) > class rope
;
92 template<class _CharT
, class _Alloc
> struct _Rope_RopeConcatenation
;
93 template<class _CharT
, class _Alloc
> struct _Rope_RopeRep
;
94 template<class _CharT
, class _Alloc
> struct _Rope_RopeLeaf
;
95 template<class _CharT
, class _Alloc
> struct _Rope_RopeFunction
;
96 template<class _CharT
, class _Alloc
> struct _Rope_RopeSubstring
;
97 template<class _CharT
, class _Alloc
> class _Rope_iterator
;
98 template<class _CharT
, class _Alloc
> class _Rope_const_iterator
;
99 template<class _CharT
, class _Alloc
> class _Rope_char_ref_proxy
;
100 template<class _CharT
, class _Alloc
> class _Rope_char_ptr_proxy
;
102 _STLP_MOVE_TO_PRIV_NAMESPACE
104 template <class _CharT
>
105 struct _BasicCharType
{ typedef __false_type _Ret
; };
108 struct _BasicCharType
<char> { typedef __true_type _Ret
; };
110 #ifdef _STLP_HAS_WCHAR_T
112 struct _BasicCharType
<wchar_t> { typedef __true_type _Ret
; };
115 // Some helpers, so we can use the power algorithm on ropes.
116 // See below for why this isn't local to the implementation.
118 // This uses a nonstandard refcount convention.
119 // The result has refcount 0.
120 template<class _CharT
, class _Alloc
>
121 struct _Rope_Concat_fn
122 : public binary_function
<rope
<_CharT
,_Alloc
>, rope
<_CharT
,_Alloc
>,
123 rope
<_CharT
,_Alloc
> > {
124 rope
<_CharT
,_Alloc
> operator() (const rope
<_CharT
,_Alloc
>& __x
,
125 const rope
<_CharT
,_Alloc
>& __y
) {
130 template <class _CharT
, class _Alloc
>
133 __identity_element(_Rope_Concat_fn
<_CharT
, _Alloc
>)
134 { return rope
<_CharT
,_Alloc
>(); }
136 _STLP_MOVE_TO_STD_NAMESPACE
139 template <class _CharT
>
140 inline void _S_construct_null_aux(_CharT
*__p
, const __true_type
&)
143 template <class _CharT
>
144 inline void _S_construct_null_aux(_CharT
*__p
, const __false_type
&)
145 { _STLP_STD::_Construct(__p
); }
147 template <class _CharT
>
148 inline void _S_construct_null(_CharT
*__p
) {
149 typedef typename _IsIntegral
<_CharT
>::_Ret _Char_Is_Integral
;
150 _S_construct_null_aux(__p
, _Char_Is_Integral());
153 // char_producers are logically functions that generate a section of
154 // a string. These can be converted to ropes. The resulting rope
155 // invokes the char_producer on demand. This allows, for example,
156 // files to be viewed as ropes without reading the entire file.
157 template <class _CharT
>
158 class char_producer
{
160 virtual ~char_producer() {}
161 virtual void operator()(size_t __start_pos
, size_t __len
,
162 _CharT
* __buffer
) = 0;
163 // Buffer should really be an arbitrary output iterator.
164 // That way we could flatten directly into an ostream, etc.
165 // This is thoroughly impossible, since iterator types don't
166 // have runtime descriptions.
171 // Sequence must provide an append operation that appends an
172 // array to the sequence. Sequence buffers are useful only if
173 // appending an entire array is cheaper than appending element by element.
174 // This is true for many string representations.
175 // This should perhaps inherit from ostream<sequence::value_type>
176 // and be implemented correspondingly, so that they can be used
177 // for formatted. For the sake of portability, we don't do this yet.
179 // For now, sequence buffers behave as output iterators. But they also
180 // behave a little like basic_ostringstream<sequence::value_type> and a
181 // little like containers.
183 template<class _Sequence
184 # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
185 defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
186 , size_t _Buf_sz
= 100
187 # if defined(__sgi) && !defined(__GNUC__)
188 # define __TYPEDEF_WORKAROUND
189 ,class _V
= typename
_Sequence::value_type
191 # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
193 // The 3rd parameter works around a common compiler bug.
194 class sequence_buffer
: public iterator
<output_iterator_tag
, void, void, void, void> {
196 # ifndef __TYPEDEF_WORKAROUND
197 typedef typename
_Sequence::value_type value_type
;
198 typedef sequence_buffer
<_Sequence
199 # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
200 defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
203 # else /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
205 enum { _Buf_sz
= 100};
206 # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
208 # else /* __TYPEDEF_WORKAROUND */
209 typedef _V value_type
;
210 typedef sequence_buffer
<_Sequence
, _Buf_sz
, _V
> _Self
;
211 # endif /* __TYPEDEF_WORKAROUND */
213 _Sequence
* _M_prefix
;
214 value_type _M_buffer
[_Buf_sz
];
218 _M_prefix
->append(_M_buffer
, _M_buffer
+ _M_buf_count
);
221 ~sequence_buffer() { flush(); }
222 sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
223 sequence_buffer(const _Self
& __x
) {
224 _M_prefix
= __x
._M_prefix
;
225 _M_buf_count
= __x
._M_buf_count
;
226 _STLP_STD::copy(__x
._M_buffer
, __x
._M_buffer
+ __x
._M_buf_count
, _M_buffer
);
228 sequence_buffer(_Self
& __x
) {
230 _M_prefix
= __x
._M_prefix
;
233 sequence_buffer(_Sequence
& __s
) : _M_prefix(&__s
), _M_buf_count(0) {}
234 _Self
& operator= (_Self
& __x
) {
236 _M_prefix
= __x
._M_prefix
;
240 _Self
& operator= (const _Self
& __x
) {
241 _M_prefix
= __x
._M_prefix
;
242 _M_buf_count
= __x
._M_buf_count
;
243 _STLP_STD::copy(__x
._M_buffer
, __x
._M_buffer
+ __x
._M_buf_count
, _M_buffer
);
246 void push_back(value_type __x
) {
247 if (_M_buf_count
< _Buf_sz
) {
248 _M_buffer
[_M_buf_count
] = __x
;
256 void append(const value_type
*__s
, size_t __len
) {
257 if (__len
+ _M_buf_count
<= _Buf_sz
) {
258 size_t __i
= _M_buf_count
;
260 for (; __j
< __len
; __i
++, __j
++) {
261 _M_buffer
[__i
] = __s
[__j
];
263 _M_buf_count
+= __len
;
264 } else if (0 == _M_buf_count
) {
265 _M_prefix
->append(__s
, __s
+ __len
);
271 _Self
& write(const value_type
*__s
, size_t __len
) {
275 _Self
& put(value_type __x
) {
279 _Self
& operator=(const value_type
& __rhs
) {
283 _Self
& operator*() { return *this; }
284 _Self
& operator++() { return *this; }
285 _Self
& operator++(int) { return *this; }
288 // The following should be treated as private, at least for now.
289 template<class _CharT
>
290 class _Rope_char_consumer
{
291 #if !defined (_STLP_MEMBER_TEMPLATES)
293 //Without member templates we have to use run-time parameterization.
294 // The symmetry with char_producer is accidental and temporary.
295 virtual ~_Rope_char_consumer() {}
296 virtual bool operator()(const _CharT
* __buffer
, size_t __len
) = 0;
301 // What follows should really be local to rope. Unfortunately,
302 // that doesn't work, since it makes it impossible to define generic
303 // equality on rope iterators. According to the draft standard, the
304 // template parameters for such an equality operator cannot be inferred
305 // from the occurence of a member class as a parameter.
306 // (SGI compilers in fact allow this, but the __result wouldn't be
308 // Similarly, some of the static member functions are member functions
309 // only to avoid polluting the global namespace, and to circumvent
310 // restrictions on type inference for template functions.
314 // The internal data structure for representing a rope. This is
315 // private to the implementation. A rope is really just a pointer
318 // A few basic functions for manipulating this data structure
319 // are members of _RopeRep. Most of the more complex algorithms
320 // are implemented as rope members.
322 // Some of the static member functions of _RopeRep have identically
323 // named functions in rope that simply invoke the _RopeRep versions.
326 template<class _CharT
, class _Alloc
>
328 : public _Refcount_Base
330 typedef _Rope_RopeRep
<_CharT
, _Alloc
> _Self
;
335 // "__ROPE_DEPTH_SIZE" is set to one more then the "__ROPE_MAX_DEPTH".
336 // This was originally just an addition of "__ROPE_MAX_DEPTH + 1"
337 // but this addition causes the sunpro compiler to complain about
338 // multiple declarations during the initialization of "_S_min_len".
339 // Changed to be a fixed value and the sunpro compiler appears to
342 # define __ROPE_MAX_DEPTH 45
343 # define __ROPE_DEPTH_SIZE 46 // __ROPE_MAX_DEPTH + 1
344 enum { _S_max_rope_depth
= __ROPE_MAX_DEPTH
};
345 enum _Tag
{_S_leaf
, _S_concat
, _S_substringfn
, _S_function
};
346 // Apparently needed by VC++
347 // The data fields of leaves are allocated with some
348 // extra space, to accomodate future growth and for basic
349 // character types, to hold a trailing eos character.
350 enum { _S_alloc_granularity
= 8 };
353 bool _M_is_balanced
:8;
355 _STLP_FORCE_ALLOCATORS(_CharT
, _Alloc
)
356 typedef _Alloc allocator_type
;
358 allocator_type
get_allocator() const { return allocator_type(_M_size
); }
360 unsigned char _M_depth
;
361 _CharT
* _STLP_VOLATILE _M_c_string
;
362 _STLP_PRIV _STLP_alloc_proxy
<size_t, _CharT
, allocator_type
> _M_size
;
364 #ifdef _STLP_NO_ARROW_OPERATOR
365 _Rope_RopeRep() : _Refcount_Base(1), _M_size(allocator_type(), 0) {
366 # if defined (_STLP_CHECK_RUNTIME_COMPATIBILITY)
367 _STLP_CHECK_RUNTIME_COMPATIBILITY();
372 /* Flattened version of string, if needed. */
374 /* If it's not 0, then the memory is owned */
376 /* In the case of a leaf, this may point to */
377 /* the same memory as the data field. */
378 _Rope_RopeRep(_Tag __t
, unsigned char __d
, bool __b
, size_t _p_size
,
379 allocator_type __a
) :
381 _M_tag(__t
), _M_is_balanced(__b
), _M_depth(__d
), _M_c_string(0), _M_size(__a
, _p_size
) {
382 #if defined (_STLP_CHECK_RUNTIME_COMPATIBILITY)
383 _STLP_CHECK_RUNTIME_COMPATIBILITY();
387 typedef _STLP_TYPENAME _STLP_PRIV _BasicCharType
<_CharT
>::_Ret _IsBasicCharType
;
390 /* Please tell why this code is necessary if you uncomment it.
391 * Problem with it is that rope implementation expect that _S_rounded_up_size(n)
392 * returns a size > n in order to store the terminating null charater. When
393 * instanciation type is not a char or wchar_t this is not guaranty resulting in
396 static size_t _S_rounded_up_size_aux(size_t __n
, __true_type
const& /*_IsBasicCharType*/) {
397 // Allow slop for in-place expansion.
398 return (__n
+ _S_alloc_granularity
) & ~(_S_alloc_granularity
- 1);
401 static size_t _S_rounded_up_size_aux(size_t __n
, __false_type
const& /*_IsBasicCharType*/) {
402 // Allow slop for in-place expansion.
403 return (__n
+ _S_alloc_granularity
- 1) & ~(_S_alloc_granularity
- 1);
406 // fbp : moved from RopeLeaf
407 static size_t _S_rounded_up_size(size_t __n
)
408 //{ return _S_rounded_up_size_aux(__n, _IsBasicCharType()); }
409 { return (__n
+ _S_alloc_granularity
) & ~(_S_alloc_granularity
- 1); }
411 static void _S_free_string( _CharT
* __s
, size_t __len
,
412 allocator_type __a
) {
413 _STLP_STD::_Destroy_Range(__s
, __s
+ __len
);
414 // This has to be a static member, so this gets a bit messy
415 # ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
416 __a
.deallocate(__s
, _S_rounded_up_size(__len
)); //*ty 03/24/2001 - restored not to use __stl_alloc_rebind() since it is not defined under _STLP_MEMBER_TEMPLATE_CLASSES
418 __stl_alloc_rebind (__a
, (_CharT
*)0).deallocate(__s
, _S_rounded_up_size(__len
));
422 // Deallocate data section of a leaf.
423 // This shouldn't be a member function.
424 // But its hard to do anything else at the
425 // moment, because it's templatized w.r.t.
427 // Does nothing if __GC is defined.
428 void _M_free_c_string();
430 // Deallocate t. Assumes t is not 0.
431 void _M_unref_nonnil() {
432 if (_M_decr() == 0) _M_free_tree();
434 void _M_ref_nonnil() {
437 static void _S_unref(_Self
* __t
) {
439 __t
->_M_unref_nonnil();
442 static void _S_ref(_Self
* __t
) {
443 if (0 != __t
) __t
->_M_incr();
445 //static void _S_free_if_unref(_Self* __t) {
446 // if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
450 template<class _CharT
, class _Alloc
>
451 struct _Rope_RopeLeaf
: public _Rope_RopeRep
<_CharT
,_Alloc
> {
453 _CharT
* _M_data
; /* Not necessarily 0 terminated. */
454 /* The allocated size is */
455 /* _S_rounded_up_size(size), except */
456 /* in the GC case, in which it */
457 /* doesn't matter. */
459 typedef _Rope_RopeRep
<_CharT
,_Alloc
> _RopeRep
;
460 typedef typename
_RopeRep::_IsBasicCharType _IsBasicCharType
;
461 void _M_init(__true_type
const& /*_IsBasicCharType*/) {
462 this->_M_c_string
= _M_data
;
464 void _M_init(__false_type
const& /*_IsBasicCharType*/) {}
467 _STLP_FORCE_ALLOCATORS(_CharT
, _Alloc
)
468 typedef typename
_RopeRep::allocator_type allocator_type
;
470 _Rope_RopeLeaf( _CharT
* __d
, size_t _p_size
, allocator_type __a
)
471 : _Rope_RopeRep
<_CharT
,_Alloc
>(_RopeRep::_S_leaf
, 0, true, _p_size
, __a
),
473 _STLP_ASSERT(_p_size
> 0)
474 _M_init(_IsBasicCharType());
477 # ifdef _STLP_NO_ARROW_OPERATOR
479 _Rope_RopeLeaf(const _Rope_RopeLeaf
<_CharT
, _Alloc
>& ) {}
482 // The constructor assumes that d has been allocated with
483 // the proper allocator and the properly padded size.
484 // In contrast, the destructor deallocates the data:
486 if (_M_data
!= this->_M_c_string
) {
487 this->_M_free_c_string();
489 _RopeRep::_S_free_string(_M_data
, this->_M_size
._M_data
, this->get_allocator());
493 template<class _CharT
, class _Alloc
>
494 struct _Rope_RopeConcatenation
: public _Rope_RopeRep
<_CharT
, _Alloc
> {
496 typedef _Rope_RopeRep
<_CharT
,_Alloc
> _RopeRep
;
501 _STLP_FORCE_ALLOCATORS(_CharT
, _Alloc
)
502 typedef typename
_RopeRep::allocator_type allocator_type
;
503 _Rope_RopeConcatenation(_RopeRep
* __l
, _RopeRep
* __r
, allocator_type __a
)
504 : _Rope_RopeRep
<_CharT
,_Alloc
>(_RopeRep::_S_concat
,
505 (max
)(__l
->_M_depth
, __r
->_M_depth
) + 1, false,
506 __l
->_M_size
._M_data
+ __r
->_M_size
._M_data
, __a
), _M_left(__l
), _M_right(__r
)
508 # ifdef _STLP_NO_ARROW_OPERATOR
509 _Rope_RopeConcatenation() {}
510 _Rope_RopeConcatenation(const _Rope_RopeConcatenation
<_CharT
, _Alloc
>&) {}
513 ~_Rope_RopeConcatenation() {
514 this->_M_free_c_string();
515 _M_left
->_M_unref_nonnil();
516 _M_right
->_M_unref_nonnil();
520 template <class _CharT
, class _Alloc
>
521 struct _Rope_RopeFunction
: public _Rope_RopeRep
<_CharT
, _Alloc
> {
523 typedef _Rope_RopeRep
<_CharT
,_Alloc
> _RopeRep
;
525 char_producer
<_CharT
>* _M_fn
;
527 * Char_producer is owned by the
528 * rope and should be explicitly
529 * deleted when the rope becomes
532 bool _M_delete_when_done
;
533 _STLP_FORCE_ALLOCATORS(_CharT
, _Alloc
)
534 typedef typename _Rope_RopeRep
<_CharT
,_Alloc
>::allocator_type allocator_type
;
535 # ifdef _STLP_NO_ARROW_OPERATOR
536 _Rope_RopeFunction() {}
537 _Rope_RopeFunction(const _Rope_RopeFunction
<_CharT
, _Alloc
>& ) {}
540 _Rope_RopeFunction(char_producer
<_CharT
>* __f
, size_t _p_size
,
541 bool __d
, allocator_type __a
)
542 : _Rope_RopeRep
<_CharT
,_Alloc
>(_RopeRep::_S_function
, 0, true, _p_size
, __a
), _M_fn(__f
)
543 , _M_delete_when_done(__d
)
544 { _STLP_ASSERT(_p_size
> 0) }
546 ~_Rope_RopeFunction() {
547 this->_M_free_c_string();
548 if (_M_delete_when_done
) {
555 * Substring results are usually represented using just
556 * concatenation nodes. But in the case of very long flat ropes
557 * or ropes with a functional representation that isn't practical.
558 * In that case, we represent the __result as a special case of
559 * RopeFunction, whose char_producer points back to the rope itself.
560 * In all cases except repeated substring operations and
561 * deallocation, we treat the __result as a RopeFunction.
563 template<class _CharT
, class _Alloc
>
564 struct _Rope_RopeSubstring
: public char_producer
<_CharT
>, public _Rope_RopeFunction
<_CharT
,_Alloc
> {
566 // XXX this whole class should be rewritten.
567 typedef _Rope_RopeRep
<_CharT
,_Alloc
> _RopeRep
;
568 _RopeRep
*_M_base
; // not 0
570 /* virtual */ void operator()(size_t __start_pos
, size_t __req_len
,
572 typedef _Rope_RopeFunction
<_CharT
,_Alloc
> _RopeFunction
;
573 typedef _Rope_RopeLeaf
<_CharT
,_Alloc
> _RopeLeaf
;
574 switch (_M_base
->_M_tag
) {
575 case _RopeRep::_S_function
:
576 case _RopeRep::_S_substringfn
:
578 char_producer
<_CharT
>* __fn
=
579 __STATIC_CAST(_RopeFunction
*, _M_base
)->_M_fn
;
580 _STLP_ASSERT(__start_pos
+ __req_len
<= this->_M_size
._M_data
)
581 _STLP_ASSERT(_M_start
+ this->_M_size
._M_data
<= _M_base
->_M_size
._M_data
)
582 (*__fn
)(__start_pos
+ _M_start
, __req_len
, __buffer
);
585 case _RopeRep::_S_leaf
:
588 __STATIC_CAST(_RopeLeaf
*, _M_base
)->_M_data
;
589 _STLP_PRIV
__ucopy_n(__s
+ __start_pos
+ _M_start
, __req_len
, __buffer
);
598 _STLP_FORCE_ALLOCATORS(_CharT
, _Alloc
)
599 typedef typename
_RopeRep::allocator_type allocator_type
;
601 _Rope_RopeSubstring(_RopeRep
* __b
, size_t __s
, size_t __l
, allocator_type __a
)
602 : _Rope_RopeFunction
<_CharT
,_Alloc
>(this, __l
, false, __a
),
603 _M_base(__b
), _M_start(__s
) {
604 _STLP_ASSERT(__l
> 0)
605 _STLP_ASSERT(__s
+ __l
<= __b
->_M_size
._M_data
)
606 _M_base
->_M_ref_nonnil();
607 this->_M_tag
= _RopeRep::_S_substringfn
;
609 virtual ~_Rope_RopeSubstring()
610 { _M_base
->_M_unref_nonnil(); }
614 * Self-destructing pointers to Rope_rep.
615 * These are not conventional smart pointers. Their
616 * only purpose in life is to ensure that unref is called
617 * on the pointer either at normal exit or if an exception
618 * is raised. It is the caller's responsibility to
619 * adjust reference counts when these pointers are initialized
620 * or assigned to. (This convention significantly reduces
621 * the number of potentially expensive reference count
624 template<class _CharT
, class _Alloc
>
625 struct _Rope_self_destruct_ptr
{
626 _Rope_RopeRep
<_CharT
,_Alloc
>* _M_ptr
;
627 ~_Rope_self_destruct_ptr()
628 { _Rope_RopeRep
<_CharT
,_Alloc
>::_S_unref(_M_ptr
); }
629 # ifdef _STLP_USE_EXCEPTIONS
630 _Rope_self_destruct_ptr() : _M_ptr(0) {}
632 _Rope_self_destruct_ptr() {}
634 _Rope_self_destruct_ptr(_Rope_RopeRep
<_CharT
,_Alloc
>* __p
) : _M_ptr(__p
) {}
635 _Rope_RopeRep
<_CharT
,_Alloc
>& operator*() { return *_M_ptr
; }
636 _Rope_RopeRep
<_CharT
,_Alloc
>* operator->() { return _M_ptr
; }
637 operator _Rope_RopeRep
<_CharT
,_Alloc
>*() { return _M_ptr
; }
638 _Rope_self_destruct_ptr
<_CharT
, _Alloc
>&
639 operator= (_Rope_RopeRep
<_CharT
,_Alloc
>* __x
)
640 { _M_ptr
= __x
; return *this; }
644 * Dereferencing a nonconst iterator has to return something
645 * that behaves almost like a reference. It's not possible to
646 * return an actual reference since assignment requires extra
647 * work. And we would get into the same problems as with the
648 * CD2 version of basic_string.
650 template<class _CharT
, class _Alloc
>
651 class _Rope_char_ref_proxy
{
652 typedef _Rope_char_ref_proxy
<_CharT
, _Alloc
> _Self
;
653 friend class rope
<_CharT
,_Alloc
>;
654 friend class _Rope_iterator
<_CharT
,_Alloc
>;
655 friend class _Rope_char_ptr_proxy
<_CharT
,_Alloc
>;
656 typedef _Rope_self_destruct_ptr
<_CharT
,_Alloc
> _Self_destruct_ptr
;
657 typedef _Rope_RopeRep
<_CharT
,_Alloc
> _RopeRep
;
658 typedef rope
<_CharT
,_Alloc
> _My_rope
;
661 bool _M_current_valid
;
662 _My_rope
* _M_root
; // The whole rope.
664 _Rope_char_ref_proxy(_My_rope
* __r
, size_t __p
) :
665 _M_pos(__p
), _M_current_valid(false), _M_root(__r
) {}
666 _Rope_char_ref_proxy(const _Self
& __x
) :
667 _M_pos(__x
._M_pos
), _M_current_valid(false), _M_root(__x
._M_root
) {}
668 // Don't preserve cache if the reference can outlive the
669 // expression. We claim that's not possible without calling
670 // a copy constructor or generating reference to a proxy
671 // reference. We declare the latter to have undefined semantics.
672 _Rope_char_ref_proxy(_My_rope
* __r
, size_t __p
, _CharT __c
)
673 : _M_pos(__p
), _M_current(__c
), _M_current_valid(true), _M_root(__r
) {}
674 inline operator _CharT () const;
675 _Self
& operator= (_CharT __c
);
676 _Rope_char_ptr_proxy
<_CharT
, _Alloc
> operator& () const;
677 _Self
& operator= (const _Self
& __c
) {
678 return operator=((_CharT
)__c
);
682 #ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
683 template<class _CharT
, class __Alloc
>
684 inline void swap(_Rope_char_ref_proxy
<_CharT
, __Alloc
> __a
,
685 _Rope_char_ref_proxy
<_CharT
, __Alloc
> __b
) {
691 // There is no really acceptable way to handle this. The default
692 // definition of swap doesn't work for proxy references.
693 // It can't really be made to work, even with ugly hacks, since
694 // the only unusual operation it uses is the copy constructor, which
695 // is needed for other purposes. We provide a macro for
696 // full specializations, and instantiate the most common case.
697 # define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
698 inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
699 _Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
700 _CharT __tmp = __a; \
705 _ROPE_SWAP_SPECIALIZATION(char, allocator
<char>)
707 # ifndef _STLP_NO_WCHAR_T
708 _ROPE_SWAP_SPECIALIZATION(wchar_t, allocator
<wchar_t>)
711 #endif /* !_STLP_FUNCTION_TMPL_PARTIAL_ORDER */
713 template<class _CharT
, class _Alloc
>
714 class _Rope_char_ptr_proxy
{
715 // XXX this class should be rewritten.
717 typedef _Rope_char_ptr_proxy
<_CharT
, _Alloc
> _Self
;
718 friend class _Rope_char_ref_proxy
<_CharT
,_Alloc
>;
720 rope
<_CharT
,_Alloc
>* _M_root
; // The whole rope.
722 _Rope_char_ptr_proxy(const _Rope_char_ref_proxy
<_CharT
,_Alloc
>& __x
)
723 : _M_pos(__x
._M_pos
), _M_root(__x
._M_root
) {}
724 _Rope_char_ptr_proxy(const _Self
& __x
)
725 : _M_pos(__x
._M_pos
), _M_root(__x
._M_root
) {}
726 _Rope_char_ptr_proxy() {}
727 _Rope_char_ptr_proxy(_CharT
* __x
) : _M_pos(0), _M_root(0) {
728 _STLP_ASSERT(0 == __x
)
730 _Self
& operator= (const _Self
& __x
) {
732 _M_root
= __x
._M_root
;
736 _Rope_char_ref_proxy
<_CharT
,_Alloc
> operator*() const {
737 return _Rope_char_ref_proxy
<_CharT
,_Alloc
>(_M_root
, _M_pos
);
744 * Unlike in the C version, we cache only part of the stack
745 * for rope iterators, since they must be efficiently copyable.
746 * When we run out of cache, we have to reconstruct the iterator
748 * Pointers from iterators are not included in reference counts.
749 * Iterators are assumed to be thread private. Ropes can
752 template<class _CharT
, class _Alloc
>
753 class _Rope_iterator_base
754 /* : public random_access_iterator<_CharT, ptrdiff_t> */
756 friend class rope
<_CharT
,_Alloc
>;
757 typedef _Rope_iterator_base
<_CharT
, _Alloc
> _Self
;
758 typedef _Rope_RopeConcatenation
<_CharT
,_Alloc
> _RopeConcat
;
760 typedef _Rope_RopeRep
<_CharT
,_Alloc
> _RopeRep
;
762 enum { _S_path_cache_len
= 4 }; // Must be <= 9 because of _M_path_direction.
763 enum { _S_iterator_buf_len
= 15 };
764 size_t _M_current_pos
;
767 // Starting position for current leaf
769 // Buffer possibly containing current char.
770 _CharT
* _M_buf_start
;
771 // Pointer to current char in buffer, != 0 ==> buffer valid.
773 // One past __last valid char in buffer.
776 // What follows is the path cache. We go out of our
777 // way to make this compact.
778 // Path_end contains the bottom section of the path from
779 // the root to the current leaf.
781 # if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
782 _RopeRep
const*_M_data
[4];
784 _RopeRep
const*_M_data
[_S_path_cache_len
];
787 // Last valid __pos in path_end;
788 // _M_path_end[0] ... _M_path_end[_M_leaf_index-1]
789 // point to concatenation nodes.
791 // (_M_path_directions >> __i) & 1 is 1
792 // if we got from _M_path_end[leaf_index - __i - 1]
793 // to _M_path_end[leaf_index - __i] by going to the
794 // __right. Assumes path_cache_len <= 9.
795 unsigned char _M_path_directions
;
796 // Short buffer for surrounding chars.
797 // This is useful primarily for
798 // RopeFunctions. We put the buffer
799 // here to avoid locking in the
800 // multithreaded case.
801 // The cached path is generally assumed to be valid
802 // only if the buffer is valid.
804 # if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
807 _CharT _M_data
[_S_iterator_buf_len
];
811 // Set buffer contents given path cache.
812 static void _S_setbuf(_Rope_iterator_base
<_CharT
, _Alloc
>& __x
);
813 // Set buffer contents and path cache.
814 static void _S_setcache(_Rope_iterator_base
<_CharT
, _Alloc
>& __x
);
815 // As above, but assumes path cache is valid for previous posn.
816 static void _S_setcache_for_incr(_Rope_iterator_base
<_CharT
, _Alloc
>& __x
);
817 _Rope_iterator_base() {}
818 _Rope_iterator_base(_RopeRep
* __root
, size_t __pos
)
819 : _M_current_pos(__pos
),_M_root(__root
), _M_buf_ptr(0) {}
820 void _M_incr(size_t __n
);
821 void _M_decr(size_t __n
);
823 size_t index() const { return _M_current_pos
; }
825 void _M_copy_buf(const _Self
& __x
) {
826 _M_tmp_buf
= __x
._M_tmp_buf
;
827 if (__x
._M_buf_start
== __x
._M_tmp_buf
._M_data
) {
828 _M_buf_start
= _M_tmp_buf
._M_data
;
829 _M_buf_end
= _M_buf_start
+ (__x
._M_buf_end
- __x
._M_buf_start
);
830 _M_buf_ptr
= _M_buf_start
+ (__x
._M_buf_ptr
- __x
._M_buf_start
);
832 _M_buf_end
= __x
._M_buf_end
;
837 _Rope_iterator_base(const _Self
& __x
) :
838 _M_current_pos(__x
._M_current_pos
),
839 _M_root(__x
._M_root
),
840 _M_leaf_pos( __x
._M_leaf_pos
),
841 _M_buf_start(__x
._M_buf_start
),
842 _M_buf_ptr(__x
._M_buf_ptr
),
843 _M_path_end(__x
._M_path_end
),
844 _M_leaf_index(__x
._M_leaf_index
),
845 _M_path_directions(__x
._M_path_directions
)
847 if (0 != __x
._M_buf_ptr
) {
851 _Self
& operator = (const _Self
& __x
)
853 _M_current_pos
= __x
._M_current_pos
;
854 _M_root
= __x
._M_root
;
855 _M_buf_start
= __x
._M_buf_start
;
856 _M_buf_ptr
= __x
._M_buf_ptr
;
857 _M_path_end
= __x
._M_path_end
;
858 _M_leaf_index
= __x
._M_leaf_index
;
859 _M_path_directions
= __x
._M_path_directions
;
860 _M_leaf_pos
= __x
._M_leaf_pos
;
861 if (0 != __x
._M_buf_ptr
) {
868 template<class _CharT
, class _Alloc
> class _Rope_iterator
;
870 template<class _CharT
, class _Alloc
>
871 class _Rope_const_iterator
: public _Rope_iterator_base
<_CharT
,_Alloc
> {
872 friend class rope
<_CharT
,_Alloc
>;
873 typedef _Rope_const_iterator
<_CharT
, _Alloc
> _Self
;
874 typedef _Rope_iterator_base
<_CharT
,_Alloc
> _Base
;
877 # ifndef _STLP_HAS_NO_NAMESPACES
878 typedef _Rope_RopeRep
<_CharT
,_Alloc
> _RopeRep
;
879 // The one from the base class may not be directly visible.
881 _Rope_const_iterator(const _RopeRep
* __root
, size_t __pos
):
882 _Rope_iterator_base
<_CharT
,_Alloc
>(__CONST_CAST(_RopeRep
*,__root
), __pos
)
883 // Only nonconst iterators modify root ref count
886 typedef _CharT reference
; // Really a value. Returning a reference
887 // Would be a mess, since it would have
888 // to be included in refcount.
889 typedef const _CharT
* pointer
;
890 typedef _CharT value_type
;
891 typedef ptrdiff_t difference_type
;
892 typedef random_access_iterator_tag iterator_category
;
895 _Rope_const_iterator() {}
896 _Rope_const_iterator(const _Self
& __x
) :
897 _Rope_iterator_base
<_CharT
,_Alloc
>(__x
) { }
898 _Rope_const_iterator(const _Rope_iterator
<_CharT
,_Alloc
>& __x
):
899 _Rope_iterator_base
<_CharT
,_Alloc
>(__x
) {}
900 _Rope_const_iterator(const rope
<_CharT
,_Alloc
>& __r
, size_t __pos
) :
901 _Rope_iterator_base
<_CharT
,_Alloc
>(__r
._M_tree_ptr
._M_data
, __pos
) {}
902 _Self
& operator= (const _Self
& __x
) {
903 _Base::operator=(__x
);
906 reference
operator*() {
907 if (0 == this->_M_buf_ptr
)
908 #if !defined (__DMC__)
911 { _Rope_iterator_base
<_CharT
, _Alloc
>* __x
= this; _S_setcache(*__x
); }
913 return *(this->_M_buf_ptr
);
917 if ( this->_M_buf_ptr
!= 0 ) {
918 _CharT
*__next
= this->_M_buf_ptr
+ 1;
919 if ( __next
< this->_M_buf_end
) {
920 this->_M_buf_ptr
= __next
;
921 ++this->_M_current_pos
;
928 _Self
& operator+=(ptrdiff_t __n
) {
936 _Self
& operator--() {
940 _Self
& operator-=(ptrdiff_t __n
) {
948 _Self
operator++(int) {
949 size_t __old_pos
= this->_M_current_pos
;
951 return _Rope_const_iterator
<_CharT
,_Alloc
>(this->_M_root
, __old_pos
);
952 // This makes a subsequent dereference expensive.
953 // Perhaps we should instead copy the iterator
954 // if it has a valid cache?
956 _Self
operator--(int) {
957 size_t __old_pos
= this->_M_current_pos
;
959 return _Rope_const_iterator
<_CharT
,_Alloc
>(this->_M_root
, __old_pos
);
961 inline reference
operator[](size_t __n
);
964 template<class _CharT
, class _Alloc
>
965 class _Rope_iterator
: public _Rope_iterator_base
<_CharT
,_Alloc
> {
966 friend class rope
<_CharT
,_Alloc
>;
967 typedef _Rope_iterator
<_CharT
, _Alloc
> _Self
;
968 typedef _Rope_iterator_base
<_CharT
,_Alloc
> _Base
;
969 typedef _Rope_RopeRep
<_CharT
,_Alloc
> _RopeRep
;
972 rope
<_CharT
,_Alloc
>* _M_root_rope
;
973 // root is treated as a cached version of this,
974 // and is used to detect changes to the underlying
976 // Root is included in the reference count.
977 // This is necessary so that we can detect changes reliably.
978 // Unfortunately, it requires careful bookkeeping for the
980 _Rope_iterator(rope
<_CharT
,_Alloc
>* __r
, size_t __pos
);
984 typedef _Rope_char_ref_proxy
<_CharT
,_Alloc
> reference
;
985 typedef _Rope_char_ref_proxy
<_CharT
,_Alloc
>* pointer
;
986 typedef _CharT value_type
;
987 typedef ptrdiff_t difference_type
;
988 typedef random_access_iterator_tag iterator_category
;
990 ~_Rope_iterator() { //*TY 5/6/00 - added dtor to balance reference count
991 _RopeRep::_S_unref(this->_M_root
);
994 rope
<_CharT
,_Alloc
>& container() { return *_M_root_rope
; }
996 this->_M_root
= 0; // Needed for reference counting.
998 _Rope_iterator(const _Self
& __x
) :
999 _Rope_iterator_base
<_CharT
,_Alloc
>(__x
) {
1000 _M_root_rope
= __x
._M_root_rope
;
1001 _RopeRep::_S_ref(this->_M_root
);
1003 _Rope_iterator(rope
<_CharT
,_Alloc
>& __r
, size_t __pos
);
1004 _Self
& operator= (const _Self
& __x
) {
1005 _RopeRep
* __old
= this->_M_root
;
1006 _RopeRep::_S_ref(__x
._M_root
);
1007 _Base::operator=(__x
);
1008 _M_root_rope
= __x
._M_root_rope
;
1009 _RopeRep::_S_unref(__old
);
1012 reference
operator*() {
1014 if (0 == this->_M_buf_ptr
) {
1015 return reference(_M_root_rope
, this->_M_current_pos
);
1017 return reference(_M_root_rope
, this->_M_current_pos
, *(this->_M_buf_ptr
));
1020 _Self
& operator++() {
1024 _Self
& operator+=(ptrdiff_t __n
) {
1028 this->_M_decr(-__n
);
1032 _Self
& operator--() {
1036 _Self
& operator-=(ptrdiff_t __n
) {
1040 this->_M_incr(-__n
);
1044 _Self
operator++(int) {
1045 size_t __old_pos
= this->_M_current_pos
;
1047 return _Self(_M_root_rope
, __old_pos
);
1049 _Self
operator--(int) {
1050 size_t __old_pos
= this->_M_current_pos
;
1052 return _Self(_M_root_rope
, __old_pos
);
1054 reference
operator[](ptrdiff_t __n
) {
1055 return reference(_M_root_rope
, this->_M_current_pos
+ __n
);
1059 # ifdef _STLP_USE_OLD_HP_ITERATOR_QUERIES
1060 template <class _CharT
, class _Alloc
>
1061 inline random_access_iterator_tag
1062 iterator_category(const _Rope_iterator
<_CharT
,_Alloc
>&) { return random_access_iterator_tag();}
1063 template <class _CharT
, class _Alloc
>
1064 inline _CharT
* value_type(const _Rope_iterator
<_CharT
,_Alloc
>&) { return 0; }
1065 template <class _CharT
, class _Alloc
>
1066 inline ptrdiff_t* distance_type(const _Rope_iterator
<_CharT
,_Alloc
>&) { return 0; }
1067 template <class _CharT
, class _Alloc
>
1068 inline random_access_iterator_tag
1069 iterator_category(const _Rope_const_iterator
<_CharT
,_Alloc
>&) { return random_access_iterator_tag(); }
1070 template <class _CharT
, class _Alloc
>
1071 inline _CharT
* value_type(const _Rope_const_iterator
<_CharT
,_Alloc
>&) { return 0; }
1072 template <class _CharT
, class _Alloc
>
1073 inline ptrdiff_t* distance_type(const _Rope_const_iterator
<_CharT
,_Alloc
>&) { return 0; }
1074 #endif /* _STLP_USE_OLD_HP_ITERATOR_QUERIES */
1076 template <class _CharT
, class _Alloc
, class _CharConsumer
>
1077 bool _S_apply_to_pieces(_CharConsumer
& __c
,
1078 _Rope_RopeRep
<_CharT
, _Alloc
> *__r
,
1079 size_t __begin
, size_t __end
);
1080 // begin and end are assumed to be in range.
1082 template <class _CharT
, class _Alloc
>
1084 #if defined (_STLP_USE_PARTIAL_SPEC_WORKAROUND)
1085 : public __stlport_class
<rope
<_CharT
, _Alloc
> >
1088 typedef rope
<_CharT
,_Alloc
> _Self
;
1090 typedef _CharT value_type
;
1091 typedef ptrdiff_t difference_type
;
1092 typedef size_t size_type
;
1093 typedef _CharT const_reference
;
1094 typedef const _CharT
* const_pointer
;
1095 typedef _Rope_iterator
<_CharT
,_Alloc
> iterator
;
1096 typedef _Rope_const_iterator
<_CharT
,_Alloc
> const_iterator
;
1097 typedef _Rope_char_ref_proxy
<_CharT
,_Alloc
> reference
;
1098 typedef _Rope_char_ptr_proxy
<_CharT
,_Alloc
> pointer
;
1100 friend class _Rope_iterator
<_CharT
,_Alloc
>;
1101 friend class _Rope_const_iterator
<_CharT
,_Alloc
>;
1102 friend struct _Rope_RopeRep
<_CharT
,_Alloc
>;
1103 friend class _Rope_iterator_base
<_CharT
,_Alloc
>;
1104 friend class _Rope_char_ptr_proxy
<_CharT
,_Alloc
>;
1105 friend class _Rope_char_ref_proxy
<_CharT
,_Alloc
>;
1106 friend struct _Rope_RopeSubstring
<_CharT
,_Alloc
>;
1108 _STLP_DECLARE_RANDOM_ACCESS_REVERSE_ITERATORS
;
1111 typedef _CharT
* _Cstrptr
;
1113 static _CharT _S_empty_c_str
[1];
1115 enum { _S_copy_max
= 23 };
1116 // For strings shorter than _S_copy_max, we copy to
1119 typedef _Rope_RopeRep
<_CharT
, _Alloc
> _RopeRep
;
1120 typedef typename
_RopeRep::_IsBasicCharType _IsBasicCharType
;
1123 _STLP_FORCE_ALLOCATORS(_CharT
, _Alloc
)
1124 typedef _Alloc allocator_type
;
1127 // The only data member of a rope:
1128 _STLP_PRIV _STLP_alloc_proxy
<_RopeRep
*, _CharT
, allocator_type
> _M_tree_ptr
;
1131 allocator_type
get_allocator() const { return allocator_type(_M_tree_ptr
); }
1134 typedef _Rope_RopeConcatenation
<_CharT
,_Alloc
> _RopeConcatenation
;
1135 typedef _Rope_RopeLeaf
<_CharT
,_Alloc
> _RopeLeaf
;
1136 typedef _Rope_RopeFunction
<_CharT
,_Alloc
> _RopeFunction
;
1137 typedef _Rope_RopeSubstring
<_CharT
,_Alloc
> _RopeSubstring
;
1139 // Retrieve a character at the indicated position.
1140 static _CharT
_S_fetch(_RopeRep
* __r
, size_type __pos
);
1142 // Obtain a pointer to the character at the indicated position.
1143 // The pointer can be used to change the character.
1144 // If such a pointer cannot be produced, as is frequently the
1145 // case, 0 is returned instead.
1146 // (Returns nonzero only if all nodes in the path have a refcount
1148 static _CharT
* _S_fetch_ptr(_RopeRep
* __r
, size_type __pos
);
1150 static void _S_unref(_RopeRep
* __t
) {
1151 _RopeRep::_S_unref(__t
);
1153 static void _S_ref(_RopeRep
* __t
) {
1154 _RopeRep::_S_ref(__t
);
1157 typedef _Rope_self_destruct_ptr
<_CharT
,_Alloc
> _Self_destruct_ptr
;
1159 // _Result is counted in refcount.
1160 static _RopeRep
* _S_substring(_RopeRep
* __base
,
1161 size_t __start
, size_t __endp1
);
1163 static _RopeRep
* _S_concat_char_iter(_RopeRep
* __r
,
1164 const _CharT
* __iter
, size_t __slen
);
1165 // Concatenate rope and char ptr, copying __s.
1166 // Should really take an arbitrary iterator.
1167 // Result is counted in refcount.
1168 static _RopeRep
* _S_destr_concat_char_iter(_RopeRep
* __r
,
1169 const _CharT
* __iter
, size_t __slen
);
1170 // As above, but one reference to __r is about to be
1171 // destroyed. Thus the pieces may be recycled if all
1172 // relevent reference counts are 1.
1174 // General concatenation on _RopeRep. _Result
1175 // has refcount of 1. Adjusts argument refcounts.
1176 static _RopeRep
* _S_concat_rep(_RopeRep
* __left
, _RopeRep
* __right
);
1179 #if defined (_STLP_MEMBER_TEMPLATES)
1180 template <class _CharConsumer
>
1182 typedef _Rope_char_consumer
<_CharT
> _CharConsumer
;
1184 void apply_to_pieces(size_t __begin
, size_t __end
,
1185 _CharConsumer
& __c
) const
1186 { _S_apply_to_pieces(__c
, _M_tree_ptr
._M_data
, __begin
, __end
); }
1190 static size_t _S_rounded_up_size(size_t __n
)
1191 { return _RopeRep::_S_rounded_up_size(__n
); }
1193 // Allocate and construct a RopeLeaf using the supplied allocator
1194 // Takes ownership of s instead of copying.
1195 static _RopeLeaf
* _S_new_RopeLeaf(_CharT
*__s
,
1196 size_t _p_size
, allocator_type __a
) {
1197 _RopeLeaf
* __space
= _STLP_CREATE_ALLOCATOR(allocator_type
, __a
,
1198 _RopeLeaf
).allocate(1);
1200 new(__space
) _RopeLeaf(__s
, _p_size
, __a
);
1202 _STLP_UNWIND(_STLP_CREATE_ALLOCATOR(allocator_type
,__a
,
1203 _RopeLeaf
).deallocate(__space
, 1))
1207 static _RopeConcatenation
* _S_new_RopeConcatenation(_RopeRep
* __left
, _RopeRep
* __right
,
1208 allocator_type __a
) {
1209 _RopeConcatenation
* __space
= _STLP_CREATE_ALLOCATOR(allocator_type
, __a
,
1210 _RopeConcatenation
).allocate(1);
1211 return new(__space
) _RopeConcatenation(__left
, __right
, __a
);
1214 static _RopeFunction
* _S_new_RopeFunction(char_producer
<_CharT
>* __f
,
1215 size_t _p_size
, bool __d
, allocator_type __a
) {
1216 _RopeFunction
* __space
= _STLP_CREATE_ALLOCATOR(allocator_type
, __a
,
1217 _RopeFunction
).allocate(1);
1218 return new(__space
) _RopeFunction(__f
, _p_size
, __d
, __a
);
1221 static _RopeSubstring
* _S_new_RopeSubstring(_Rope_RopeRep
<_CharT
,_Alloc
>* __b
, size_t __s
,
1222 size_t __l
, allocator_type __a
) {
1223 _RopeSubstring
* __space
= _STLP_CREATE_ALLOCATOR(allocator_type
, __a
,
1224 _RopeSubstring
).allocate(1);
1225 return new(__space
) _RopeSubstring(__b
, __s
, __l
, __a
);
1229 _RopeLeaf
* _S_RopeLeaf_from_unowned_char_ptr(const _CharT
*__s
,
1230 size_t _p_size
, allocator_type __a
) {
1231 if (0 == _p_size
) return 0;
1233 _CharT
* __buf
= _STLP_CREATE_ALLOCATOR(allocator_type
,__a
, _CharT
).allocate(_S_rounded_up_size(_p_size
));
1235 _STLP_PRIV
__ucopy_n(__s
, _p_size
, __buf
);
1236 _S_construct_null(__buf
+ _p_size
);
1239 return _S_new_RopeLeaf(__buf
, _p_size
, __a
);
1241 _STLP_UNWIND(_RopeRep::_S_free_string(__buf
, _p_size
, __a
))
1242 _STLP_RET_AFTER_THROW(0)
1246 // Concatenation of nonempty strings.
1247 // Always builds a concatenation node.
1248 // Rebalances if the result is too deep.
1249 // Result has refcount 1.
1250 // Does not increment left and right ref counts even though
1251 // they are referenced.
1253 _S_tree_concat(_RopeRep
* __left
, _RopeRep
* __right
);
1255 // Concatenation helper functions
1257 _S_leaf_concat_char_iter(_RopeLeaf
* __r
,
1258 const _CharT
* __iter
, size_t __slen
);
1259 // Concatenate by copying leaf.
1260 // should take an arbitrary iterator
1261 // result has refcount 1.
1262 static _RopeLeaf
* _S_destr_leaf_concat_char_iter
1263 (_RopeLeaf
* __r
, const _CharT
* __iter
, size_t __slen
);
1264 // A version that potentially clobbers __r if __r->_M_ref_count == 1.
1267 // A helper function for exponentiating strings.
1268 // This uses a nonstandard refcount convention.
1269 // The result has refcount 0.
1270 typedef _STLP_PRIV _Rope_Concat_fn
<_CharT
,_Alloc
> _Concat_fn
;
1271 #if !defined (__GNUC__) || (__GNUC__ < 3)
1274 friend struct _STLP_PRIV _Rope_Concat_fn
<_CharT
,_Alloc
>;
1278 static size_t _S_char_ptr_len(const _CharT
* __s
) {
1279 return char_traits
<_CharT
>::length(__s
);
1282 public: /* for operators */
1283 rope(_RopeRep
* __t
, const allocator_type
& __a
= allocator_type())
1284 : _M_tree_ptr(__a
, __t
) { }
1286 // Copy __r to the _CharT buffer.
1287 // Returns __buffer + __r->_M_size._M_data.
1288 // Assumes that buffer is uninitialized.
1289 static _CharT
* _S_flatten(_RopeRep
* __r
, _CharT
* __buffer
);
1291 // Again, with explicit starting position and length.
1292 // Assumes that buffer is uninitialized.
1293 static _CharT
* _S_flatten(_RopeRep
* __r
,
1294 size_t __start
, size_t __len
,
1297 // fbp : HP aCC prohibits access to protected min_len from within static methods ( ?? )
1299 static const unsigned long _S_min_len
[__ROPE_DEPTH_SIZE
];
1301 static bool _S_is_balanced(_RopeRep
* __r
)
1302 { return (__r
->_M_size
._M_data
>= _S_min_len
[__r
->_M_depth
]); }
1304 static bool _S_is_almost_balanced(_RopeRep
* __r
) {
1305 return (__r
->_M_depth
== 0 ||
1306 __r
->_M_size
._M_data
>= _S_min_len
[__r
->_M_depth
- 1]);
1309 static bool _S_is_roughly_balanced(_RopeRep
* __r
) {
1310 return (__r
->_M_depth
<= 1 ||
1311 __r
->_M_size
._M_data
>= _S_min_len
[__r
->_M_depth
- 2]);
1314 // Assumes the result is not empty.
1315 static _RopeRep
* _S_concat_and_set_balanced(_RopeRep
* __left
,
1316 _RopeRep
* __right
) {
1317 _RopeRep
* __result
= _S_concat_rep(__left
, __right
);
1318 if (_S_is_balanced(__result
)) __result
->_M_is_balanced
= true;
1322 // The basic rebalancing operation. Logically copies the
1323 // rope. The result has refcount of 1. The client will
1324 // usually decrement the reference count of __r.
1325 // The result is within height 2 of balanced by the above
1327 static _RopeRep
* _S_balance(_RopeRep
* __r
);
1329 // Add all unbalanced subtrees to the forest of balanceed trees.
1330 // Used only by balance.
1331 static void _S_add_to_forest(_RopeRep
*__r
, _RopeRep
** __forest
);
1333 // Add __r to forest, assuming __r is already balanced.
1334 static void _S_add_leaf_to_forest(_RopeRep
* __r
, _RopeRep
** __forest
);
1337 // Print to stdout, exposing structure
1338 static void _S_dump(_RopeRep
* __r
, int __indent
= 0);
1341 // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
1342 static int _S_compare(const _RopeRep
* __x
, const _RopeRep
* __y
);
1344 void _STLP_FUNCTION_THROWS
_M_throw_out_of_range() const;
1346 void _M_reset(_RopeRep
* __r
) {
1347 //if (__r != _M_tree_ptr._M_data) {
1348 _S_unref(_M_tree_ptr
._M_data
);
1349 _M_tree_ptr
._M_data
= __r
;
1354 bool empty() const { return 0 == _M_tree_ptr
._M_data
; }
1356 // Comparison member function. This is public only for those
1357 // clients that need a ternary comparison. Others
1358 // should use the comparison operators below.
1359 int compare(const _Self
& __y
) const {
1360 return _S_compare(_M_tree_ptr
._M_data
, __y
._M_tree_ptr
._M_data
);
1363 rope(const _CharT
* __s
, const allocator_type
& __a
= allocator_type())
1364 : _M_tree_ptr(__a
, _S_RopeLeaf_from_unowned_char_ptr(__s
, _S_char_ptr_len(__s
),__a
))
1367 rope(const _CharT
* __s
, size_t __len
,
1368 const allocator_type
& __a
= allocator_type())
1369 : _M_tree_ptr(__a
, (_S_RopeLeaf_from_unowned_char_ptr(__s
, __len
, __a
)))
1372 // Should perhaps be templatized with respect to the iterator type
1373 // and use Sequence_buffer. (It should perhaps use sequence_buffer
1375 rope(const _CharT
*__s
, const _CharT
*__e
,
1376 const allocator_type
& __a
= allocator_type())
1377 : _M_tree_ptr(__a
, _S_RopeLeaf_from_unowned_char_ptr(__s
, __e
- __s
, __a
))
1380 rope(const const_iterator
& __s
, const const_iterator
& __e
,
1381 const allocator_type
& __a
= allocator_type())
1382 : _M_tree_ptr(__a
, _S_substring(__s
._M_root
, __s
._M_current_pos
,
1383 __e
._M_current_pos
))
1386 rope(const iterator
& __s
, const iterator
& __e
,
1387 const allocator_type
& __a
= allocator_type())
1388 : _M_tree_ptr(__a
, _S_substring(__s
._M_root
, __s
._M_current_pos
,
1389 __e
._M_current_pos
))
1392 rope(_CharT __c
, const allocator_type
& __a
= allocator_type())
1393 : _M_tree_ptr(__a
, (_RopeRep
*)0) {
1394 _CharT
* __buf
= _M_tree_ptr
.allocate(_S_rounded_up_size(1));
1396 _Copy_Construct(__buf
, __c
);
1397 _S_construct_null(__buf
+ 1);
1400 _M_tree_ptr
._M_data
= _S_new_RopeLeaf(__buf
, 1, __a
);
1402 _STLP_UNWIND(_RopeRep::_S_free_string(__buf
, 1, __a
))
1405 rope(size_t __n
, _CharT __c
,
1406 const allocator_type
& __a
= allocator_type()):
1407 _M_tree_ptr(__a
, (_RopeRep
*)0) {
1411 rope
<_CharT
,_Alloc
> __result
;
1412 # define __exponentiate_threshold size_t(32)
1413 _RopeRep
* __remainder
;
1414 rope
<_CharT
,_Alloc
> __remainder_rope
;
1417 typedef _STLP_PRIV _Rope_Concat_fn
<_CharT
,_Alloc
> _Concat_fn
;
1419 size_t __exponent
= __n
/ __exponentiate_threshold
;
1420 size_t __rest
= __n
% __exponentiate_threshold
;
1424 _CharT
* __rest_buffer
= _M_tree_ptr
.allocate(_S_rounded_up_size(__rest
));
1425 uninitialized_fill_n(__rest_buffer
, __rest
, __c
);
1426 _S_construct_null(__rest_buffer
+ __rest
);
1428 __remainder
= _S_new_RopeLeaf(__rest_buffer
, __rest
, __a
);
1430 _STLP_UNWIND(_RopeRep::_S_free_string(__rest_buffer
, __rest
, __a
))
1432 __remainder_rope
._M_tree_ptr
._M_data
= __remainder
;
1433 if (__exponent
!= 0) {
1434 _CharT
* __base_buffer
= _M_tree_ptr
.allocate(_S_rounded_up_size(__exponentiate_threshold
));
1435 _RopeLeaf
* __base_leaf
;
1436 rope
<_CharT
,_Alloc
> __base_rope
;
1437 uninitialized_fill_n(__base_buffer
, __exponentiate_threshold
, __c
);
1438 _S_construct_null(__base_buffer
+ __exponentiate_threshold
);
1440 __base_leaf
= _S_new_RopeLeaf(__base_buffer
,
1441 __exponentiate_threshold
, __a
);
1443 _STLP_UNWIND(_RopeRep::_S_free_string(__base_buffer
,
1444 __exponentiate_threshold
, __a
))
1445 __base_rope
._M_tree_ptr
._M_data
= __base_leaf
;
1446 if (1 == __exponent
) {
1447 __result
= __base_rope
;
1448 // One each for base_rope and __result
1449 //_STLP_ASSERT(2 == __result._M_tree_ptr._M_data->_M_ref_count)
1451 __result
= _STLP_PRIV
__power(__base_rope
, __exponent
, _Concat_fn());
1453 if (0 != __remainder
) {
1454 __result
+= __remainder_rope
;
1457 __result
= __remainder_rope
;
1459 _M_tree_ptr
._M_data
= __result
._M_tree_ptr
._M_data
;
1460 _M_tree_ptr
._M_data
->_M_ref_nonnil();
1461 # undef __exponentiate_threshold
1464 rope(const allocator_type
& __a
= allocator_type())
1465 : _M_tree_ptr(__a
, (_RopeRep
*)0) {}
1467 // Construct a rope from a function that can compute its members
1468 rope(char_producer
<_CharT
> *__fn
, size_t __len
, bool __delete_fn
,
1469 const allocator_type
& __a
= allocator_type())
1470 : _M_tree_ptr(__a
, (_RopeRep
*)0) {
1471 _M_tree_ptr
._M_data
= (0 == __len
) ?
1472 0 : _S_new_RopeFunction(__fn
, __len
, __delete_fn
, __a
);
1475 rope(const _Self
& __x
)
1476 : _M_tree_ptr(__x
._M_tree_ptr
, __x
._M_tree_ptr
._M_data
) {
1477 _S_ref(_M_tree_ptr
._M_data
);
1480 #if !defined (_STLP_NO_MOVE_SEMANTIC)
1481 rope(__move_source
<_Self
> __src
)
1482 : _M_tree_ptr(__src
.get()._M_tree_ptr
, __src
.get()._M_tree_ptr
._M_data
) {
1483 __src
.get()._M_tree_ptr
._M_data
= 0;
1488 _S_unref(_M_tree_ptr
._M_data
);
1491 _Self
& operator=(const _Self
& __x
) {
1492 _STLP_ASSERT(get_allocator() == __x
.get_allocator())
1493 _S_ref(__x
._M_tree_ptr
._M_data
);
1494 _M_reset(__x
._M_tree_ptr
._M_data
);
1499 _S_unref(_M_tree_ptr
._M_data
);
1500 _M_tree_ptr
._M_data
= 0;
1502 void push_back(_CharT __x
) {
1503 _M_reset(_S_destr_concat_char_iter(_M_tree_ptr
._M_data
, &__x
, 1));
1507 _RopeRep
* __old
= _M_tree_ptr
._M_data
;
1508 _M_tree_ptr
._M_data
=
1509 _S_substring(_M_tree_ptr
._M_data
, 0, _M_tree_ptr
._M_data
->_M_size
._M_data
- 1);
1513 _CharT
back() const {
1514 return _S_fetch(_M_tree_ptr
._M_data
, _M_tree_ptr
._M_data
->_M_size
._M_data
- 1);
1517 void push_front(_CharT __x
) {
1518 _RopeRep
* __old
= _M_tree_ptr
._M_data
;
1520 _S_RopeLeaf_from_unowned_char_ptr(&__x
, 1, _M_tree_ptr
);
1522 _M_tree_ptr
._M_data
= _S_concat_rep(__left
, _M_tree_ptr
._M_data
);
1526 _STLP_UNWIND(_S_unref(__left
))
1530 _RopeRep
* __old
= _M_tree_ptr
._M_data
;
1531 _M_tree_ptr
._M_data
= _S_substring(_M_tree_ptr
._M_data
, 1, _M_tree_ptr
._M_data
->_M_size
._M_data
);
1535 _CharT
front() const {
1536 return _S_fetch(_M_tree_ptr
._M_data
, 0);
1540 _RopeRep
* __old
= _M_tree_ptr
._M_data
;
1541 _M_tree_ptr
._M_data
= _S_balance(_M_tree_ptr
._M_data
);
1545 void copy(_CharT
* __buffer
) const {
1546 _STLP_STD::_Destroy_Range(__buffer
, __buffer
+ size());
1547 _S_flatten(_M_tree_ptr
._M_data
, __buffer
);
1551 * This is the copy function from the standard, but
1552 * with the arguments reordered to make it consistent with the
1553 * rest of the interface.
1554 * Note that this guaranteed not to compile if the draft standard
1557 size_type
copy(size_type __pos
, size_type __n
, _CharT
* __buffer
) const {
1558 size_t _p_size
= size();
1559 size_t __len
= (__pos
+ __n
> _p_size
? _p_size
- __pos
: __n
);
1561 _STLP_STD::_Destroy_Range(__buffer
, __buffer
+ __len
);
1562 _S_flatten(_M_tree_ptr
._M_data
, __pos
, __len
, __buffer
);
1567 // Print to stdout, exposing structure. May be useful for
1568 // performance debugging.
1570 _S_dump(_M_tree_ptr
._M_data
);
1574 // Convert to 0 terminated string in new allocated memory.
1575 // Embedded 0s in the input do not terminate the copy.
1576 const _CharT
* c_str() const;
1578 // As above, but also use the flattened representation as the
1579 // the new rope representation.
1580 const _CharT
* replace_with_c_str();
1582 // Reclaim memory for the c_str generated flattened string.
1583 // Intentionally undocumented, since it's hard to say when this
1584 // is safe for multiple threads.
1585 void delete_c_str () {
1586 if (0 == _M_tree_ptr
._M_data
) return;
1587 if (_RopeRep::_S_leaf
== _M_tree_ptr
._M_data
->_M_tag
&&
1588 ((_RopeLeaf
*)_M_tree_ptr
._M_data
)->_M_data
==
1589 _M_tree_ptr
._M_data
->_M_c_string
) {
1590 // Representation shared
1593 _M_tree_ptr
._M_data
->_M_free_c_string();
1594 _M_tree_ptr
._M_data
->_M_c_string
= 0;
1597 _CharT
operator[] (size_type __pos
) const {
1598 return _S_fetch(_M_tree_ptr
._M_data
, __pos
);
1601 _CharT
at(size_type __pos
) const {
1602 if (__pos
>= size()) _M_throw_out_of_range();
1603 return (*this)[__pos
];
1606 const_iterator
begin() const {
1607 return(const_iterator(_M_tree_ptr
._M_data
, 0));
1610 // An easy way to get a const iterator from a non-const container.
1611 const_iterator
const_begin() const {
1612 return(const_iterator(_M_tree_ptr
._M_data
, 0));
1615 const_iterator
end() const {
1616 return(const_iterator(_M_tree_ptr
._M_data
, size()));
1619 const_iterator
const_end() const {
1620 return(const_iterator(_M_tree_ptr
._M_data
, size()));
1623 size_type
size() const {
1624 return(0 == _M_tree_ptr
._M_data
? 0 : _M_tree_ptr
._M_data
->_M_size
._M_data
);
1627 size_type
length() const {
1631 size_type
max_size() const {
1632 return _S_min_len
[__ROPE_MAX_DEPTH
-1] - 1;
1633 // Guarantees that the result can be sufficiently
1634 // balanced. Longer ropes will probably still work,
1635 // but it's harder to make guarantees.
1638 const_reverse_iterator
rbegin() const {
1639 return const_reverse_iterator(end());
1642 const_reverse_iterator
const_rbegin() const {
1643 return const_reverse_iterator(end());
1646 const_reverse_iterator
rend() const {
1647 return const_reverse_iterator(begin());
1650 const_reverse_iterator
const_rend() const {
1651 return const_reverse_iterator(begin());
1653 // The symmetric cases are intentionally omitted, since they're presumed
1654 // to be less common, and we don't handle them as well.
1656 // The following should really be templatized.
1657 // The first argument should be an input iterator or
1658 // forward iterator with value_type _CharT.
1659 _Self
& append(const _CharT
* __iter
, size_t __n
) {
1660 _M_reset(_S_destr_concat_char_iter(_M_tree_ptr
._M_data
, __iter
, __n
));
1664 _Self
& append(const _CharT
* __c_string
) {
1665 size_t __len
= _S_char_ptr_len(__c_string
);
1666 append(__c_string
, __len
);
1670 _Self
& append(const _CharT
* __s
, const _CharT
* __e
) {
1671 _M_reset(_S_destr_concat_char_iter(_M_tree_ptr
._M_data
, __s
, __e
- __s
));
1675 _Self
& append(const_iterator __s
, const_iterator __e
) {
1676 _STLP_ASSERT(__s
._M_root
== __e
._M_root
)
1677 _STLP_ASSERT(get_allocator() == __s
._M_root
->get_allocator())
1678 _Self_destruct_ptr
__appendee(_S_substring(__s
._M_root
, __s
._M_current_pos
, __e
._M_current_pos
));
1679 _M_reset(_S_concat_rep(_M_tree_ptr
._M_data
, (_RopeRep
*)__appendee
));
1683 _Self
& append(_CharT __c
) {
1684 _M_reset(_S_destr_concat_char_iter(_M_tree_ptr
._M_data
, &__c
, 1));
1688 _Self
& append() { return append(_CharT()); } // XXX why?
1690 _Self
& append(const _Self
& __y
) {
1691 _STLP_ASSERT(__y
.get_allocator() == get_allocator())
1692 _M_reset(_S_concat_rep(_M_tree_ptr
._M_data
, __y
._M_tree_ptr
._M_data
));
1696 _Self
& append(size_t __n
, _CharT __c
) {
1697 rope
<_CharT
,_Alloc
> __last(__n
, __c
);
1698 return append(__last
);
1701 void swap(_Self
& __b
) {
1702 _M_tree_ptr
.swap(__b
._M_tree_ptr
);
1704 #if defined (_STLP_USE_PARTIAL_SPEC_WORKAROUND) && !defined (_STLP_FUNCTION_TMPL_PARTIAL_ORDER)
1705 void _M_swap_workaround(_Self
& __x
) { swap(__x
); }
1709 // Result is included in refcount.
1710 static _RopeRep
* replace(_RopeRep
* __old
, size_t __pos1
,
1711 size_t __pos2
, _RopeRep
* __r
) {
1712 if (0 == __old
) { _S_ref(__r
); return __r
; }
1713 _Self_destruct_ptr
__left(_S_substring(__old
, 0, __pos1
));
1714 _Self_destruct_ptr
__right(_S_substring(__old
, __pos2
, __old
->_M_size
._M_data
));
1715 _STLP_MPWFIX_TRY
//*TY 06/01/2000 -
1719 __result
= _S_concat_rep(__left
, __right
);
1721 _STLP_ASSERT(__old
->get_allocator() == __r
->get_allocator())
1722 _Self_destruct_ptr
__left_result(_S_concat_rep(__left
, __r
));
1723 __result
= _S_concat_rep(__left_result
, __right
);
1726 _STLP_MPWFIX_CATCH
//*TY 06/01/2000 -
1730 void insert(size_t __p
, const _Self
& __r
) {
1731 if (__p
> size()) _M_throw_out_of_range();
1732 _STLP_ASSERT(get_allocator() == __r
.get_allocator())
1733 _M_reset(replace(_M_tree_ptr
._M_data
, __p
, __p
, __r
._M_tree_ptr
._M_data
));
1736 void insert(size_t __p
, size_t __n
, _CharT __c
) {
1737 rope
<_CharT
,_Alloc
> __r(__n
,__c
);
1741 void insert(size_t __p
, const _CharT
* __i
, size_t __n
) {
1742 if (__p
> size()) _M_throw_out_of_range();
1743 _Self_destruct_ptr
__left(_S_substring(_M_tree_ptr
._M_data
, 0, __p
));
1744 _Self_destruct_ptr
__right(_S_substring(_M_tree_ptr
._M_data
, __p
, size()));
1745 _Self_destruct_ptr
__left_result(
1746 _S_concat_char_iter(__left
, __i
, __n
));
1747 // _S_ destr_concat_char_iter should be safe here.
1748 // But as it stands it's probably not a win, since __left
1749 // is likely to have additional references.
1750 _M_reset(_S_concat_rep(__left_result
, __right
));
1753 void insert(size_t __p
, const _CharT
* __c_string
) {
1754 insert(__p
, __c_string
, _S_char_ptr_len(__c_string
));
1757 void insert(size_t __p
, _CharT __c
) {
1758 insert(__p
, &__c
, 1);
1761 void insert(size_t __p
) {
1762 _CharT __c
= _CharT();
1763 insert(__p
, &__c
, 1);
1766 void insert(size_t __p
, const _CharT
* __i
, const _CharT
* __j
) {
1767 _Self
__r(__i
, __j
);
1771 void insert(size_t __p
, const const_iterator
& __i
,
1772 const const_iterator
& __j
) {
1773 _Self
__r(__i
, __j
);
1777 void insert(size_t __p
, const iterator
& __i
,
1778 const iterator
& __j
) {
1779 _Self
__r(__i
, __j
);
1783 // (position, length) versions of replace operations:
1784 void replace(size_t __p
, size_t __n
, const _Self
& __r
) {
1785 if (__p
> size()) _M_throw_out_of_range();
1786 _M_reset(replace(_M_tree_ptr
._M_data
, __p
, __p
+ __n
, __r
._M_tree_ptr
._M_data
));
1789 void replace(size_t __p
, size_t __n
,
1790 const _CharT
* __i
, size_t __i_len
) {
1791 _Self
__r(__i
, __i_len
);
1792 replace(__p
, __n
, __r
);
1795 void replace(size_t __p
, size_t __n
, _CharT __c
) {
1797 replace(__p
, __n
, __r
);
1800 void replace(size_t __p
, size_t __n
, const _CharT
* __c_string
) {
1801 _Self
__r(__c_string
);
1802 replace(__p
, __n
, __r
);
1805 void replace(size_t __p
, size_t __n
,
1806 const _CharT
* __i
, const _CharT
* __j
) {
1807 _Self
__r(__i
, __j
);
1808 replace(__p
, __n
, __r
);
1811 void replace(size_t __p
, size_t __n
,
1812 const const_iterator
& __i
, const const_iterator
& __j
) {
1813 _Self
__r(__i
, __j
);
1814 replace(__p
, __n
, __r
);
1817 void replace(size_t __p
, size_t __n
,
1818 const iterator
& __i
, const iterator
& __j
) {
1819 _Self
__r(__i
, __j
);
1820 replace(__p
, __n
, __r
);
1823 // Single character variants:
1824 void replace(size_t __p
, _CharT __c
) {
1825 if (__p
> size()) _M_throw_out_of_range();
1826 iterator
__i(this, __p
);
1830 void replace(size_t __p
, const _Self
& __r
) {
1831 replace(__p
, 1, __r
);
1834 void replace(size_t __p
, const _CharT
* __i
, size_t __i_len
) {
1835 replace(__p
, 1, __i
, __i_len
);
1838 void replace(size_t __p
, const _CharT
* __c_string
) {
1839 replace(__p
, 1, __c_string
);
1842 void replace(size_t __p
, const _CharT
* __i
, const _CharT
* __j
) {
1843 replace(__p
, 1, __i
, __j
);
1846 void replace(size_t __p
, const const_iterator
& __i
,
1847 const const_iterator
& __j
) {
1848 replace(__p
, 1, __i
, __j
);
1851 void replace(size_t __p
, const iterator
& __i
,
1852 const iterator
& __j
) {
1853 replace(__p
, 1, __i
, __j
);
1856 // Erase, (position, size) variant.
1857 void erase(size_t __p
, size_t __n
) {
1858 if (__p
> size()) _M_throw_out_of_range();
1859 _M_reset(replace(_M_tree_ptr
._M_data
, __p
, __p
+ __n
, 0));
1862 // Erase, single character
1863 void erase(size_t __p
) {
1864 erase(__p
, __p
+ 1);
1867 // Insert, iterator variants.
1868 iterator
insert(const iterator
& __p
, const _Self
& __r
)
1869 { insert(__p
.index(), __r
); return __p
; }
1870 iterator
insert(const iterator
& __p
, size_t __n
, _CharT __c
)
1871 { insert(__p
.index(), __n
, __c
); return __p
; }
1872 iterator
insert(const iterator
& __p
, _CharT __c
)
1873 { insert(__p
.index(), __c
); return __p
; }
1874 iterator
insert(const iterator
& __p
)
1875 { insert(__p
.index()); return __p
; }
1876 iterator
insert(const iterator
& __p
, const _CharT
* c_string
)
1877 { insert(__p
.index(), c_string
); return __p
; }
1878 iterator
insert(const iterator
& __p
, const _CharT
* __i
, size_t __n
)
1879 { insert(__p
.index(), __i
, __n
); return __p
; }
1880 iterator
insert(const iterator
& __p
, const _CharT
* __i
,
1882 { insert(__p
.index(), __i
, __j
); return __p
; }
1883 iterator
insert(const iterator
& __p
,
1884 const const_iterator
& __i
, const const_iterator
& __j
)
1885 { insert(__p
.index(), __i
, __j
); return __p
; }
1886 iterator
insert(const iterator
& __p
,
1887 const iterator
& __i
, const iterator
& __j
)
1888 { insert(__p
.index(), __i
, __j
); return __p
; }
1890 // Replace, range variants.
1891 void replace(const iterator
& __p
, const iterator
& __q
,
1893 { replace(__p
.index(), __q
.index() - __p
.index(), __r
); }
1894 void replace(const iterator
& __p
, const iterator
& __q
, _CharT __c
)
1895 { replace(__p
.index(), __q
.index() - __p
.index(), __c
); }
1896 void replace(const iterator
& __p
, const iterator
& __q
,
1897 const _CharT
* __c_string
)
1898 { replace(__p
.index(), __q
.index() - __p
.index(), __c_string
); }
1899 void replace(const iterator
& __p
, const iterator
& __q
,
1900 const _CharT
* __i
, size_t __n
)
1901 { replace(__p
.index(), __q
.index() - __p
.index(), __i
, __n
); }
1902 void replace(const iterator
& __p
, const iterator
& __q
,
1903 const _CharT
* __i
, const _CharT
* __j
)
1904 { replace(__p
.index(), __q
.index() - __p
.index(), __i
, __j
); }
1905 void replace(const iterator
& __p
, const iterator
& __q
,
1906 const const_iterator
& __i
, const const_iterator
& __j
)
1907 { replace(__p
.index(), __q
.index() - __p
.index(), __i
, __j
); }
1908 void replace(const iterator
& __p
, const iterator
& __q
,
1909 const iterator
& __i
, const iterator
& __j
)
1910 { replace(__p
.index(), __q
.index() - __p
.index(), __i
, __j
); }
1912 // Replace, iterator variants.
1913 void replace(const iterator
& __p
, const _Self
& __r
)
1914 { replace(__p
.index(), __r
); }
1915 void replace(const iterator
& __p
, _CharT __c
)
1916 { replace(__p
.index(), __c
); }
1917 void replace(const iterator
& __p
, const _CharT
* __c_string
)
1918 { replace(__p
.index(), __c_string
); }
1919 void replace(const iterator
& __p
, const _CharT
* __i
, size_t __n
)
1920 { replace(__p
.index(), __i
, __n
); }
1921 void replace(const iterator
& __p
, const _CharT
* __i
, const _CharT
* __j
)
1922 { replace(__p
.index(), __i
, __j
); }
1923 void replace(const iterator
& __p
, const_iterator __i
,
1925 { replace(__p
.index(), __i
, __j
); }
1926 void replace(const iterator
& __p
, iterator __i
, iterator __j
)
1927 { replace(__p
.index(), __i
, __j
); }
1929 // Iterator and range variants of erase
1930 iterator
erase(const iterator
& __p
, const iterator
& __q
) {
1931 size_t __p_index
= __p
.index();
1932 erase(__p_index
, __q
.index() - __p_index
);
1933 return iterator(this, __p_index
);
1935 iterator
erase(const iterator
& __p
) {
1936 size_t __p_index
= __p
.index();
1937 erase(__p_index
, 1);
1938 return iterator(this, __p_index
);
1941 _Self
substr(size_t __start
, size_t __len
= 1) const {
1942 if (__start
> size()) _M_throw_out_of_range();
1943 return rope
<_CharT
,_Alloc
>(_S_substring(_M_tree_ptr
._M_data
, __start
, __start
+ __len
));
1946 _Self
substr(iterator __start
, iterator __end
) const {
1947 return rope
<_CharT
,_Alloc
>(_S_substring(_M_tree_ptr
._M_data
, __start
.index(), __end
.index()));
1950 _Self
substr(iterator __start
) const {
1951 size_t __pos
= __start
.index();
1952 return rope
<_CharT
,_Alloc
>(_S_substring(_M_tree_ptr
._M_data
, __pos
, __pos
+ 1));
1955 _Self
substr(const_iterator __start
, const_iterator __end
) const {
1956 // This might eventually take advantage of the cache in the
1958 return rope
<_CharT
,_Alloc
>(_S_substring(_M_tree_ptr
._M_data
, __start
.index(), __end
.index()));
1961 rope
<_CharT
,_Alloc
> substr(const_iterator __start
) {
1962 size_t __pos
= __start
.index();
1963 return rope
<_CharT
,_Alloc
>(_S_substring(_M_tree_ptr
._M_data
, __pos
, __pos
+ 1));
1966 #include <stl/_string_npos.h>
1968 size_type
find(const _Self
& __s
, size_type __pos
= 0) const {
1969 if (__pos
>= size())
1970 # ifndef _STLP_OLD_ROPE_SEMANTICS
1976 size_type __result_pos
;
1977 const_iterator __result
= _STLP_STD::search(const_begin() + (ptrdiff_t)__pos
, const_end(), __s
.begin(), __s
.end() );
1978 __result_pos
= __result
.index();
1979 # ifndef _STLP_OLD_ROPE_SEMANTICS
1980 if (__result_pos
== size()) __result_pos
= npos
;
1982 return __result_pos
;
1984 size_type
find(_CharT __c
, size_type __pos
= 0) const;
1985 size_type
find(const _CharT
* __s
, size_type __pos
= 0) const {
1986 size_type __result_pos
;
1987 const_iterator __result
= _STLP_STD::search(const_begin() + (ptrdiff_t)__pos
, const_end(),
1988 __s
, __s
+ _S_char_ptr_len(__s
));
1989 __result_pos
= __result
.index();
1990 # ifndef _STLP_OLD_ROPE_SEMANTICS
1991 if (__result_pos
== size()) __result_pos
= npos
;
1993 return __result_pos
;
1996 iterator
mutable_begin() {
1997 return(iterator(this, 0));
2000 iterator
mutable_end() {
2001 return(iterator(this, size()));
2004 reverse_iterator
mutable_rbegin() {
2005 return reverse_iterator(mutable_end());
2008 reverse_iterator
mutable_rend() {
2009 return reverse_iterator(mutable_begin());
2012 reference
mutable_reference_at(size_type __pos
) {
2013 return reference(this, __pos
);
2017 reference
operator[] (size_type __pos
) {
2018 return reference(this, __pos
);
2021 reference
at(size_type __pos
) {
2022 if (__pos
>= size()) _M_throw_out_of_range();
2023 return (*this)[__pos
];
2026 void resize(size_type
, _CharT
) {}
2027 void resize(size_type
) {}
2028 void reserve(size_type
= 0) {}
2029 size_type
capacity() const {
2033 // Stuff below this line is dangerous because it's error prone.
2034 // I would really like to get rid of it.
2035 // copy function with funny arg ordering.
2036 size_type
copy(_CharT
* __buffer
, size_type __n
,
2037 size_type __pos
= 0) const {
2038 return copy(__pos
, __n
, __buffer
);
2041 iterator
end() { return mutable_end(); }
2043 iterator
begin() { return mutable_begin(); }
2045 reverse_iterator
rend() { return mutable_rend(); }
2047 reverse_iterator
rbegin() { return mutable_rbegin(); }
2051 const_iterator
end() { return const_end(); }
2053 const_iterator
begin() { return const_begin(); }
2055 const_reverse_iterator
rend() { return const_rend(); }
2057 const_reverse_iterator
rbegin() { return const_rbegin(); }
2062 #if defined (__GNUC__) && (__GNUC__ == 2) && (__GNUC_MINOR__ == 96)
2063 template <class _CharT
, class _Alloc
>
2064 const size_t rope
<_CharT
, _Alloc
>::npos
= ~(size_t) 0;
2067 template <class _CharT
, class _Alloc
>
2069 _Rope_const_iterator
< _CharT
, _Alloc
>::operator[](size_t __n
)
2070 { return rope
<_CharT
,_Alloc
>::_S_fetch(this->_M_root
, this->_M_current_pos
+ __n
); }
2072 template <class _CharT
, class _Alloc
>
2073 inline bool operator== (const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
,
2074 const _Rope_const_iterator
<_CharT
,_Alloc
>& __y
) {
2075 return (__x
._M_current_pos
== __y
._M_current_pos
&&
2076 __x
._M_root
== __y
._M_root
);
2079 template <class _CharT
, class _Alloc
>
2080 inline bool operator< (const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
,
2081 const _Rope_const_iterator
<_CharT
,_Alloc
>& __y
)
2082 { return (__x
._M_current_pos
< __y
._M_current_pos
); }
2084 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
2086 template <class _CharT
, class _Alloc
>
2087 inline bool operator!= (const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
,
2088 const _Rope_const_iterator
<_CharT
,_Alloc
>& __y
)
2089 { return !(__x
== __y
); }
2091 template <class _CharT
, class _Alloc
>
2092 inline bool operator> (const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
,
2093 const _Rope_const_iterator
<_CharT
,_Alloc
>& __y
)
2094 { return __y
< __x
; }
2096 template <class _CharT
, class _Alloc
>
2097 inline bool operator<= (const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
,
2098 const _Rope_const_iterator
<_CharT
,_Alloc
>& __y
)
2099 { return !(__y
< __x
); }
2101 template <class _CharT
, class _Alloc
>
2102 inline bool operator>= (const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
,
2103 const _Rope_const_iterator
<_CharT
,_Alloc
>& __y
)
2104 { return !(__x
< __y
); }
2106 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
2108 template <class _CharT
, class _Alloc
>
2109 inline ptrdiff_t operator-(const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
,
2110 const _Rope_const_iterator
<_CharT
,_Alloc
>& __y
)
2111 { return (ptrdiff_t)__x
._M_current_pos
- (ptrdiff_t)__y
._M_current_pos
; }
2113 #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000 // dwa 8/21/97 - "ambiguous access to overloaded function" bug.
2114 template <class _CharT
, class _Alloc
>
2115 inline _Rope_const_iterator
<_CharT
,_Alloc
>
2116 operator-(const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
, ptrdiff_t __n
)
2117 { return _Rope_const_iterator
<_CharT
,_Alloc
>(__x
._M_root
, __x
._M_current_pos
- __n
); }
2120 template <class _CharT
, class _Alloc
>
2121 inline _Rope_const_iterator
<_CharT
,_Alloc
>
2122 operator+(const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
, ptrdiff_t __n
)
2123 { return _Rope_const_iterator
<_CharT
,_Alloc
>(__x
._M_root
, __x
._M_current_pos
+ __n
); }
2125 template <class _CharT
, class _Alloc
>
2126 inline _Rope_const_iterator
<_CharT
,_Alloc
>
2127 operator+(ptrdiff_t __n
, const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
)
2128 { return _Rope_const_iterator
<_CharT
,_Alloc
>(__x
._M_root
, __x
._M_current_pos
+ __n
); }
2130 template <class _CharT
, class _Alloc
>
2131 inline bool operator== (const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
2132 const _Rope_iterator
<_CharT
,_Alloc
>& __y
) {
2133 return (__x
._M_current_pos
== __y
._M_current_pos
&&
2134 __x
._M_root_rope
== __y
._M_root_rope
);
2137 template <class _CharT
, class _Alloc
>
2138 inline bool operator< (const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
2139 const _Rope_iterator
<_CharT
,_Alloc
>& __y
)
2140 { return (__x
._M_current_pos
< __y
._M_current_pos
); }
2142 #if defined (_STLP_USE_SEPARATE_RELOPS_NAMESPACE)
2143 template <class _CharT
, class _Alloc
>
2144 inline bool operator!= (const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
2145 const _Rope_iterator
<_CharT
,_Alloc
>& __y
)
2146 { return !(__x
== __y
); }
2148 template <class _CharT
, class _Alloc
>
2149 inline bool operator> (const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
2150 const _Rope_iterator
<_CharT
,_Alloc
>& __y
)
2151 { return __y
< __x
; }
2153 template <class _CharT
, class _Alloc
>
2154 inline bool operator<= (const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
2155 const _Rope_iterator
<_CharT
,_Alloc
>& __y
)
2156 { return !(__y
< __x
); }
2158 template <class _CharT
, class _Alloc
>
2159 inline bool operator>= (const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
2160 const _Rope_iterator
<_CharT
,_Alloc
>& __y
)
2161 { return !(__x
< __y
); }
2162 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
2164 template <class _CharT
, class _Alloc
>
2165 inline ptrdiff_t operator-(const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
2166 const _Rope_iterator
<_CharT
,_Alloc
>& __y
)
2167 { return (ptrdiff_t)__x
._M_current_pos
- (ptrdiff_t)__y
._M_current_pos
; }
2169 #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000 // dwa 8/21/97 - "ambiguous access to overloaded function" bug.
2170 template <class _CharT
, class _Alloc
>
2171 inline _Rope_iterator
<_CharT
,_Alloc
>
2172 operator-(const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
2174 return _Rope_iterator
<_CharT
,_Alloc
>(__x
._M_root_rope
, __x
._M_current_pos
- __n
);
2178 template <class _CharT
, class _Alloc
>
2179 inline _Rope_iterator
<_CharT
,_Alloc
>
2180 operator+(const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
2182 return _Rope_iterator
<_CharT
,_Alloc
>(__x
._M_root_rope
, __x
._M_current_pos
+ __n
);
2185 template <class _CharT
, class _Alloc
>
2186 inline _Rope_iterator
<_CharT
,_Alloc
>
2187 operator+(ptrdiff_t __n
, const _Rope_iterator
<_CharT
,_Alloc
>& __x
) {
2188 return _Rope_iterator
<_CharT
,_Alloc
>(__x
._M_root_rope
, __x
._M_current_pos
+ __n
);
2191 template <class _CharT
, class _Alloc
>
2192 inline rope
<_CharT
,_Alloc
>
2193 operator+ (const rope
<_CharT
,_Alloc
>& __left
,
2194 const rope
<_CharT
,_Alloc
>& __right
) {
2195 _STLP_ASSERT(__left
.get_allocator() == __right
.get_allocator())
2196 return rope
<_CharT
,_Alloc
>(rope
<_CharT
,_Alloc
>::_S_concat_rep(__left
._M_tree_ptr
._M_data
, __right
._M_tree_ptr
._M_data
));
2197 // Inlining this should make it possible to keep __left and __right in registers.
2200 template <class _CharT
, class _Alloc
>
2201 inline rope
<_CharT
,_Alloc
>&
2202 operator+= (rope
<_CharT
,_Alloc
>& __left
,
2203 const rope
<_CharT
,_Alloc
>& __right
) {
2204 __left
.append(__right
);
2208 template <class _CharT
, class _Alloc
>
2209 inline rope
<_CharT
,_Alloc
>
2210 operator+ (const rope
<_CharT
,_Alloc
>& __left
,
2211 const _CharT
* __right
) {
2212 size_t __rlen
= rope
<_CharT
,_Alloc
>::_S_char_ptr_len(__right
);
2213 return rope
<_CharT
,_Alloc
>(rope
<_CharT
,_Alloc
>::_S_concat_char_iter(__left
._M_tree_ptr
._M_data
, __right
, __rlen
));
2216 template <class _CharT
, class _Alloc
>
2217 inline rope
<_CharT
,_Alloc
>&
2218 operator+= (rope
<_CharT
,_Alloc
>& __left
,
2219 const _CharT
* __right
) {
2220 __left
.append(__right
);
2224 template <class _CharT
, class _Alloc
>
2225 inline rope
<_CharT
,_Alloc
>
2226 operator+ (const rope
<_CharT
,_Alloc
>& __left
, _CharT __right
) {
2227 return rope
<_CharT
,_Alloc
>(rope
<_CharT
,_Alloc
>::_S_concat_char_iter(__left
._M_tree_ptr
._M_data
, &__right
, 1));
2230 template <class _CharT
, class _Alloc
>
2231 inline rope
<_CharT
,_Alloc
>&
2232 operator+= (rope
<_CharT
,_Alloc
>& __left
, _CharT __right
) {
2233 __left
.append(__right
);
2237 template <class _CharT
, class _Alloc
>
2239 operator< (const rope
<_CharT
,_Alloc
>& __left
,
2240 const rope
<_CharT
,_Alloc
>& __right
) {
2241 return __left
.compare(__right
) < 0;
2244 template <class _CharT
, class _Alloc
>
2246 operator== (const rope
<_CharT
,_Alloc
>& __left
,
2247 const rope
<_CharT
,_Alloc
>& __right
) {
2248 return __left
.compare(__right
) == 0;
2251 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
2253 template <class _CharT
, class _Alloc
>
2255 operator!= (const rope
<_CharT
,_Alloc
>& __x
, const rope
<_CharT
,_Alloc
>& __y
) {
2256 return !(__x
== __y
);
2259 template <class _CharT
, class _Alloc
>
2261 operator> (const rope
<_CharT
,_Alloc
>& __x
, const rope
<_CharT
,_Alloc
>& __y
) {
2265 template <class _CharT
, class _Alloc
>
2267 operator<= (const rope
<_CharT
,_Alloc
>& __x
, const rope
<_CharT
,_Alloc
>& __y
) {
2268 return !(__y
< __x
);
2271 template <class _CharT
, class _Alloc
>
2273 operator>= (const rope
<_CharT
,_Alloc
>& __x
, const rope
<_CharT
,_Alloc
>& __y
) {
2274 return !(__x
< __y
);
2277 template <class _CharT
, class _Alloc
>
2278 inline bool operator!= (const _Rope_char_ptr_proxy
<_CharT
,_Alloc
>& __x
,
2279 const _Rope_char_ptr_proxy
<_CharT
,_Alloc
>& __y
) {
2280 return !(__x
== __y
);
2283 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
2285 template <class _CharT
, class _Alloc
>
2286 inline bool operator== (const _Rope_char_ptr_proxy
<_CharT
,_Alloc
>& __x
,
2287 const _Rope_char_ptr_proxy
<_CharT
,_Alloc
>& __y
) {
2288 return (__x
._M_pos
== __y
._M_pos
&& __x
._M_root
== __y
._M_root
);
2291 #if !defined (_STLP_USE_NO_IOSTREAMS)
2292 template<class _CharT
, class _Traits
, class _Alloc
>
2293 basic_ostream
<_CharT
, _Traits
>& operator<< (basic_ostream
<_CharT
, _Traits
>& __o
,
2294 const rope
<_CharT
, _Alloc
>& __r
);
2297 typedef rope
<char, allocator
<char> > crope
;
2298 #if defined (_STLP_HAS_WCHAR_T)
2299 typedef rope
<wchar_t, allocator
<wchar_t> > wrope
;
2302 inline crope::reference
__mutable_reference_at(crope
& __c
, size_t __i
)
2303 { return __c
.mutable_reference_at(__i
); }
2305 #if defined (_STLP_HAS_WCHAR_T)
2306 inline wrope::reference
__mutable_reference_at(wrope
& __c
, size_t __i
)
2307 { return __c
.mutable_reference_at(__i
); }
2310 #if defined (_STLP_FUNCTION_TMPL_PARTIAL_ORDER)
2311 template <class _CharT
, class _Alloc
>
2312 inline void swap(rope
<_CharT
,_Alloc
>& __x
, rope
<_CharT
,_Alloc
>& __y
)
2316 inline void swap(crope
& __x
, crope
& __y
) { __x
.swap(__y
); }
2317 # ifdef _STLP_HAS_WCHAR_T // dwa 8/21/97
2318 inline void swap(wrope
& __x
, wrope
& __y
) { __x
.swap(__y
); }
2321 #endif /* _STLP_FUNCTION_TMPL_PARTIAL_ORDER */
2324 // Hash functions should probably be revisited later:
2325 _STLP_TEMPLATE_NULL
struct hash
<crope
> {
2326 size_t operator()(const crope
& __str
) const {
2327 size_t _p_size
= __str
.size();
2329 if (0 == _p_size
) return 0;
2330 return 13*__str
[0] + 5*__str
[_p_size
- 1] + _p_size
;
2334 #if defined (_STLP_HAS_WCHAR_T) // dwa 8/21/97
2335 _STLP_TEMPLATE_NULL
struct hash
<wrope
> {
2336 size_t operator()(const wrope
& __str
) const {
2337 size_t _p_size
= __str
.size();
2339 if (0 == _p_size
) return 0;
2340 return 13*__str
[0] + 5*__str
[_p_size
- 1] + _p_size
;
2345 #if (!defined (_STLP_MSVC) || (_STLP_MSVC >= 1310))
2346 // I couldn't get this to work with VC++
2347 template<class _CharT
,class _Alloc
>
2348 # if defined (__DMC__)
2351 void _Rope_rotate(_Rope_iterator
<_CharT
, _Alloc
> __first
,
2352 _Rope_iterator
<_CharT
, _Alloc
> __middle
,
2353 _Rope_iterator
<_CharT
, _Alloc
> __last
);
2355 inline void rotate(_Rope_iterator
<char, allocator
<char> > __first
,
2356 _Rope_iterator
<char, allocator
<char> > __middle
,
2357 _Rope_iterator
<char, allocator
<char> > __last
)
2358 { _Rope_rotate(__first
, __middle
, __last
); }
2361 template <class _CharT
, class _Alloc
>
2362 inline _Rope_char_ref_proxy
<_CharT
, _Alloc
>::operator _CharT () const {
2363 if (_M_current_valid
) {
2366 return _My_rope::_S_fetch(_M_root
->_M_tree_ptr
._M_data
, _M_pos
);
2370 #if defined (_STLP_CLASS_PARTIAL_SPECIALIZATION) && !defined (_STLP_NO_MOVE_SEMANTIC)
2371 template <class _CharT
, class _Alloc
>
2372 struct __move_traits
<rope
<_CharT
, _Alloc
> > {
2373 typedef __true_type implemented
;
2374 //Completness depends on the allocator:
2375 typedef typename __move_traits
<_Alloc
>::complete complete
;
2381 #if !defined (_STLP_LINK_TIME_INSTANTIATION)
2382 # include <stl/_rope.c>
2385 #endif /* _STLP_INTERNAL_ROPE_H */