]> git.ipfire.org Git - thirdparty/gcc.git/blame - libstdc++-v3/include/ext/rope
re PR libstdc++/25191 (exception_defines.h #defines try/catch)
[thirdparty/gcc.git] / libstdc++-v3 / include / ext / rope
CommitLineData
42526146
PE
1// SGI's rope class -*- C++ -*-
2
bc2631e0 3// Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
b82f782b 4// Free Software Foundation, Inc.
42526146
PE
5//
6// This file is part of the GNU ISO C++ Library. This library is free
7// software; you can redistribute it and/or modify it under the
8// terms of the GNU General Public License as published by the
9// Free Software Foundation; either version 2, or (at your option)
10// any later version.
11
12// This library is distributed in the hope that it will be useful,
13// but WITHOUT ANY WARRANTY; without even the implied warranty of
14// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15// GNU General Public License for more details.
16
17// You should have received a copy of the GNU General Public License along
18// with this library; see the file COPYING. If not, write to the Free
83f51799 19// Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
42526146
PE
20// USA.
21
22// As a special exception, you may use this file as part of a free software
23// library without restriction. Specifically, if other files instantiate
24// templates or use macros or inline functions from this file, or you compile
25// this file and link it with other files to produce an executable, this
26// file does not by itself cause the resulting executable to be covered by
27// the GNU General Public License. This exception does not however
28// invalidate any other reasons why the executable file might be covered by
29// the GNU General Public License.
30
725dc051
BK
31/*
32 * Copyright (c) 1997
33 * Silicon Graphics Computer Systems, Inc.
34 *
35 * Permission to use, copy, modify, distribute and sell this software
36 * and its documentation for any purpose is hereby granted without fee,
37 * provided that the above copyright notice appear in all copies and
38 * that both that copyright notice and this permission notice appear
39 * in supporting documentation. Silicon Graphics makes no
40 * representations about the suitability of this software for any
41 * purpose. It is provided "as is" without express or implied warranty.
42 */
43
ffe94f83
PE
44/** @file ext/rope
45 * This file is a GNU extension to the Standard C++ Library (possibly
0aa06b18 46 * containing extensions from the HP/SGI STL subset).
1b4a6975
PE
47 */
48
b2acb86f
BK
49#ifndef _ROPE
50#define _ROPE 1
725dc051 51
bd1a56a0
BK
52#include <algorithm>
53#include <iosfwd>
9dd90ac6
PC
54#include <bits/stl_construct.h>
55#include <bits/stl_uninitialized.h>
725dc051
BK
56#include <bits/stl_function.h>
57#include <bits/stl_numeric.h>
1ff9402d 58#include <bits/allocator.h>
bd91a8c4 59#include <bits/gthr.h>
0c3de900 60#include <tr1/functional>
725dc051 61
b2acb86f
BK
62# ifdef __GC
63# define __GC_CONST const
64# else
b2acb86f
BK
65# define __GC_CONST // constant except for deallocation
66# endif
67
68#include <ext/memory> // For uninitialized_copy_n
69
3cbc7af0
BK
70_GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx)
71
46db4159
BK
72 namespace __detail
73 {
74 enum { _S_max_rope_depth = 45 };
75 enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
76 } // namespace __detail
77
a6048f29
PC
78 using std::size_t;
79 using std::ptrdiff_t;
80 using std::allocator;
a6048f29
PC
81 using std::_Destroy;
82
91efdb82
PC
83 // See libstdc++/36832.
84 template<typename _ForwardIterator, typename _Allocator>
85 void
86 _Destroy_const(_ForwardIterator __first,
87 _ForwardIterator __last, _Allocator __alloc)
88 {
89 for (; __first != __last; ++__first)
90 __alloc.destroy(&*__first);
91 }
92
93 template<typename _ForwardIterator, typename _Tp>
94 inline void
95 _Destroy_const(_ForwardIterator __first,
96 _ForwardIterator __last, allocator<_Tp>)
97 { _Destroy(__first, __last); }
98
a6048f29
PC
99 // The _S_eos function is used for those functions that
100 // convert to/from C-like strings to detect the end of the string.
101
102 // The end-of-C-string character.
103 // This is what the draft standard says it should be.
104 template <class _CharT>
105 inline _CharT
106 _S_eos(_CharT*)
107 { return _CharT(); }
108
109 // Test for basic character types.
110 // For basic character types leaves having a trailing eos.
111 template <class _CharT>
112 inline bool
113 _S_is_basic_char_type(_CharT*)
114 { return false; }
115
116 template <class _CharT>
117 inline bool
118 _S_is_one_byte_char_type(_CharT*)
119 { return false; }
120
121 inline bool
122 _S_is_basic_char_type(char*)
123 { return true; }
124
125 inline bool
126 _S_is_one_byte_char_type(char*)
127 { return true; }
128
129 inline bool
130 _S_is_basic_char_type(wchar_t*)
131 { return true; }
132
133 // Store an eos iff _CharT is a basic character type.
134 // Do not reference _S_eos if it isn't.
135 template <class _CharT>
136 inline void
137 _S_cond_store_eos(_CharT&) { }
138
139 inline void
140 _S_cond_store_eos(char& __c)
141 { __c = 0; }
142
143 inline void
144 _S_cond_store_eos(wchar_t& __c)
145 { __c = 0; }
146
147 // char_producers are logically functions that generate a section of
28dac70a 148 // a string. These can be converted to ropes. The resulting rope
a6048f29
PC
149 // invokes the char_producer on demand. This allows, for example,
150 // files to be viewed as ropes without reading the entire file.
151 template <class _CharT>
152 class char_producer
153 {
b2acb86f 154 public:
46db4159 155 virtual ~char_producer() { };
a6048f29
PC
156
157 virtual void
158 operator()(size_t __start_pos, size_t __len,
159 _CharT* __buffer) = 0;
160 // Buffer should really be an arbitrary output iterator.
161 // That way we could flatten directly into an ostream, etc.
162 // This is thoroughly impossible, since iterator types don't
163 // have runtime descriptions.
164 };
b2acb86f 165
a6048f29
PC
166 // Sequence buffers:
167 //
168 // Sequence must provide an append operation that appends an
169 // array to the sequence. Sequence buffers are useful only if
170 // appending an entire array is cheaper than appending element by element.
171 // This is true for many string representations.
172 // This should perhaps inherit from ostream<sequence::value_type>
173 // and be implemented correspondingly, so that they can be used
174 // for formatted. For the sake of portability, we don't do this yet.
175 //
176 // For now, sequence buffers behave as output iterators. But they also
177 // behave a little like basic_ostringstream<sequence::value_type> and a
178 // little like containers.
179
180 template<class _Sequence, size_t _Buf_sz = 100>
181 class sequence_buffer
c3f824ce 182 : public std::iterator<std::output_iterator_tag, void, void, void, void>
a6048f29 183 {
b2acb86f 184 public:
a6048f29 185 typedef typename _Sequence::value_type value_type;
b2acb86f 186 protected:
a6048f29
PC
187 _Sequence* _M_prefix;
188 value_type _M_buffer[_Buf_sz];
189 size_t _M_buf_count;
b2acb86f 190 public:
a6048f29
PC
191
192 void
193 flush()
194 {
195 _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
196 _M_buf_count = 0;
197 }
198
199 ~sequence_buffer()
200 { flush(); }
201
202 sequence_buffer()
203 : _M_prefix(0), _M_buf_count(0) { }
204
205 sequence_buffer(const sequence_buffer& __x)
206 {
207 _M_prefix = __x._M_prefix;
208 _M_buf_count = __x._M_buf_count;
412dc870 209 std::copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
a6048f29
PC
210 }
211
212 sequence_buffer(sequence_buffer& __x)
213 {
214 __x.flush();
215 _M_prefix = __x._M_prefix;
216 _M_buf_count = 0;
217 }
218
219 sequence_buffer(_Sequence& __s)
220 : _M_prefix(&__s), _M_buf_count(0) { }
221
222 sequence_buffer&
223 operator=(sequence_buffer& __x)
224 {
225 __x.flush();
226 _M_prefix = __x._M_prefix;
227 _M_buf_count = 0;
228 return *this;
229 }
230
231 sequence_buffer&
232 operator=(const sequence_buffer& __x)
233 {
234 _M_prefix = __x._M_prefix;
235 _M_buf_count = __x._M_buf_count;
412dc870 236 std::copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
a6048f29
PC
237 return *this;
238 }
239
240 void
241 push_back(value_type __x)
242 {
243 if (_M_buf_count < _Buf_sz)
244 {
245 _M_buffer[_M_buf_count] = __x;
246 ++_M_buf_count;
247 }
248 else
249 {
250 flush();
251 _M_buffer[0] = __x;
252 _M_buf_count = 1;
253 }
254 }
255
256 void
257 append(value_type* __s, size_t __len)
258 {
259 if (__len + _M_buf_count <= _Buf_sz)
260 {
261 size_t __i = _M_buf_count;
262 for (size_t __j = 0; __j < __len; __i++, __j++)
263 _M_buffer[__i] = __s[__j];
264 _M_buf_count += __len;
265 }
266 else if (0 == _M_buf_count)
267 _M_prefix->append(__s, __s + __len);
268 else
269 {
270 flush();
271 append(__s, __len);
272 }
273 }
274
275 sequence_buffer&
276 write(value_type* __s, size_t __len)
277 {
278 append(__s, __len);
279 return *this;
280 }
281
282 sequence_buffer&
283 put(value_type __x)
284 {
285 push_back(__x);
286 return *this;
287 }
288
289 sequence_buffer&
290 operator=(const value_type& __rhs)
291 {
292 push_back(__rhs);
293 return *this;
294 }
295
296 sequence_buffer&
297 operator*()
298 { return *this; }
299
300 sequence_buffer&
301 operator++()
302 { return *this; }
303
304 sequence_buffer
305 operator++(int)
306 { return *this; }
307 };
308
309 // The following should be treated as private, at least for now.
310 template<class _CharT>
311 class _Rope_char_consumer
312 {
b2acb86f 313 public:
a6048f29
PC
314 // If we had member templates, these should not be virtual.
315 // For now we need to use run-time parametrization where
316 // compile-time would do. Hence this should all be private
317 // for now.
318 // The symmetry with char_producer is accidental and temporary.
46db4159 319 virtual ~_Rope_char_consumer() { };
a6048f29
PC
320
321 virtual bool
322 operator()(const _CharT* __buffer, size_t __len) = 0;
323 };
324
325 // First a lot of forward declarations. The standard seems to require
326 // much stricter "declaration before use" than many of the implementations
327 // that preceded it.
328 template<class _CharT, class _Alloc = allocator<_CharT> >
329 class rope;
330
331 template<class _CharT, class _Alloc>
332 struct _Rope_RopeConcatenation;
333
334 template<class _CharT, class _Alloc>
335 struct _Rope_RopeLeaf;
336
337 template<class _CharT, class _Alloc>
338 struct _Rope_RopeFunction;
339
340 template<class _CharT, class _Alloc>
341 struct _Rope_RopeSubstring;
342
343 template<class _CharT, class _Alloc>
344 class _Rope_iterator;
345
346 template<class _CharT, class _Alloc>
347 class _Rope_const_iterator;
348
349 template<class _CharT, class _Alloc>
350 class _Rope_char_ref_proxy;
351
352 template<class _CharT, class _Alloc>
353 class _Rope_char_ptr_proxy;
354
355 template<class _CharT, class _Alloc>
356 bool
357 operator==(const _Rope_char_ptr_proxy<_CharT, _Alloc>& __x,
358 const _Rope_char_ptr_proxy<_CharT, _Alloc>& __y);
359
360 template<class _CharT, class _Alloc>
361 _Rope_const_iterator<_CharT, _Alloc>
362 operator-(const _Rope_const_iterator<_CharT, _Alloc>& __x,
363 ptrdiff_t __n);
364
365 template<class _CharT, class _Alloc>
366 _Rope_const_iterator<_CharT, _Alloc>
367 operator+(const _Rope_const_iterator<_CharT, _Alloc>& __x,
368 ptrdiff_t __n);
369
370 template<class _CharT, class _Alloc>
371 _Rope_const_iterator<_CharT, _Alloc>
372 operator+(ptrdiff_t __n,
373 const _Rope_const_iterator<_CharT, _Alloc>& __x);
374
375 template<class _CharT, class _Alloc>
376 bool
377 operator==(const _Rope_const_iterator<_CharT, _Alloc>& __x,
378 const _Rope_const_iterator<_CharT, _Alloc>& __y);
379
380 template<class _CharT, class _Alloc>
381 bool
382 operator<(const _Rope_const_iterator<_CharT, _Alloc>& __x,
383 const _Rope_const_iterator<_CharT, _Alloc>& __y);
384
385 template<class _CharT, class _Alloc>
386 ptrdiff_t
387 operator-(const _Rope_const_iterator<_CharT, _Alloc>& __x,
388 const _Rope_const_iterator<_CharT, _Alloc>& __y);
389
390 template<class _CharT, class _Alloc>
391 _Rope_iterator<_CharT, _Alloc>
392 operator-(const _Rope_iterator<_CharT, _Alloc>& __x, ptrdiff_t __n);
393
394 template<class _CharT, class _Alloc>
395 _Rope_iterator<_CharT, _Alloc>
396 operator+(const _Rope_iterator<_CharT, _Alloc>& __x, ptrdiff_t __n);
397
398 template<class _CharT, class _Alloc>
399 _Rope_iterator<_CharT, _Alloc>
400 operator+(ptrdiff_t __n, const _Rope_iterator<_CharT, _Alloc>& __x);
401
402 template<class _CharT, class _Alloc>
403 bool
404 operator==(const _Rope_iterator<_CharT, _Alloc>& __x,
405 const _Rope_iterator<_CharT, _Alloc>& __y);
406
407 template<class _CharT, class _Alloc>
408 bool
409 operator<(const _Rope_iterator<_CharT, _Alloc>& __x,
410 const _Rope_iterator<_CharT, _Alloc>& __y);
411
412 template<class _CharT, class _Alloc>
413 ptrdiff_t
414 operator-(const _Rope_iterator<_CharT, _Alloc>& __x,
415 const _Rope_iterator<_CharT, _Alloc>& __y);
416
417 template<class _CharT, class _Alloc>
418 rope<_CharT, _Alloc>
419 operator+(const rope<_CharT, _Alloc>& __left,
420 const rope<_CharT, _Alloc>& __right);
421
422 template<class _CharT, class _Alloc>
423 rope<_CharT, _Alloc>
424 operator+(const rope<_CharT, _Alloc>& __left, const _CharT* __right);
425
426 template<class _CharT, class _Alloc>
427 rope<_CharT, _Alloc>
428 operator+(const rope<_CharT, _Alloc>& __left, _CharT __right);
429
430 // Some helpers, so we can use power on ropes.
431 // See below for why this isn't local to the implementation.
432
433 // This uses a nonstandard refcount convention.
434 // The result has refcount 0.
435 template<class _CharT, class _Alloc>
436 struct _Rope_Concat_fn
437 : public std::binary_function<rope<_CharT, _Alloc>, rope<_CharT, _Alloc>,
438 rope<_CharT, _Alloc> >
439 {
440 rope<_CharT, _Alloc>
441 operator()(const rope<_CharT, _Alloc>& __x,
442 const rope<_CharT, _Alloc>& __y)
443 { return __x + __y; }
444 };
b2acb86f 445
a6048f29
PC
446 template <class _CharT, class _Alloc>
447 inline rope<_CharT, _Alloc>
448 identity_element(_Rope_Concat_fn<_CharT, _Alloc>)
449 { return rope<_CharT, _Alloc>(); }
b2acb86f 450
6b5a2662
BK
451 // Class _Refcount_Base provides a type, _RC_t, a data member,
452 // _M_ref_count, and member functions _M_incr and _M_decr, which perform
fa30fe72 453 // atomic preincrement/predecrement. The constructor initializes
6b5a2662
BK
454 // _M_ref_count.
455 struct _Refcount_Base
456 {
457 // The type _RC_t
458 typedef size_t _RC_t;
a6048f29 459
6b5a2662
BK
460 // The data member _M_ref_count
461 volatile _RC_t _M_ref_count;
fa30fe72 462
6b5a2662
BK
463 // Constructor
464 __gthread_mutex_t _M_ref_count_lock;
465
26c691a8 466 _Refcount_Base(_RC_t __n) : _M_ref_count(__n), _M_ref_count_lock()
6b5a2662
BK
467 {
468#ifdef __GTHREAD_MUTEX_INIT
469 __gthread_mutex_t __tmp = __GTHREAD_MUTEX_INIT;
470 _M_ref_count_lock = __tmp;
471#elif defined(__GTHREAD_MUTEX_INIT_FUNCTION)
472 __GTHREAD_MUTEX_INIT_FUNCTION (&_M_ref_count_lock);
473#else
474#error __GTHREAD_MUTEX_INIT or __GTHREAD_MUTEX_INIT_FUNCTION should be defined by gthr.h abstraction layer, report problem to libstdc++@gcc.gnu.org.
475#endif
476 }
477
fa30fe72
BI
478 void
479 _M_incr()
6b5a2662
BK
480 {
481 __gthread_mutex_lock(&_M_ref_count_lock);
482 ++_M_ref_count;
483 __gthread_mutex_unlock(&_M_ref_count_lock);
484 }
485
fa30fe72
BI
486 _RC_t
487 _M_decr()
6b5a2662
BK
488 {
489 __gthread_mutex_lock(&_M_ref_count_lock);
490 volatile _RC_t __tmp = --_M_ref_count;
491 __gthread_mutex_unlock(&_M_ref_count_lock);
492 return __tmp;
493 }
494 };
495
a6048f29
PC
496 //
497 // What follows should really be local to rope. Unfortunately,
498 // that doesn't work, since it makes it impossible to define generic
499 // equality on rope iterators. According to the draft standard, the
500 // template parameters for such an equality operator cannot be inferred
501 // from the occurrence of a member class as a parameter.
502 // (SGI compilers in fact allow this, but the __result wouldn't be
503 // portable.)
504 // Similarly, some of the static member functions are member functions
505 // only to avoid polluting the global namespace, and to circumvent
506 // restrictions on type inference for template functions.
507 //
508
509 //
510 // The internal data structure for representing a rope. This is
511 // private to the implementation. A rope is really just a pointer
512 // to one of these.
513 //
514 // A few basic functions for manipulating this data structure
515 // are members of _RopeRep. Most of the more complex algorithms
516 // are implemented as rope members.
517 //
518 // Some of the static member functions of _RopeRep have identically
519 // named functions in rope that simply invoke the _RopeRep versions.
b2acb86f
BK
520
521#define __ROPE_DEFINE_ALLOCS(__a) \
522 __ROPE_DEFINE_ALLOC(_CharT,_Data) /* character data */ \
523 typedef _Rope_RopeConcatenation<_CharT,__a> __C; \
524 __ROPE_DEFINE_ALLOC(__C,_C) \
525 typedef _Rope_RopeLeaf<_CharT,__a> __L; \
526 __ROPE_DEFINE_ALLOC(__L,_L) \
527 typedef _Rope_RopeFunction<_CharT,__a> __F; \
528 __ROPE_DEFINE_ALLOC(__F,_F) \
529 typedef _Rope_RopeSubstring<_CharT,__a> __S; \
530 __ROPE_DEFINE_ALLOC(__S,_S)
531
a6048f29
PC
532 // Internal rope nodes potentially store a copy of the allocator
533 // instance used to allocate them. This is mostly redundant.
534 // But the alternative would be to pass allocator instances around
535 // in some form to nearly all internal functions, since any pointer
536 // assignment may result in a zero reference count and thus require
537 // deallocation.
b2acb86f
BK
538
539#define __STATIC_IF_SGI_ALLOC /* not static */
540
a6048f29
PC
541 template <class _CharT, class _Alloc>
542 struct _Rope_rep_base
543 : public _Alloc
544 {
545 typedef _Alloc allocator_type;
fa30fe72 546
a6048f29
PC
547 allocator_type
548 get_allocator() const
549 { return *static_cast<const _Alloc*>(this); }
b2acb86f 550
c531371e
PC
551 allocator_type&
552 _M_get_allocator()
553 { return *static_cast<_Alloc*>(this); }
554
555 const allocator_type&
556 _M_get_allocator() const
557 { return *static_cast<const _Alloc*>(this); }
558
a6048f29 559 _Rope_rep_base(size_t __size, const allocator_type&)
46db4159 560 : _M_size(__size) { }
b2acb86f 561
a6048f29 562 size_t _M_size;
fa30fe72 563
b2acb86f
BK
564# define __ROPE_DEFINE_ALLOC(_Tp, __name) \
565 typedef typename \
5640461d 566 _Alloc::template rebind<_Tp>::other __name##Alloc; \
b2acb86f 567 static _Tp* __name##_allocate(size_t __n) \
5640461d
PC
568 { return __name##Alloc().allocate(__n); } \
569 static void __name##_deallocate(_Tp *__p, size_t __n) \
570 { __name##Alloc().deallocate(__p, __n); }
a6048f29 571 __ROPE_DEFINE_ALLOCS(_Alloc)
b2acb86f 572# undef __ROPE_DEFINE_ALLOC
a6048f29 573 };
b2acb86f 574
a6048f29
PC
575 template<class _CharT, class _Alloc>
576 struct _Rope_RopeRep
577 : public _Rope_rep_base<_CharT, _Alloc>
b2acb86f 578# ifndef __GC
a6048f29 579 , _Refcount_Base
b2acb86f 580# endif
a6048f29 581 {
b2acb86f 582 public:
46db4159 583 __detail::_Tag _M_tag:8;
a6048f29
PC
584 bool _M_is_balanced:8;
585 unsigned char _M_depth;
586 __GC_CONST _CharT* _M_c_string;
587 __gthread_mutex_t _M_c_string_lock;
b2acb86f
BK
588 /* Flattened version of string, if needed. */
589 /* typically 0. */
590 /* If it's not 0, then the memory is owned */
591 /* by this node. */
592 /* In the case of a leaf, this may point to */
593 /* the same memory as the data field. */
a6048f29
PC
594 typedef typename _Rope_rep_base<_CharT, _Alloc>::allocator_type
595 allocator_type;
596
597 using _Rope_rep_base<_CharT, _Alloc>::get_allocator;
c531371e 598 using _Rope_rep_base<_CharT, _Alloc>::_M_get_allocator;
a6048f29 599
46db4159 600 _Rope_RopeRep(__detail::_Tag __t, int __d, bool __b, size_t __size,
c531371e 601 const allocator_type& __a)
a6048f29
PC
602 : _Rope_rep_base<_CharT, _Alloc>(__size, __a),
603#ifndef __GC
604 _Refcount_Base(1),
605#endif
606 _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0)
b2acb86f
BK
607#ifdef __GTHREAD_MUTEX_INIT
608 {
a6048f29
PC
609 // Do not copy a POSIX/gthr mutex once in use. However, bits are bits.
610 __gthread_mutex_t __tmp = __GTHREAD_MUTEX_INIT;
611 _M_c_string_lock = __tmp;
b2acb86f
BK
612 }
613#else
614 { __GTHREAD_MUTEX_INIT_FUNCTION (&_M_c_string_lock); }
615#endif
a6048f29
PC
616#ifdef __GC
617 void
46db4159 618 _M_incr () { }
a6048f29
PC
619#endif
620 static void
621 _S_free_string(__GC_CONST _CharT*, size_t __len,
c531371e 622 allocator_type& __a);
a6048f29 623#define __STL_FREE_STRING(__s, __l, __a) _S_free_string(__s, __l, __a);
b2acb86f
BK
624 // Deallocate data section of a leaf.
625 // This shouldn't be a member function.
626 // But its hard to do anything else at the
627 // moment, because it's templatized w.r.t.
628 // an allocator.
629 // Does nothing if __GC is defined.
a6048f29
PC
630#ifndef __GC
631 void _M_free_c_string();
632 void _M_free_tree();
633 // Deallocate t. Assumes t is not 0.
634 void
635 _M_unref_nonnil()
636 {
637 if (0 == _M_decr())
638 _M_free_tree();
639 }
640
641 void
642 _M_ref_nonnil()
643 { _M_incr(); }
644
645 static void
646 _S_unref(_Rope_RopeRep* __t)
647 {
648 if (0 != __t)
649 __t->_M_unref_nonnil();
650 }
651
652 static void
653 _S_ref(_Rope_RopeRep* __t)
654 {
655 if (0 != __t)
656 __t->_M_incr();
657 }
658
659 static void
660 _S_free_if_unref(_Rope_RopeRep* __t)
661 {
662 if (0 != __t && 0 == __t->_M_ref_count)
663 __t->_M_free_tree();
664 }
b2acb86f 665# else /* __GC */
46db4159
BK
666 void _M_unref_nonnil() { }
667 void _M_ref_nonnil() { }
668 static void _S_unref(_Rope_RopeRep*) { }
669 static void _S_ref(_Rope_RopeRep*) { }
670 static void _S_free_if_unref(_Rope_RopeRep*) { }
b2acb86f 671# endif
26c691a8 672protected:
a6048f29
PC
673 _Rope_RopeRep&
674 operator=(const _Rope_RopeRep&);
b2acb86f 675
a6048f29
PC
676 _Rope_RopeRep(const _Rope_RopeRep&);
677 };
b2acb86f 678
a6048f29
PC
679 template<class _CharT, class _Alloc>
680 struct _Rope_RopeLeaf
681 : public _Rope_RopeRep<_CharT, _Alloc>
682 {
683 public:
684 // Apparently needed by VC++
685 // The data fields of leaves are allocated with some
686 // extra space, to accommodate future growth and for basic
687 // character types, to hold a trailing eos character.
688 enum { _S_alloc_granularity = 8 };
689
690 static size_t
691 _S_rounded_up_size(size_t __n)
692 {
b2acb86f 693 size_t __size_with_eos;
a6048f29
PC
694
695 if (_S_is_basic_char_type((_CharT*)0))
696 __size_with_eos = __n + 1;
697 else
698 __size_with_eos = __n;
699#ifdef __GC
700 return __size_with_eos;
701#else
702 // Allow slop for in-place expansion.
a9dd5a46
PC
703 return ((__size_with_eos + size_t(_S_alloc_granularity) - 1)
704 &~ (size_t(_S_alloc_granularity) - 1));
a6048f29
PC
705#endif
706 }
707 __GC_CONST _CharT* _M_data; /* Not necessarily 0 terminated. */
708 /* The allocated size is */
709 /* _S_rounded_up_size(size), except */
710 /* in the GC case, in which it */
711 /* doesn't matter. */
712 typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type
713 allocator_type;
fa30fe72 714
a6048f29 715 _Rope_RopeLeaf(__GC_CONST _CharT* __d, size_t __size,
c531371e 716 const allocator_type& __a)
46db4159 717 : _Rope_RopeRep<_CharT, _Alloc>(__detail::_S_leaf, 0, true,
a6048f29
PC
718 __size, __a), _M_data(__d)
719 {
720 if (_S_is_basic_char_type((_CharT *)0))
721 {
b2acb86f
BK
722 // already eos terminated.
723 this->_M_c_string = __d;
a6048f29
PC
724 }
725 }
726 // The constructor assumes that d has been allocated with
727 // the proper allocator and the properly padded size.
728 // In contrast, the destructor deallocates the data:
729#ifndef __GC
730 ~_Rope_RopeLeaf() throw()
731 {
732 if (_M_data != this->_M_c_string)
f2ffecb1 733 this->_M_free_c_string();
a6048f29 734
c531371e 735 __STL_FREE_STRING(_M_data, this->_M_size, this->_M_get_allocator());
a6048f29
PC
736 }
737#endif
26c691a8 738protected:
a6048f29
PC
739 _Rope_RopeLeaf&
740 operator=(const _Rope_RopeLeaf&);
26c691a8 741
a6048f29
PC
742 _Rope_RopeLeaf(const _Rope_RopeLeaf&);
743 };
b2acb86f 744
a6048f29
PC
745 template<class _CharT, class _Alloc>
746 struct _Rope_RopeConcatenation
747 : public _Rope_RopeRep<_CharT, _Alloc>
748 {
749 public:
750 _Rope_RopeRep<_CharT, _Alloc>* _M_left;
751 _Rope_RopeRep<_CharT, _Alloc>* _M_right;
752
753 typedef typename _Rope_rep_base<_CharT, _Alloc>::allocator_type
754 allocator_type;
755
756 _Rope_RopeConcatenation(_Rope_RopeRep<_CharT, _Alloc>* __l,
757 _Rope_RopeRep<_CharT, _Alloc>* __r,
c531371e 758 const allocator_type& __a)
46db4159 759 : _Rope_RopeRep<_CharT, _Alloc>(__detail::_S_concat,
a6048f29
PC
760 std::max(__l->_M_depth,
761 __r->_M_depth) + 1,
762 false,
763 __l->_M_size + __r->_M_size, __a),
b2acb86f 764 _M_left(__l), _M_right(__r)
a6048f29
PC
765 { }
766#ifndef __GC
767 ~_Rope_RopeConcatenation() throw()
768 {
769 this->_M_free_c_string();
770 _M_left->_M_unref_nonnil();
771 _M_right->_M_unref_nonnil();
772 }
773#endif
26c691a8 774protected:
a6048f29
PC
775 _Rope_RopeConcatenation&
776 operator=(const _Rope_RopeConcatenation&);
777
778 _Rope_RopeConcatenation(const _Rope_RopeConcatenation&);
779 };
b2acb86f 780
a6048f29
PC
781 template<class _CharT, class _Alloc>
782 struct _Rope_RopeFunction
783 : public _Rope_RopeRep<_CharT, _Alloc>
784 {
785 public:
786 char_producer<_CharT>* _M_fn;
787#ifndef __GC
b2acb86f
BK
788 bool _M_delete_when_done; // Char_producer is owned by the
789 // rope and should be explicitly
790 // deleted when the rope becomes
791 // inaccessible.
a6048f29 792#else
b2acb86f
BK
793 // In the GC case, we either register the rope for
794 // finalization, or not. Thus the field is unnecessary;
795 // the information is stored in the collector data structures.
796 // We do need a finalization procedure to be invoked by the
797 // collector.
a6048f29
PC
798 static void
799 _S_fn_finalization_proc(void * __tree, void *)
800 { delete ((_Rope_RopeFunction *)__tree) -> _M_fn; }
801#endif
802 typedef typename _Rope_rep_base<_CharT, _Alloc>::allocator_type
803 allocator_type;
804
805 _Rope_RopeFunction(char_producer<_CharT>* __f, size_t __size,
c531371e 806 bool __d, const allocator_type& __a)
46db4159 807 : _Rope_RopeRep<_CharT, _Alloc>(__detail::_S_function, 0, true, __size, __a)
a6048f29
PC
808 , _M_fn(__f)
809#ifndef __GC
810 , _M_delete_when_done(__d)
811#endif
812 {
813#ifdef __GC
814 if (__d)
815 {
816 GC_REGISTER_FINALIZER(this, _Rope_RopeFunction::
817 _S_fn_finalization_proc, 0, 0, 0);
818 }
819#endif
820 }
821#ifndef __GC
822 ~_Rope_RopeFunction() throw()
823 {
824 this->_M_free_c_string();
825 if (_M_delete_when_done)
826 delete _M_fn;
827 }
b2acb86f 828# endif
a6048f29
PC
829 protected:
830 _Rope_RopeFunction&
831 operator=(const _Rope_RopeFunction&);
832
833 _Rope_RopeFunction(const _Rope_RopeFunction&);
834 };
835 // Substring results are usually represented using just
836 // concatenation nodes. But in the case of very long flat ropes
837 // or ropes with a functional representation that isn't practical.
838 // In that case, we represent the __result as a special case of
839 // RopeFunction, whose char_producer points back to the rope itself.
840 // In all cases except repeated substring operations and
841 // deallocation, we treat the __result as a RopeFunction.
842 template<class _CharT, class _Alloc>
843 struct _Rope_RopeSubstring
844 : public _Rope_RopeFunction<_CharT, _Alloc>,
845 public char_producer<_CharT>
846 {
847 public:
848 // XXX this whole class should be rewritten.
849 _Rope_RopeRep<_CharT,_Alloc>* _M_base; // not 0
850 size_t _M_start;
851
852 virtual void
853 operator()(size_t __start_pos, size_t __req_len,
854 _CharT* __buffer)
855 {
856 switch(_M_base->_M_tag)
857 {
46db4159
BK
858 case __detail::_S_function:
859 case __detail::_S_substringfn:
a6048f29
PC
860 {
861 char_producer<_CharT>* __fn =
862 ((_Rope_RopeFunction<_CharT,_Alloc>*)_M_base)->_M_fn;
863 (*__fn)(__start_pos + _M_start, __req_len, __buffer);
864 }
865 break;
46db4159 866 case __detail::_S_leaf:
a6048f29
PC
867 {
868 __GC_CONST _CharT* __s =
869 ((_Rope_RopeLeaf<_CharT,_Alloc>*)_M_base)->_M_data;
870 uninitialized_copy_n(__s + __start_pos + _M_start, __req_len,
871 __buffer);
872 }
873 break;
874 default:
875 break;
876 }
877 }
878
879 typedef typename _Rope_rep_base<_CharT, _Alloc>::allocator_type
b2acb86f 880 allocator_type;
a6048f29
PC
881
882 _Rope_RopeSubstring(_Rope_RopeRep<_CharT, _Alloc>* __b, size_t __s,
c531371e 883 size_t __l, const allocator_type& __a)
a6048f29
PC
884 : _Rope_RopeFunction<_CharT, _Alloc>(this, __l, false, __a),
885 char_producer<_CharT>(), _M_base(__b), _M_start(__s)
886 {
887#ifndef __GC
888 _M_base->_M_ref_nonnil();
889#endif
46db4159 890 this->_M_tag = __detail::_S_substringfn;
a6048f29 891 }
26c691a8 892 virtual ~_Rope_RopeSubstring() throw()
fa30fe72 893 {
a6048f29
PC
894#ifndef __GC
895 _M_base->_M_unref_nonnil();
896 // _M_free_c_string(); -- done by parent class
897#endif
b2acb86f 898 }
a6048f29 899 };
b2acb86f 900
a6048f29
PC
901 // Self-destructing pointers to Rope_rep.
902 // These are not conventional smart pointers. Their
903 // only purpose in life is to ensure that unref is called
904 // on the pointer either at normal exit or if an exception
905 // is raised. It is the caller's responsibility to
906 // adjust reference counts when these pointers are initialized
907 // or assigned to. (This convention significantly reduces
908 // the number of potentially expensive reference count
909 // updates.)
b2acb86f
BK
910#ifndef __GC
911 template<class _CharT, class _Alloc>
a6048f29
PC
912 struct _Rope_self_destruct_ptr
913 {
914 _Rope_RopeRep<_CharT, _Alloc>* _M_ptr;
915
916 ~_Rope_self_destruct_ptr()
917 { _Rope_RopeRep<_CharT, _Alloc>::_S_unref(_M_ptr); }
b2acb86f 918#ifdef __EXCEPTIONS
46db4159 919 _Rope_self_destruct_ptr() : _M_ptr(0) { };
b2acb86f 920#else
46db4159 921 _Rope_self_destruct_ptr() { };
b2acb86f 922#endif
a6048f29 923 _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT, _Alloc>* __p)
46db4159 924 : _M_ptr(__p) { }
a6048f29
PC
925
926 _Rope_RopeRep<_CharT, _Alloc>&
927 operator*()
928 { return *_M_ptr; }
929
930 _Rope_RopeRep<_CharT, _Alloc>*
931 operator->()
932 { return _M_ptr; }
933
934 operator _Rope_RopeRep<_CharT, _Alloc>*()
935 { return _M_ptr; }
936
937 _Rope_self_destruct_ptr&
938 operator=(_Rope_RopeRep<_CharT, _Alloc>* __x)
939 { _M_ptr = __x; return *this; }
940 };
b2acb86f
BK
941#endif
942
a6048f29
PC
943 // Dereferencing a nonconst iterator has to return something
944 // that behaves almost like a reference. It's not possible to
945 // return an actual reference since assignment requires extra
946 // work. And we would get into the same problems as with the
947 // CD2 version of basic_string.
948 template<class _CharT, class _Alloc>
949 class _Rope_char_ref_proxy
950 {
951 friend class rope<_CharT, _Alloc>;
952 friend class _Rope_iterator<_CharT, _Alloc>;
953 friend class _Rope_char_ptr_proxy<_CharT, _Alloc>;
954#ifdef __GC
955 typedef _Rope_RopeRep<_CharT, _Alloc>* _Self_destruct_ptr;
956#else
957 typedef _Rope_self_destruct_ptr<_CharT, _Alloc> _Self_destruct_ptr;
958#endif
959 typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
960 typedef rope<_CharT, _Alloc> _My_rope;
961 size_t _M_pos;
962 _CharT _M_current;
963 bool _M_current_valid;
964 _My_rope* _M_root; // The whole rope.
965 public:
966 _Rope_char_ref_proxy(_My_rope* __r, size_t __p)
46db4159 967 : _M_pos(__p), _M_current(), _M_current_valid(false), _M_root(__r) { }
a6048f29
PC
968
969 _Rope_char_ref_proxy(const _Rope_char_ref_proxy& __x)
46db4159
BK
970 : _M_pos(__x._M_pos), _M_current(__x._M_current),
971 _M_current_valid(false), _M_root(__x._M_root) { }
a6048f29
PC
972
973 // Don't preserve cache if the reference can outlive the
974 // expression. We claim that's not possible without calling
975 // a copy constructor or generating reference to a proxy
976 // reference. We declare the latter to have undefined semantics.
977 _Rope_char_ref_proxy(_My_rope* __r, size_t __p, _CharT __c)
46db4159 978 : _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) { }
a6048f29
PC
979
980 inline operator _CharT () const;
981
982 _Rope_char_ref_proxy&
983 operator=(_CharT __c);
984
985 _Rope_char_ptr_proxy<_CharT, _Alloc> operator&() const;
986
987 _Rope_char_ref_proxy&
988 operator=(const _Rope_char_ref_proxy& __c)
989 { return operator=((_CharT)__c); }
990 };
991
992 template<class _CharT, class __Alloc>
993 inline void
994 swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
995 _Rope_char_ref_proxy <_CharT, __Alloc > __b)
996 {
997 _CharT __tmp = __a;
998 __a = __b;
999 __b = __tmp;
b2acb86f 1000 }
a6048f29
PC
1001
1002 template<class _CharT, class _Alloc>
1003 class _Rope_char_ptr_proxy
1004 {
1005 // XXX this class should be rewritten.
1006 friend class _Rope_char_ref_proxy<_CharT, _Alloc>;
1007 size_t _M_pos;
1008 rope<_CharT,_Alloc>* _M_root; // The whole rope.
1009 public:
1010 _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
46db4159 1011 : _M_pos(__x._M_pos), _M_root(__x._M_root) { }
a6048f29
PC
1012
1013 _Rope_char_ptr_proxy(const _Rope_char_ptr_proxy& __x)
46db4159 1014 : _M_pos(__x._M_pos), _M_root(__x._M_root) { }
a6048f29 1015
46db4159 1016 _Rope_char_ptr_proxy() { }
a6048f29
PC
1017
1018 _Rope_char_ptr_proxy(_CharT* __x)
1019 : _M_root(0), _M_pos(0) { }
1020
1021 _Rope_char_ptr_proxy&
1022 operator=(const _Rope_char_ptr_proxy& __x)
1023 {
b2acb86f
BK
1024 _M_pos = __x._M_pos;
1025 _M_root = __x._M_root;
1026 return *this;
a6048f29 1027 }
b2acb86f 1028
a6048f29
PC
1029 template<class _CharT2, class _Alloc2>
1030 friend bool
1031 operator==(const _Rope_char_ptr_proxy<_CharT2, _Alloc2>& __x,
1032 const _Rope_char_ptr_proxy<_CharT2, _Alloc2>& __y);
b2acb86f 1033
a6048f29
PC
1034 _Rope_char_ref_proxy<_CharT, _Alloc> operator*() const
1035 { return _Rope_char_ref_proxy<_CharT, _Alloc>(_M_root, _M_pos); }
1036 };
b2acb86f 1037
a6048f29
PC
1038 // Rope iterators:
1039 // Unlike in the C version, we cache only part of the stack
1040 // for rope iterators, since they must be efficiently copyable.
1041 // When we run out of cache, we have to reconstruct the iterator
1042 // value.
1043 // Pointers from iterators are not included in reference counts.
1044 // Iterators are assumed to be thread private. Ropes can
1045 // be shared.
1046
1047 template<class _CharT, class _Alloc>
1048 class _Rope_iterator_base
c3f824ce 1049 : public std::iterator<std::random_access_iterator_tag, _CharT>
a6048f29
PC
1050 {
1051 friend class rope<_CharT, _Alloc>;
1052 public:
1053 typedef _Alloc _allocator_type; // used in _Rope_rotate, VC++ workaround
1054 typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
1055 // Borland doesn't want this to be protected.
1056 protected:
1057 enum { _S_path_cache_len = 4 }; // Must be <= 9.
1058 enum { _S_iterator_buf_len = 15 };
1059 size_t _M_current_pos;
1060 _RopeRep* _M_root; // The whole rope.
1061 size_t _M_leaf_pos; // Starting position for current leaf
1062 __GC_CONST _CharT* _M_buf_start;
1063 // Buffer possibly
1064 // containing current char.
1065 __GC_CONST _CharT* _M_buf_ptr;
1066 // Pointer to current char in buffer.
1067 // != 0 ==> buffer valid.
1068 __GC_CONST _CharT* _M_buf_end;
1069 // One past __last valid char in buffer.
1070 // What follows is the path cache. We go out of our
1071 // way to make this compact.
1072 // Path_end contains the bottom section of the path from
1073 // the root to the current leaf.
1074 const _RopeRep* _M_path_end[_S_path_cache_len];
1075 int _M_leaf_index; // Last valid __pos in path_end;
1076 // _M_path_end[0] ... _M_path_end[leaf_index-1]
1077 // point to concatenation nodes.
1078 unsigned char _M_path_directions;
b2acb86f
BK
1079 // (path_directions >> __i) & 1 is 1
1080 // iff we got from _M_path_end[leaf_index - __i - 1]
1081 // to _M_path_end[leaf_index - __i] by going to the
1082 // __right. Assumes path_cache_len <= 9.
a6048f29 1083 _CharT _M_tmp_buf[_S_iterator_buf_len];
b2acb86f 1084 // Short buffer for surrounding chars.
fa30fe72 1085 // This is useful primarily for
b2acb86f
BK
1086 // RopeFunctions. We put the buffer
1087 // here to avoid locking in the
1088 // multithreaded case.
a6048f29
PC
1089 // The cached path is generally assumed to be valid
1090 // only if the buffer is valid.
1091 static void _S_setbuf(_Rope_iterator_base& __x);
b2acb86f
BK
1092 // Set buffer contents given
1093 // path cache.
a6048f29 1094 static void _S_setcache(_Rope_iterator_base& __x);
b2acb86f
BK
1095 // Set buffer contents and
1096 // path cache.
a6048f29 1097 static void _S_setcache_for_incr(_Rope_iterator_base& __x);
b2acb86f
BK
1098 // As above, but assumes path
1099 // cache is valid for previous posn.
46db4159 1100 _Rope_iterator_base() { }
a6048f29
PC
1101
1102 _Rope_iterator_base(_RopeRep* __root, size_t __pos)
46db4159 1103 : _M_current_pos(__pos), _M_root(__root), _M_buf_ptr(0) { }
a6048f29
PC
1104
1105 void _M_incr(size_t __n);
1106 void _M_decr(size_t __n);
1107 public:
1108 size_t
1109 index() const
1110 { return _M_current_pos; }
1111
1112 _Rope_iterator_base(const _Rope_iterator_base& __x)
1113 {
1114 if (0 != __x._M_buf_ptr)
1115 *this = __x;
1116 else
1117 {
b2acb86f
BK
1118 _M_current_pos = __x._M_current_pos;
1119 _M_root = __x._M_root;
1120 _M_buf_ptr = 0;
a6048f29
PC
1121 }
1122 }
1123 };
b2acb86f 1124
a6048f29
PC
1125 template<class _CharT, class _Alloc>
1126 class _Rope_iterator;
b2acb86f 1127
a6048f29
PC
1128 template<class _CharT, class _Alloc>
1129 class _Rope_const_iterator
1130 : public _Rope_iterator_base<_CharT, _Alloc>
1131 {
1132 friend class rope<_CharT, _Alloc>;
1133 protected:
1134 typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
b2acb86f 1135 // The one from the base class may not be directly visible.
a6048f29
PC
1136 _Rope_const_iterator(const _RopeRep* __root, size_t __pos)
1137 : _Rope_iterator_base<_CharT, _Alloc>(const_cast<_RopeRep*>(__root),
1138 __pos)
b2acb86f 1139 // Only nonconst iterators modify root ref count
46db4159 1140 { }
b2acb86f 1141 public:
a6048f29
PC
1142 typedef _CharT reference; // Really a value. Returning a reference
1143 // Would be a mess, since it would have
1144 // to be included in refcount.
1145 typedef const _CharT* pointer;
b2acb86f 1146
a6048f29 1147 public:
46db4159 1148 _Rope_const_iterator() { };
a6048f29
PC
1149
1150 _Rope_const_iterator(const _Rope_const_iterator& __x)
1151 : _Rope_iterator_base<_CharT,_Alloc>(__x) { }
1152
1153 _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x);
1154
1155 _Rope_const_iterator(const rope<_CharT, _Alloc>& __r, size_t __pos)
1156 : _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr, __pos) { }
1157
1158 _Rope_const_iterator&
1159 operator=(const _Rope_const_iterator& __x)
1160 {
1161 if (0 != __x._M_buf_ptr)
1162 *(static_cast<_Rope_iterator_base<_CharT, _Alloc>*>(this)) = __x;
1163 else
1164 {
b2acb86f
BK
1165 this->_M_current_pos = __x._M_current_pos;
1166 this->_M_root = __x._M_root;
1167 this->_M_buf_ptr = 0;
a6048f29 1168 }
b2acb86f 1169 return(*this);
a6048f29
PC
1170 }
1171
1172 reference
1173 operator*()
1174 {
1175 if (0 == this->_M_buf_ptr)
1176 _S_setcache(*this);
b2acb86f 1177 return *this->_M_buf_ptr;
a6048f29 1178 }
49525c76
DG
1179
1180 // Without this const version, Rope iterators do not meet the
1181 // requirements of an Input Iterator.
1182 reference
1183 operator*() const
1184 {
1185 return *const_cast<_Rope_const_iterator&>(*this);
1186 }
1187
a6048f29
PC
1188 _Rope_const_iterator&
1189 operator++()
1190 {
b2acb86f
BK
1191 __GC_CONST _CharT* __next;
1192 if (0 != this->_M_buf_ptr
a6048f29
PC
1193 && (__next = this->_M_buf_ptr + 1) < this->_M_buf_end)
1194 {
b2acb86f
BK
1195 this->_M_buf_ptr = __next;
1196 ++this->_M_current_pos;
a6048f29
PC
1197 }
1198 else
1199 this->_M_incr(1);
1200 return *this;
1201 }
1202
1203 _Rope_const_iterator&
1204 operator+=(ptrdiff_t __n)
1205 {
1206 if (__n >= 0)
1207 this->_M_incr(__n);
1208 else
1209 this->_M_decr(-__n);
1210 return *this;
1211 }
1212
1213 _Rope_const_iterator&
1214 operator--()
1215 {
f2ffecb1 1216 this->_M_decr(1);
b2acb86f 1217 return *this;
a6048f29
PC
1218 }
1219
1220 _Rope_const_iterator&
1221 operator-=(ptrdiff_t __n)
1222 {
1223 if (__n >= 0)
1224 this->_M_decr(__n);
1225 else
1226 this->_M_incr(-__n);
1227 return *this;
1228 }
1229
1230 _Rope_const_iterator
1231 operator++(int)
1232 {
b2acb86f 1233 size_t __old_pos = this->_M_current_pos;
f2ffecb1 1234 this->_M_incr(1);
b2acb86f
BK
1235 return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
1236 // This makes a subsequent dereference expensive.
1237 // Perhaps we should instead copy the iterator
1238 // if it has a valid cache?
a6048f29
PC
1239 }
1240
1241 _Rope_const_iterator
1242 operator--(int)
1243 {
b2acb86f 1244 size_t __old_pos = this->_M_current_pos;
f2ffecb1 1245 this->_M_decr(1);
b2acb86f 1246 return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
a6048f29
PC
1247 }
1248
1249 template<class _CharT2, class _Alloc2>
1250 friend _Rope_const_iterator<_CharT2, _Alloc2>
1251 operator-(const _Rope_const_iterator<_CharT2, _Alloc2>& __x,
1252 ptrdiff_t __n);
1253
1254 template<class _CharT2, class _Alloc2>
1255 friend _Rope_const_iterator<_CharT2, _Alloc2>
1256 operator+(const _Rope_const_iterator<_CharT2, _Alloc2>& __x,
1257 ptrdiff_t __n);
1258
1259 template<class _CharT2, class _Alloc2>
1260 friend _Rope_const_iterator<_CharT2, _Alloc2>
1261 operator+(ptrdiff_t __n,
1262 const _Rope_const_iterator<_CharT2, _Alloc2>& __x);
1263
1264 reference
1265 operator[](size_t __n)
1266 { return rope<_CharT, _Alloc>::_S_fetch(this->_M_root,
1267 this->_M_current_pos + __n); }
1268
1269 template<class _CharT2, class _Alloc2>
1270 friend bool
1271 operator==(const _Rope_const_iterator<_CharT2, _Alloc2>& __x,
1272 const _Rope_const_iterator<_CharT2, _Alloc2>& __y);
1273
1274 template<class _CharT2, class _Alloc2>
1275 friend bool
1276 operator<(const _Rope_const_iterator<_CharT2, _Alloc2>& __x,
1277 const _Rope_const_iterator<_CharT2, _Alloc2>& __y);
1278
1279 template<class _CharT2, class _Alloc2>
1280 friend ptrdiff_t
1281 operator-(const _Rope_const_iterator<_CharT2, _Alloc2>& __x,
1282 const _Rope_const_iterator<_CharT2, _Alloc2>& __y);
1283 };
b2acb86f 1284
a6048f29
PC
1285 template<class _CharT, class _Alloc>
1286 class _Rope_iterator
1287 : public _Rope_iterator_base<_CharT, _Alloc>
1288 {
1289 friend class rope<_CharT, _Alloc>;
1290 protected:
1291 typedef typename _Rope_iterator_base<_CharT, _Alloc>::_RopeRep _RopeRep;
1292 rope<_CharT, _Alloc>* _M_root_rope;
46db4159
BK
1293
1294 // root is treated as a cached version of this, and is used to
1295 // detect changes to the underlying rope.
1296
1297 // Root is included in the reference count. This is necessary
1298 // so that we can detect changes reliably. Unfortunately, it
1299 // requires careful bookkeeping for the nonGC case.
a6048f29
PC
1300 _Rope_iterator(rope<_CharT, _Alloc>* __r, size_t __pos)
1301 : _Rope_iterator_base<_CharT, _Alloc>(__r->_M_tree_ptr, __pos),
fa30fe72 1302 _M_root_rope(__r)
b2acb86f 1303 { _RopeRep::_S_ref(this->_M_root);
a6048f29
PC
1304 if (!(__r -> empty()))
1305 _S_setcache(*this);
1306 }
b2acb86f 1307
a6048f29
PC
1308 void _M_check();
1309 public:
1310 typedef _Rope_char_ref_proxy<_CharT, _Alloc> reference;
1311 typedef _Rope_char_ref_proxy<_CharT, _Alloc>* pointer;
b2acb86f 1312
a6048f29
PC
1313 rope<_CharT, _Alloc>&
1314 container()
1315 { return *_M_root_rope; }
1316
1317 _Rope_iterator()
1318 {
b2acb86f 1319 this->_M_root = 0; // Needed for reference counting.
a6048f29
PC
1320 };
1321
1322 _Rope_iterator(const _Rope_iterator& __x)
1323 : _Rope_iterator_base<_CharT, _Alloc>(__x)
1324 {
b2acb86f
BK
1325 _M_root_rope = __x._M_root_rope;
1326 _RopeRep::_S_ref(this->_M_root);
a6048f29
PC
1327 }
1328
1329 _Rope_iterator(rope<_CharT, _Alloc>& __r, size_t __pos);
1330
1331 ~_Rope_iterator()
1332 { _RopeRep::_S_unref(this->_M_root); }
b2acb86f 1333
a6048f29
PC
1334 _Rope_iterator&
1335 operator=(const _Rope_iterator& __x)
1336 {
1337 _RopeRep* __old = this->_M_root;
1338
b2acb86f 1339 _RopeRep::_S_ref(__x._M_root);
a6048f29
PC
1340 if (0 != __x._M_buf_ptr)
1341 {
b2acb86f 1342 _M_root_rope = __x._M_root_rope;
a6048f29
PC
1343 *(static_cast<_Rope_iterator_base<_CharT, _Alloc>*>(this)) = __x;
1344 }
1345 else
1346 {
b2acb86f
BK
1347 this->_M_current_pos = __x._M_current_pos;
1348 this->_M_root = __x._M_root;
1349 _M_root_rope = __x._M_root_rope;
1350 this->_M_buf_ptr = 0;
a6048f29 1351 }
b2acb86f
BK
1352 _RopeRep::_S_unref(__old);
1353 return(*this);
a6048f29
PC
1354 }
1355
1356 reference
1357 operator*()
1358 {
b2acb86f 1359 _M_check();
a6048f29
PC
1360 if (0 == this->_M_buf_ptr)
1361 return _Rope_char_ref_proxy<_CharT, _Alloc>(_M_root_rope,
1362 this->_M_current_pos);
1363 else
1364 return _Rope_char_ref_proxy<_CharT, _Alloc>(_M_root_rope,
1365 this->_M_current_pos,
1366 *this->_M_buf_ptr);
1367 }
1368
49525c76
DG
1369 // See above comment.
1370 reference
1371 operator*() const
1372 {
1373 return *const_cast<_Rope_iterator&>(*this);
1374 }
1375
a6048f29
PC
1376 _Rope_iterator&
1377 operator++()
1378 {
f2ffecb1 1379 this->_M_incr(1);
b2acb86f 1380 return *this;
a6048f29
PC
1381 }
1382
1383 _Rope_iterator&
1384 operator+=(ptrdiff_t __n)
1385 {
1386 if (__n >= 0)
1387 this->_M_incr(__n);
1388 else
1389 this->_M_decr(-__n);
1390 return *this;
1391 }
1392
1393 _Rope_iterator&
1394 operator--()
1395 {
f2ffecb1 1396 this->_M_decr(1);
b2acb86f 1397 return *this;
a6048f29
PC
1398 }
1399
1400 _Rope_iterator&
1401 operator-=(ptrdiff_t __n)
1402 {
1403 if (__n >= 0)
1404 this->_M_decr(__n);
1405 else
1406 this->_M_incr(-__n);
1407 return *this;
1408 }
1409
1410 _Rope_iterator
1411 operator++(int)
1412 {
b2acb86f 1413 size_t __old_pos = this->_M_current_pos;
f2ffecb1 1414 this->_M_incr(1);
b2acb86f 1415 return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
a6048f29
PC
1416 }
1417
1418 _Rope_iterator
1419 operator--(int)
1420 {
b2acb86f 1421 size_t __old_pos = this->_M_current_pos;
f2ffecb1 1422 this->_M_decr(1);
b2acb86f 1423 return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
a6048f29 1424 }
b2acb86f 1425
a6048f29
PC
1426 reference
1427 operator[](ptrdiff_t __n)
1428 { return _Rope_char_ref_proxy<_CharT, _Alloc>(_M_root_rope,
1429 this->_M_current_pos
1430 + __n); }
1431
1432 template<class _CharT2, class _Alloc2>
1433 friend bool
1434 operator==(const _Rope_iterator<_CharT2, _Alloc2>& __x,
1435 const _Rope_iterator<_CharT2, _Alloc2>& __y);
1436
1437 template<class _CharT2, class _Alloc2>
1438 friend bool
1439 operator<(const _Rope_iterator<_CharT2, _Alloc2>& __x,
1440 const _Rope_iterator<_CharT2, _Alloc2>& __y);
1441
1442 template<class _CharT2, class _Alloc2>
1443 friend ptrdiff_t
1444 operator-(const _Rope_iterator<_CharT2, _Alloc2>& __x,
1445 const _Rope_iterator<_CharT2, _Alloc2>& __y);
1446
1447 template<class _CharT2, class _Alloc2>
1448 friend _Rope_iterator<_CharT2, _Alloc2>
1449 operator-(const _Rope_iterator<_CharT2, _Alloc2>& __x, ptrdiff_t __n);
1450
1451 template<class _CharT2, class _Alloc2>
1452 friend _Rope_iterator<_CharT2, _Alloc2>
1453 operator+(const _Rope_iterator<_CharT2, _Alloc2>& __x, ptrdiff_t __n);
1454
1455 template<class _CharT2, class _Alloc2>
1456 friend _Rope_iterator<_CharT2, _Alloc2>
1457 operator+(ptrdiff_t __n, const _Rope_iterator<_CharT2, _Alloc2>& __x);
1458 };
5640461d 1459
5640461d 1460
a6048f29
PC
1461 template <class _CharT, class _Alloc>
1462 struct _Rope_base
1463 : public _Alloc
1464 {
1465 typedef _Alloc allocator_type;
5640461d 1466
a6048f29
PC
1467 allocator_type
1468 get_allocator() const
1469 { return *static_cast<const _Alloc*>(this); }
b2acb86f 1470
c531371e
PC
1471 allocator_type&
1472 _M_get_allocator()
1473 { return *static_cast<_Alloc*>(this); }
1474
1475 const allocator_type&
1476 _M_get_allocator() const
1477 { return *static_cast<const _Alloc*>(this); }
1478
a6048f29
PC
1479 typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
1480 // The one in _Base may not be visible due to template rules.
b2acb86f 1481
a6048f29 1482 _Rope_base(_RopeRep* __t, const allocator_type&)
46db4159 1483 : _M_tree_ptr(__t) { }
a6048f29 1484
46db4159 1485 _Rope_base(const allocator_type&) { }
a6048f29
PC
1486
1487 // The only data member of a rope:
1488 _RopeRep *_M_tree_ptr;
1489
1490#define __ROPE_DEFINE_ALLOC(_Tp, __name) \
b2acb86f 1491 typedef typename \
5640461d 1492 _Alloc::template rebind<_Tp>::other __name##Alloc; \
b2acb86f 1493 static _Tp* __name##_allocate(size_t __n) \
5640461d 1494 { return __name##Alloc().allocate(__n); } \
b2acb86f 1495 static void __name##_deallocate(_Tp *__p, size_t __n) \
5640461d 1496 { __name##Alloc().deallocate(__p, __n); }
a6048f29
PC
1497 __ROPE_DEFINE_ALLOCS(_Alloc)
1498#undef __ROPE_DEFINE_ALLOC
1499
1500 protected:
1501 _Rope_base&
1502 operator=(const _Rope_base&);
1503
1504 _Rope_base(const _Rope_base&);
1505 };
26c691a8 1506
a6048f29
PC
1507 /**
1508 * This is an SGI extension.
1509 * @ingroup SGIextensions
1510 * @doctodo
1511 */
1512 template <class _CharT, class _Alloc>
1513 class rope : public _Rope_base<_CharT, _Alloc>
1514 {
1515 public:
1516 typedef _CharT value_type;
1517 typedef ptrdiff_t difference_type;
1518 typedef size_t size_type;
1519 typedef _CharT const_reference;
1520 typedef const _CharT* const_pointer;
1521 typedef _Rope_iterator<_CharT, _Alloc> iterator;
1522 typedef _Rope_const_iterator<_CharT, _Alloc> const_iterator;
1523 typedef _Rope_char_ref_proxy<_CharT, _Alloc> reference;
1524 typedef _Rope_char_ptr_proxy<_CharT, _Alloc> pointer;
1525
1526 friend class _Rope_iterator<_CharT, _Alloc>;
1527 friend class _Rope_const_iterator<_CharT, _Alloc>;
1528 friend struct _Rope_RopeRep<_CharT, _Alloc>;
1529 friend class _Rope_iterator_base<_CharT, _Alloc>;
1530 friend class _Rope_char_ptr_proxy<_CharT, _Alloc>;
1531 friend class _Rope_char_ref_proxy<_CharT, _Alloc>;
1532 friend struct _Rope_RopeSubstring<_CharT, _Alloc>;
26c691a8 1533
a6048f29
PC
1534 protected:
1535 typedef _Rope_base<_CharT, _Alloc> _Base;
1536 typedef typename _Base::allocator_type allocator_type;
1537 using _Base::_M_tree_ptr;
1538 using _Base::get_allocator;
c531371e 1539 using _Base::_M_get_allocator;
a6048f29
PC
1540 typedef __GC_CONST _CharT* _Cstrptr;
1541
1542 static _CharT _S_empty_c_str[1];
1543
1544 static bool
1545 _S_is0(_CharT __c)
1546 { return __c == _S_eos((_CharT*)0); }
1547
1548 enum { _S_copy_max = 23 };
1549 // For strings shorter than _S_copy_max, we copy to
1550 // concatenate.
b2acb86f 1551
a6048f29
PC
1552 typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
1553 typedef _Rope_RopeConcatenation<_CharT, _Alloc> _RopeConcatenation;
1554 typedef _Rope_RopeLeaf<_CharT, _Alloc> _RopeLeaf;
1555 typedef _Rope_RopeFunction<_CharT, _Alloc> _RopeFunction;
1556 typedef _Rope_RopeSubstring<_CharT, _Alloc> _RopeSubstring;
b2acb86f 1557
a6048f29
PC
1558 // Retrieve a character at the indicated position.
1559 static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
b2acb86f 1560
a6048f29
PC
1561#ifndef __GC
1562 // Obtain a pointer to the character at the indicated position.
1563 // The pointer can be used to change the character.
1564 // If such a pointer cannot be produced, as is frequently the
1565 // case, 0 is returned instead.
1566 // (Returns nonzero only if all nodes in the path have a refcount
1567 // of 1.)
1568 static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
1569#endif
b2acb86f 1570
a6048f29
PC
1571 static bool
1572 _S_apply_to_pieces(// should be template parameter
1573 _Rope_char_consumer<_CharT>& __c,
1574 const _RopeRep* __r,
1575 size_t __begin, size_t __end);
1576 // begin and end are assumed to be in range.
b2acb86f 1577
a6048f29
PC
1578#ifndef __GC
1579 static void
1580 _S_unref(_RopeRep* __t)
1581 { _RopeRep::_S_unref(__t); }
1582
1583 static void
1584 _S_ref(_RopeRep* __t)
1585 { _RopeRep::_S_ref(__t); }
1586
1587#else /* __GC */
46db4159
BK
1588 static void _S_unref(_RopeRep*) { }
1589 static void _S_ref(_RopeRep*) { }
a6048f29 1590#endif
b2acb86f 1591
a6048f29
PC
1592#ifdef __GC
1593 typedef _Rope_RopeRep<_CharT, _Alloc>* _Self_destruct_ptr;
1594#else
1595 typedef _Rope_self_destruct_ptr<_CharT, _Alloc> _Self_destruct_ptr;
1596#endif
1597
1598 // _Result is counted in refcount.
1599 static _RopeRep* _S_substring(_RopeRep* __base,
b2acb86f
BK
1600 size_t __start, size_t __endp1);
1601
a6048f29
PC
1602 static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
1603 const _CharT* __iter, size_t __slen);
1604 // Concatenate rope and char ptr, copying __s.
1605 // Should really take an arbitrary iterator.
1606 // Result is counted in refcount.
1607 static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
1608 const _CharT* __iter,
1609 size_t __slen)
1610 // As above, but one reference to __r is about to be
1611 // destroyed. Thus the pieces may be recycled if all
1612 // relevant reference counts are 1.
1613#ifdef __GC
1614 // We can't really do anything since refcounts are unavailable.
1615 { return _S_concat_char_iter(__r, __iter, __slen); }
1616#else
1617 ;
1618#endif
b2acb86f 1619
a6048f29
PC
1620 static _RopeRep* _S_concat(_RopeRep* __left, _RopeRep* __right);
1621 // General concatenation on _RopeRep. _Result
1622 // has refcount of 1. Adjusts argument refcounts.
b2acb86f 1623
a6048f29
PC
1624 public:
1625 void
1626 apply_to_pieces(size_t __begin, size_t __end,
1627 _Rope_char_consumer<_CharT>& __c) const
1628 { _S_apply_to_pieces(__c, this->_M_tree_ptr, __begin, __end); }
b2acb86f
BK
1629
1630 protected:
1631
a6048f29
PC
1632 static size_t
1633 _S_rounded_up_size(size_t __n)
1634 { return _RopeLeaf::_S_rounded_up_size(__n); }
1635
1636 static size_t
1637 _S_allocated_capacity(size_t __n)
1638 {
1639 if (_S_is_basic_char_type((_CharT*)0))
1640 return _S_rounded_up_size(__n) - 1;
1641 else
1642 return _S_rounded_up_size(__n);
1643
1644 }
1645
1646 // Allocate and construct a RopeLeaf using the supplied allocator
1647 // Takes ownership of s instead of copying.
1648 static _RopeLeaf*
1649 _S_new_RopeLeaf(__GC_CONST _CharT *__s,
c531371e 1650 size_t __size, allocator_type& __a)
a6048f29
PC
1651 {
1652 _RopeLeaf* __space = typename _Base::_LAlloc(__a).allocate(1);
1653 return new(__space) _RopeLeaf(__s, __size, __a);
1654 }
1655
1656 static _RopeConcatenation*
1657 _S_new_RopeConcatenation(_RopeRep* __left, _RopeRep* __right,
c531371e 1658 allocator_type& __a)
a6048f29
PC
1659 {
1660 _RopeConcatenation* __space = typename _Base::_CAlloc(__a).allocate(1);
1661 return new(__space) _RopeConcatenation(__left, __right, __a);
1662 }
1663
1664 static _RopeFunction*
1665 _S_new_RopeFunction(char_producer<_CharT>* __f,
c531371e 1666 size_t __size, bool __d, allocator_type& __a)
a6048f29
PC
1667 {
1668 _RopeFunction* __space = typename _Base::_FAlloc(__a).allocate(1);
1669 return new(__space) _RopeFunction(__f, __size, __d, __a);
1670 }
1671
1672 static _RopeSubstring*
1673 _S_new_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
c531371e 1674 size_t __l, allocator_type& __a)
a6048f29
PC
1675 {
1676 _RopeSubstring* __space = typename _Base::_SAlloc(__a).allocate(1);
1677 return new(__space) _RopeSubstring(__b, __s, __l, __a);
1678 }
1679
1680 static _RopeLeaf*
1681 _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
c531371e 1682 size_t __size, allocator_type& __a)
a6048f29 1683#define __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __size, __a) \
fa30fe72 1684 _S_RopeLeaf_from_unowned_char_ptr(__s, __size, __a)
a6048f29
PC
1685 {
1686 if (0 == __size)
1687 return 0;
1688 _CharT* __buf = __a.allocate(_S_rounded_up_size(__size));
1689
1985f1cd 1690 __uninitialized_copy_n_a(__s, __size, __buf, __a);
a6048f29 1691 _S_cond_store_eos(__buf[__size]);
bc2631e0 1692 __try
a6048f29 1693 { return _S_new_RopeLeaf(__buf, __size, __a); }
bc2631e0 1694 __catch(...)
a6048f29
PC
1695 {
1696 _RopeRep::__STL_FREE_STRING(__buf, __size, __a);
1697 __throw_exception_again;
1698 }
1699 }
b2acb86f 1700
a6048f29
PC
1701 // Concatenation of nonempty strings.
1702 // Always builds a concatenation node.
1703 // Rebalances if the result is too deep.
1704 // Result has refcount 1.
1705 // Does not increment left and right ref counts even though
1706 // they are referenced.
1707 static _RopeRep*
1708 _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
1709
1710 // Concatenation helper functions
1711 static _RopeLeaf*
1712 _S_leaf_concat_char_iter(_RopeLeaf* __r,
1713 const _CharT* __iter, size_t __slen);
1714 // Concatenate by copying leaf.
1715 // should take an arbitrary iterator
1716 // result has refcount 1.
1717#ifndef __GC
1718 static _RopeLeaf*
1719 _S_destr_leaf_concat_char_iter(_RopeLeaf* __r,
1720 const _CharT* __iter, size_t __slen);
1721 // A version that potentially clobbers __r if __r->_M_ref_count == 1.
1722#endif
1723
1724 private:
1725
1726 static size_t _S_char_ptr_len(const _CharT* __s);
1727 // slightly generalized strlen
1728
1729 rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
1730 : _Base(__t, __a) { }
1731
1732
1733 // Copy __r to the _CharT buffer.
1734 // Returns __buffer + __r->_M_size.
1735 // Assumes that buffer is uninitialized.
1736 static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
1737
1738 // Again, with explicit starting position and length.
1739 // Assumes that buffer is uninitialized.
1740 static _CharT* _S_flatten(_RopeRep* __r,
1741 size_t __start, size_t __len,
1742 _CharT* __buffer);
1743
1744 static const unsigned long
46db4159 1745 _S_min_len[__detail::_S_max_rope_depth + 1];
a6048f29
PC
1746
1747 static bool
1748 _S_is_balanced(_RopeRep* __r)
1749 { return (__r->_M_size >= _S_min_len[__r->_M_depth]); }
1750
1751 static bool
1752 _S_is_almost_balanced(_RopeRep* __r)
1753 { return (__r->_M_depth == 0
1754 || __r->_M_size >= _S_min_len[__r->_M_depth - 1]); }
1755
1756 static bool
1757 _S_is_roughly_balanced(_RopeRep* __r)
1758 { return (__r->_M_depth <= 1
1759 || __r->_M_size >= _S_min_len[__r->_M_depth - 2]); }
1760
1761 // Assumes the result is not empty.
1762 static _RopeRep*
1763 _S_concat_and_set_balanced(_RopeRep* __left, _RopeRep* __right)
1764 {
1765 _RopeRep* __result = _S_concat(__left, __right);
1766 if (_S_is_balanced(__result))
1767 __result->_M_is_balanced = true;
1768 return __result;
1769 }
1770
1771 // The basic rebalancing operation. Logically copies the
1772 // rope. The result has refcount of 1. The client will
1773 // usually decrement the reference count of __r.
1774 // The result is within height 2 of balanced by the above
1775 // definition.
1776 static _RopeRep* _S_balance(_RopeRep* __r);
1777
28dac70a 1778 // Add all unbalanced subtrees to the forest of balanced trees.
a6048f29
PC
1779 // Used only by balance.
1780 static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
1781
1782 // Add __r to forest, assuming __r is already balanced.
1783 static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
1784
1785 // Print to stdout, exposing structure
1786 static void _S_dump(_RopeRep* __r, int __indent = 0);
1787
1788 // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
1789 static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
1790
1791 public:
1792 bool
1793 empty() const
1794 { return 0 == this->_M_tree_ptr; }
1795
1796 // Comparison member function. This is public only for those
1797 // clients that need a ternary comparison. Others
1798 // should use the comparison operators below.
1799 int
1800 compare(const rope& __y) const
1801 { return _S_compare(this->_M_tree_ptr, __y._M_tree_ptr); }
1802
1803 rope(const _CharT* __s, const allocator_type& __a = allocator_type())
c531371e
PC
1804 : _Base(__a)
1805 {
1806 this->_M_tree_ptr =
1807 __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, _S_char_ptr_len(__s),
1808 _M_get_allocator());
1809 }
a6048f29
PC
1810
1811 rope(const _CharT* __s, size_t __len,
1812 const allocator_type& __a = allocator_type())
c531371e
PC
1813 : _Base(__a)
1814 {
1815 this->_M_tree_ptr =
1816 __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __len, _M_get_allocator());
1817 }
a6048f29
PC
1818
1819 // Should perhaps be templatized with respect to the iterator type
1820 // and use Sequence_buffer. (It should perhaps use sequence_buffer
1821 // even now.)
c531371e 1822 rope(const _CharT* __s, const _CharT* __e,
a6048f29 1823 const allocator_type& __a = allocator_type())
c531371e
PC
1824 : _Base(__a)
1825 {
1826 this->_M_tree_ptr =
1827 __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __e - __s, _M_get_allocator());
1828 }
a6048f29
PC
1829
1830 rope(const const_iterator& __s, const const_iterator& __e,
1831 const allocator_type& __a = allocator_type())
1832 : _Base(_S_substring(__s._M_root, __s._M_current_pos,
1833 __e._M_current_pos), __a)
1834 { }
1835
1836 rope(const iterator& __s, const iterator& __e,
1837 const allocator_type& __a = allocator_type())
1838 : _Base(_S_substring(__s._M_root, __s._M_current_pos,
1839 __e._M_current_pos), __a)
1840 { }
1841
1842 rope(_CharT __c, const allocator_type& __a = allocator_type())
1843 : _Base(__a)
1844 {
1845 _CharT* __buf = this->_Data_allocate(_S_rounded_up_size(1));
1846
c531371e 1847 _M_get_allocator().construct(__buf, __c);
bc2631e0 1848 __try
c531371e
PC
1849 {
1850 this->_M_tree_ptr = _S_new_RopeLeaf(__buf, 1,
1851 _M_get_allocator());
1852 }
bc2631e0 1853 __catch(...)
a6048f29 1854 {
c531371e 1855 _RopeRep::__STL_FREE_STRING(__buf, 1, _M_get_allocator());
a6048f29
PC
1856 __throw_exception_again;
1857 }
1858 }
1859
1860 rope(size_t __n, _CharT __c,
1861 const allocator_type& __a = allocator_type());
1862
1863 rope(const allocator_type& __a = allocator_type())
46db4159 1864 : _Base(0, __a) { }
b2acb86f 1865
46db4159 1866 // Construct a rope from a function that can compute its members
a6048f29
PC
1867 rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
1868 const allocator_type& __a = allocator_type())
1869 : _Base(__a)
1870 {
1871 this->_M_tree_ptr = (0 == __len) ?
1872 0 : _S_new_RopeFunction(__fn, __len, __delete_fn, __a);
1873 }
1874
1875 rope(const rope& __x, const allocator_type& __a = allocator_type())
1876 : _Base(__x._M_tree_ptr, __a)
1877 { _S_ref(this->_M_tree_ptr); }
b2acb86f 1878
26c691a8 1879 ~rope() throw()
a6048f29
PC
1880 { _S_unref(this->_M_tree_ptr); }
1881
1882 rope&
1883 operator=(const rope& __x)
1884 {
1885 _RopeRep* __old = this->_M_tree_ptr;
1886 this->_M_tree_ptr = __x._M_tree_ptr;
1887 _S_ref(this->_M_tree_ptr);
1888 _S_unref(__old);
1889 return *this;
1890 }
1891
1892 void
1893 clear()
1894 {
1895 _S_unref(this->_M_tree_ptr);
1896 this->_M_tree_ptr = 0;
1897 }
1898
1899 void
1900 push_back(_CharT __x)
1901 {
1902 _RopeRep* __old = this->_M_tree_ptr;
1903 this->_M_tree_ptr
1904 = _S_destr_concat_char_iter(this->_M_tree_ptr, &__x, 1);
1905 _S_unref(__old);
1906 }
1907
1908 void
1909 pop_back()
1910 {
1911 _RopeRep* __old = this->_M_tree_ptr;
46db4159
BK
1912 this->_M_tree_ptr = _S_substring(this->_M_tree_ptr,
1913 0, this->_M_tree_ptr->_M_size - 1);
a6048f29
PC
1914 _S_unref(__old);
1915 }
1916
1917 _CharT
1918 back() const
1919 { return _S_fetch(this->_M_tree_ptr, this->_M_tree_ptr->_M_size - 1); }
1920
1921 void
1922 push_front(_CharT __x)
1923 {
1924 _RopeRep* __old = this->_M_tree_ptr;
1925 _RopeRep* __left =
c531371e 1926 __STL_ROPE_FROM_UNOWNED_CHAR_PTR(&__x, 1, _M_get_allocator());
bc2631e0 1927 __try
a6048f29
PC
1928 {
1929 this->_M_tree_ptr = _S_concat(__left, this->_M_tree_ptr);
1930 _S_unref(__old);
1931 _S_unref(__left);
1932 }
bc2631e0 1933 __catch(...)
a6048f29
PC
1934 {
1935 _S_unref(__left);
1936 __throw_exception_again;
1937 }
1938 }
1939
1940 void
1941 pop_front()
1942 {
1943 _RopeRep* __old = this->_M_tree_ptr;
1944 this->_M_tree_ptr
1945 = _S_substring(this->_M_tree_ptr, 1, this->_M_tree_ptr->_M_size);
1946 _S_unref(__old);
1947 }
1948
1949 _CharT
1950 front() const
1951 { return _S_fetch(this->_M_tree_ptr, 0); }
1952
1953 void
1954 balance()
1955 {
1956 _RopeRep* __old = this->_M_tree_ptr;
1957 this->_M_tree_ptr = _S_balance(this->_M_tree_ptr);
1958 _S_unref(__old);
1959 }
91efdb82 1960
a6048f29
PC
1961 void
1962 copy(_CharT* __buffer) const
1963 {
91efdb82 1964 _Destroy_const(__buffer, __buffer + size(), _M_get_allocator());
a6048f29
PC
1965 _S_flatten(this->_M_tree_ptr, __buffer);
1966 }
1967
1968 // This is the copy function from the standard, but
1969 // with the arguments reordered to make it consistent with the
1970 // rest of the interface.
1971 // Note that this guaranteed not to compile if the draft standard
1972 // order is assumed.
1973 size_type
1974 copy(size_type __pos, size_type __n, _CharT* __buffer) const
1975 {
1976 size_t __size = size();
1977 size_t __len = (__pos + __n > __size? __size - __pos : __n);
91efdb82
PC
1978
1979 _Destroy_const(__buffer, __buffer + __len, _M_get_allocator());
a6048f29
PC
1980 _S_flatten(this->_M_tree_ptr, __pos, __len, __buffer);
1981 return __len;
1982 }
1983
1984 // Print to stdout, exposing structure. May be useful for
1985 // performance debugging.
1986 void
1987 dump()
1988 { _S_dump(this->_M_tree_ptr); }
1989
1990 // Convert to 0 terminated string in new allocated memory.
1991 // Embedded 0s in the input do not terminate the copy.
1992 const _CharT* c_str() const;
1993
28dac70a 1994 // As above, but also use the flattened representation as
a6048f29
PC
1995 // the new rope representation.
1996 const _CharT* replace_with_c_str();
1997
1998 // Reclaim memory for the c_str generated flattened string.
1999 // Intentionally undocumented, since it's hard to say when this
2000 // is safe for multiple threads.
2001 void
2002 delete_c_str ()
2003 {
2004 if (0 == this->_M_tree_ptr)
2005 return;
46db4159 2006 if (__detail::_S_leaf == this->_M_tree_ptr->_M_tag &&
a6048f29
PC
2007 ((_RopeLeaf*)this->_M_tree_ptr)->_M_data ==
2008 this->_M_tree_ptr->_M_c_string)
2009 {
2010 // Representation shared
2011 return;
2012 }
2013#ifndef __GC
2014 this->_M_tree_ptr->_M_free_c_string();
2015#endif
2016 this->_M_tree_ptr->_M_c_string = 0;
2017 }
2018
2019 _CharT
2020 operator[] (size_type __pos) const
2021 { return _S_fetch(this->_M_tree_ptr, __pos); }
2022
2023 _CharT
2024 at(size_type __pos) const
2025 {
2026 // if (__pos >= size()) throw out_of_range; // XXX
2027 return (*this)[__pos];
2028 }
2029
2030 const_iterator
2031 begin() const
2032 { return(const_iterator(this->_M_tree_ptr, 0)); }
2033
2034 // An easy way to get a const iterator from a non-const container.
2035 const_iterator
2036 const_begin() const
2037 { return(const_iterator(this->_M_tree_ptr, 0)); }
2038
2039 const_iterator
2040 end() const
2041 { return(const_iterator(this->_M_tree_ptr, size())); }
2042
2043 const_iterator
2044 const_end() const
2045 { return(const_iterator(this->_M_tree_ptr, size())); }
2046
2047 size_type
2048 size() const
2049 { return(0 == this->_M_tree_ptr? 0 : this->_M_tree_ptr->_M_size); }
2050
2051 size_type
2052 length() const
2053 { return size(); }
2054
2055 size_type
2056 max_size() const
2057 {
46db4159 2058 return _S_min_len[int(__detail::_S_max_rope_depth) - 1] - 1;
28dac70a 2059 // Guarantees that the result can be sufficiently
a6048f29
PC
2060 // balanced. Longer ropes will probably still work,
2061 // but it's harder to make guarantees.
2062 }
2063
c3f824ce 2064 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
a6048f29
PC
2065
2066 const_reverse_iterator
2067 rbegin() const
2068 { return const_reverse_iterator(end()); }
2069
2070 const_reverse_iterator
2071 const_rbegin() const
2072 { return const_reverse_iterator(end()); }
2073
2074 const_reverse_iterator
2075 rend() const
2076 { return const_reverse_iterator(begin()); }
2077
2078 const_reverse_iterator
2079 const_rend() const
2080 { return const_reverse_iterator(begin()); }
2081
2082 template<class _CharT2, class _Alloc2>
2083 friend rope<_CharT2, _Alloc2>
2084 operator+(const rope<_CharT2, _Alloc2>& __left,
2085 const rope<_CharT2, _Alloc2>& __right);
2086
2087 template<class _CharT2, class _Alloc2>
2088 friend rope<_CharT2, _Alloc2>
2089 operator+(const rope<_CharT2, _Alloc2>& __left, const _CharT2* __right);
2090
2091 template<class _CharT2, class _Alloc2>
2092 friend rope<_CharT2, _Alloc2>
2093 operator+(const rope<_CharT2, _Alloc2>& __left, _CharT2 __right);
b2acb86f 2094
46db4159
BK
2095 // The symmetric cases are intentionally omitted, since they're
2096 // presumed to be less common, and we don't handle them as well.
2097
2098 // The following should really be templatized. The first
2099 // argument should be an input iterator or forward iterator with
2100 // value_type _CharT.
a6048f29
PC
2101 rope&
2102 append(const _CharT* __iter, size_t __n)
2103 {
2104 _RopeRep* __result =
2105 _S_destr_concat_char_iter(this->_M_tree_ptr, __iter, __n);
2106 _S_unref(this->_M_tree_ptr);
2107 this->_M_tree_ptr = __result;
2108 return *this;
2109 }
2110
2111 rope&
2112 append(const _CharT* __c_string)
2113 {
2114 size_t __len = _S_char_ptr_len(__c_string);
2115 append(__c_string, __len);
2116 return(*this);
2117 }
2118
2119 rope&
2120 append(const _CharT* __s, const _CharT* __e)
2121 {
2122 _RopeRep* __result =
2123 _S_destr_concat_char_iter(this->_M_tree_ptr, __s, __e - __s);
2124 _S_unref(this->_M_tree_ptr);
2125 this->_M_tree_ptr = __result;
2126 return *this;
2127 }
2128
2129 rope&
2130 append(const_iterator __s, const_iterator __e)
2131 {
2132 _Self_destruct_ptr __appendee(_S_substring(__s._M_root,
2133 __s._M_current_pos,
2134 __e._M_current_pos));
b82f782b
BK
2135 _RopeRep* __result = _S_concat(this->_M_tree_ptr,
2136 (_RopeRep*)__appendee);
a6048f29
PC
2137 _S_unref(this->_M_tree_ptr);
2138 this->_M_tree_ptr = __result;
2139 return *this;
2140 }
2141
2142 rope&
2143 append(_CharT __c)
2144 {
2145 _RopeRep* __result =
2146 _S_destr_concat_char_iter(this->_M_tree_ptr, &__c, 1);
2147 _S_unref(this->_M_tree_ptr);
2148 this->_M_tree_ptr = __result;
2149 return *this;
2150 }
2151
2152 rope&
2153 append()
2154 { return append(_CharT()); } // XXX why?
2155
2156 rope&
2157 append(const rope& __y)
2158 {
2159 _RopeRep* __result = _S_concat(this->_M_tree_ptr, __y._M_tree_ptr);
2160 _S_unref(this->_M_tree_ptr);
2161 this->_M_tree_ptr = __result;
2162 return *this;
2163 }
b2acb86f 2164
a6048f29
PC
2165 rope&
2166 append(size_t __n, _CharT __c)
2167 {
2168 rope<_CharT,_Alloc> __last(__n, __c);
2169 return append(__last);
2170 }
2171
2172 void
2173 swap(rope& __b)
2174 {
2175 _RopeRep* __tmp = this->_M_tree_ptr;
2176 this->_M_tree_ptr = __b._M_tree_ptr;
2177 __b._M_tree_ptr = __tmp;
2178 }
b2acb86f
BK
2179
2180 protected:
a6048f29
PC
2181 // Result is included in refcount.
2182 static _RopeRep*
2183 replace(_RopeRep* __old, size_t __pos1,
2184 size_t __pos2, _RopeRep* __r)
2185 {
2186 if (0 == __old)
2187 {
2188 _S_ref(__r);
2189 return __r;
2190 }
2191 _Self_destruct_ptr __left(_S_substring(__old, 0, __pos1));
2192 _Self_destruct_ptr __right(_S_substring(__old, __pos2, __old->_M_size));
2193 _RopeRep* __result;
2194
2195 if (0 == __r)
2196 __result = _S_concat(__left, __right);
2197 else
2198 {
2199 _Self_destruct_ptr __left_result(_S_concat(__left, __r));
2200 __result = _S_concat(__left_result, __right);
2201 }
2202 return __result;
2203 }
b2acb86f
BK
2204
2205 public:
a6048f29
PC
2206 void
2207 insert(size_t __p, const rope& __r)
2208 {
2209 _RopeRep* __result =
2210 replace(this->_M_tree_ptr, __p, __p, __r._M_tree_ptr);
2211 _S_unref(this->_M_tree_ptr);
2212 this->_M_tree_ptr = __result;
2213 }
b2acb86f 2214
a6048f29
PC
2215 void
2216 insert(size_t __p, size_t __n, _CharT __c)
2217 {
2218 rope<_CharT,_Alloc> __r(__n,__c);
2219 insert(__p, __r);
2220 }
2221
2222 void
2223 insert(size_t __p, const _CharT* __i, size_t __n)
2224 {
2225 _Self_destruct_ptr __left(_S_substring(this->_M_tree_ptr, 0, __p));
2226 _Self_destruct_ptr __right(_S_substring(this->_M_tree_ptr,
2227 __p, size()));
2228 _Self_destruct_ptr __left_result(_S_concat_char_iter(__left, __i, __n));
2229 // _S_ destr_concat_char_iter should be safe here.
2230 // But as it stands it's probably not a win, since __left
2231 // is likely to have additional references.
2232 _RopeRep* __result = _S_concat(__left_result, __right);
2233 _S_unref(this->_M_tree_ptr);
2234 this->_M_tree_ptr = __result;
2235 }
b2acb86f 2236
a6048f29
PC
2237 void
2238 insert(size_t __p, const _CharT* __c_string)
2239 { insert(__p, __c_string, _S_char_ptr_len(__c_string)); }
b2acb86f 2240
a6048f29
PC
2241 void
2242 insert(size_t __p, _CharT __c)
2243 { insert(__p, &__c, 1); }
b2acb86f 2244
a6048f29
PC
2245 void
2246 insert(size_t __p)
2247 {
2248 _CharT __c = _CharT();
2249 insert(__p, &__c, 1);
2250 }
b2acb86f 2251
a6048f29
PC
2252 void
2253 insert(size_t __p, const _CharT* __i, const _CharT* __j)
2254 {
2255 rope __r(__i, __j);
2256 insert(__p, __r);
2257 }
b2acb86f 2258
a6048f29
PC
2259 void
2260 insert(size_t __p, const const_iterator& __i,
2261 const const_iterator& __j)
2262 {
2263 rope __r(__i, __j);
2264 insert(__p, __r);
2265 }
b2acb86f 2266
a6048f29
PC
2267 void
2268 insert(size_t __p, const iterator& __i,
2269 const iterator& __j)
2270 {
2271 rope __r(__i, __j);
2272 insert(__p, __r);
2273 }
2274
2275 // (position, length) versions of replace operations:
2276
2277 void
2278 replace(size_t __p, size_t __n, const rope& __r)
2279 {
2280 _RopeRep* __result =
2281 replace(this->_M_tree_ptr, __p, __p + __n, __r._M_tree_ptr);
2282 _S_unref(this->_M_tree_ptr);
2283 this->_M_tree_ptr = __result;
2284 }
2285
2286 void
2287 replace(size_t __p, size_t __n,
2288 const _CharT* __i, size_t __i_len)
2289 {
2290 rope __r(__i, __i_len);
2291 replace(__p, __n, __r);
2292 }
2293
2294 void
2295 replace(size_t __p, size_t __n, _CharT __c)
2296 {
2297 rope __r(__c);
2298 replace(__p, __n, __r);
2299 }
2300
2301 void
2302 replace(size_t __p, size_t __n, const _CharT* __c_string)
2303 {
2304 rope __r(__c_string);
2305 replace(__p, __n, __r);
2306 }
2307
2308 void
2309 replace(size_t __p, size_t __n,
2310 const _CharT* __i, const _CharT* __j)
2311 {
2312 rope __r(__i, __j);
2313 replace(__p, __n, __r);
2314 }
2315
2316 void
2317 replace(size_t __p, size_t __n,
2318 const const_iterator& __i, const const_iterator& __j)
2319 {
2320 rope __r(__i, __j);
2321 replace(__p, __n, __r);
2322 }
2323
2324 void
2325 replace(size_t __p, size_t __n,
2326 const iterator& __i, const iterator& __j)
2327 {
2328 rope __r(__i, __j);
2329 replace(__p, __n, __r);
2330 }
2331
2332 // Single character variants:
2333 void
2334 replace(size_t __p, _CharT __c)
2335 {
2336 iterator __i(this, __p);
2337 *__i = __c;
2338 }
2339
2340 void
2341 replace(size_t __p, const rope& __r)
2342 { replace(__p, 1, __r); }
2343
2344 void
2345 replace(size_t __p, const _CharT* __i, size_t __i_len)
2346 { replace(__p, 1, __i, __i_len); }
2347
2348 void
2349 replace(size_t __p, const _CharT* __c_string)
2350 { replace(__p, 1, __c_string); }
2351
2352 void
2353 replace(size_t __p, const _CharT* __i, const _CharT* __j)
2354 { replace(__p, 1, __i, __j); }
2355
2356 void
2357 replace(size_t __p, const const_iterator& __i,
2358 const const_iterator& __j)
2359 { replace(__p, 1, __i, __j); }
2360
2361 void
2362 replace(size_t __p, const iterator& __i,
2363 const iterator& __j)
2364 { replace(__p, 1, __i, __j); }
2365
2366 // Erase, (position, size) variant.
2367 void
2368 erase(size_t __p, size_t __n)
2369 {
2370 _RopeRep* __result = replace(this->_M_tree_ptr, __p,
2371 __p + __n, 0);
2372 _S_unref(this->_M_tree_ptr);
2373 this->_M_tree_ptr = __result;
2374 }
2375
2376 // Erase, single character
2377 void
2378 erase(size_t __p)
2379 { erase(__p, __p + 1); }
2380
2381 // Insert, iterator variants.
2382 iterator
2383 insert(const iterator& __p, const rope& __r)
2384 {
2385 insert(__p.index(), __r);
2386 return __p;
2387 }
2388
2389 iterator
2390 insert(const iterator& __p, size_t __n, _CharT __c)
2391 {
2392 insert(__p.index(), __n, __c);
2393 return __p;
2394 }
2395
2396 iterator insert(const iterator& __p, _CharT __c)
2397 {
2398 insert(__p.index(), __c);
2399 return __p;
2400 }
2401
2402 iterator
2403 insert(const iterator& __p )
2404 {
2405 insert(__p.index());
2406 return __p;
2407 }
2408
2409 iterator
2410 insert(const iterator& __p, const _CharT* c_string)
2411 {
2412 insert(__p.index(), c_string);
2413 return __p;
2414 }
2415
2416 iterator
2417 insert(const iterator& __p, const _CharT* __i, size_t __n)
2418 {
2419 insert(__p.index(), __i, __n);
2420 return __p;
2421 }
2422
2423 iterator
2424 insert(const iterator& __p, const _CharT* __i,
2425 const _CharT* __j)
2426 {
2427 insert(__p.index(), __i, __j);
2428 return __p;
2429 }
2430
2431 iterator
2432 insert(const iterator& __p,
2433 const const_iterator& __i, const const_iterator& __j)
2434 {
2435 insert(__p.index(), __i, __j);
2436 return __p;
2437 }
2438
2439 iterator
2440 insert(const iterator& __p,
2441 const iterator& __i, const iterator& __j)
2442 {
2443 insert(__p.index(), __i, __j);
2444 return __p;
2445 }
2446
2447 // Replace, range variants.
2448 void
2449 replace(const iterator& __p, const iterator& __q, const rope& __r)
2450 { replace(__p.index(), __q.index() - __p.index(), __r); }
2451
2452 void
2453 replace(const iterator& __p, const iterator& __q, _CharT __c)
2454 { replace(__p.index(), __q.index() - __p.index(), __c); }
2455
2456 void
2457 replace(const iterator& __p, const iterator& __q,
2458 const _CharT* __c_string)
2459 { replace(__p.index(), __q.index() - __p.index(), __c_string); }
2460
2461 void
2462 replace(const iterator& __p, const iterator& __q,
2463 const _CharT* __i, size_t __n)
2464 { replace(__p.index(), __q.index() - __p.index(), __i, __n); }
2465
2466 void
2467 replace(const iterator& __p, const iterator& __q,
2468 const _CharT* __i, const _CharT* __j)
2469 { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
2470
2471 void
2472 replace(const iterator& __p, const iterator& __q,
2473 const const_iterator& __i, const const_iterator& __j)
2474 { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
2475
2476 void
2477 replace(const iterator& __p, const iterator& __q,
2478 const iterator& __i, const iterator& __j)
2479 { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
2480
2481 // Replace, iterator variants.
2482 void
2483 replace(const iterator& __p, const rope& __r)
2484 { replace(__p.index(), __r); }
2485
2486 void
2487 replace(const iterator& __p, _CharT __c)
2488 { replace(__p.index(), __c); }
2489
2490 void
2491 replace(const iterator& __p, const _CharT* __c_string)
2492 { replace(__p.index(), __c_string); }
2493
2494 void
2495 replace(const iterator& __p, const _CharT* __i, size_t __n)
2496 { replace(__p.index(), __i, __n); }
2497
2498 void
2499 replace(const iterator& __p, const _CharT* __i, const _CharT* __j)
2500 { replace(__p.index(), __i, __j); }
2501
2502 void
2503 replace(const iterator& __p, const_iterator __i, const_iterator __j)
2504 { replace(__p.index(), __i, __j); }
2505
2506 void
2507 replace(const iterator& __p, iterator __i, iterator __j)
2508 { replace(__p.index(), __i, __j); }
2509
2510 // Iterator and range variants of erase
2511 iterator
2512 erase(const iterator& __p, const iterator& __q)
2513 {
2514 size_t __p_index = __p.index();
2515 erase(__p_index, __q.index() - __p_index);
2516 return iterator(this, __p_index);
2517 }
2518
2519 iterator
2520 erase(const iterator& __p)
2521 {
2522 size_t __p_index = __p.index();
2523 erase(__p_index, 1);
2524 return iterator(this, __p_index);
2525 }
2526
2527 rope
2528 substr(size_t __start, size_t __len = 1) const
2529 {
2530 return rope<_CharT, _Alloc>(_S_substring(this->_M_tree_ptr,
2531 __start,
2532 __start + __len));
2533 }
2534
2535 rope
2536 substr(iterator __start, iterator __end) const
2537 {
2538 return rope<_CharT, _Alloc>(_S_substring(this->_M_tree_ptr,
2539 __start.index(),
2540 __end.index()));
2541 }
2542
2543 rope
2544 substr(iterator __start) const
2545 {
2546 size_t __pos = __start.index();
2547 return rope<_CharT, _Alloc>(_S_substring(this->_M_tree_ptr,
2548 __pos, __pos + 1));
2549 }
2550
2551 rope
2552 substr(const_iterator __start, const_iterator __end) const
2553 {
2554 // This might eventually take advantage of the cache in the
2555 // iterator.
2556 return rope<_CharT, _Alloc>(_S_substring(this->_M_tree_ptr,
2557 __start.index(),
2558 __end.index()));
2559 }
2560
2561 rope<_CharT, _Alloc>
2562 substr(const_iterator __start)
2563 {
2564 size_t __pos = __start.index();
2565 return rope<_CharT, _Alloc>(_S_substring(this->_M_tree_ptr,
2566 __pos, __pos + 1));
2567 }
2568
2569 static const size_type npos;
2570
2571 size_type find(_CharT __c, size_type __pos = 0) const;
2572
2573 size_type
2574 find(const _CharT* __s, size_type __pos = 0) const
2575 {
2576 size_type __result_pos;
2577 const_iterator __result =
2578 std::search(const_begin() + __pos, const_end(),
2579 __s, __s + _S_char_ptr_len(__s));
2580 __result_pos = __result.index();
2581#ifndef __STL_OLD_ROPE_SEMANTICS
2582 if (__result_pos == size())
2583 __result_pos = npos;
2584#endif
2585 return __result_pos;
2586 }
2587
2588 iterator
2589 mutable_begin()
2590 { return(iterator(this, 0)); }
2591
2592 iterator
2593 mutable_end()
2594 { return(iterator(this, size())); }
2595
c3f824ce 2596 typedef std::reverse_iterator<iterator> reverse_iterator;
a6048f29
PC
2597
2598 reverse_iterator
2599 mutable_rbegin()
2600 { return reverse_iterator(mutable_end()); }
2601
2602 reverse_iterator
2603 mutable_rend()
2604 { return reverse_iterator(mutable_begin()); }
2605
2606 reference
2607 mutable_reference_at(size_type __pos)
2608 { return reference(this, __pos); }
2609
2610#ifdef __STD_STUFF
2611 reference
2612 operator[] (size_type __pos)
2613 { return _char_ref_proxy(this, __pos); }
2614
2615 reference
2616 at(size_type __pos)
2617 {
2618 // if (__pos >= size()) throw out_of_range; // XXX
2619 return (*this)[__pos];
2620 }
2621
46db4159
BK
2622 void resize(size_type __n, _CharT __c) { }
2623 void resize(size_type __n) { }
2624 void reserve(size_type __res_arg = 0) { }
a6048f29
PC
2625
2626 size_type
2627 capacity() const
2628 { return max_size(); }
2629
2630 // Stuff below this line is dangerous because it's error prone.
2631 // I would really like to get rid of it.
2632 // copy function with funny arg ordering.
2633 size_type
2634 copy(_CharT* __buffer, size_type __n,
2635 size_type __pos = 0) const
2636 { return copy(__pos, __n, __buffer); }
2637
2638 iterator
2639 end()
2640 { return mutable_end(); }
2641
2642 iterator
2643 begin()
2644 { return mutable_begin(); }
2645
2646 reverse_iterator
2647 rend()
2648 { return mutable_rend(); }
2649
2650 reverse_iterator
2651 rbegin()
2652 { return mutable_rbegin(); }
2653
2654#else
2655 const_iterator
2656 end()
2657 { return const_end(); }
2658
2659 const_iterator
2660 begin()
2661 { return const_begin(); }
2662
2663 const_reverse_iterator
2664 rend()
2665 { return const_rend(); }
2666
2667 const_reverse_iterator
2668 rbegin()
2669 { return const_rbegin(); }
2670
2671#endif
2672 };
2673
2674 template <class _CharT, class _Alloc>
2675 const typename rope<_CharT, _Alloc>::size_type
2676 rope<_CharT, _Alloc>::npos = (size_type)(-1);
2677
2678 template <class _CharT, class _Alloc>
2679 inline bool operator==(const _Rope_const_iterator<_CharT, _Alloc>& __x,
2680 const _Rope_const_iterator<_CharT, _Alloc>& __y)
2681 { return (__x._M_current_pos == __y._M_current_pos
2682 && __x._M_root == __y._M_root); }
2683
2684 template <class _CharT, class _Alloc>
2685 inline bool operator<(const _Rope_const_iterator<_CharT, _Alloc>& __x,
2686 const _Rope_const_iterator<_CharT, _Alloc>& __y)
2687 { return (__x._M_current_pos < __y._M_current_pos); }
2688
2689 template <class _CharT, class _Alloc>
2690 inline bool operator!=(const _Rope_const_iterator<_CharT, _Alloc>& __x,
2691 const _Rope_const_iterator<_CharT, _Alloc>& __y)
2692 { return !(__x == __y); }
2693
2694 template <class _CharT, class _Alloc>
2695 inline bool operator>(const _Rope_const_iterator<_CharT, _Alloc>& __x,
2696 const _Rope_const_iterator<_CharT, _Alloc>& __y)
2697 { return __y < __x; }
2698
2699 template <class _CharT, class _Alloc>
2700 inline bool
2701 operator<=(const _Rope_const_iterator<_CharT, _Alloc>& __x,
2702 const _Rope_const_iterator<_CharT, _Alloc>& __y)
2703 { return !(__y < __x); }
2704
2705 template <class _CharT, class _Alloc>
2706 inline bool
2707 operator>=(const _Rope_const_iterator<_CharT, _Alloc>& __x,
2708 const _Rope_const_iterator<_CharT, _Alloc>& __y)
2709 { return !(__x < __y); }
2710
2711 template <class _CharT, class _Alloc>
2712 inline ptrdiff_t
2713 operator-(const _Rope_const_iterator<_CharT, _Alloc>& __x,
2714 const _Rope_const_iterator<_CharT, _Alloc>& __y)
2715 { return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos; }
2716
2717 template <class _CharT, class _Alloc>
2718 inline _Rope_const_iterator<_CharT, _Alloc>
2719 operator-(const _Rope_const_iterator<_CharT, _Alloc>& __x, ptrdiff_t __n)
2720 { return _Rope_const_iterator<_CharT, _Alloc>(__x._M_root,
2721 __x._M_current_pos - __n); }
2722
2723 template <class _CharT, class _Alloc>
2724 inline _Rope_const_iterator<_CharT, _Alloc>
2725 operator+(const _Rope_const_iterator<_CharT, _Alloc>& __x, ptrdiff_t __n)
2726 { return _Rope_const_iterator<_CharT, _Alloc>(__x._M_root,
2727 __x._M_current_pos + __n); }
2728
2729 template <class _CharT, class _Alloc>
2730 inline _Rope_const_iterator<_CharT, _Alloc>
2731 operator+(ptrdiff_t __n, const _Rope_const_iterator<_CharT, _Alloc>& __x)
2732 { return _Rope_const_iterator<_CharT, _Alloc>(__x._M_root,
2733 __x._M_current_pos + __n); }
2734
2735 template <class _CharT, class _Alloc>
2736 inline bool
2737 operator==(const _Rope_iterator<_CharT, _Alloc>& __x,
2738 const _Rope_iterator<_CharT, _Alloc>& __y)
2739 {return (__x._M_current_pos == __y._M_current_pos
2740 && __x._M_root_rope == __y._M_root_rope); }
2741
2742 template <class _CharT, class _Alloc>
2743 inline bool
2744 operator<(const _Rope_iterator<_CharT, _Alloc>& __x,
2745 const _Rope_iterator<_CharT, _Alloc>& __y)
2746 { return (__x._M_current_pos < __y._M_current_pos); }
2747
2748 template <class _CharT, class _Alloc>
2749 inline bool
2750 operator!=(const _Rope_iterator<_CharT, _Alloc>& __x,
2751 const _Rope_iterator<_CharT, _Alloc>& __y)
2752 { return !(__x == __y); }
2753
2754 template <class _CharT, class _Alloc>
2755 inline bool
2756 operator>(const _Rope_iterator<_CharT, _Alloc>& __x,
2757 const _Rope_iterator<_CharT, _Alloc>& __y)
2758 { return __y < __x; }
2759
2760 template <class _CharT, class _Alloc>
2761 inline bool
2762 operator<=(const _Rope_iterator<_CharT, _Alloc>& __x,
2763 const _Rope_iterator<_CharT, _Alloc>& __y)
2764 { return !(__y < __x); }
2765
2766 template <class _CharT, class _Alloc>
2767 inline bool
2768 operator>=(const _Rope_iterator<_CharT, _Alloc>& __x,
2769 const _Rope_iterator<_CharT, _Alloc>& __y)
2770 { return !(__x < __y); }
2771
2772 template <class _CharT, class _Alloc>
2773 inline ptrdiff_t
2774 operator-(const _Rope_iterator<_CharT, _Alloc>& __x,
2775 const _Rope_iterator<_CharT, _Alloc>& __y)
2776 { return ((ptrdiff_t)__x._M_current_pos
2777 - (ptrdiff_t)__y._M_current_pos); }
2778
2779 template <class _CharT, class _Alloc>
2780 inline _Rope_iterator<_CharT, _Alloc>
2781 operator-(const _Rope_iterator<_CharT, _Alloc>& __x,
2782 ptrdiff_t __n)
2783 { return _Rope_iterator<_CharT, _Alloc>(__x._M_root_rope,
2784 __x._M_current_pos - __n); }
2785
2786 template <class _CharT, class _Alloc>
2787 inline _Rope_iterator<_CharT, _Alloc>
2788 operator+(const _Rope_iterator<_CharT, _Alloc>& __x, ptrdiff_t __n)
2789 { return _Rope_iterator<_CharT, _Alloc>(__x._M_root_rope,
2790 __x._M_current_pos + __n); }
2791
2792 template <class _CharT, class _Alloc>
46db4159 2793 inline _Rope_iterator<_CharT, _Alloc>
a6048f29
PC
2794 operator+(ptrdiff_t __n, const _Rope_iterator<_CharT, _Alloc>& __x)
2795 { return _Rope_iterator<_CharT, _Alloc>(__x._M_root_rope,
2796 __x._M_current_pos + __n); }
2797
2798 template <class _CharT, class _Alloc>
46db4159 2799 inline rope<_CharT, _Alloc>
a6048f29
PC
2800 operator+(const rope<_CharT, _Alloc>& __left,
2801 const rope<_CharT, _Alloc>& __right)
2802 {
a6048f29
PC
2803 // Inlining this should make it possible to keep __left and
2804 // __right in registers.
46db4159
BK
2805 typedef rope<_CharT, _Alloc> rope_type;
2806 return rope_type(rope_type::_S_concat(__left._M_tree_ptr,
2807 __right._M_tree_ptr));
a6048f29
PC
2808 }
2809
2810 template <class _CharT, class _Alloc>
2811 inline rope<_CharT, _Alloc>&
2812 operator+=(rope<_CharT, _Alloc>& __left,
2813 const rope<_CharT, _Alloc>& __right)
2814 {
2815 __left.append(__right);
2816 return __left;
2817 }
2818
2819 template <class _CharT, class _Alloc>
2820 inline rope<_CharT, _Alloc>
2821 operator+(const rope<_CharT, _Alloc>& __left,
2822 const _CharT* __right)
2823 {
46db4159
BK
2824 typedef rope<_CharT, _Alloc> rope_type;
2825 size_t __rlen = rope_type::_S_char_ptr_len(__right);
2826 return rope_type(rope_type::_S_concat_char_iter(__left._M_tree_ptr,
a6048f29
PC
2827 __right, __rlen));
2828 }
2829
2830 template <class _CharT, class _Alloc>
2831 inline rope<_CharT, _Alloc>&
2832 operator+=(rope<_CharT, _Alloc>& __left,
2833 const _CharT* __right)
2834 {
2835 __left.append(__right);
2836 return __left;
2837 }
2838
2839 template <class _CharT, class _Alloc>
2840 inline rope<_CharT, _Alloc>
2841 operator+(const rope<_CharT, _Alloc>& __left, _CharT __right)
2842 {
46db4159
BK
2843 typedef rope<_CharT, _Alloc> rope_type;
2844 return rope_type(rope_type::_S_concat_char_iter(__left._M_tree_ptr,
a6048f29
PC
2845 &__right, 1));
2846 }
2847
2848 template <class _CharT, class _Alloc>
2849 inline rope<_CharT, _Alloc>&
2850 operator+=(rope<_CharT, _Alloc>& __left, _CharT __right)
2851 {
2852 __left.append(__right);
2853 return __left;
2854 }
2855
2856 template <class _CharT, class _Alloc>
2857 bool
2858 operator<(const rope<_CharT, _Alloc>& __left,
2859 const rope<_CharT, _Alloc>& __right)
2860 { return __left.compare(__right) < 0; }
2861
2862 template <class _CharT, class _Alloc>
2863 bool
2864 operator==(const rope<_CharT, _Alloc>& __left,
2865 const rope<_CharT, _Alloc>& __right)
2866 { return __left.compare(__right) == 0; }
2867
2868 template <class _CharT, class _Alloc>
2869 inline bool
2870 operator==(const _Rope_char_ptr_proxy<_CharT, _Alloc>& __x,
2871 const _Rope_char_ptr_proxy<_CharT, _Alloc>& __y)
2872 { return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root); }
2873
2874 template <class _CharT, class _Alloc>
2875 inline bool
2876 operator!=(const rope<_CharT, _Alloc>& __x,
2877 const rope<_CharT, _Alloc>& __y)
2878 { return !(__x == __y); }
2879
2880 template <class _CharT, class _Alloc>
2881 inline bool
2882 operator>(const rope<_CharT, _Alloc>& __x,
2883 const rope<_CharT, _Alloc>& __y)
2884 { return __y < __x; }
2885
2886 template <class _CharT, class _Alloc>
2887 inline bool
2888 operator<=(const rope<_CharT, _Alloc>& __x,
2889 const rope<_CharT, _Alloc>& __y)
2890 { return !(__y < __x); }
2891
2892 template <class _CharT, class _Alloc>
2893 inline bool
2894 operator>=(const rope<_CharT, _Alloc>& __x,
2895 const rope<_CharT, _Alloc>& __y)
2896 { return !(__x < __y); }
2897
2898 template <class _CharT, class _Alloc>
2899 inline bool
2900 operator!=(const _Rope_char_ptr_proxy<_CharT, _Alloc>& __x,
2901 const _Rope_char_ptr_proxy<_CharT, _Alloc>& __y)
2902 { return !(__x == __y); }
2903
2904 template<class _CharT, class _Traits, class _Alloc>
2905 std::basic_ostream<_CharT, _Traits>&
2906 operator<<(std::basic_ostream<_CharT, _Traits>& __o,
2907 const rope<_CharT, _Alloc>& __r);
2908
2909 typedef rope<char> crope;
2910 typedef rope<wchar_t> wrope;
2911
2912 inline crope::reference
2913 __mutable_reference_at(crope& __c, size_t __i)
2914 { return __c.mutable_reference_at(__i); }
2915
2916 inline wrope::reference
2917 __mutable_reference_at(wrope& __c, size_t __i)
2918 { return __c.mutable_reference_at(__i); }
2919
2920 template <class _CharT, class _Alloc>
2921 inline void
2922 swap(rope<_CharT, _Alloc>& __x, rope<_CharT, _Alloc>& __y)
2923 { __x.swap(__y); }
2924
0c3de900
BK
2925_GLIBCXX_END_NAMESPACE
2926
2927
2928namespace std
2929{
2930namespace tr1
2931{
a6048f29 2932 template<>
0c3de900 2933 struct hash<__gnu_cxx::crope>
a6048f29
PC
2934 {
2935 size_t
0c3de900 2936 operator()(const __gnu_cxx::crope& __str) const
a6048f29
PC
2937 {
2938 size_t __size = __str.size();
a6048f29
PC
2939 if (0 == __size)
2940 return 0;
2941 return 13 * __str[0] + 5 * __str[__size - 1] + __size;
2942 }
2943 };
2944
2945
2946 template<>
0c3de900 2947 struct hash<__gnu_cxx::wrope>
a6048f29
PC
2948 {
2949 size_t
0c3de900 2950 operator()(const __gnu_cxx::wrope& __str) const
a6048f29
PC
2951 {
2952 size_t __size = __str.size();
a6048f29
PC
2953 if (0 == __size)
2954 return 0;
2955 return 13 * __str[0] + 5 * __str[__size - 1] + __size;
2956 }
2957 };
0c3de900
BK
2958} // namespace tr1
2959} // namespace std
b2acb86f
BK
2960
2961# include <ext/ropeimpl.h>
2962
2963#endif