Instead of just omitting the definition of __unicode::_Utf8_view when
char8_t is disabled, we can make it use char instead.
libstdc++-v3/ChangeLog:
PR libstdc++/114519
* include/bits/unicode.h (_Utf8_view) [!__cpp_char8_t]: Define
using char instead of char8_t.
* testsuite/ext/unicode/view.cc: Use u8""sv literals to create
string views, instead of std::u8string_view.
#ifdef __cpp_char8_t
template<typename _View>
using _Utf8_view = _Utf_view<char8_t, _View>;
+#else
+ template<typename _View>
+ using _Utf8_view = _Utf_view<char, _View>;
#endif
template<typename _View>
using _Utf16_view = _Utf_view<char16_t, _View>;
constexpr void
test_utf8_to_utf8()
{
- const std::u8string_view s8 = u8"£🇬🇧 €🇪🇺 æбçδé ♠♥♦♣ 🤡";
+ const auto s8 = u8"£🇬🇧 €🇪🇺 æбçδé ♠♥♦♣ 🤡"sv;
uc::_Utf8_view v(s8);
VERIFY( std::ranges::distance(v) == s8.size() );
VERIFY( std::ranges::equal(v, s8) );
constexpr void
test_utf8_to_utf16()
{
- const std::u8string_view s8 = u8"£🇬🇧 €🇪🇺 æбçδé ♠♥♦♣ 🤡";
+ const auto s8 = u8"£🇬🇧 €🇪🇺 æбçδé ♠♥♦♣ 🤡"sv;
const std::u16string_view s16 = u"£🇬🇧 €🇪🇺 æбçδé ♠♥♦♣ 🤡";
uc::_Utf16_view v(s8);
VERIFY( std::ranges::distance(v) == s16.size() );