Go to the documentation of this file.
89 #ifndef LLVM_SUPPORT_CONVERTUTF_H
90 #define LLVM_SUPPORT_CONVERTUTF_H
96 #include <system_error>
118 #define UNI_REPLACEMENT_CHAR (UTF32)0x0000FFFD
119 #define UNI_MAX_BMP (UTF32)0x0000FFFF
120 #define UNI_MAX_UTF16 (UTF32)0x0010FFFF
121 #define UNI_MAX_UTF32 (UTF32)0x7FFFFFFF
122 #define UNI_MAX_LEGAL_UTF32 (UTF32)0x0010FFFF
124 #define UNI_MAX_UTF8_BYTES_PER_CODE_POINT 4
126 #define UNI_UTF16_BYTE_ORDER_MARK_NATIVE 0xFEFF
127 #define UNI_UTF16_BYTE_ORDER_MARK_SWAPPED 0xFFFE
129 #define UNI_UTF32_BYTE_ORDER_MARK_NATIVE 0x0000FEFF
130 #define UNI_UTF32_BYTE_ORDER_MARK_SWAPPED 0xFFFE0000
145 const UTF8** sourceStart,
const UTF8* sourceEnd,
153 const UTF8** sourceStart,
const UTF8* sourceEnd,
161 const UTF8** sourceStart,
const UTF8* sourceEnd,
165 const UTF16** sourceStart,
const UTF16* sourceEnd,
169 const UTF32** sourceStart,
const UTF32* sourceEnd,
173 const UTF16** sourceStart,
const UTF16* sourceEnd,
177 const UTF32** sourceStart,
const UTF32* sourceEnd,
189 template <
typename T>
class ArrayRef;
190 template <
typename T>
class SmallVectorImpl;
203 char *&ResultPtr,
const UTF8 *&ErrorPtr);
252 const UTF8 *sourceEnd,
255 if (*source == sourceEnd)
311 SmallVectorImpl<UTF16> &DstUTF16);
316 std::error_code UTF8ToUTF16(StringRef utf8, SmallVectorImpl<wchar_t> &utf16);
318 std::error_code CurCPToUTF16(StringRef utf8, SmallVectorImpl<wchar_t> &utf16);
319 std::error_code UTF16ToUTF8(
const wchar_t *utf16,
size_t utf16_len,
320 SmallVectorImpl<char> &utf8);
322 std::error_code UTF16ToCurCP(
const wchar_t *utf16,
size_t utf16_len,
323 SmallVectorImpl<char> &utf8);
This is an optimization pass for GlobalISel generic memory operations.
ConversionResult ConvertUTF8toUTF32Partial(const UTF8 **sourceStart, const UTF8 *sourceEnd, UTF32 **targetStart, UTF32 *targetEnd, ConversionFlags flags)
Convert a partial UTF8 sequence to UTF32.
bool ConvertCodePointToUTF8(unsigned Source, char *&ResultPtr)
Convert an Unicode code point to UTF8 sequence.
bool hasUTF16ByteOrderMark(ArrayRef< char > SrcBytes)
Returns true if a blob of text starts with a UTF-16 big or little endian byte order mark.
ConversionResult ConvertUTF32toUTF8(const UTF32 **sourceStart, const UTF32 *sourceEnd, UTF8 **targetStart, UTF8 *targetEnd, ConversionFlags flags)
ConversionResult convertUTF8Sequence(const UTF8 **source, const UTF8 *sourceEnd, UTF32 *target, ConversionFlags flags)
Convert the first UTF8 sequence in the given source buffer to a UTF32 code point.
bool convertUTF16ToUTF8String(ArrayRef< char > SrcBytes, std::string &Out)
Converts a stream of raw bytes assumed to be UTF16 into a UTF8 std::string.
Boolean isLegalUTF8Sequence(const UTF8 *source, const UTF8 *sourceEnd)
ConversionResult ConvertUTF32toUTF16(const UTF32 **sourceStart, const UTF32 *sourceEnd, UTF16 **targetStart, UTF16 *targetEnd, ConversionFlags flags)
ConversionResult ConvertUTF8toUTF32(const UTF8 **sourceStart, const UTF8 *sourceEnd, UTF32 **targetStart, UTF32 *targetEnd, ConversionFlags flags)
Convert a partial UTF8 sequence to UTF32.
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
StringRef - Represent a constant reference to a string, i.e.
bool convertWideToUTF8(const std::wstring &Source, std::string &Result)
Converts a std::wstring to a UTF-8 encoded std::string.
unsigned getNumBytesForUTF8(UTF8 firstByte)
bool convertUTF8ToUTF16String(StringRef SrcUTF8, SmallVectorImpl< UTF16 > &DstUTF16)
Converts a UTF-8 string into a UTF-16 string with native endianness.
ConversionResult ConvertUTF8toUTF16(const UTF8 **sourceStart, const UTF8 *sourceEnd, UTF16 **targetStart, UTF16 *targetEnd, ConversionFlags flags)
ConversionResult ConvertUTF16toUTF8(const UTF16 **sourceStart, const UTF16 *sourceEnd, UTF8 **targetStart, UTF8 *targetEnd, ConversionFlags flags)
Boolean isLegalUTF8String(const UTF8 **source, const UTF8 *sourceEnd)
bool ConvertUTF8toWide(unsigned WideCharWidth, llvm::StringRef Source, char *&ResultPtr, const UTF8 *&ErrorPtr)
Convert an UTF8 StringRef to UTF8, UTF16, or UTF32 depending on WideCharWidth.
ConversionResult ConvertUTF16toUTF32(const UTF16 **sourceStart, const UTF16 *sourceEnd, UTF32 **targetStart, UTF32 *targetEnd, ConversionFlags flags)
bool convertUTF32ToUTF8String(ArrayRef< char > SrcBytes, std::string &Out)
Converts a stream of raw bytes assumed to be UTF32 into a UTF8 std::string.