Modified StringUtil to use a u16string instead of a wstring in the non-win32 UTF16BEToUTR8 method.

This commit is contained in:
34will 2018-01-10 14:49:35 +00:00
parent 41fe276323
commit ba111959fa

View File

@ -472,6 +472,14 @@ std::string CP1252ToUTF8(const std::string& input)
return UTF16ToUTF8(CPToUTF16(CODEPAGE_WINDOWS_1252, input));
}
std::string UTF16BEToUTF8(const char16_t* str, size_t max_size)
{
const char16_t* str_end = std::find(str, str + max_size, '\0');
std::wstring result(static_cast<size_t>(str_end - str), '\0');
std::transform(str, str_end, result.begin(), static_cast<u16 (&)(u16)>(Common::swap16));
return UTF16ToUTF8(result);
}
#else
template <typename T>
@ -559,12 +567,12 @@ std::string UTF16ToUTF8(const std::wstring& input)
return CodeToUTF8("UTF-16LE", input);
}
#endif
std::string UTF16BEToUTF8(const char16_t* str, size_t max_size)
{
const char16_t* str_end = std::find(str, str + max_size, '\0');
std::wstring result(static_cast<size_t>(str_end - str), '\0');
std::u16string result(static_cast<size_t>(str_end - str), '\0');
std::transform(str, str_end, result.begin(), static_cast<u16 (&)(u16)>(Common::swap16));
return UTF16ToUTF8(result);
return CodeToUTF8("UTF-16LE", result);
}
#endif