如何在C++中将UTF-16字符串转换为UTF-8字符串

6

请考虑以下事项:

STDMETHODIMP CFileSystemAPI::setRRConfig( BSTR config_str, VARIANT* ret )
{
mReportReaderFactory.reset( new sbis::report_reader::ReportReaderFactory() );

USES_CONVERSION;
std::string configuration_str = W2A( config_str );

但是在config_str中,我得到的是一个UTF-16字符串。在这段代码中,我该如何将它转换为UTF-8呢?

4个回答

5

你可以像这样做:

std::string WstrToUtf8Str(const std::wstring& wstr)
{
  std::string retStr;
  if (!wstr.empty())
  {
    int sizeRequired = WideCharToMultiByte(CP_UTF8, 0, wstr.c_str(), -1, NULL, 0, NULL, NULL);

    if (sizeRequired > 0)
    {
      std::vector<char> utf8String(sizeRequired);
      int bytesConverted = WideCharToMultiByte(CP_UTF8, 0, wstr.c_str(),    
                           -1, &utf8String[0], utf8String.size(), NULL, 
                           NULL);
      if (bytesConverted != 0)
      {
        retStr = &utf8String[0];
      }
      else
      {
        std::stringstream err;
        err << __FUNCTION__ 
            << " std::string WstrToUtf8Str failed to convert wstring '"
            << wstr.c_str() << L"'";
        throw std::runtime_error( err.str() );
      }
    }
  }
  return retStr;
}

您可以将您的BSTR作为std::wstring传递给该函数。

5
我实现了两种UTF-8<->UTF-16<->UTF-32之间的转换变体,第一种变体完全从头开始实现了所有转换,第二种使用标准std::codecvtstd::wstring_convert(这两个类从C++17开始已被弃用,但仍然存在,并保证在C++11/C++14中存在)。
如果您不喜欢我的代码,则可以使用几乎单个头文件的C ++库utfcpp,该库应该经过许多客户的充分测试。
要将UTF-8转换为UTF-16,只需调用Utf32To16(Utf8To32(str)),要将UTF-16转换为UTF-8,请调用Utf32To8(Utf16To32(str))。或者您可以使用我的便捷助手函数UtfConv<std::wstring>(std::string("abc"))进行UTF-8到UTF-16的转换,或者使用UtfConv<std::string>(std::wstring(L"abc"))进行UTF-16到UTF-8的转换,实际上,UtfConv可以从任何一种UTF编码字符串转换为另一种。在Test(cs)宏内部查看这些和其他用法的示例。
这两个变体都符合C++11标准。它们还可以在CLang/GCC/MSVC编译器中编译(请参见下面的“在线试用!”链接),并已经在Windows/Linux操作系统中测试过。
你需要将我的两个代码片段保存在以UTF-8编码的文件中,并向CLang/GCC提供选项-finput-charset=UTF-8 -fexec-charset=UTF-8,向MSVC提供选项/utf-8。只有在您放置了具有非ASCII字符的文字字符串(例如我为测试目的在代码中放置的内容)时,才需要进行这种UTF-8保存和选项设置。如果要使用函数本身,则不需要进行此操作。
包含<windows.h><clocale><iostream>,以及调用SetConsoleOutputCP(65001)std::setlocale(LC_ALL, "en_US.UTF-8")仅用于测试目的,以正确设置和输出到UTF-8控制台。这些内容对于转换函数是不必要的。

代码的一部分并不是非常必要,我指的是与UtfHelper相关的结构和函数,它们只是用于转换的辅助函数,主要是为了以跨平台的方式处理std::wstring而创建的,因为在Linux上wchar_t通常是32位,而在Windows上是16位。只有低级函数Utf8To32Utf32To8Utf16To32Utf32To16才是真正需要进行转换的东西。

变体1是根据维基百科对UTF-8和UTF-16编码的描述创建的。

如果您发现错误或任何改进(特别是在变体1中),请告诉我,我会修复它们。


变体1

在线试用!

#include <string>
#include <iostream>
#include <stdexcept>
#include <type_traits>
#include <cstdint>

#ifdef _WIN32
    #include <windows.h>
#else
    #include <clocale>
#endif

#define ASSERT_MSG(cond, msg) { if (!(cond)) throw std::runtime_error("Assertion (" #cond ") failed at line " + std::to_string(__LINE__) + "! Msg: " + std::string(msg)); }
#define ASSERT(cond) ASSERT_MSG(cond, "")

template <typename U8StrT = std::string>
inline static U8StrT Utf32To8(std::u32string const & s) {
    static_assert(sizeof(typename U8StrT::value_type) == 1, "Char byte-size should be 1 for UTF-8 strings!");
    typedef typename U8StrT::value_type VT;
    typedef uint8_t u8;
    U8StrT r;
    for (auto c: s) {
        size_t nby = c <= 0x7FU ? 1 : c <= 0x7FFU ? 2 : c <= 0xFFFFU ? 3 : c <= 0x1FFFFFU ? 4 : c <= 0x3FFFFFFU ? 5 : c <= 0x7FFFFFFFU ? 6 : 7;
        r.push_back(VT(
            nby <= 1 ? u8(c) : (
                (u8(0xFFU) << (8 - nby)) |
                u8(c >> (6 * (nby - 1)))
            )
        ));
        for (size_t i = 1; i < nby; ++i)
            r.push_back(VT(u8(0x80U | (u8(0x3FU) & u8(c >> (6 * (nby - 1 - i)))))));
    }
    return r;
}

template <typename U8StrT>
inline static std::u32string Utf8To32(U8StrT const & s) {
    static_assert(sizeof(typename U8StrT::value_type) == 1, "Char byte-size should be 1 for UTF-8 strings!");
    typedef uint8_t u8;
    std::u32string r;
    auto it = (u8 const *)s.c_str(), end = (u8 const *)(s.c_str() + s.length());
    while (it < end) {
        char32_t c = 0;
        if (*it <= 0x7FU) {
            c = *it;
            ++it;
        } else {
            ASSERT((*it & 0xC0U) == 0xC0U);
            size_t nby = 0;
            for (u8 b = *it; (b & 0x80U) != 0; b <<= 1, ++nby) {(void)0;}
            ASSERT(nby <= 7);
            ASSERT((end - it) >= nby);
            c = *it & (u8(0xFFU) >> (nby + 1));
            for (size_t i = 1; i < nby; ++i) {
                ASSERT((it[i] & 0xC0U) == 0x80U);
                c = (c << 6) | (it[i] & 0x3FU);
            }
            it += nby;
        }
        r.push_back(c);
    }
    return r;
}


template <typename U16StrT = std::u16string>
inline static U16StrT Utf32To16(std::u32string const & s) {
    static_assert(sizeof(typename U16StrT::value_type) == 2, "Char byte-size should be 2 for UTF-16 strings!");
    typedef typename U16StrT::value_type VT;
    typedef uint16_t u16;
    U16StrT r;
    for (auto c: s) {
        if (c <= 0xFFFFU)
            r.push_back(VT(c));
        else {
            ASSERT(c <= 0x10FFFFU);
            c -= 0x10000U;
            r.push_back(VT(u16(0xD800U | ((c >> 10) & 0x3FFU))));
            r.push_back(VT(u16(0xDC00U | (c & 0x3FFU))));
        }
    }
    return r;
}

template <typename U16StrT>
inline static std::u32string Utf16To32(U16StrT const & s) {
    static_assert(sizeof(typename U16StrT::value_type) == 2, "Char byte-size should be 2 for UTF-16 strings!");
    typedef uint16_t u16;
    std::u32string r;
    auto it = (u16 const *)s.c_str(), end = (u16 const *)(s.c_str() + s.length());
    while (it < end) {
        char32_t c = 0;
        if (*it < 0xD800U || *it > 0xDFFFU) {
            c = *it;
            ++it;
        } else if (*it >= 0xDC00U) {
            ASSERT_MSG(false, "Unallowed UTF-16 sequence!");
        } else {
            ASSERT(end - it >= 2);
            c = (*it & 0x3FFU) << 10;
            if ((it[1] < 0xDC00U) || (it[1] > 0xDFFFU)) {
                ASSERT_MSG(false, "Unallowed UTF-16 sequence!");
            } else {
                c |= it[1] & 0x3FFU;
                c += 0x10000U;
            }
            it += 2;
        }
        r.push_back(c);
    }
    return r;
}


template <typename StrT, size_t NumBytes = sizeof(typename StrT::value_type)> struct UtfHelper;
template <typename StrT> struct UtfHelper<StrT, 1> {
    inline static std::u32string UtfTo32(StrT const & s) { return Utf8To32(s); }
    inline static StrT UtfFrom32(std::u32string const & s) { return Utf32To8<StrT>(s); }
};
template <typename StrT> struct UtfHelper<StrT, 2> {
    inline static std::u32string UtfTo32(StrT const & s) { return Utf16To32(s); }
    inline static StrT UtfFrom32(std::u32string const & s) { return Utf32To16<StrT>(s); }
};
template <typename StrT> struct UtfHelper<StrT, 4> {
    inline static std::u32string UtfTo32(StrT const & s) {
        return std::u32string((char32_t const *)(s.c_str()), (char32_t const *)(s.c_str() + s.length()));
    }
    inline static StrT UtfFrom32(std::u32string const & s) {
        return StrT((typename StrT::value_type const *)(s.c_str()),
            (typename StrT::value_type const *)(s.c_str() + s.length()));
    }
};
template <typename StrT> inline static std::u32string UtfTo32(StrT const & s) {
    return UtfHelper<StrT>::UtfTo32(s);
}
template <typename StrT> inline static StrT UtfFrom32(std::u32string const & s) {
    return UtfHelper<StrT>::UtfFrom32(s);
}
template <typename StrToT, typename StrFromT> inline static StrToT UtfConv(StrFromT const & s) {
    return UtfFrom32<StrToT>(UtfTo32(s));
}

#define Test(cs) \
    std::cout << Utf32To8(Utf8To32(std::string(cs))) << ", "; \
    std::cout << Utf32To8(Utf16To32(Utf32To16(Utf8To32(std::string(cs))))) << ", "; \
    std::cout << Utf32To8(Utf16To32(std::u16string(u##cs))) << ", "; \
    std::cout << Utf32To8(std::u32string(U##cs)) << ", "; \
    std::cout << UtfConv<std::string>(UtfConv<std::u16string>(UtfConv<std::u32string>(UtfConv<std::u32string>(UtfConv<std::u16string>(std::string(cs)))))) << ", "; \
    std::cout << UtfConv<std::string>(UtfConv<std::wstring>(UtfConv<std::string>(UtfConv<std::u32string>(UtfConv<std::u32string>(std::string(cs)))))) << ", "; \
    std::cout << UtfFrom32<std::string>(UtfTo32(std::string(cs))) << ", "; \
    std::cout << UtfFrom32<std::string>(UtfTo32(std::u16string(u##cs))) << ", "; \
    std::cout << UtfFrom32<std::string>(UtfTo32(std::wstring(L##cs))) << ", "; \
    std::cout << UtfFrom32<std::string>(UtfTo32(std::u32string(U##cs))) << std::endl; \
    std::cout << "UTF-8 num bytes: " << std::dec << Utf32To8(std::u32string(U##cs)).size() << ", "; \
    std::cout << "UTF-16 num bytes: " << std::dec << (Utf32To16(std::u32string(U##cs)).size() * 2) << std::endl;

int main() {
    #ifdef _WIN32
        SetConsoleOutputCP(65001);
    #else
        std::setlocale(LC_ALL, "en_US.UTF-8");
    #endif
    try {
        Test("World");
        Test("Привет");
        Test("");
        Test("");
        return 0;
    } catch (std::exception const & ex) {
        std::cout << "Exception: " << ex.what() << std::endl;
        return -1;
    }
}

输出:

World, World, World, World, World, World, World, World, World, World
UTF-8 num bytes: 5, UTF-16 num bytes: 10
Привет, Привет, Привет, Привет, Привет, Привет, Привет, Привет, Привет, Привет
UTF-8 num bytes: 12, UTF-16 num bytes: 12
, , , , , , , , , 
UTF-8 num bytes: 8, UTF-16 num bytes: 8
, , , , , , , , , 
UTF-8 num bytes: 4, UTF-16 num bytes: 4

变体2

在线尝试!

#include <string>
#include <iostream>
#include <stdexcept>
#include <type_traits>
#include <locale>
#include <codecvt>
#include <cstdint>

#ifdef _WIN32
    #include <windows.h>
#else
    #include <clocale>
#endif

#define ASSERT(cond) { if (!(cond)) throw std::runtime_error("Assertion (" #cond ") failed at line " + std::to_string(__LINE__) + "!"); }

// Workaround for some of MSVC compilers.
#if defined(_MSC_VER) && (!_DLL) && (_MSC_VER >= 1900 /* VS 2015*/) && (_MSC_VER <= 1914 /* VS 2017 */)
std::locale::id std::codecvt<char16_t, char, _Mbstatet>::id;
std::locale::id std::codecvt<char32_t, char, _Mbstatet>::id;
#endif

template <typename U8StrT>
inline static std::u32string Utf8To32(U8StrT const & s) {
    static_assert(sizeof(typename U8StrT::value_type) == 1, "Char byte-size should be 1 for UTF-8 strings!");
    std::wstring_convert<std::codecvt_utf8<char32_t>, char32_t> utf_8_32_conv_;
    return utf_8_32_conv_.from_bytes((char const *)s.c_str(), (char const *)(s.c_str() + s.length()));
}

template <typename U8StrT = std::string>
inline static U8StrT Utf32To8(std::u32string const & s) {
    static_assert(sizeof(typename U8StrT::value_type) == 1, "Char byte-size should be 1 for UTF-8 strings!");
    std::wstring_convert<std::codecvt_utf8<char32_t>, char32_t> utf_8_32_conv_;
    std::string res = utf_8_32_conv_.to_bytes(s.c_str(), s.c_str() + s.length());
    return U8StrT(
        (typename U8StrT::value_type const *)(res.c_str()),
        (typename U8StrT::value_type const *)(res.c_str() + res.length()));
}

template <typename U16StrT>
inline static std::u32string Utf16To32(U16StrT const & s) {
    static_assert(sizeof(typename U16StrT::value_type) == 2, "Char byte-size should be 2 for UTF-16 strings!");
    std::wstring_convert<std::codecvt_utf16<char32_t, 0x10ffff, std::little_endian>, char32_t> utf_16_32_conv_;
    return utf_16_32_conv_.from_bytes((char const *)s.c_str(), (char const *)(s.c_str() + s.length()));
}

template <typename U16StrT = std::u16string>
inline static U16StrT Utf32To16(std::u32string const & s) {
    static_assert(sizeof(typename U16StrT::value_type) == 2, "Char byte-size should be 2 for UTF-16 strings!");
    std::wstring_convert<std::codecvt_utf16<char32_t, 0x10ffff, std::little_endian>, char32_t> utf_16_32_conv_;
    std::string res = utf_16_32_conv_.to_bytes(s.c_str(), s.c_str() + s.length());
    return U16StrT(
        (typename U16StrT::value_type const *)(res.c_str()),
        (typename U16StrT::value_type const *)(res.c_str() + res.length()));
}


template <typename StrT, size_t NumBytes = sizeof(typename StrT::value_type)> struct UtfHelper;
template <typename StrT> struct UtfHelper<StrT, 1> {
    inline static std::u32string UtfTo32(StrT const & s) { return Utf8To32(s); }
    inline static StrT UtfFrom32(std::u32string const & s) { return Utf32To8<StrT>(s); }
};
template <typename StrT> struct UtfHelper<StrT, 2> {
    inline static std::u32string UtfTo32(StrT const & s) { return Utf16To32(s); }
    inline static StrT UtfFrom32(std::u32string const & s) { return Utf32To16<StrT>(s); }
};
template <typename StrT> struct UtfHelper<StrT, 4> {
    inline static std::u32string UtfTo32(StrT const & s) {
        return std::u32string((char32_t const *)(s.c_str()), (char32_t const *)(s.c_str() + s.length()));
    }
    inline static StrT UtfFrom32(std::u32string const & s) {
        return StrT((typename StrT::value_type const *)(s.c_str()),
            (typename StrT::value_type const *)(s.c_str() + s.length()));
    }
};
template <typename StrT> inline static std::u32string UtfTo32(StrT const & s) {
    return UtfHelper<StrT>::UtfTo32(s);
}
template <typename StrT> inline static StrT UtfFrom32(std::u32string const & s) {
    return UtfHelper<StrT>::UtfFrom32(s);
}
template <typename StrToT, typename StrFromT> inline static StrToT UtfConv(StrFromT const & s) {
    return UtfFrom32<StrToT>(UtfTo32(s));
}

#define Test(cs) \
    std::cout << Utf32To8(Utf8To32(std::string(cs))) << ", "; \
    std::cout << Utf32To8(Utf16To32(Utf32To16(Utf8To32(std::string(cs))))) << ", "; \
    std::cout << Utf32To8(Utf16To32(std::u16string(u##cs))) << ", "; \
    std::cout << Utf32To8(std::u32string(U##cs)) << ", "; \
    std::cout << UtfConv<std::string>(UtfConv<std::u16string>(UtfConv<std::u32string>(UtfConv<std::u32string>(UtfConv<std::u16string>(std::string(cs)))))) << ", "; \
    std::cout << UtfConv<std::string>(UtfConv<std::wstring>(UtfConv<std::string>(UtfConv<std::u32string>(UtfConv<std::u32string>(std::string(cs)))))) << ", "; \
    std::cout << UtfFrom32<std::string>(UtfTo32(std::string(cs))) << ", "; \
    std::cout << UtfFrom32<std::string>(UtfTo32(std::u16string(u##cs))) << ", "; \
    std::cout << UtfFrom32<std::string>(UtfTo32(std::wstring(L##cs))) << ", "; \
    std::cout << UtfFrom32<std::string>(UtfTo32(std::u32string(U##cs))) << std::endl; \
    std::cout << "UTF-8 num bytes: " << std::dec << Utf32To8(std::u32string(U##cs)).size() << ", "; \
    std::cout << "UTF-16 num bytes: " << std::dec << (Utf32To16(std::u32string(U##cs)).size() * 2) << std::endl;

int main() {
    #ifdef _WIN32
        SetConsoleOutputCP(65001);
    #else
        std::setlocale(LC_ALL, "en_US.UTF-8");
    #endif
    try {
        Test("World");
        Test("Привет");
        Test("");
        Test("");
        return 0;
    } catch (std::exception const & ex) {
        std::cout << "Exception: " << ex.what() << std::endl;
        return -1;
    }
}

输出:

World, World, World, World, World, World, World, World, World, World
UTF-8 num bytes: 5, UTF-16 num bytes: 10
Привет, Привет, Привет, Привет, Привет, Привет, Привет, Привет, Привет, Привет
UTF-8 num bytes: 12, UTF-16 num bytes: 12
, , , , , , , , , 
UTF-8 num bytes: 8, UTF-16 num bytes: 8
, , , , , , , , , 
UTF-8 num bytes: 4, UTF-16 num bytes: 4

2

你能给我一个例子吗?因为我不明白如何使用它。BSTR输入参数采用UTF-16le编码。 - user3252635
1
我没有时间自己创建一个,但是我找到了这个链接,他非常详细地介绍了这个问题。希望这能帮到你。 - beardedN5rd
@user3252635,文档中有一个示例。还有更好的示例在cppreference.com上,同时请查看std::wstring_convert - Remy Lebeau

1
void encode_unicode_character(char* buffer, int* offset, wchar_t ucs_character)
{
    if (ucs_character <= 0x7F)
    {
        // Plain single-byte ASCII.
        buffer[(*offset)++] = (char) ucs_character;
    }
    else if (ucs_character <= 0x7FF)
    {
        // Two bytes.
        buffer[(*offset)++] = 0xC0 | (ucs_character >> 6);
        buffer[(*offset)++] = 0x80 | ((ucs_character >> 0) & 0x3F);
    }
    else if (ucs_character <= 0xFFFF)
    {
        // Three bytes.
        buffer[(*offset)++] = 0xE0 | (ucs_character >> 12);
        buffer[(*offset)++] = 0x80 | ((ucs_character >> 6) & 0x3F);
        buffer[(*offset)++] = 0x80 | ((ucs_character >> 0) & 0x3F);
    }
    else if (ucs_character <= 0x1FFFFF)
    {
        // Four bytes.
        buffer[(*offset)++] = 0xF0 | (ucs_character >> 18);
        buffer[(*offset)++] = 0x80 | ((ucs_character >> 12) & 0x3F);
        buffer[(*offset)++] = 0x80 | ((ucs_character >> 6) & 0x3F);
        buffer[(*offset)++] = 0x80 | ((ucs_character >> 0) & 0x3F);
    }
    else if (ucs_character <= 0x3FFFFFF)
    {
        // Five bytes.
        buffer[(*offset)++] = 0xF8 | (ucs_character >> 24);
        buffer[(*offset)++] = 0x80 | ((ucs_character >> 18) & 0x3F);
        buffer[(*offset)++] = 0x80 | ((ucs_character >> 12) & 0x3F);
        buffer[(*offset)++] = 0x80 | ((ucs_character >> 6) & 0x3F);
        buffer[(*offset)++] = 0x80 | ((ucs_character >> 0) & 0x3F);
    }
    else if (ucs_character <= 0x7FFFFFFF)
    {
        // Six bytes.
        buffer[(*offset)++] = 0xFC | (ucs_character >> 30);
        buffer[(*offset)++] = 0x80 | ((ucs_character >> 24) & 0x3F);
        buffer[(*offset)++] = 0x80 | ((ucs_character >> 18) & 0x3F);
        buffer[(*offset)++] = 0x80 | ((ucs_character >> 12) & 0x3F);
        buffer[(*offset)++] = 0x80 | ((ucs_character >> 6) & 0x3F);
        buffer[(*offset)++] = 0x80 | ((ucs_character >> 0) & 0x3F);
    }
    else
    {
        // Invalid char; don't encode anything.
    }
}

ISO10646-2012是理解UCS所需的全部内容。


5
UCS不是问题所在。UCS也不等同于UTF-16。您的代码适用于UTF-16吗? - rubenvb
1
我们不明白为什么你在讨论UTF-16的问题时提到UCS。我们也不理解为什么你展示了将UTF-32转换为UTF-8的代码,而问题是关于UTF-16的。同时,假设wchar_t可以用来保存UTF-32字符元素是错误的。在某些系统上可能可以,但并非所有系统都可以。 - David Heffernan
@DavidHeffernan 真的吗?嗯,我不知道...我应该如何首先解码UTF16,我的意思是从哪里解码到哪里?太有趣了...嗯... - kvv
1
@kw 不是这样的。问题要求转换从UTF-16开始。这意味着输入是UTF-16。你的代码是从UTF-32开始转换的。因此,要使用它,需要先将UTF-16转换为UTF-32,然后再转换为UTF-8。 - David Heffernan
1
谢谢收听。 - David Heffernan
显示剩余14条评论

网页内容由stack overflow 提供, 点击上面的
可以查看英文原文,
原文链接