Fix UTF32ToUTF16Char() conversion. Fixes issue #99. Reviewed by bryner.
git-svn-id: http://google-breakpad.googlecode.com/svn/trunk@97 4c0a9323-5329-0410-9bdc-e9ce6186880e
This commit is contained in:
parent
70b5efdda7
commit
c4d599912c
1 changed files with 2 additions and 2 deletions
|
@ -98,8 +98,8 @@ void UTF32ToUTF16(const wchar_t *in, vector<u_int16_t> *out) {
|
|||
}
|
||||
|
||||
void UTF32ToUTF16Char(wchar_t in, u_int16_t out[2]) {
|
||||
const UTF32 *source_ptr = reinterpret_cast<const UTF32 *>(in);
|
||||
const UTF32 *source_end_ptr = source_ptr + sizeof(wchar_t);
|
||||
const UTF32 *source_ptr = reinterpret_cast<const UTF32 *>(&in);
|
||||
const UTF32 *source_end_ptr = source_ptr + 1;
|
||||
u_int16_t *target_ptr = out;
|
||||
u_int16_t *target_end_ptr = target_ptr + 2 * sizeof(u_int16_t);
|
||||
out[0] = out[1] = 0;
|
||||
|
|
Loading…
Reference in a new issue