diff --git a/imgui.cpp b/imgui.cpp index 726b05f5..6a984849 100644 --- a/imgui.cpp +++ b/imgui.cpp @@ -1142,17 +1142,21 @@ ImGuiIO::ImGuiIO() // - on Windows you can get those using ToAscii+keyboard state, or via the WM_CHAR message void ImGuiIO::AddInputCharacter(unsigned int c) { - InputQueueCharacters.push_back(c > 0 && c <= IM_UNICODE_CODEPOINT_MAX ? (ImWchar)c : IM_UNICODE_CODEPOINT_INVALID); + if (c != 0) + InputQueueCharacters.push_back(c <= IM_UNICODE_CODEPOINT_MAX ? (ImWchar)c : IM_UNICODE_CODEPOINT_INVALID); } // UTF16 strings use surrogate pairs to encode codepoints >= 0x10000, so // we should save the high surrogate. void ImGuiIO::AddInputCharacterUTF16(ImWchar16 c) { + if (c == 0 && InputQueueSurrogate == 0) + return; + if ((c & 0xFC00) == 0xD800) // High surrogate, must save { if (InputQueueSurrogate != 0) - InputQueueCharacters.push_back(0xFFFD); + InputQueueCharacters.push_back(IM_UNICODE_CODEPOINT_INVALID); InputQueueSurrogate = c; return; } @@ -1177,7 +1181,7 @@ void ImGuiIO::AddInputCharactersUTF8(const char* utf8_chars) { unsigned int c = 0; utf8_chars += ImTextCharFromUtf8(&c, utf8_chars, NULL); - if (c > 0) + if (c != 0) InputQueueCharacters.push_back((ImWchar)c); } }