Unicode: UTF32 support improvements (#2541, #2538, #2815)

- Make ImWchar32 unsigned.
 - Fix Win32 version of ImFileOpen by including windows.h sooner.
 - Make ImGuiIO::AddInputCharacterUTF16() more robust by disallowing illegal
surrogate pairs.
 - Allow pushing higher plane codepoints through ImGuiIO::AddInputCharacter().
 - Minor cleaning up in the high-plane Unicode support.
 - Fix Clang -Wunreachable-code warning
This commit is contained in:
Sam Hocevar
2019-09-29 12:15:13 +02:00
committed by ocornut
parent 6d59653e82
commit c8ea0a017d
3 changed files with 46 additions and 35 deletions

20
imgui.h
View File

@ -92,7 +92,7 @@ Index of this file:
#else
#define IM_OFFSETOF(_TYPE,_MEMBER) ((size_t)&(((_TYPE*)0)->_MEMBER)) // Offset of _MEMBER within _TYPE. Old style macro.
#endif
#define IM_UNICODE_CODEPOINT_MAX 0xFFFF // Last Unicode code point supported by this build.
#define IM_UNICODE_CODEPOINT_MAX (sizeof(ImWchar) == 2 ? 0xFFFF : 0x10FFFF) // Last Unicode code point supported by this build.
#define IM_UNICODE_CODEPOINT_INVALID 0xFFFD // Standard invalid Unicode code point.
// Warnings
@ -147,7 +147,7 @@ typedef unsigned int ImGuiID; // Unique ID used by widgets (typically hash
#define ImWchar ImWchar16
#endif
typedef unsigned short ImWchar16; // A single U16 character for keyboard input/display. We encode them as multi bytes UTF-8 when used in strings.
typedef int ImWchar32; // A single 32bit character for keyboard input/display, define ImWchar to ImWchar32 to use it. See imconfig.h .
typedef unsigned int ImWchar32; // A single U32 character for keyboard input/display. Define ImWchar to ImWchar32 to use it. See imconfig.h .
typedef int ImGuiCol; // -> enum ImGuiCol_ // Enum: A color identifier for styling
typedef int ImGuiCond; // -> enum ImGuiCond_ // Enum: A condition for many Set*() functions
typedef int ImGuiDataType; // -> enum ImGuiDataType_ // Enum: A primary data type
@ -1512,7 +1512,7 @@ struct ImGuiIO
float KeysDownDurationPrev[512]; // Previous duration the key has been down
float NavInputsDownDuration[ImGuiNavInput_COUNT];
float NavInputsDownDurationPrev[ImGuiNavInput_COUNT];
ImWchar16 Surrogate; // For AddInputCharacterUTF16
ImWchar16 InputQueueSurrogate; // For AddInputCharacterUTF16
ImVector<ImWchar> InputQueueCharacters; // Queue of _characters_ input (obtained by platform back-end). Fill using AddInputCharacter() helper.
IMGUI_API ImGuiIO();
@ -2097,15 +2097,11 @@ struct ImFontGlyphRangesBuilder
{
ImVector<ImU32> UsedChars; // Store 1-bit per Unicode code point (0=unused, 1=used)
ImFontGlyphRangesBuilder() { Clear(); }
inline void Clear()
{
int MaxUnicode = sizeof(ImWchar) == 2 ? 0x10000 : 0x110000;
UsedChars.resize(MaxUnicode / sizeof(int)); memset(UsedChars.Data, 0, MaxUnicode / sizeof(int));
}
inline bool GetBit(int n) const { int off = (n >> 5); ImU32 mask = 1u << (n & 31); return (UsedChars[off] & mask) != 0; } // Get bit n in the array
inline void SetBit(int n) { int off = (n >> 5); ImU32 mask = 1u << (n & 31); UsedChars[off] |= mask; } // Set bit n in the array
inline void AddChar(ImWchar c) { SetBit(c); } // Add character
ImFontGlyphRangesBuilder() { Clear(); }
inline void Clear() { int size_in_bytes = (IM_UNICODE_CODEPOINT_MAX + 1) / 8; UsedChars.resize(size_in_bytes / (int)sizeof(ImU32)); memset(UsedChars.Data, 0, (size_t)size_in_bytes); }
inline bool GetBit(size_t n) const { int off = (int)(n >> 5); ImU32 mask = 1u << (n & 31); return (UsedChars[off] & mask) != 0; } // Get bit n in the array
inline void SetBit(size_t n) { int off = (int)(n >> 5); ImU32 mask = 1u << (n & 31); UsedChars[off] |= mask; } // Set bit n in the array
inline void AddChar(ImWchar c) { SetBit(c); } // Add character
IMGUI_API void AddText(const char* text, const char* text_end = NULL); // Add string (each character of the UTF-8 string are added)
IMGUI_API void AddRanges(const ImWchar* ranges); // Add ranges, e.g. builder.AddRanges(ImFontAtlas::GetGlyphRangesDefault()) to force add all of ASCII/Latin+Ext
IMGUI_API void BuildRanges(ImVector<ImWchar>* out_ranges); // Output new ranges