win32: handle unicode when loading glsl shaders
This commit is contained in:
parent
4b770e0997
commit
ba76c1c2be
@ -14,10 +14,13 @@
|
||||
#include "gl_core_3_1.h"
|
||||
#include <direct.h>
|
||||
|
||||
#ifdef UNICODE
|
||||
#define chdir(dir) _wchdir(Utf8ToWide(dir))
|
||||
#define realpath(src, resolved) _twfullpath(resolved, src, PATH_MAX)
|
||||
#else
|
||||
#define chdir(dir) _chdir(dir)
|
||||
#define realpath(src, resolved) _fullpath(resolved, src, PATH_MAX)
|
||||
|
||||
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
||||
|
@ -229,4 +229,11 @@ extern "C" int _twopen(const char *filename, int oflag, int pmode) {
|
||||
return _wopen(Utf8ToWide(filename), oflag, pmode);
|
||||
}
|
||||
|
||||
extern "C" void _twfullpath(char* dst, const char* src, int len) {
|
||||
wchar_t *resolved = _wfullpath(NULL, Utf8ToWide(src), MAX_PATH);
|
||||
strncpy(dst, WideToUtf8(resolved), len);
|
||||
dst[len - 1] = '\0';
|
||||
return;
|
||||
}
|
||||
|
||||
#endif // UNICODE
|
||||
|
@ -198,6 +198,7 @@ extern "C" {
|
||||
FILE *_tfwopen(const char *filename, const char *mode );
|
||||
int _twremove(const char *filename );
|
||||
int _twopen(const char *filename, int oflag, int pmode);
|
||||
void _twfullpath(char* dst, const char* src, int len);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user