07-clip-unicode.diff 1.96 KB
Newer Older
1
diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp
2
index 14e02c8d..6e849d8e 100644
3
4
--- a/examples/llava/clip.cpp
+++ b/examples/llava/clip.cpp
5
6
7
@@ -44,6 +44,19 @@
 #define LOG_ERR(...) do { fprintf(stderr, __VA_ARGS__); } while (0)
 #define LOG_DBG(...) do { fprintf(stderr, __VA_ARGS__); } while (0)
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
 
+#if defined(_WIN32)
+#define WIN32_LEAN_AND_MEAN
+#ifndef NOMINMAX
+    #define NOMINMAX
+#endif
+#include <windows.h>
+#if __GLIBCXX__
+#include <cstdio>
+#include <ext/stdio_filebuf.h>
+#include <fcntl.h>
+#endif
+#endif
+
 //#define CLIP_DEBUG_FUNCTIONS
 
 // RGB uint8 image
25
26
@@ -1225,8 +1238,29 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) {
             gguf_free(ctx);
27
28
             return nullptr;
         }
29
-
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
+#ifdef _WIN32
+        int wlen = MultiByteToWideChar(CP_UTF8, 0, fname, -1, NULL, 0);
+        if (!wlen) {
+            return NULL;
+        }
+        wchar_t * wbuf = (wchar_t *) malloc(wlen * sizeof(wchar_t));
+        wlen = MultiByteToWideChar(CP_UTF8, 0, fname, -1, wbuf, wlen);
+        if (!wlen) {
+            free(wbuf);
+            return NULL;
+        }
+#if __GLIBCXX__
+        int fd = _wopen(wbuf, _O_RDONLY | _O_BINARY);
+        __gnu_cxx::stdio_filebuf<char> buffer(fd, std::ios_base::in);
+        std::istream fin(&buffer);
+#else // MSVC
+        // unused in our current build
+        auto fin = std::ifstream(wbuf, std::ios::binary);
+#endif
+        free(wbuf);
+#else
         auto fin = std::ifstream(fname, std::ios::binary);
+#endif
         if (!fin) {
54
             LOG_ERR("cannot open model file for loading tensors\n");
55
             clip_free(new_clip);
56
@@ -1266,7 +1300,11 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) {
57
58
59
60
61
62
63
64
65
66
67
                 ggml_backend_tensor_set(cur, read_buf.data(), 0, num_bytes);
             }
         }
+#if defined(_WIN32) && defined(__GLIBCXX__)
+        close(fd);
+#else
         fin.close();
+#endif
     }
 
     // vision model