summaryrefslogtreecommitdiff
path: root/gnu/packages/patches/llama-cpp-vulkan-optional.patch
blob: 43a49b6a02368ac981a85a95c2438d9db6745a10 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
Author: Danny Milosavljevic <[email protected]>
Date: 2025-01-29
License: Expat
Subject: Make Vulkan optional

See also: <https://github.com/ggerganov/llama.cpp/pull/11494>

diff -ru orig/llama.cpp/ggml/include/ggml-vulkan.h llama.cpp/ggml/include/ggml-vulkan.h
--- orig/llama.cpp/ggml/include/ggml-vulkan.h	2025-01-29 10:24:10.894476682 +0100
+++ llama.cpp/ggml/include/ggml-vulkan.h	2025-02-07 18:28:34.509509638 +0100
@@ -10,8 +10,6 @@
 #define GGML_VK_NAME "Vulkan"
 #define GGML_VK_MAX_DEVICES 16
 
-GGML_BACKEND_API void ggml_vk_instance_init(void);
-
 // backend API
 GGML_BACKEND_API ggml_backend_t ggml_backend_vk_init(size_t dev_num);
 
diff -ru orig/llama.cpp/ggml/src/ggml-vulkan/ggml-vulkan.cpp llama.cpp/ggml/src/ggml-vulkan/ggml-vulkan.cpp
--- orig/llama.cpp/ggml/src/ggml-vulkan/ggml-vulkan.cpp	2025-01-29 10:24:10.922476480 +0100
+++ llama.cpp/ggml/src/ggml-vulkan/ggml-vulkan.cpp	2025-01-29 22:33:19.955087552 +0100
@@ -8174,8 +8174,13 @@
         /* .iface       = */ ggml_backend_vk_reg_i,
         /* .context     = */ nullptr,
     };
-
-    return &reg;
+    try {
+        ggml_vk_instance_init();
+        return &reg;
+    } catch (const vk::SystemError& e) {
+        VK_LOG_DEBUG("ggml_vk_get_device_count() -> Error: System error: " << e.what());
+        return nullptr;
+    }
 }
 
 // Extension availability