From 2f031fc5342de3c235831803d80fdf6cfc2428f5 Mon Sep 17 00:00:00 2001 From: zhengkunwang <31820853+zhengkunwang223@users.noreply.github.com> Date: Mon, 17 Feb 2025 17:39:16 +0800 Subject: [PATCH] feat: Add gpuSupport Config (#3195) --- maxkb/data.yml | 1 + ollama/data.yml | 1 + vllm/data.yml | 1 + 3 files changed, 3 insertions(+) diff --git a/maxkb/data.yml b/maxkb/data.yml index aa84e88db..4a016289e 100644 --- a/maxkb/data.yml +++ b/maxkb/data.yml @@ -26,3 +26,4 @@ additionalProperties: website: https://github.com/1Panel-dev/MaxKB github: https://github.com/1Panel-dev/MaxKB document: https://github.com/1Panel-dev/MaxKB/wiki + gpuSupport: true diff --git a/ollama/data.yml b/ollama/data.yml index d7a60625b..499f60fe3 100644 --- a/ollama/data.yml +++ b/ollama/data.yml @@ -26,3 +26,4 @@ additionalProperties: website: https://ollama.com/ github: https://github.com/ollama/ollama document: https://github.com/ollama/ollama/blob/main/docs/README.md + gpuSupport: true diff --git a/vllm/data.yml b/vllm/data.yml index bbc517054..90d5264cc 100644 --- a/vllm/data.yml +++ b/vllm/data.yml @@ -26,3 +26,4 @@ additionalProperties: website: https://github.com/vllm-project/vllm github: https://github.com/vllm-project/vllm document: https://docs.vllm.ai/en/latest/ + gpuSupport: true