From 01050f3103acb5bf15e41909628504993400b31f Mon Sep 17 00:00:00 2001 From: Akarshan Biswas Date: Thu, 9 Oct 2025 07:21:53 +0530 Subject: [PATCH] fix: Gracefully handle offline mode during backend check (#6767) The `listSupportedBackends` function now includes error handling for the `fetchRemoteSupportedBackends` call. This addresses an issue where an error thrown during the remote fetch (e.g., due to no network connection in offline mode) would prevent the subsequent loading of locally installed or manually provided llama.cpp backends. The remote backend versions array will now default to empty if the fetch fails, allowing the rest of the backend initialization process to proceed as expected. --- extensions/llamacpp-extension/src/backend.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/extensions/llamacpp-extension/src/backend.ts b/extensions/llamacpp-extension/src/backend.ts index a313e01c6..bd0543227 100644 --- a/extensions/llamacpp-extension/src/backend.ts +++ b/extensions/llamacpp-extension/src/backend.ts @@ -156,8 +156,13 @@ export async function listSupportedBackends(): Promise< supportedBackends.push('macos-arm64') } // get latest backends from Github - const remoteBackendVersions = + let remoteBackendVersions = [] + try { + remoteBackendVersions = await fetchRemoteSupportedBackends(supportedBackends) + } catch (e) { + console.debug(`Not able to get remote backends, Jan might be offline or network problem: ${String(e)}`) + } // Get locally installed versions const localBackendVersions = await getLocalInstalledBackends()