fix: local api server auto start first model when missing last used

This commit is contained in:
Faisal Amir 2025-10-01 11:04:28 +07:00
parent c5a5968bf8
commit 2679b19e32
11 changed files with 168 additions and 128 deletions

View File

@ -24,6 +24,7 @@ import { predefinedProviders } from '@/consts/providers'
import { useServiceHub } from '@/hooks/useServiceHub'
import { PlatformFeatures } from '@/lib/platform/const'
import { PlatformFeature } from '@/lib/platform/types'
import { getLastUsedModel } from '@/utils/getModelToStart'
type DropdownModelProviderProps = {
model?: ThreadModel
@ -39,16 +40,6 @@ interface SearchableModel {
}
// Helper functions for localStorage
const getLastUsedModel = (): { provider: string; model: string } | null => {
try {
const stored = localStorage.getItem(localStorageKey.lastUsedModel)
return stored ? JSON.parse(stored) : null
} catch (error) {
console.debug('Failed to get last used model from localStorage:', error)
return null
}
}
const setLastUsedModel = (provider: string, model: string) => {
try {
localStorage.setItem(

View File

@ -169,6 +169,12 @@
"serverLogs": "Server Logs",
"serverLogsDesc": "Zeige detaillierte Logs des lokalen API-Servers an.",
"openLogs": "Logs öffnen",
"swaggerDocs": "API-Dokumentation",
"swaggerDocsDesc": "Zeige interaktive API-Dokumentation (Swagger UI) an.",
"openDocs": "Dokumentation öffnen",
"startupConfiguration": "Startkonfiguration",
"runOnStartup": "Standardmäßig beim Start aktivieren",
"runOnStartupDesc": "Starte den lokalen API-Server automatisch beim Anwendungsstart. Verwendet das zuletzt verwendete Modell oder wählt das erste verfügbare Modell, falls nicht verfügbar.",
"serverConfiguration": "Server Konfiguration",
"serverHost": "Server Host",
"serverHostDesc": "Netzwerkadresse für den Server.",

View File

@ -169,9 +169,12 @@
"serverLogs": "Server Logs",
"serverLogsDesc": "View detailed logs of the local API server.",
"openLogs": "Open Logs",
"swaggerDocs": "API Documentation",
"swaggerDocsDesc": "View interactive API documentation (Swagger UI).",
"openDocs": "Open Docs",
"startupConfiguration": "Startup Configuration",
"runOnStartup": "Enable by default on startup",
"runOnStartupDesc": "Automatically start the Local API Server when the application launches.",
"runOnStartupDesc": "Automatically start the Local API Server when the application launches. Uses last used model, or picks the first available model if unavailable.",
"serverConfiguration": "Server Configuration",
"serverHost": "Server Host",
"serverHostDesc": "Network address for the server.",

View File

@ -167,6 +167,12 @@
"serverLogs": "Log Server",
"serverLogsDesc": "Lihat log terperinci dari server API lokal.",
"openLogs": "Buka Log",
"swaggerDocs": "Dokumentasi API",
"swaggerDocsDesc": "Lihat dokumentasi API interaktif (Swagger UI).",
"openDocs": "Buka Dokumentasi",
"startupConfiguration": "Konfigurasi Startup",
"runOnStartup": "Aktifkan secara default saat startup",
"runOnStartupDesc": "Mulai Server API Lokal secara otomatis saat aplikasi diluncurkan. Menggunakan model terakhir yang digunakan, atau memilih model pertama yang tersedia jika tidak tersedia.",
"serverConfiguration": "Konfigurasi Server",
"serverHost": "Host Server",
"serverHostDesc": "Alamat jaringan untuk server.",

View File

@ -167,9 +167,12 @@
"serverLogs": "Dzienniki Serwera",
"serverLogsDesc": "Wyświetl szczegółowe dzienniki lokalnego serwera API.",
"openLogs": "Otwórz Dzienniki",
"swaggerDocs": "Dokumentacja API",
"swaggerDocsDesc": "Wyświetl interaktywną dokumentację API (Swagger UI).",
"openDocs": "Otwórz Dokumentację",
"startupConfiguration": "Konfiguracja Startowa",
"runOnStartup": "Domyślnie włączaj przy starcie",
"runOnStartupDesc": "Automatycznie uruchamiaj lokalny serwer API podczas uruchamiania aplikacji.",
"runOnStartupDesc": "Automatycznie uruchamiaj lokalny serwer API podczas uruchamiania aplikacji. Używa ostatnio używanego modelu lub wybiera pierwszy dostępny model, jeśli nie jest dostępny.",
"serverConfiguration": "Konfiguracja Serwera",
"serverHost": "Host",
"serverHostDesc": "Adres sieciowy serwera.",

View File

@ -169,6 +169,12 @@
"serverLogs": "Nhật ký máy chủ",
"serverLogsDesc": "Xem nhật ký chi tiết của máy chủ API cục bộ.",
"openLogs": "Mở nhật ký",
"swaggerDocs": "Tài liệu API",
"swaggerDocsDesc": "Xem tài liệu API tương tác (Swagger UI).",
"openDocs": "Mở tài liệu",
"startupConfiguration": "Cấu hình khởi động",
"runOnStartup": "Bật mặc định khi khởi động",
"runOnStartupDesc": "Tự động khởi động Máy chủ API Cục bộ khi ứng dụng khởi chạy. Sử dụng mô hình đã dùng gần nhất hoặc chọn mô hình đầu tiên có sẵn nếu không khả dụng.",
"serverConfiguration": "Cấu hình máy chủ",
"serverHost": "Máy chủ lưu trữ",
"serverHostDesc": "Địa chỉ mạng cho máy chủ.",

View File

@ -169,6 +169,12 @@
"serverLogs": "服务器日志",
"serverLogsDesc": "查看本地 API 服务器的详细日志。",
"openLogs": "打开日志",
"swaggerDocs": "API 文档",
"swaggerDocsDesc": "查看交互式 API 文档Swagger UI。",
"openDocs": "打开文档",
"startupConfiguration": "启动配置",
"runOnStartup": "默认在启动时启用",
"runOnStartupDesc": "应用程序启动时自动启动本地 API 服务器。使用上次使用的模型,如果不可用则选择第一个可用模型。",
"serverConfiguration": "服务器配置",
"serverHost": "服务器主机",
"serverHostDesc": "服务器的网络地址。",

View File

@ -167,6 +167,12 @@
"serverLogs": "伺服器日誌",
"serverLogsDesc": "檢視本機 API 伺服器的詳細日誌。",
"openLogs": "開啟日誌",
"swaggerDocs": "API 文件",
"swaggerDocsDesc": "查看互動式 API 文件Swagger UI。",
"openDocs": "開啟文件",
"startupConfiguration": "啟動設定",
"runOnStartup": "預設在啟動時啟用",
"runOnStartupDesc": "應用程式啟動時自動啟動本機 API 伺服器。使用上次使用的模型,如果不可用則選擇第一個可用模型。",
"serverConfiguration": "伺服器設定",
"serverHost": "伺服器主機",
"serverHostDesc": "伺服器的網路位址。",

View File

@ -12,8 +12,8 @@ import { useThreads } from '@/hooks/useThreads'
import { useLocalApiServer } from '@/hooks/useLocalApiServer'
import { useAppState } from '@/hooks/useAppState'
import { AppEvent, events } from '@janhq/core'
import { localStorageKey } from '@/constants/localStorage'
import { SystemEvent } from '@/types/events'
import { getModelToStart } from '@/utils/getModelToStart'
export function DataProvider() {
const { setProviders, selectedModel, selectedProvider, getProviderByName } =
@ -66,12 +66,15 @@ export function DataProvider() {
// Listen for deep link events
let unsubscribe = () => {}
serviceHub.events().listen(SystemEvent.DEEP_LINK, (event) => {
const deep_link = event.payload as string
handleDeepLink([deep_link])
}).then((unsub) => {
unsubscribe = unsub
})
serviceHub
.events()
.listen(SystemEvent.DEEP_LINK, (event) => {
const deep_link = event.payload as string
handleDeepLink([deep_link])
})
.then((unsub) => {
unsubscribe = unsub
})
return () => {
unsubscribe()
}
@ -109,54 +112,6 @@ export function DataProvider() {
})
}, [serviceHub, setProviders])
const getLastUsedModel = (): { provider: string; model: string } | null => {
try {
const stored = localStorage.getItem(localStorageKey.lastUsedModel)
return stored ? JSON.parse(stored) : null
} catch (error) {
console.debug('Failed to get last used model from localStorage:', error)
return null
}
}
// Helper function to determine which model to start
const getModelToStart = () => {
// Use last used model if available
const lastUsedModel = getLastUsedModel()
if (lastUsedModel) {
const provider = getProviderByName(lastUsedModel.provider)
if (
provider &&
provider.models.some((m) => m.id === lastUsedModel.model)
) {
return { model: lastUsedModel.model, provider }
}
}
// Use selected model if available
if (selectedModel && selectedProvider) {
const provider = getProviderByName(selectedProvider)
if (provider) {
return { model: selectedModel.id, provider }
}
}
// Use first model from llamacpp provider
const llamacppProvider = getProviderByName('llamacpp')
if (
llamacppProvider &&
llamacppProvider.models &&
llamacppProvider.models.length > 0
) {
return {
model: llamacppProvider.models[0].id,
provider: llamacppProvider,
}
}
return null
}
// Auto-start Local API Server on app startup if enabled
useEffect(() => {
if (enableOnStartup) {
@ -166,7 +121,11 @@ export function DataProvider() {
return
}
const modelToStart = getModelToStart()
const modelToStart = getModelToStart({
selectedModel,
selectedProvider,
getProviderByName,
})
// Only start server if we have a model to load
if (!modelToStart) {

View File

@ -15,7 +15,6 @@ import { useLocalApiServer } from '@/hooks/useLocalApiServer'
import { useAppState } from '@/hooks/useAppState'
import { useModelProvider } from '@/hooks/useModelProvider'
import { useServiceHub } from '@/hooks/useServiceHub'
import { localStorageKey } from '@/constants/localStorage'
import { IconLogs } from '@tabler/icons-react'
import { cn } from '@/lib/utils'
import { ApiKeyInput } from '@/containers/ApiKeyInput'
@ -23,6 +22,7 @@ import { useEffect, useState } from 'react'
import { PlatformGuard } from '@/lib/platform/PlatformGuard'
import { PlatformFeature } from '@/lib/platform'
import { toast } from 'sonner'
import { getModelToStart } from '@/utils/getModelToStart'
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const Route = createFileRoute(route.settings.local_api_server as any)({
@ -81,54 +81,6 @@ function LocalAPIServerContent() {
setIsApiKeyEmpty(!isValid)
}
const getLastUsedModel = (): { provider: string; model: string } | null => {
try {
const stored = localStorage.getItem(localStorageKey.lastUsedModel)
return stored ? JSON.parse(stored) : null
} catch (error) {
console.debug('Failed to get last used model from localStorage:', error)
return null
}
}
// Helper function to determine which model to start
const getModelToStart = () => {
// Use last used model if available
const lastUsedModel = getLastUsedModel()
if (lastUsedModel) {
const provider = getProviderByName(lastUsedModel.provider)
if (
provider &&
provider.models.some((m) => m.id === lastUsedModel.model)
) {
return { model: lastUsedModel.model, provider }
}
}
// Use selected model if available
if (selectedModel && selectedProvider) {
const provider = getProviderByName(selectedProvider)
if (provider) {
return { model: selectedModel.id, provider }
}
}
// Use first model from llamacpp provider
const llamacppProvider = getProviderByName('llamacpp')
if (
llamacppProvider &&
llamacppProvider.models &&
llamacppProvider.models.length > 0
) {
return {
model: llamacppProvider.models[0].id,
provider: llamacppProvider,
}
}
return null
}
const [isModelLoading, setIsModelLoading] = useState(false)
const toggleAPIServer = async () => {
@ -136,7 +88,7 @@ function LocalAPIServerContent() {
if (serverStatus === 'stopped') {
console.log('Starting server with port:', serverPort)
toast.info('Starting server...', {
description: `Attempting to start server on port ${serverPort}`
description: `Attempting to start server on port ${serverPort}`,
})
if (!apiKey || apiKey.toString().trim().length === 0) {
@ -145,7 +97,11 @@ function LocalAPIServerContent() {
}
setShowApiKeyError(false)
const modelToStart = getModelToStart()
const modelToStart = getModelToStart({
selectedModel,
selectedProvider,
getProviderByName,
})
// Only start server if we have a model to load
if (!modelToStart) {
console.warn(
@ -191,31 +147,31 @@ function LocalAPIServerContent() {
toast.dismiss()
// Extract error message from various error formats
const errorMsg = error && typeof error === 'object' && 'message' in error
? String(error.message)
: String(error)
const errorMsg =
error && typeof error === 'object' && 'message' in error
? String(error.message)
: String(error)
// Port-related errors (highest priority)
if (errorMsg.includes('Address already in use')) {
toast.error('Port has been occupied', {
description: `Port ${serverPort} is already in use. Please try a different port.`
description: `Port ${serverPort} is already in use. Please try a different port.`,
})
}
// Model-related errors
else if (errorMsg.includes('Invalid or inaccessible model path')) {
toast.error('Invalid or inaccessible model path', {
description: errorMsg
description: errorMsg,
})
}
else if (errorMsg.includes('model')) {
} else if (errorMsg.includes('model')) {
toast.error('Failed to start model', {
description: errorMsg
description: errorMsg,
})
}
// Generic server errors
else {
toast.error('Failed to start server', {
description: errorMsg
description: errorMsg,
})
}
})
@ -307,6 +263,35 @@ function LocalAPIServerContent() {
</Button>
}
/>
<CardItem
title={t('settings:localApiServer.swaggerDocs')}
description={t('settings:localApiServer.swaggerDocsDesc')}
actions={
<a
href={`http://${serverHost}:${serverPort}`}
target="_blank"
rel="noopener noreferrer"
>
<Button
asChild
variant="link"
size="sm"
className="p-0 text-main-view-fg/80"
disabled={!isServerRunning}
title={t('settings:localApiServer.swaggerDocs')}
>
<div
className={cn(
'cursor-pointer flex items-center justify-center rounded-sm hover:bg-main-view-fg/15 bg-main-view-fg/10 transition-all duration-200 ease-in-out px-2 py-1 gap-1',
!isServerRunning && 'opacity-50 cursor-not-allowed'
)}
>
<span>{t('settings:localApiServer.openDocs')}</span>
</div>
</Button>
</a>
}
/>
</Card>
{/* Startup Configuration */}

View File

@ -0,0 +1,69 @@
import { localStorageKey } from '@/constants/localStorage'
import type { ModelInfo } from '@janhq/core'
export const getLastUsedModel = (): {
provider: string
model: string
} | null => {
try {
const stored = localStorage.getItem(localStorageKey.lastUsedModel)
return stored ? JSON.parse(stored) : null
} catch (error) {
console.debug('Failed to get last used model from localStorage:', error)
return null
}
}
// Helper function to determine which model to start
export const getModelToStart = (params: {
selectedModel?: ModelInfo | null
selectedProvider?: string | null
getProviderByName: (name: string) => ModelProvider | undefined
}): { model: string; provider: ModelProvider } | null => {
const { selectedModel, selectedProvider, getProviderByName } = params
// Use last used model if available
const lastUsedModel = getLastUsedModel()
if (lastUsedModel) {
const provider = getProviderByName(lastUsedModel.provider)
if (provider && provider.models.some((m) => m.id === lastUsedModel.model)) {
return { model: lastUsedModel.model, provider }
} else {
// Last used model not found under provider, fallback to first llamacpp model
const llamacppProvider = getProviderByName('llamacpp')
if (
llamacppProvider &&
llamacppProvider.models &&
llamacppProvider.models.length > 0
) {
return {
model: llamacppProvider.models[0].id,
provider: llamacppProvider,
}
}
}
}
// Use selected model if available
if (selectedModel && selectedProvider) {
const provider = getProviderByName(selectedProvider)
if (provider) {
return { model: selectedModel.id, provider }
}
}
// Use first model from llamacpp provider
const llamacppProvider = getProviderByName('llamacpp')
if (
llamacppProvider &&
llamacppProvider.models &&
llamacppProvider.models.length > 0
) {
return {
model: llamacppProvider.models[0].id,
provider: llamacppProvider,
}
}
return null
}