Merge branch 'dev' into release/v0.6.8
This commit is contained in:
commit
55d998ea99
23
.github/ISSUE_TEMPLATE/3-epic.md
vendored
23
.github/ISSUE_TEMPLATE/3-epic.md
vendored
@ -1,12 +1,27 @@
|
||||
---
|
||||
name: 🌟 Epic
|
||||
about: Major building block that advances Jan's goals
|
||||
about: User stories and specs
|
||||
title: 'epic: '
|
||||
type: Epic
|
||||
---
|
||||
|
||||
## Goal
|
||||
## User Stories
|
||||
|
||||
## Tasklist
|
||||
- As a [user type], I can [do something] so that [outcome]
|
||||
|
||||
## Out of scope
|
||||
## Not in scope
|
||||
|
||||
-
|
||||
|
||||
## User Flows & Designs
|
||||
|
||||
- Key user flows
|
||||
- Figma link
|
||||
- Edge cases
|
||||
- Error states
|
||||
|
||||
## Engineering Decisions
|
||||
|
||||
- **Technical Approach:** Brief outline of the solution.
|
||||
- **Key Trade-offs:** What’s been considered/rejected and why.
|
||||
- **Dependencies:** APIs, services, libraries, teams.
|
||||
|
||||
31
.github/ISSUE_TEMPLATE/4-goal.md
vendored
31
.github/ISSUE_TEMPLATE/4-goal.md
vendored
@ -1,13 +1,38 @@
|
||||
---
|
||||
name: 🎯 Goal
|
||||
about: External communication of Jan's roadmap and objectives
|
||||
about: Roadmap goals for our users
|
||||
title: 'goal: '
|
||||
type: Goal
|
||||
---
|
||||
|
||||
## Goal
|
||||
|
||||
## Tasklist
|
||||
> Why are we doing this? 1 liner value proposition
|
||||
|
||||
## Out of scope
|
||||
_e.g. Make onboarding to Jan 3x easier_
|
||||
|
||||
## Success Criteria
|
||||
|
||||
> When do we consider this done? Limit to 3.
|
||||
|
||||
1. _e.g. Redesign onboarding flow to remove redundant steps._
|
||||
2. _e.g. Add a “getting started” guide_
|
||||
3. _e.g. Make local model setup more “normie” friendly_
|
||||
|
||||
## Non Goals
|
||||
|
||||
> What is out of scope?
|
||||
|
||||
- _e.g. Take advanced users through customizing settings_
|
||||
|
||||
## User research (if any)
|
||||
|
||||
> Links to user messages and interviews
|
||||
|
||||
## Design inspo
|
||||
|
||||
> Links
|
||||
|
||||
## Open questions
|
||||
|
||||
> What are we not sure about?
|
||||
|
||||
17
.github/workflows/jan-tauri-build-nightly.yaml
vendored
17
.github/workflows/jan-tauri-build-nightly.yaml
vendored
@ -16,6 +16,23 @@ on:
|
||||
branches:
|
||||
- release/**
|
||||
- dev
|
||||
paths:
|
||||
- '.github/workflows/jan-tauri-build-nightly.yaml'
|
||||
- '.github/workflows/template-get-update-version.yml'
|
||||
- '.github/workflows/template-tauri-build-macos.yml'
|
||||
- '.github/workflows/template-tauri-build-windows-x64.yml'
|
||||
- '.github/workflows/template-tauri-build-linux-x64.yml'
|
||||
- '.github/workflows/template-noti-discord-and-update-url-readme.yml'
|
||||
- 'src-tauri/**'
|
||||
- 'core/**'
|
||||
- 'web-app/**'
|
||||
- 'extensions/**'
|
||||
- 'scripts/**'
|
||||
- 'pre-install/**'
|
||||
- 'Makefile'
|
||||
- 'package.json'
|
||||
- 'mise.toml'
|
||||
|
||||
|
||||
jobs:
|
||||
set-public-provider:
|
||||
|
||||
@ -2,7 +2,7 @@ use serde::{Deserialize, Serialize};
|
||||
use std::{fs, io, path::PathBuf};
|
||||
use tauri::{AppHandle, Manager, Runtime, State};
|
||||
|
||||
use crate::core::utils::extensions::inference_llamacpp_extension::cleanup::cleanup_processes;
|
||||
use crate::core::{mcp::clean_up_mcp_servers, utils::extensions::inference_llamacpp_extension::cleanup::cleanup_processes};
|
||||
|
||||
use super::{server, setup, state::AppState};
|
||||
|
||||
@ -125,6 +125,7 @@ pub fn factory_reset(app_handle: tauri::AppHandle, state: State<'_, AppState>) {
|
||||
log::info!("Factory reset, removing data folder: {:?}", data_folder);
|
||||
|
||||
tauri::async_runtime::block_on(async {
|
||||
clean_up_mcp_servers(state.clone()).await;
|
||||
cleanup_processes(state).await;
|
||||
|
||||
if data_folder.exists() {
|
||||
@ -138,7 +139,7 @@ pub fn factory_reset(app_handle: tauri::AppHandle, state: State<'_, AppState>) {
|
||||
let _ = fs::create_dir_all(&data_folder).map_err(|e| e.to_string());
|
||||
|
||||
// Reset the configuration
|
||||
let mut default_config = AppConfiguration::default();
|
||||
let mut default_config: AppConfiguration = AppConfiguration::default();
|
||||
default_config.data_folder = default_data_folder_path(app_handle.clone());
|
||||
let _ = update_app_configuration(app_handle.clone(), default_config);
|
||||
|
||||
|
||||
@ -751,6 +751,26 @@ pub async fn reset_mcp_restart_count(state: State<'_, AppState>, server_name: St
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn clean_up_mcp_servers(
|
||||
state: State<'_, AppState>,
|
||||
) {
|
||||
log::info!("Cleaning up MCP servers");
|
||||
|
||||
// Stop all running MCP servers
|
||||
let _ = stop_mcp_servers(state.mcp_servers.clone()).await;
|
||||
|
||||
// Clear active servers and restart counts
|
||||
{
|
||||
let mut active_servers = state.mcp_active_servers.lock().await;
|
||||
active_servers.clear();
|
||||
}
|
||||
{
|
||||
let mut restart_counts = state.mcp_restart_counts.lock().await;
|
||||
restart_counts.clear();
|
||||
}
|
||||
log::info!("MCP servers cleaned up successfully");
|
||||
}
|
||||
|
||||
pub async fn stop_mcp_servers(
|
||||
servers_state: Arc<Mutex<HashMap<String, RunningService<RoleClient, ()>>>>,
|
||||
) -> Result<(), String> {
|
||||
|
||||
@ -199,30 +199,6 @@ pub fn setup_mcp(app: &App) {
|
||||
let state = app.state::<AppState>();
|
||||
let servers = state.mcp_servers.clone();
|
||||
let app_handle: tauri::AppHandle = app.handle().clone();
|
||||
// Setup kill-mcp-servers event listener (similar to kill-sidecar)
|
||||
let app_handle_for_kill = app_handle.clone();
|
||||
app_handle.listen("kill-mcp-servers", move |_event| {
|
||||
let app_handle = app_handle_for_kill.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
log::info!("Received kill-mcp-servers event - cleaning up MCP servers");
|
||||
let app_state = app_handle.state::<AppState>();
|
||||
// Stop all running MCP servers
|
||||
if let Err(e) = super::mcp::stop_mcp_servers(app_state.mcp_servers.clone()).await {
|
||||
log::error!("Failed to stop MCP servers: {}", e);
|
||||
return;
|
||||
}
|
||||
// Clear active servers and restart counts
|
||||
{
|
||||
let mut active_servers = app_state.mcp_active_servers.lock().await;
|
||||
active_servers.clear();
|
||||
}
|
||||
{
|
||||
let mut restart_counts = app_state.mcp_restart_counts.lock().await;
|
||||
restart_counts.clear();
|
||||
}
|
||||
log::info!("MCP servers cleaned up successfully");
|
||||
});
|
||||
});
|
||||
tauri::async_runtime::spawn(async move {
|
||||
if let Err(e) = run_mcp_commands(&app_handle, servers).await {
|
||||
log::error!("Failed to run mcp commands: {}", e);
|
||||
|
||||
@ -10,6 +10,8 @@ use std::{collections::HashMap, sync::Arc};
|
||||
use tauri::{Emitter, Manager, RunEvent};
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use crate::core::mcp::clean_up_mcp_servers;
|
||||
|
||||
#[cfg_attr(mobile, tauri::mobile_entry_point)]
|
||||
pub fn run() {
|
||||
let mut builder = tauri::Builder::default();
|
||||
@ -143,10 +145,10 @@ pub fn run() {
|
||||
.on_window_event(|window, event| match event {
|
||||
tauri::WindowEvent::CloseRequested { .. } => {
|
||||
if window.label() == "main" {
|
||||
window.emit("kill-mcp-servers", ()).unwrap();
|
||||
let state = window.app_handle().state::<AppState>();
|
||||
|
||||
tauri::async_runtime::block_on(async {
|
||||
clean_up_mcp_servers(state.clone()).await;
|
||||
cleanup_processes(state).await;
|
||||
});
|
||||
}
|
||||
@ -173,6 +175,7 @@ pub fn run() {
|
||||
}
|
||||
|
||||
// Quick cleanup with shorter timeout
|
||||
clean_up_mcp_servers(state.clone()).await;
|
||||
cleanup_processes(state).await;
|
||||
});
|
||||
});
|
||||
|
||||
@ -49,7 +49,6 @@ describe('useModelSources', () => {
|
||||
expect(result.current.error).toBe(null)
|
||||
expect(result.current.loading).toBe(false)
|
||||
expect(typeof result.current.fetchSources).toBe('function')
|
||||
expect(typeof result.current.addSource).toBe('function')
|
||||
})
|
||||
|
||||
describe('fetchSources', () => {
|
||||
@ -225,153 +224,6 @@ describe('useModelSources', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('addSource', () => {
|
||||
it('should add a new source to the store', () => {
|
||||
const { result } = renderHook(() => useModelSources())
|
||||
|
||||
const testModel: CatalogModel = {
|
||||
model_name: 'test-model',
|
||||
description: 'Test model description',
|
||||
developer: 'test-developer',
|
||||
downloads: 100,
|
||||
num_quants: 2,
|
||||
quants: [
|
||||
{
|
||||
model_id: 'test-model-q4',
|
||||
path: 'https://example.com/test-model-q4.gguf',
|
||||
file_size: '2.0 GB',
|
||||
},
|
||||
],
|
||||
created_at: '2023-01-01T00:00:00Z',
|
||||
}
|
||||
|
||||
act(() => {
|
||||
result.current.addSource(testModel)
|
||||
})
|
||||
|
||||
expect(result.current.sources).toHaveLength(1)
|
||||
expect(result.current.sources[0]).toEqual(testModel)
|
||||
})
|
||||
|
||||
it('should replace existing source with same model_name', () => {
|
||||
const { result } = renderHook(() => useModelSources())
|
||||
|
||||
const originalModel: CatalogModel = {
|
||||
model_name: 'duplicate-model',
|
||||
description: 'Original description',
|
||||
developer: 'original-developer',
|
||||
downloads: 50,
|
||||
num_quants: 1,
|
||||
quants: [],
|
||||
created_at: '2023-01-01T00:00:00Z',
|
||||
}
|
||||
|
||||
const updatedModel: CatalogModel = {
|
||||
model_name: 'duplicate-model',
|
||||
description: 'Updated description',
|
||||
developer: 'updated-developer',
|
||||
downloads: 150,
|
||||
num_quants: 2,
|
||||
quants: [
|
||||
{
|
||||
model_id: 'duplicate-model-q4',
|
||||
path: 'https://example.com/duplicate-model-q4.gguf',
|
||||
file_size: '3.0 GB',
|
||||
},
|
||||
],
|
||||
created_at: '2023-02-01T00:00:00Z',
|
||||
}
|
||||
|
||||
act(() => {
|
||||
result.current.addSource(originalModel)
|
||||
})
|
||||
|
||||
expect(result.current.sources).toHaveLength(1)
|
||||
|
||||
act(() => {
|
||||
result.current.addSource(updatedModel)
|
||||
})
|
||||
|
||||
expect(result.current.sources).toHaveLength(1)
|
||||
expect(result.current.sources[0]).toEqual(updatedModel)
|
||||
})
|
||||
|
||||
it('should handle multiple different sources', () => {
|
||||
const { result } = renderHook(() => useModelSources())
|
||||
|
||||
const model1: CatalogModel = {
|
||||
model_name: 'model-1',
|
||||
description: 'First model',
|
||||
developer: 'developer-1',
|
||||
downloads: 100,
|
||||
num_quants: 1,
|
||||
quants: [],
|
||||
created_at: '2023-01-01T00:00:00Z',
|
||||
}
|
||||
|
||||
const model2: CatalogModel = {
|
||||
model_name: 'model-2',
|
||||
description: 'Second model',
|
||||
developer: 'developer-2',
|
||||
downloads: 200,
|
||||
num_quants: 1,
|
||||
quants: [],
|
||||
created_at: '2023-01-02T00:00:00Z',
|
||||
}
|
||||
|
||||
act(() => {
|
||||
result.current.addSource(model1)
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.addSource(model2)
|
||||
})
|
||||
|
||||
expect(result.current.sources).toHaveLength(2)
|
||||
expect(result.current.sources).toContainEqual(model1)
|
||||
expect(result.current.sources).toContainEqual(model2)
|
||||
})
|
||||
|
||||
it('should handle CatalogModel with complete quants data', () => {
|
||||
const { result } = renderHook(() => useModelSources())
|
||||
|
||||
const modelWithQuants: CatalogModel = {
|
||||
model_name: 'model-with-quants',
|
||||
description: 'Model with quantizations',
|
||||
developer: 'quant-developer',
|
||||
downloads: 500,
|
||||
num_quants: 3,
|
||||
quants: [
|
||||
{
|
||||
model_id: 'model-q4_k_m',
|
||||
path: 'https://example.com/model-q4_k_m.gguf',
|
||||
file_size: '2.0 GB',
|
||||
},
|
||||
{
|
||||
model_id: 'model-q8_0',
|
||||
path: 'https://example.com/model-q8_0.gguf',
|
||||
file_size: '4.0 GB',
|
||||
},
|
||||
{
|
||||
model_id: 'model-f16',
|
||||
path: 'https://example.com/model-f16.gguf',
|
||||
file_size: '8.0 GB',
|
||||
},
|
||||
],
|
||||
created_at: '2023-01-01T00:00:00Z',
|
||||
readme: 'https://example.com/readme.md',
|
||||
}
|
||||
|
||||
act(() => {
|
||||
result.current.addSource(modelWithQuants)
|
||||
})
|
||||
|
||||
expect(result.current.sources).toHaveLength(1)
|
||||
expect(result.current.sources[0]).toEqual(modelWithQuants)
|
||||
expect(result.current.sources[0].quants).toHaveLength(3)
|
||||
})
|
||||
})
|
||||
|
||||
describe('state management', () => {
|
||||
it('should maintain state across multiple hook instances', () => {
|
||||
const { result: result1 } = renderHook(() => useModelSources())
|
||||
|
||||
@ -8,7 +8,6 @@ type ModelSourcesState = {
|
||||
sources: CatalogModel[]
|
||||
error: Error | null
|
||||
loading: boolean
|
||||
addSource: (source: CatalogModel) => void
|
||||
fetchSources: () => Promise<void>
|
||||
}
|
||||
|
||||
@ -18,15 +17,6 @@ export const useModelSources = create<ModelSourcesState>()(
|
||||
sources: [],
|
||||
error: null,
|
||||
loading: false,
|
||||
|
||||
addSource: (source: CatalogModel) => {
|
||||
set((state) => ({
|
||||
sources: [
|
||||
...state.sources.filter((e) => e.model_name !== source.model_name),
|
||||
source,
|
||||
],
|
||||
}))
|
||||
},
|
||||
fetchSources: async () => {
|
||||
set({ loading: true, error: null })
|
||||
try {
|
||||
|
||||
@ -27,6 +27,7 @@ import {
|
||||
import { Progress } from '@/components/ui/progress'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useGeneralSetting } from '@/hooks/useGeneralSetting'
|
||||
|
||||
type SearchParams = {
|
||||
repo: string
|
||||
@ -42,6 +43,7 @@ export const Route = createFileRoute('/hub/$modelId')({
|
||||
function HubModelDetail() {
|
||||
const { modelId } = useParams({ from: Route.id })
|
||||
const navigate = useNavigate()
|
||||
const { huggingfaceToken } = useGeneralSetting()
|
||||
const { sources, fetchSources } = useModelSources()
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const search = useSearch({ from: Route.id as any })
|
||||
@ -60,12 +62,15 @@ function HubModelDetail() {
|
||||
}, [fetchSources])
|
||||
|
||||
const fetchRepo = useCallback(async () => {
|
||||
const repoInfo = await fetchHuggingFaceRepo(search.repo || modelId)
|
||||
const repoInfo = await fetchHuggingFaceRepo(
|
||||
search.repo || modelId,
|
||||
huggingfaceToken
|
||||
)
|
||||
if (repoInfo) {
|
||||
const repoDetail = convertHfRepoToCatalogModel(repoInfo)
|
||||
setRepoData(repoDetail)
|
||||
}
|
||||
}, [modelId, search])
|
||||
}, [modelId, search, huggingfaceToken])
|
||||
|
||||
useEffect(() => {
|
||||
fetchRepo()
|
||||
@ -151,7 +156,20 @@ function HubModelDetail() {
|
||||
useEffect(() => {
|
||||
if (modelData?.readme) {
|
||||
setIsLoadingReadme(true)
|
||||
// Try fetching without headers first
|
||||
// There is a weird issue where this HF link will return error when access public repo with auth header
|
||||
fetch(modelData.readme)
|
||||
.then((response) => {
|
||||
if (!response.ok && huggingfaceToken && modelData?.readme) {
|
||||
// Retry with Authorization header if first fetch failed
|
||||
return fetch(modelData.readme, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${huggingfaceToken}`,
|
||||
},
|
||||
})
|
||||
}
|
||||
return response
|
||||
})
|
||||
.then((response) => response.text())
|
||||
.then((content) => {
|
||||
setReadmeContent(content)
|
||||
@ -162,7 +180,7 @@ function HubModelDetail() {
|
||||
setIsLoadingReadme(false)
|
||||
})
|
||||
}
|
||||
}, [modelData?.readme])
|
||||
}, [modelData?.readme, huggingfaceToken])
|
||||
|
||||
if (!modelData) {
|
||||
return (
|
||||
|
||||
@ -39,6 +39,7 @@ import HeaderPage from '@/containers/HeaderPage'
|
||||
import { Loader } from 'lucide-react'
|
||||
import { useTranslation } from '@/i18n/react-i18next-compat'
|
||||
import Fuse from 'fuse.js'
|
||||
import { useGeneralSetting } from '@/hooks/useGeneralSetting'
|
||||
|
||||
type ModelProps = {
|
||||
model: CatalogModel
|
||||
@ -57,6 +58,7 @@ export const Route = createFileRoute(route.hub.index as any)({
|
||||
|
||||
function Hub() {
|
||||
const parentRef = useRef(null)
|
||||
const { huggingfaceToken } = useGeneralSetting()
|
||||
|
||||
const { t } = useTranslation()
|
||||
const sortOptions = [
|
||||
@ -71,7 +73,7 @@ function Hub() {
|
||||
}
|
||||
}, [])
|
||||
|
||||
const { sources, addSource, fetchSources, loading } = useModelSources()
|
||||
const { sources, fetchSources, loading } = useModelSources()
|
||||
|
||||
const [searchValue, setSearchValue] = useState('')
|
||||
const [sortSelected, setSortSelected] = useState('newest')
|
||||
@ -185,14 +187,16 @@ function Hub() {
|
||||
addModelSourceTimeoutRef.current = setTimeout(async () => {
|
||||
try {
|
||||
// Fetch HuggingFace repository information
|
||||
const repoInfo = await fetchHuggingFaceRepo(e.target.value)
|
||||
const repoInfo = await fetchHuggingFaceRepo(
|
||||
e.target.value,
|
||||
huggingfaceToken
|
||||
)
|
||||
if (repoInfo) {
|
||||
const catalogModel = convertHfRepoToCatalogModel(repoInfo)
|
||||
if (
|
||||
!sources.some((s) => s.model_name === catalogModel.model_name)
|
||||
) {
|
||||
setHuggingFaceRepo(catalogModel)
|
||||
addSource(catalogModel)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@ -325,7 +325,10 @@ describe('models service', () => {
|
||||
|
||||
expect(result).toEqual(mockRepoData)
|
||||
expect(fetch).toHaveBeenCalledWith(
|
||||
'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true'
|
||||
'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true',
|
||||
{
|
||||
headers: {},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
@ -341,19 +344,28 @@ describe('models service', () => {
|
||||
'https://huggingface.co/microsoft/DialoGPT-medium'
|
||||
)
|
||||
expect(fetch).toHaveBeenCalledWith(
|
||||
'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true'
|
||||
'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true',
|
||||
{
|
||||
headers: {},
|
||||
}
|
||||
)
|
||||
|
||||
// Test with domain prefix
|
||||
await fetchHuggingFaceRepo('huggingface.co/microsoft/DialoGPT-medium')
|
||||
expect(fetch).toHaveBeenCalledWith(
|
||||
'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true'
|
||||
'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true',
|
||||
{
|
||||
headers: {},
|
||||
}
|
||||
)
|
||||
|
||||
// Test with trailing slash
|
||||
await fetchHuggingFaceRepo('microsoft/DialoGPT-medium/')
|
||||
expect(fetch).toHaveBeenCalledWith(
|
||||
'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true'
|
||||
'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true',
|
||||
{
|
||||
headers: {},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
@ -379,7 +391,10 @@ describe('models service', () => {
|
||||
|
||||
expect(result).toBeNull()
|
||||
expect(fetch).toHaveBeenCalledWith(
|
||||
'https://huggingface.co/api/models/nonexistent/model?blobs=true'
|
||||
'https://huggingface.co/api/models/nonexistent/model?blobs=true',
|
||||
{
|
||||
headers: {},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
|
||||
@ -99,7 +99,8 @@ export const fetchModelCatalog = async (): Promise<ModelCatalog> => {
|
||||
* @returns A promise that resolves to the repository information.
|
||||
*/
|
||||
export const fetchHuggingFaceRepo = async (
|
||||
repoId: string
|
||||
repoId: string,
|
||||
hfToken?: string
|
||||
): Promise<HuggingFaceRepo | null> => {
|
||||
try {
|
||||
// Clean the repo ID to handle various input formats
|
||||
@ -114,7 +115,14 @@ export const fetchHuggingFaceRepo = async (
|
||||
}
|
||||
|
||||
const response = await fetch(
|
||||
`https://huggingface.co/api/models/${cleanRepoId}?blobs=true`
|
||||
`https://huggingface.co/api/models/${cleanRepoId}?blobs=true`,
|
||||
{
|
||||
headers: hfToken
|
||||
? {
|
||||
Authorization: `Bearer ${hfToken}`,
|
||||
}
|
||||
: {},
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
|
||||
@ -17,7 +17,7 @@ export enum ModelCapabilities {
|
||||
|
||||
// TODO: Remove this enum when we integrate llama.cpp extension
|
||||
export enum DefaultToolUseSupportedModels {
|
||||
JanNano = 'jan-nano',
|
||||
JanNano = 'jan-',
|
||||
Qwen3 = 'qwen3',
|
||||
Lucy = 'lucy',
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user