Merge pull request #6024 from menloresearch/fix/jan-hub-repo-data-and-deeplink
fix: Jan hub model detail and deep link
This commit is contained in:
commit
c642076ec3
@ -129,6 +129,12 @@ pub fn run() {
|
|||||||
if let Err(e) = setup::install_extensions(app.handle().clone(), false) {
|
if let Err(e) = setup::install_extensions(app.handle().clone(), false) {
|
||||||
log::error!("Failed to install extensions: {}", e);
|
log::error!("Failed to install extensions: {}", e);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(any(windows, target_os = "linux"))]
|
||||||
|
{
|
||||||
|
use tauri_plugin_deep_link::DeepLinkExt;
|
||||||
|
app.deep_link().register_all()?;
|
||||||
|
}
|
||||||
setup_mcp(app);
|
setup_mcp(app);
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
|
|||||||
@ -62,7 +62,12 @@ export function DataProvider() {
|
|||||||
|
|
||||||
// Check for app updates
|
// Check for app updates
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
checkForUpdate()
|
// Only check for updates if the auto updater is not disabled
|
||||||
|
// App might be distributed via other package managers
|
||||||
|
// or methods that handle updates differently
|
||||||
|
if (!AUTO_UPDATER_DISABLED) {
|
||||||
|
checkForUpdate()
|
||||||
|
}
|
||||||
}, [checkForUpdate])
|
}, [checkForUpdate])
|
||||||
|
|
||||||
const handleDeepLink = (urls: string[] | null) => {
|
const handleDeepLink = (urls: string[] | null) => {
|
||||||
@ -79,7 +84,7 @@ export function DataProvider() {
|
|||||||
const resource = params.slice(1).join('/')
|
const resource = params.slice(1).join('/')
|
||||||
// return { action, provider, resource }
|
// return { action, provider, resource }
|
||||||
navigate({
|
navigate({
|
||||||
to: route.hub.index,
|
to: route.hub.model,
|
||||||
search: {
|
search: {
|
||||||
repo: resource,
|
repo: resource,
|
||||||
},
|
},
|
||||||
|
|||||||
@ -1,5 +1,10 @@
|
|||||||
import HeaderPage from '@/containers/HeaderPage'
|
import HeaderPage from '@/containers/HeaderPage'
|
||||||
import { createFileRoute, useParams, useNavigate } from '@tanstack/react-router'
|
import {
|
||||||
|
createFileRoute,
|
||||||
|
useParams,
|
||||||
|
useNavigate,
|
||||||
|
useSearch,
|
||||||
|
} from '@tanstack/react-router'
|
||||||
import {
|
import {
|
||||||
IconArrowLeft,
|
IconArrowLeft,
|
||||||
IconDownload,
|
IconDownload,
|
||||||
@ -13,23 +18,38 @@ import { RenderMarkdown } from '@/containers/RenderMarkdown'
|
|||||||
import { useEffect, useMemo, useCallback, useState } from 'react'
|
import { useEffect, useMemo, useCallback, useState } from 'react'
|
||||||
import { useModelProvider } from '@/hooks/useModelProvider'
|
import { useModelProvider } from '@/hooks/useModelProvider'
|
||||||
import { useDownloadStore } from '@/hooks/useDownloadStore'
|
import { useDownloadStore } from '@/hooks/useDownloadStore'
|
||||||
import { pullModel } from '@/services/models'
|
import {
|
||||||
|
CatalogModel,
|
||||||
|
convertHfRepoToCatalogModel,
|
||||||
|
fetchHuggingFaceRepo,
|
||||||
|
pullModel,
|
||||||
|
} from '@/services/models'
|
||||||
import { Progress } from '@/components/ui/progress'
|
import { Progress } from '@/components/ui/progress'
|
||||||
import { Button } from '@/components/ui/button'
|
import { Button } from '@/components/ui/button'
|
||||||
import { cn } from '@/lib/utils'
|
import { cn } from '@/lib/utils'
|
||||||
|
|
||||||
|
type SearchParams = {
|
||||||
|
repo: string
|
||||||
|
}
|
||||||
|
|
||||||
export const Route = createFileRoute('/hub/$modelId')({
|
export const Route = createFileRoute('/hub/$modelId')({
|
||||||
component: HubModelDetail,
|
component: HubModelDetail,
|
||||||
|
validateSearch: (search: Record<string, unknown>): SearchParams => ({
|
||||||
|
repo: search.repo as SearchParams['repo'],
|
||||||
|
}),
|
||||||
})
|
})
|
||||||
|
|
||||||
function HubModelDetail() {
|
function HubModelDetail() {
|
||||||
const { modelId } = useParams({ from: Route.id })
|
const { modelId } = useParams({ from: Route.id })
|
||||||
const navigate = useNavigate()
|
const navigate = useNavigate()
|
||||||
const { sources, fetchSources } = useModelSources()
|
const { sources, fetchSources } = useModelSources()
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
const search = useSearch({ from: Route.id as any })
|
||||||
const { getProviderByName } = useModelProvider()
|
const { getProviderByName } = useModelProvider()
|
||||||
const llamaProvider = getProviderByName('llamacpp')
|
const llamaProvider = getProviderByName('llamacpp')
|
||||||
const { downloads, localDownloadingModels, addLocalDownloadingModel } =
|
const { downloads, localDownloadingModels, addLocalDownloadingModel } =
|
||||||
useDownloadStore()
|
useDownloadStore()
|
||||||
|
const [repoData, setRepoData] = useState<CatalogModel | undefined>()
|
||||||
|
|
||||||
// State for README content
|
// State for README content
|
||||||
const [readmeContent, setReadmeContent] = useState<string>('')
|
const [readmeContent, setReadmeContent] = useState<string>('')
|
||||||
@ -39,10 +59,21 @@ function HubModelDetail() {
|
|||||||
fetchSources()
|
fetchSources()
|
||||||
}, [fetchSources])
|
}, [fetchSources])
|
||||||
|
|
||||||
|
const fetchRepo = useCallback(async () => {
|
||||||
|
const repoInfo = await fetchHuggingFaceRepo(search.repo || modelId)
|
||||||
|
if (repoInfo) {
|
||||||
|
const repoDetail = convertHfRepoToCatalogModel(repoInfo)
|
||||||
|
setRepoData(repoDetail)
|
||||||
|
}
|
||||||
|
}, [modelId, search])
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
fetchRepo()
|
||||||
|
}, [modelId, fetchRepo])
|
||||||
// Find the model data from sources
|
// Find the model data from sources
|
||||||
const modelData = useMemo(() => {
|
const modelData = useMemo(() => {
|
||||||
return sources.find((model) => model.model_name === modelId)
|
return sources.find((model) => model.model_name === modelId) ?? repoData
|
||||||
}, [sources, modelId])
|
}, [sources, modelId, repoData])
|
||||||
|
|
||||||
// Download processes
|
// Download processes
|
||||||
const downloadProcesses = useMemo(
|
const downloadProcesses = useMemo(
|
||||||
@ -116,7 +147,6 @@ function HubModelDetail() {
|
|||||||
})
|
})
|
||||||
}, [modelData])
|
}, [modelData])
|
||||||
|
|
||||||
|
|
||||||
// Fetch README content when modelData.readme is available
|
// Fetch README content when modelData.readme is available
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (modelData?.readme) {
|
if (modelData?.readme) {
|
||||||
|
|||||||
@ -31,7 +31,7 @@ import {
|
|||||||
CatalogModel,
|
CatalogModel,
|
||||||
pullModel,
|
pullModel,
|
||||||
fetchHuggingFaceRepo,
|
fetchHuggingFaceRepo,
|
||||||
HuggingFaceRepo,
|
convertHfRepoToCatalogModel,
|
||||||
} from '@/services/models'
|
} from '@/services/models'
|
||||||
import { useDownloadStore } from '@/hooks/useDownloadStore'
|
import { useDownloadStore } from '@/hooks/useDownloadStore'
|
||||||
import { Progress } from '@/components/ui/progress'
|
import { Progress } from '@/components/ui/progress'
|
||||||
@ -63,14 +63,16 @@ function Hub() {
|
|||||||
{ value: 'newest', name: t('hub:sortNewest') },
|
{ value: 'newest', name: t('hub:sortNewest') },
|
||||||
{ value: 'most-downloaded', name: t('hub:sortMostDownloaded') },
|
{ value: 'most-downloaded', name: t('hub:sortMostDownloaded') },
|
||||||
]
|
]
|
||||||
const searchOptions = {
|
const searchOptions = useMemo(() => {
|
||||||
includeScore: true,
|
return {
|
||||||
// Search in `author` and in `tags` array
|
includeScore: true,
|
||||||
keys: ['model_name', 'quants.model_id'],
|
// Search in `author` and in `tags` array
|
||||||
}
|
keys: ['model_name', 'quants.model_id'],
|
||||||
|
}
|
||||||
|
}, [])
|
||||||
|
|
||||||
const { sources, addSource, fetchSources, loading } = useModelSources()
|
const { sources, addSource, fetchSources, loading } = useModelSources()
|
||||||
const search = useSearch({ from: route.hub.index as any })
|
|
||||||
const [searchValue, setSearchValue] = useState('')
|
const [searchValue, setSearchValue] = useState('')
|
||||||
const [sortSelected, setSortSelected] = useState('newest')
|
const [sortSelected, setSortSelected] = useState('newest')
|
||||||
const [expandedModels, setExpandedModels] = useState<Record<string, boolean>>(
|
const [expandedModels, setExpandedModels] = useState<Record<string, boolean>>(
|
||||||
@ -92,48 +94,6 @@ function Hub() {
|
|||||||
const { getProviderByName } = useModelProvider()
|
const { getProviderByName } = useModelProvider()
|
||||||
const llamaProvider = getProviderByName('llamacpp')
|
const llamaProvider = getProviderByName('llamacpp')
|
||||||
|
|
||||||
// Convert HuggingFace repository to CatalogModel format
|
|
||||||
const convertHfRepoToCatalogModel = useCallback(
|
|
||||||
(repo: HuggingFaceRepo): CatalogModel => {
|
|
||||||
// Extract GGUF files from the repository siblings
|
|
||||||
const ggufFiles =
|
|
||||||
repo.siblings?.filter((file) =>
|
|
||||||
file.rfilename.toLowerCase().endsWith('.gguf')
|
|
||||||
) || []
|
|
||||||
|
|
||||||
// Convert GGUF files to quants format
|
|
||||||
const quants = ggufFiles.map((file) => {
|
|
||||||
// Format file size
|
|
||||||
const formatFileSize = (size?: number) => {
|
|
||||||
if (!size) return 'Unknown size'
|
|
||||||
if (size < 1024 ** 3) return `${(size / 1024 ** 2).toFixed(1)} MB`
|
|
||||||
return `${(size / 1024 ** 3).toFixed(1)} GB`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate model_id from filename (remove .gguf extension, case-insensitive)
|
|
||||||
const modelId = file.rfilename.replace(/\.gguf$/i, '')
|
|
||||||
|
|
||||||
return {
|
|
||||||
model_id: modelId,
|
|
||||||
path: `https://huggingface.co/${repo.modelId}/resolve/main/${file.rfilename}`,
|
|
||||||
file_size: formatFileSize(file.size),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
return {
|
|
||||||
model_name: repo.modelId,
|
|
||||||
description: `**Metadata:** ${repo.pipeline_tag}\n\n **Tags**: ${repo.tags?.join(', ')}`,
|
|
||||||
developer: repo.author,
|
|
||||||
downloads: repo.downloads || 0,
|
|
||||||
num_quants: quants.length,
|
|
||||||
quants: quants,
|
|
||||||
created_at: repo.created_at,
|
|
||||||
readme: `https://huggingface.co/${repo.modelId}/resolve/main/README.md`,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
|
|
||||||
const toggleModelExpansion = (modelId: string) => {
|
const toggleModelExpansion = (modelId: string) => {
|
||||||
setExpandedModels((prev) => ({
|
setExpandedModels((prev) => ({
|
||||||
...prev,
|
...prev,
|
||||||
@ -141,35 +101,6 @@ function Hub() {
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (search.repo) {
|
|
||||||
setSearchValue(search.repo || '')
|
|
||||||
setIsSearching(true)
|
|
||||||
|
|
||||||
addModelSourceTimeoutRef.current = setTimeout(async () => {
|
|
||||||
try {
|
|
||||||
// Fetch HuggingFace repository information
|
|
||||||
const repoInfo = await fetchHuggingFaceRepo(search.repo)
|
|
||||||
if (repoInfo) {
|
|
||||||
const catalogModel = convertHfRepoToCatalogModel(repoInfo)
|
|
||||||
if (
|
|
||||||
!sources.some((s) => s.model_name === catalogModel.model_name)
|
|
||||||
) {
|
|
||||||
setHuggingFaceRepo(catalogModel)
|
|
||||||
addSource(catalogModel)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await fetchSources()
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error fetching repository info:', error)
|
|
||||||
} finally {
|
|
||||||
setIsSearching(false)
|
|
||||||
}
|
|
||||||
}, 500)
|
|
||||||
}
|
|
||||||
}, [convertHfRepoToCatalogModel, fetchSources, addSource, search, sources])
|
|
||||||
|
|
||||||
// Sorting functionality
|
// Sorting functionality
|
||||||
const sortedModels = useMemo(() => {
|
const sortedModels = useMemo(() => {
|
||||||
return [...sources].sort((a, b) => {
|
return [...sources].sort((a, b) => {
|
||||||
@ -264,9 +195,6 @@ function Hub() {
|
|||||||
addSource(catalogModel)
|
addSource(catalogModel)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Original addSource logic (if needed)
|
|
||||||
await fetchSources()
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error fetching repository info:', error)
|
console.error('Error fetching repository info:', error)
|
||||||
} finally {
|
} finally {
|
||||||
|
|||||||
@ -4,6 +4,7 @@ import {
|
|||||||
fetchModels,
|
fetchModels,
|
||||||
fetchModelCatalog,
|
fetchModelCatalog,
|
||||||
fetchHuggingFaceRepo,
|
fetchHuggingFaceRepo,
|
||||||
|
convertHfRepoToCatalogModel,
|
||||||
updateModel,
|
updateModel,
|
||||||
pullModel,
|
pullModel,
|
||||||
abortDownload,
|
abortDownload,
|
||||||
@ -12,6 +13,8 @@ import {
|
|||||||
stopModel,
|
stopModel,
|
||||||
stopAllModels,
|
stopAllModels,
|
||||||
startModel,
|
startModel,
|
||||||
|
HuggingFaceRepo,
|
||||||
|
CatalogModel,
|
||||||
} from '../models'
|
} from '../models'
|
||||||
import { EngineManager, Model } from '@janhq/core'
|
import { EngineManager, Model } from '@janhq/core'
|
||||||
|
|
||||||
@ -334,7 +337,9 @@ describe('models service', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
// Test with full URL
|
// Test with full URL
|
||||||
await fetchHuggingFaceRepo('https://huggingface.co/microsoft/DialoGPT-medium')
|
await fetchHuggingFaceRepo(
|
||||||
|
'https://huggingface.co/microsoft/DialoGPT-medium'
|
||||||
|
)
|
||||||
expect(fetch).toHaveBeenCalledWith(
|
expect(fetch).toHaveBeenCalledWith(
|
||||||
'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true'
|
'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true'
|
||||||
)
|
)
|
||||||
@ -524,7 +529,303 @@ describe('models service', () => {
|
|||||||
|
|
||||||
expect(result).toEqual(mockRepoData)
|
expect(result).toEqual(mockRepoData)
|
||||||
// Verify the GGUF file is present in siblings
|
// Verify the GGUF file is present in siblings
|
||||||
expect(result?.siblings?.some(s => s.rfilename.endsWith('.gguf'))).toBe(true)
|
expect(result?.siblings?.some((s) => s.rfilename.endsWith('.gguf'))).toBe(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('convertHfRepoToCatalogModel', () => {
|
||||||
|
const mockHuggingFaceRepo: HuggingFaceRepo = {
|
||||||
|
id: 'microsoft/DialoGPT-medium',
|
||||||
|
modelId: 'microsoft/DialoGPT-medium',
|
||||||
|
sha: 'abc123',
|
||||||
|
downloads: 1500,
|
||||||
|
likes: 75,
|
||||||
|
tags: ['pytorch', 'transformers', 'text-generation'],
|
||||||
|
pipeline_tag: 'text-generation',
|
||||||
|
created_at: '2021-01-01T00:00:00Z',
|
||||||
|
last_modified: '2021-12-01T00:00:00Z',
|
||||||
|
private: false,
|
||||||
|
disabled: false,
|
||||||
|
gated: false,
|
||||||
|
author: 'microsoft',
|
||||||
|
siblings: [
|
||||||
|
{
|
||||||
|
rfilename: 'model-q4_0.gguf',
|
||||||
|
size: 2 * 1024 * 1024 * 1024, // 2GB
|
||||||
|
blobId: 'blob123',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
rfilename: 'model-q8_0.GGUF', // Test case-insensitive matching
|
||||||
|
size: 4 * 1024 * 1024 * 1024, // 4GB
|
||||||
|
blobId: 'blob456',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
rfilename: 'tokenizer.json', // Non-GGUF file (should be filtered out)
|
||||||
|
size: 1024 * 1024, // 1MB
|
||||||
|
blobId: 'blob789',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
it('should convert HuggingFace repo to catalog model format', () => {
|
||||||
|
const result = convertHfRepoToCatalogModel(mockHuggingFaceRepo)
|
||||||
|
|
||||||
|
const expected: CatalogModel = {
|
||||||
|
model_name: 'microsoft/DialoGPT-medium',
|
||||||
|
description: '**Tags**: pytorch, transformers, text-generation',
|
||||||
|
developer: 'microsoft',
|
||||||
|
downloads: 1500,
|
||||||
|
num_quants: 2,
|
||||||
|
quants: [
|
||||||
|
{
|
||||||
|
model_id: 'model-q4_0',
|
||||||
|
path: 'https://huggingface.co/microsoft/DialoGPT-medium/resolve/main/model-q4_0.gguf',
|
||||||
|
file_size: '2.0 GB',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
model_id: 'model-q8_0',
|
||||||
|
path: 'https://huggingface.co/microsoft/DialoGPT-medium/resolve/main/model-q8_0.GGUF',
|
||||||
|
file_size: '4.0 GB',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
created_at: '2021-01-01T00:00:00Z',
|
||||||
|
readme:
|
||||||
|
'https://huggingface.co/microsoft/DialoGPT-medium/resolve/main/README.md',
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(result).toEqual(expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle repository with no GGUF files', () => {
|
||||||
|
const repoWithoutGGUF: HuggingFaceRepo = {
|
||||||
|
...mockHuggingFaceRepo,
|
||||||
|
siblings: [
|
||||||
|
{
|
||||||
|
rfilename: 'tokenizer.json',
|
||||||
|
size: 1024 * 1024,
|
||||||
|
blobId: 'blob789',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
rfilename: 'config.json',
|
||||||
|
size: 2048,
|
||||||
|
blobId: 'blob101',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = convertHfRepoToCatalogModel(repoWithoutGGUF)
|
||||||
|
|
||||||
|
expect(result.num_quants).toBe(0)
|
||||||
|
expect(result.quants).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle repository with no siblings', () => {
|
||||||
|
const repoWithoutSiblings: HuggingFaceRepo = {
|
||||||
|
...mockHuggingFaceRepo,
|
||||||
|
siblings: undefined,
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = convertHfRepoToCatalogModel(repoWithoutSiblings)
|
||||||
|
|
||||||
|
expect(result.num_quants).toBe(0)
|
||||||
|
expect(result.quants).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should format file sizes correctly', () => {
|
||||||
|
const repoWithVariousFileSizes: HuggingFaceRepo = {
|
||||||
|
...mockHuggingFaceRepo,
|
||||||
|
siblings: [
|
||||||
|
{
|
||||||
|
rfilename: 'small-model.gguf',
|
||||||
|
size: 500 * 1024 * 1024, // 500MB
|
||||||
|
blobId: 'blob1',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
rfilename: 'large-model.gguf',
|
||||||
|
size: 3.5 * 1024 * 1024 * 1024, // 3.5GB
|
||||||
|
blobId: 'blob2',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
rfilename: 'unknown-size.gguf',
|
||||||
|
// No size property
|
||||||
|
blobId: 'blob3',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = convertHfRepoToCatalogModel(repoWithVariousFileSizes)
|
||||||
|
|
||||||
|
expect(result.quants[0].file_size).toBe('500.0 MB')
|
||||||
|
expect(result.quants[1].file_size).toBe('3.5 GB')
|
||||||
|
expect(result.quants[2].file_size).toBe('Unknown size')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle empty or undefined tags', () => {
|
||||||
|
const repoWithEmptyTags: HuggingFaceRepo = {
|
||||||
|
...mockHuggingFaceRepo,
|
||||||
|
tags: [],
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = convertHfRepoToCatalogModel(repoWithEmptyTags)
|
||||||
|
|
||||||
|
expect(result.description).toBe('**Tags**: ')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle missing downloads count', () => {
|
||||||
|
const repoWithoutDownloads: HuggingFaceRepo = {
|
||||||
|
...mockHuggingFaceRepo,
|
||||||
|
downloads: undefined as any,
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = convertHfRepoToCatalogModel(repoWithoutDownloads)
|
||||||
|
|
||||||
|
expect(result.downloads).toBe(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should correctly remove .gguf extension from model IDs', () => {
|
||||||
|
const repoWithVariousGGUF: HuggingFaceRepo = {
|
||||||
|
...mockHuggingFaceRepo,
|
||||||
|
siblings: [
|
||||||
|
{
|
||||||
|
rfilename: 'model.gguf',
|
||||||
|
size: 1024,
|
||||||
|
blobId: 'blob1',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
rfilename: 'MODEL.GGUF',
|
||||||
|
size: 1024,
|
||||||
|
blobId: 'blob2',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
rfilename: 'complex-model-name.gguf',
|
||||||
|
size: 1024,
|
||||||
|
blobId: 'blob3',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = convertHfRepoToCatalogModel(repoWithVariousGGUF)
|
||||||
|
|
||||||
|
expect(result.quants[0].model_id).toBe('model')
|
||||||
|
expect(result.quants[1].model_id).toBe('MODEL')
|
||||||
|
expect(result.quants[2].model_id).toBe('complex-model-name')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should generate correct download paths', () => {
|
||||||
|
const result = convertHfRepoToCatalogModel(mockHuggingFaceRepo)
|
||||||
|
|
||||||
|
expect(result.quants[0].path).toBe(
|
||||||
|
'https://huggingface.co/microsoft/DialoGPT-medium/resolve/main/model-q4_0.gguf'
|
||||||
|
)
|
||||||
|
expect(result.quants[1].path).toBe(
|
||||||
|
'https://huggingface.co/microsoft/DialoGPT-medium/resolve/main/model-q8_0.GGUF'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should generate correct readme URL', () => {
|
||||||
|
const result = convertHfRepoToCatalogModel(mockHuggingFaceRepo)
|
||||||
|
|
||||||
|
expect(result.readme).toBe(
|
||||||
|
'https://huggingface.co/microsoft/DialoGPT-medium/resolve/main/README.md'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle GGUF files with case-insensitive extension matching', () => {
|
||||||
|
const repoWithMixedCase: HuggingFaceRepo = {
|
||||||
|
...mockHuggingFaceRepo,
|
||||||
|
siblings: [
|
||||||
|
{
|
||||||
|
rfilename: 'model-1.gguf',
|
||||||
|
size: 1024,
|
||||||
|
blobId: 'blob1',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
rfilename: 'model-2.GGUF',
|
||||||
|
size: 1024,
|
||||||
|
blobId: 'blob2',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
rfilename: 'model-3.GgUf',
|
||||||
|
size: 1024,
|
||||||
|
blobId: 'blob3',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
rfilename: 'not-a-model.txt',
|
||||||
|
size: 1024,
|
||||||
|
blobId: 'blob4',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = convertHfRepoToCatalogModel(repoWithMixedCase)
|
||||||
|
|
||||||
|
expect(result.num_quants).toBe(3)
|
||||||
|
expect(result.quants).toHaveLength(3)
|
||||||
|
expect(result.quants[0].model_id).toBe('model-1')
|
||||||
|
expect(result.quants[1].model_id).toBe('model-2')
|
||||||
|
expect(result.quants[2].model_id).toBe('model-3')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle edge cases with file size formatting', () => {
|
||||||
|
const repoWithEdgeCases: HuggingFaceRepo = {
|
||||||
|
...mockHuggingFaceRepo,
|
||||||
|
siblings: [
|
||||||
|
{
|
||||||
|
rfilename: 'tiny.gguf',
|
||||||
|
size: 512, // < 1MB
|
||||||
|
blobId: 'blob1',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
rfilename: 'exactly-1gb.gguf',
|
||||||
|
size: 1024 * 1024 * 1024, // Exactly 1GB
|
||||||
|
blobId: 'blob2',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
rfilename: 'zero-size.gguf',
|
||||||
|
size: 0,
|
||||||
|
blobId: 'blob3',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = convertHfRepoToCatalogModel(repoWithEdgeCases)
|
||||||
|
|
||||||
|
expect(result.quants[0].file_size).toBe('0.0 MB')
|
||||||
|
expect(result.quants[1].file_size).toBe('1.0 GB')
|
||||||
|
expect(result.quants[2].file_size).toBe('Unknown size') // 0 is falsy, so it returns 'Unknown size'
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle missing optional fields gracefully', () => {
|
||||||
|
const minimalRepo: HuggingFaceRepo = {
|
||||||
|
id: 'minimal/repo',
|
||||||
|
modelId: 'minimal/repo',
|
||||||
|
sha: 'abc123',
|
||||||
|
downloads: 0,
|
||||||
|
likes: 0,
|
||||||
|
tags: [],
|
||||||
|
created_at: '2021-01-01T00:00:00Z',
|
||||||
|
last_modified: '2021-12-01T00:00:00Z',
|
||||||
|
private: false,
|
||||||
|
disabled: false,
|
||||||
|
gated: false,
|
||||||
|
author: 'minimal',
|
||||||
|
siblings: [
|
||||||
|
{
|
||||||
|
rfilename: 'model.gguf',
|
||||||
|
blobId: 'blob1',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = convertHfRepoToCatalogModel(minimalRepo)
|
||||||
|
|
||||||
|
expect(result.model_name).toBe('minimal/repo')
|
||||||
|
expect(result.developer).toBe('minimal')
|
||||||
|
expect(result.downloads).toBe(0)
|
||||||
|
expect(result.description).toBe('**Tags**: ')
|
||||||
|
expect(result.quants[0].file_size).toBe('Unknown size')
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@ -134,6 +134,47 @@ export const fetchHuggingFaceRepo = async (
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Convert HuggingFace repository to CatalogModel format
|
||||||
|
export const convertHfRepoToCatalogModel = (
|
||||||
|
repo: HuggingFaceRepo
|
||||||
|
): CatalogModel => {
|
||||||
|
// Extract GGUF files from the repository siblings
|
||||||
|
const ggufFiles =
|
||||||
|
repo.siblings?.filter((file) =>
|
||||||
|
file.rfilename.toLowerCase().endsWith('.gguf')
|
||||||
|
) || []
|
||||||
|
|
||||||
|
// Convert GGUF files to quants format
|
||||||
|
const quants = ggufFiles.map((file) => {
|
||||||
|
// Format file size
|
||||||
|
const formatFileSize = (size?: number) => {
|
||||||
|
if (!size) return 'Unknown size'
|
||||||
|
if (size < 1024 ** 3) return `${(size / 1024 ** 2).toFixed(1)} MB`
|
||||||
|
return `${(size / 1024 ** 3).toFixed(1)} GB`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate model_id from filename (remove .gguf extension, case-insensitive)
|
||||||
|
const modelId = file.rfilename.replace(/\.gguf$/i, '')
|
||||||
|
|
||||||
|
return {
|
||||||
|
model_id: modelId,
|
||||||
|
path: `https://huggingface.co/${repo.modelId}/resolve/main/${file.rfilename}`,
|
||||||
|
file_size: formatFileSize(file.size),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
model_name: repo.modelId,
|
||||||
|
description: `**Tags**: ${repo.tags?.join(', ')}`,
|
||||||
|
developer: repo.author,
|
||||||
|
downloads: repo.downloads || 0,
|
||||||
|
num_quants: quants.length,
|
||||||
|
quants: quants,
|
||||||
|
created_at: repo.created_at,
|
||||||
|
readme: `https://huggingface.co/${repo.modelId}/resolve/main/README.md`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Updates a model.
|
* Updates a model.
|
||||||
* @param model The model to update.
|
* @param model The model to update.
|
||||||
|
|||||||
1
web-app/src/types/global.d.ts
vendored
1
web-app/src/types/global.d.ts
vendored
@ -19,6 +19,7 @@ declare global {
|
|||||||
declare const POSTHOG_KEY: string
|
declare const POSTHOG_KEY: string
|
||||||
declare const POSTHOG_HOST: string
|
declare const POSTHOG_HOST: string
|
||||||
declare const MODEL_CATALOG_URL: string
|
declare const MODEL_CATALOG_URL: string
|
||||||
|
declare const AUTO_UPDATER_DISABLED: boolean
|
||||||
interface Window {
|
interface Window {
|
||||||
core: AppCore | undefined
|
core: AppCore | undefined
|
||||||
}
|
}
|
||||||
|
|||||||
@ -33,19 +33,19 @@ export default defineConfig(({ mode }) => {
|
|||||||
define: {
|
define: {
|
||||||
IS_TAURI: JSON.stringify(process.env.IS_TAURI),
|
IS_TAURI: JSON.stringify(process.env.IS_TAURI),
|
||||||
IS_MACOS: JSON.stringify(
|
IS_MACOS: JSON.stringify(
|
||||||
process.env.TAURI_ENV_PLATFORM?.includes('darwin') ?? 'false'
|
process.env.TAURI_ENV_PLATFORM?.includes('darwin') ?? false
|
||||||
),
|
),
|
||||||
IS_WINDOWS: JSON.stringify(
|
IS_WINDOWS: JSON.stringify(
|
||||||
process.env.TAURI_ENV_PLATFORM?.includes('windows') ?? 'false'
|
process.env.TAURI_ENV_PLATFORM?.includes('windows') ?? false
|
||||||
),
|
),
|
||||||
IS_LINUX: JSON.stringify(
|
IS_LINUX: JSON.stringify(
|
||||||
process.env.TAURI_ENV_PLATFORM?.includes('linux') ?? 'false'
|
process.env.TAURI_ENV_PLATFORM?.includes('linux') ?? false
|
||||||
),
|
),
|
||||||
IS_IOS: JSON.stringify(
|
IS_IOS: JSON.stringify(
|
||||||
process.env.TAURI_ENV_PLATFORM?.includes('ios') ?? 'false'
|
process.env.TAURI_ENV_PLATFORM?.includes('ios') ?? false
|
||||||
),
|
),
|
||||||
IS_ANDROID: JSON.stringify(
|
IS_ANDROID: JSON.stringify(
|
||||||
process.env.TAURI_ENV_PLATFORM?.includes('android') ?? 'false'
|
process.env.TAURI_ENV_PLATFORM?.includes('android') ?? false
|
||||||
),
|
),
|
||||||
PLATFORM: JSON.stringify(process.env.TAURI_ENV_PLATFORM),
|
PLATFORM: JSON.stringify(process.env.TAURI_ENV_PLATFORM),
|
||||||
|
|
||||||
@ -56,6 +56,9 @@ export default defineConfig(({ mode }) => {
|
|||||||
MODEL_CATALOG_URL: JSON.stringify(
|
MODEL_CATALOG_URL: JSON.stringify(
|
||||||
'https://raw.githubusercontent.com/menloresearch/model-catalog/main/model_catalog.json'
|
'https://raw.githubusercontent.com/menloresearch/model-catalog/main/model_catalog.json'
|
||||||
),
|
),
|
||||||
|
AUTO_UPDATER_DISABLED: JSON.stringify(
|
||||||
|
env.AUTO_UPDATER_DISABLED === 'true'
|
||||||
|
),
|
||||||
},
|
},
|
||||||
|
|
||||||
// Vite options tailored for Tauri development and only applied in `tauri dev` or `tauri build`
|
// Vite options tailored for Tauri development and only applied in `tauri dev` or `tauri build`
|
||||||
|
|||||||
@ -38,5 +38,6 @@ export default defineConfig({
|
|||||||
VERSION: JSON.stringify('test'),
|
VERSION: JSON.stringify('test'),
|
||||||
POSTHOG_KEY: JSON.stringify(''),
|
POSTHOG_KEY: JSON.stringify(''),
|
||||||
POSTHOG_HOST: JSON.stringify(''),
|
POSTHOG_HOST: JSON.stringify(''),
|
||||||
|
AUTO_UPDATER_DISABLED: JSON.stringify('false'),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user