fix: HF token is not used while searching repositories (#6137)

* fix: HF token is not used while searching repositories

* chore: whitelist jan model with tool use support by default

* fix: tests

* fix: duplicate model while searching

* fix: deprecate addSource tests since the function was removed
This commit is contained in:
Louis 2025-08-12 11:48:06 +07:00 committed by GitHub
commit 7c25d1dbfd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 38 additions and 169 deletions

View File

@ -49,7 +49,6 @@ describe('useModelSources', () => {
expect(result.current.error).toBe(null) expect(result.current.error).toBe(null)
expect(result.current.loading).toBe(false) expect(result.current.loading).toBe(false)
expect(typeof result.current.fetchSources).toBe('function') expect(typeof result.current.fetchSources).toBe('function')
expect(typeof result.current.addSource).toBe('function')
}) })
describe('fetchSources', () => { describe('fetchSources', () => {
@ -225,153 +224,6 @@ describe('useModelSources', () => {
}) })
}) })
describe('addSource', () => {
it('should add a new source to the store', () => {
const { result } = renderHook(() => useModelSources())
const testModel: CatalogModel = {
model_name: 'test-model',
description: 'Test model description',
developer: 'test-developer',
downloads: 100,
num_quants: 2,
quants: [
{
model_id: 'test-model-q4',
path: 'https://example.com/test-model-q4.gguf',
file_size: '2.0 GB',
},
],
created_at: '2023-01-01T00:00:00Z',
}
act(() => {
result.current.addSource(testModel)
})
expect(result.current.sources).toHaveLength(1)
expect(result.current.sources[0]).toEqual(testModel)
})
it('should replace existing source with same model_name', () => {
const { result } = renderHook(() => useModelSources())
const originalModel: CatalogModel = {
model_name: 'duplicate-model',
description: 'Original description',
developer: 'original-developer',
downloads: 50,
num_quants: 1,
quants: [],
created_at: '2023-01-01T00:00:00Z',
}
const updatedModel: CatalogModel = {
model_name: 'duplicate-model',
description: 'Updated description',
developer: 'updated-developer',
downloads: 150,
num_quants: 2,
quants: [
{
model_id: 'duplicate-model-q4',
path: 'https://example.com/duplicate-model-q4.gguf',
file_size: '3.0 GB',
},
],
created_at: '2023-02-01T00:00:00Z',
}
act(() => {
result.current.addSource(originalModel)
})
expect(result.current.sources).toHaveLength(1)
act(() => {
result.current.addSource(updatedModel)
})
expect(result.current.sources).toHaveLength(1)
expect(result.current.sources[0]).toEqual(updatedModel)
})
it('should handle multiple different sources', () => {
const { result } = renderHook(() => useModelSources())
const model1: CatalogModel = {
model_name: 'model-1',
description: 'First model',
developer: 'developer-1',
downloads: 100,
num_quants: 1,
quants: [],
created_at: '2023-01-01T00:00:00Z',
}
const model2: CatalogModel = {
model_name: 'model-2',
description: 'Second model',
developer: 'developer-2',
downloads: 200,
num_quants: 1,
quants: [],
created_at: '2023-01-02T00:00:00Z',
}
act(() => {
result.current.addSource(model1)
})
act(() => {
result.current.addSource(model2)
})
expect(result.current.sources).toHaveLength(2)
expect(result.current.sources).toContainEqual(model1)
expect(result.current.sources).toContainEqual(model2)
})
it('should handle CatalogModel with complete quants data', () => {
const { result } = renderHook(() => useModelSources())
const modelWithQuants: CatalogModel = {
model_name: 'model-with-quants',
description: 'Model with quantizations',
developer: 'quant-developer',
downloads: 500,
num_quants: 3,
quants: [
{
model_id: 'model-q4_k_m',
path: 'https://example.com/model-q4_k_m.gguf',
file_size: '2.0 GB',
},
{
model_id: 'model-q8_0',
path: 'https://example.com/model-q8_0.gguf',
file_size: '4.0 GB',
},
{
model_id: 'model-f16',
path: 'https://example.com/model-f16.gguf',
file_size: '8.0 GB',
},
],
created_at: '2023-01-01T00:00:00Z',
readme: 'https://example.com/readme.md',
}
act(() => {
result.current.addSource(modelWithQuants)
})
expect(result.current.sources).toHaveLength(1)
expect(result.current.sources[0]).toEqual(modelWithQuants)
expect(result.current.sources[0].quants).toHaveLength(3)
})
})
describe('state management', () => { describe('state management', () => {
it('should maintain state across multiple hook instances', () => { it('should maintain state across multiple hook instances', () => {
const { result: result1 } = renderHook(() => useModelSources()) const { result: result1 } = renderHook(() => useModelSources())

View File

@ -8,7 +8,6 @@ type ModelSourcesState = {
sources: CatalogModel[] sources: CatalogModel[]
error: Error | null error: Error | null
loading: boolean loading: boolean
addSource: (source: CatalogModel) => void
fetchSources: () => Promise<void> fetchSources: () => Promise<void>
} }
@ -18,15 +17,6 @@ export const useModelSources = create<ModelSourcesState>()(
sources: [], sources: [],
error: null, error: null,
loading: false, loading: false,
addSource: (source: CatalogModel) => {
set((state) => ({
sources: [
...state.sources.filter((e) => e.model_name !== source.model_name),
source,
],
}))
},
fetchSources: async () => { fetchSources: async () => {
set({ loading: true, error: null }) set({ loading: true, error: null })
try { try {

View File

@ -39,6 +39,7 @@ import HeaderPage from '@/containers/HeaderPage'
import { Loader } from 'lucide-react' import { Loader } from 'lucide-react'
import { useTranslation } from '@/i18n/react-i18next-compat' import { useTranslation } from '@/i18n/react-i18next-compat'
import Fuse from 'fuse.js' import Fuse from 'fuse.js'
import { useGeneralSetting } from '@/hooks/useGeneralSetting'
type ModelProps = { type ModelProps = {
model: CatalogModel model: CatalogModel
@ -57,6 +58,7 @@ export const Route = createFileRoute(route.hub.index as any)({
function Hub() { function Hub() {
const parentRef = useRef(null) const parentRef = useRef(null)
const { huggingfaceToken } = useGeneralSetting()
const { t } = useTranslation() const { t } = useTranslation()
const sortOptions = [ const sortOptions = [
@ -71,7 +73,7 @@ function Hub() {
} }
}, []) }, [])
const { sources, addSource, fetchSources, loading } = useModelSources() const { sources, fetchSources, loading } = useModelSources()
const [searchValue, setSearchValue] = useState('') const [searchValue, setSearchValue] = useState('')
const [sortSelected, setSortSelected] = useState('newest') const [sortSelected, setSortSelected] = useState('newest')
@ -185,14 +187,16 @@ function Hub() {
addModelSourceTimeoutRef.current = setTimeout(async () => { addModelSourceTimeoutRef.current = setTimeout(async () => {
try { try {
// Fetch HuggingFace repository information // Fetch HuggingFace repository information
const repoInfo = await fetchHuggingFaceRepo(e.target.value) const repoInfo = await fetchHuggingFaceRepo(
e.target.value,
huggingfaceToken
)
if (repoInfo) { if (repoInfo) {
const catalogModel = convertHfRepoToCatalogModel(repoInfo) const catalogModel = convertHfRepoToCatalogModel(repoInfo)
if ( if (
!sources.some((s) => s.model_name === catalogModel.model_name) !sources.some((s) => s.model_name === catalogModel.model_name)
) { ) {
setHuggingFaceRepo(catalogModel) setHuggingFaceRepo(catalogModel)
addSource(catalogModel)
} }
} }
} catch (error) { } catch (error) {

View File

@ -325,7 +325,10 @@ describe('models service', () => {
expect(result).toEqual(mockRepoData) expect(result).toEqual(mockRepoData)
expect(fetch).toHaveBeenCalledWith( expect(fetch).toHaveBeenCalledWith(
'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true' 'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true',
{
headers: {},
}
) )
}) })
@ -341,19 +344,28 @@ describe('models service', () => {
'https://huggingface.co/microsoft/DialoGPT-medium' 'https://huggingface.co/microsoft/DialoGPT-medium'
) )
expect(fetch).toHaveBeenCalledWith( expect(fetch).toHaveBeenCalledWith(
'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true' 'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true',
{
headers: {},
}
) )
// Test with domain prefix // Test with domain prefix
await fetchHuggingFaceRepo('huggingface.co/microsoft/DialoGPT-medium') await fetchHuggingFaceRepo('huggingface.co/microsoft/DialoGPT-medium')
expect(fetch).toHaveBeenCalledWith( expect(fetch).toHaveBeenCalledWith(
'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true' 'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true',
{
headers: {},
}
) )
// Test with trailing slash // Test with trailing slash
await fetchHuggingFaceRepo('microsoft/DialoGPT-medium/') await fetchHuggingFaceRepo('microsoft/DialoGPT-medium/')
expect(fetch).toHaveBeenCalledWith( expect(fetch).toHaveBeenCalledWith(
'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true' 'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true',
{
headers: {},
}
) )
}) })
@ -379,7 +391,10 @@ describe('models service', () => {
expect(result).toBeNull() expect(result).toBeNull()
expect(fetch).toHaveBeenCalledWith( expect(fetch).toHaveBeenCalledWith(
'https://huggingface.co/api/models/nonexistent/model?blobs=true' 'https://huggingface.co/api/models/nonexistent/model?blobs=true',
{
headers: {},
}
) )
}) })

View File

@ -99,7 +99,8 @@ export const fetchModelCatalog = async (): Promise<ModelCatalog> => {
* @returns A promise that resolves to the repository information. * @returns A promise that resolves to the repository information.
*/ */
export const fetchHuggingFaceRepo = async ( export const fetchHuggingFaceRepo = async (
repoId: string repoId: string,
hfToken?: string
): Promise<HuggingFaceRepo | null> => { ): Promise<HuggingFaceRepo | null> => {
try { try {
// Clean the repo ID to handle various input formats // Clean the repo ID to handle various input formats
@ -114,7 +115,14 @@ export const fetchHuggingFaceRepo = async (
} }
const response = await fetch( const response = await fetch(
`https://huggingface.co/api/models/${cleanRepoId}?blobs=true` `https://huggingface.co/api/models/${cleanRepoId}?blobs=true`,
{
headers: hfToken
? {
Authorization: `Bearer ${hfToken}`,
}
: {},
}
) )
if (!response.ok) { if (!response.ok) {

View File

@ -17,7 +17,7 @@ export enum ModelCapabilities {
// TODO: Remove this enum when we integrate llama.cpp extension // TODO: Remove this enum when we integrate llama.cpp extension
export enum DefaultToolUseSupportedModels { export enum DefaultToolUseSupportedModels {
JanNano = 'jan-nano', JanNano = 'jan-',
Qwen3 = 'qwen3', Qwen3 = 'qwen3',
Lucy = 'lucy', Lucy = 'lucy',
} }