chore: enhance onboarding screen's models (#4723)

* chore: enhance onboarding screen's models

* chore: lint fix

* chore: correct lint fix command

* chore: fix tests
This commit is contained in:
Louis 2025-02-25 09:36:55 +07:00 committed by GitHub
parent 60257635ad
commit 81fea5665b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 122 additions and 98 deletions

View File

@ -1 +1 @@
npx oxlint@latest --fix
yarn lint --fix --quiet

View File

@ -35,6 +35,7 @@ import useDownloadModel from '@/hooks/useDownloadModel'
import { modelDownloadStateAtom } from '@/hooks/useDownloadState'
import { useGetEngines } from '@/hooks/useEngineManagement'
import { useGetModelSources } from '@/hooks/useModelSource'
import useRecommendedModel from '@/hooks/useRecommendedModel'
import useUpdateModelParameters from '@/hooks/useUpdateModelParameters'
@ -44,6 +45,8 @@ import { formatDownloadPercentage, toGigabytes } from '@/utils/converter'
import { manualRecommendationModel } from '@/utils/model'
import { getLogoEngine, getTitleByEngine } from '@/utils/modelEngine'
import { extractModelName } from '@/utils/modelSource'
import { activeAssistantAtom } from '@/helpers/atoms/Assistant.atom'
import {
configuredModelsAtom,
@ -84,6 +87,7 @@ const ModelDropdown = ({
const [toggle, setToggle] = useState<HTMLDivElement | null>(null)
const [selectedModel, setSelectedModel] = useAtom(selectedModelAtom)
const { recommendedModel, downloadedModels } = useRecommendedModel()
const { sources } = useGetModelSources()
const [dropdownOptions, setDropdownOptions] = useState<HTMLDivElement | null>(
null
)
@ -97,11 +101,8 @@ const ModelDropdown = ({
const configuredModels = useAtomValue(configuredModelsAtom)
const { stopModel } = useActiveModel()
const featuredModels = configuredModels.filter(
(x) =>
manualRecommendationModel.includes(x.id) &&
x.metadata?.tags?.includes('Featured') &&
x.metadata?.size < 5000000000
const featuredModels = sources?.filter((x) =>
manualRecommendationModel.includes(x.id)
)
const { updateThreadMetadata } = useCreateNewThread()
@ -464,9 +465,9 @@ const ModelDropdown = ({
showModel &&
!searchText.length && (
<ul className="pb-2">
{featuredModels.map((model) => {
{featuredModels?.map((model) => {
const isDownloading = downloadingModels.some(
(md) => md === model.id
(md) => md === (model.models[0]?.id ?? model.id)
)
return (
<li
@ -475,34 +476,35 @@ const ModelDropdown = ({
>
<div className="flex items-center gap-2">
<p
className="max-w-[200px] overflow-hidden truncate whitespace-nowrap text-[hsla(var(--text-secondary))]"
title={model.name}
className="max-w-[200px] overflow-hidden truncate whitespace-nowrap capitalize text-[hsla(var(--text-secondary))]"
title={model.id}
>
{model.name}
{extractModelName(model.id)}
</p>
<ModelLabel
size={model.metadata?.size}
size={model.models[0]?.size}
compact
/>
</div>
<div className="flex items-center gap-2 text-[hsla(var(--text-tertiary))]">
<span className="font-medium">
{toGigabytes(model.metadata?.size)}
{toGigabytes(model.models[0]?.size)}
</span>
{!isDownloading ? (
<DownloadCloudIcon
size={18}
className="cursor-pointer text-[hsla(var(--app-link))]"
onClick={() =>
downloadModel(
model.sources[0].url,
model.id
)
downloadModel(model.models[0]?.id)
}
/>
) : (
Object.values(downloadStates)
.filter((x) => x.modelId === model.id)
.filter(
(x) =>
x.modelId ===
(model.models[0]?.id ?? model.id)
)
.map((item) => (
<ProgressCircle
key={item.modelId}

View File

@ -43,7 +43,7 @@ const SettingLeftPanel = () => {
for (const extension of extensions) {
const settings = await extension.getSettings()
if (settings && settings.length > 0) {
if (settings && settings.length > 0 && settings.some((e) => e.title)) {
extensionsMenu.push({
name: extension.productName,
setting: extension.name,

View File

@ -51,27 +51,30 @@ jest.mock('@/hooks/useDownloadModel', () => ({
const mockAtomValue = jest.spyOn(jotai, 'useAtomValue')
const mockSetAtom = jest.spyOn(jotai, 'useSetAtom')
describe('OnDeviceStarterScreen', () => {
const mockExtensionHasSettings = [
{
name: 'Test Extension',
setting: 'test-setting',
apiKey: 'test-key',
provider: 'test-provider',
},
]
jest.mock('@/hooks/useModelSource')
import * as source from '@/hooks/useModelSource'
describe('OnDeviceStarterScreen', () => {
beforeEach(() => {
mockAtomValue.mockImplementation(() => [])
mockSetAtom.mockImplementation(() => jest.fn())
})
jest.spyOn(source, 'useGetModelSources').mockReturnValue({
sources: [],
error: null,
mutate: jest.fn(),
})
it('renders the component', () => {
jest.spyOn(source, 'useGetModelSources').mockReturnValue({
sources: [],
error: null,
mutate: jest.fn(),
})
render(
<Provider>
<OnDeviceStarterScreen
extensionHasSettings={mockExtensionHasSettings}
/>
<OnDeviceStarterScreen isShowStarterScreen={true} />
</Provider>
)
@ -80,11 +83,14 @@ describe('OnDeviceStarterScreen', () => {
})
it('handles search input', () => {
jest.spyOn(source, 'useGetModelSources').mockReturnValue({
sources: [],
error: null,
mutate: jest.fn(),
})
render(
<Provider>
<OnDeviceStarterScreen
extensionHasSettings={mockExtensionHasSettings}
/>
<OnDeviceStarterScreen isShowStarterScreen={true} />
</Provider>
)
@ -97,11 +103,14 @@ describe('OnDeviceStarterScreen', () => {
it('displays "No Result Found" when no models match the search', () => {
mockAtomValue.mockImplementation(() => [])
jest.spyOn(source, 'useGetModelSources').mockReturnValue({
sources: [],
error: null,
mutate: jest.fn(),
})
render(
<Provider>
<OnDeviceStarterScreen
extensionHasSettings={mockExtensionHasSettings}
/>
<OnDeviceStarterScreen isShowStarterScreen={true} />
</Provider>
)
@ -114,38 +123,60 @@ describe('OnDeviceStarterScreen', () => {
it('renders featured models', () => {
const mockConfiguredModels = [
{
id: 'gemma-2-9b-it',
name: 'Gemma 2B',
id: 'cortexso/deepseek-r1',
name: 'DeepSeek R1',
metadata: {
tags: ['Featured'],
author: 'Test Author',
size: 3000000000,
},
models: [
{
id: 'cortexso/deepseek-r1',
name: 'DeepSeek R1',
metadata: {
tags: ['Featured'],
},
},
],
},
{
id: 'llama3.1-8b-instruct',
id: 'cortexso/llama3.2',
name: 'Llama 3.1',
metadata: { tags: [], author: 'Test Author', size: 2000000000 },
models: [
{
id: 'cortexso/deepseek-r1',
name: 'DeepSeek R1',
metadata: {
tags: ['Featured'],
},
},
],
},
]
mockAtomValue.mockImplementation((atom) => {
return mockConfiguredModels
jest.spyOn(source, 'useGetModelSources').mockReturnValue({
sources: mockConfiguredModels,
error: null,
mutate: jest.fn(),
})
render(
<Provider>
<OnDeviceStarterScreen
extensionHasSettings={mockExtensionHasSettings}
/>
<OnDeviceStarterScreen isShowStarterScreen={true} />
</Provider>
)
expect(screen.getByText('Gemma 2B')).toBeInTheDocument()
expect(screen.queryByText('Llama 3.1')).not.toBeInTheDocument()
expect(screen.getAllByText('deepseek-r1')[0]).toBeInTheDocument()
})
it('renders cloud models', () => {
jest.spyOn(source, 'useGetModelSources').mockReturnValue({
sources: [],
error: null,
mutate: jest.fn(),
})
const mockRemoteModels = [
{ id: 'remote-model-1', name: 'Remote Model 1', engine: 'openai' },
{ id: 'remote-model-2', name: 'Remote Model 2', engine: 'anthropic' },
@ -160,9 +191,7 @@ describe('OnDeviceStarterScreen', () => {
render(
<Provider>
<OnDeviceStarterScreen
extensionHasSettings={mockExtensionHasSettings}
/>
<OnDeviceStarterScreen isShowStarterScreen={true} />
</Provider>
)

View File

@ -26,6 +26,8 @@ import { modelDownloadStateAtom } from '@/hooks/useDownloadState'
import { useGetEngines } from '@/hooks/useEngineManagement'
import { useGetModelSources } from '@/hooks/useModelSource'
import { formatDownloadPercentage, toGigabytes } from '@/utils/converter'
import { manualRecommendationModel } from '@/utils/model'
import {
@ -34,6 +36,8 @@ import {
isLocalEngine,
} from '@/utils/modelEngine'
import { extractModelName } from '@/utils/modelSource'
import { mainViewStateAtom } from '@/helpers/atoms/App.atom'
import {
configuredModelsAtom,
@ -55,36 +59,17 @@ const OnDeviceStarterScreen = ({ isShowStarterScreen }: Props) => {
const { engines } = useGetEngines()
const configuredModels = useAtomValue(configuredModelsAtom)
const { sources } = useGetModelSources()
const setMainViewState = useSetAtom(mainViewStateAtom)
const featuredModel = configuredModels.filter((x) => {
const manualRecommendModel = configuredModels.filter((x) =>
const featuredModels = sources?.filter((x) =>
manualRecommendationModel.includes(x.id)
)
if (manualRecommendModel.length === 2) {
return (
x.id === manualRecommendationModel[0] ||
x.id === manualRecommendationModel[1]
)
} else {
return (
x.metadata?.tags?.includes('Featured') && x.metadata?.size < 5000000000
)
}
})
const remoteModel = configuredModels.filter(
(x) => !isLocalEngine(engines, x.engine)
)
const filteredModels = configuredModels.filter((model) => {
return (
isLocalEngine(engines, model.engine) &&
model.name.toLowerCase().includes(searchValue.toLowerCase())
)
})
const remoteModelEngine = remoteModel.map((x) => x.engine)
const groupByEngine = remoteModelEngine.filter(function (item, index) {
@ -142,16 +127,16 @@ const OnDeviceStarterScreen = ({ isShowStarterScreen }: Props) => {
!isOpen ? 'invisible' : 'visible'
)}
>
{!filteredModels.length ? (
{!featuredModels?.length ? (
<div className="p-3 text-center">
<p className="line-clamp-1 text-[hsla(var(--text-secondary))]">
No Result Found
</p>
</div>
) : (
filteredModels.map((model) => {
sources?.map((model) => {
const isDownloading = downloadingModels.some(
(md) => md === model.id
(md) => md === (model.models[0]?.id ?? model.id)
)
return (
<div
@ -160,16 +145,19 @@ const OnDeviceStarterScreen = ({ isShowStarterScreen }: Props) => {
>
<div className="flex items-center gap-2">
<p
className={twMerge('line-clamp-1')}
title={model.name}
className={'line-clamp-1 capitalize'}
title={extractModelName(model.id)}
>
{model.name}
{extractModelName(model.id)}
</p>
<ModelLabel size={model.metadata?.size} compact />
<ModelLabel
size={model.models[0]?.size}
compact
/>
</div>
<div className="flex items-center gap-2 text-[hsla(var(--text-tertiary))]">
<span className="font-medium">
{toGigabytes(model.metadata?.size)}
{toGigabytes(model.models[0]?.size)}
</span>
{!isDownloading ? (
<DownloadCloudIcon
@ -177,15 +165,15 @@ const OnDeviceStarterScreen = ({ isShowStarterScreen }: Props) => {
className="cursor-pointer text-[hsla(var(--app-link))]"
onClick={() =>
downloadModel(
model.sources[0].url,
model.id,
model.name
model.models[0]?.id ?? model.id
)
}
/>
) : (
Object.values(downloadStates)
.filter((x) => x.modelId === model.id)
.filter(
(x) => x.modelId === model.models[0]?.id
)
.map((item) => (
<ProgressCircle
key={item.modelId}
@ -222,9 +210,9 @@ const OnDeviceStarterScreen = ({ isShowStarterScreen }: Props) => {
</p>
</div>
{featuredModel.slice(0, 2).map((featModel) => {
{featuredModels?.map((featModel) => {
const isDownloading = downloadingModels.some(
(md) => md === featModel.id
(md) => md === (featModel.models[0]?.id ?? featModel.id)
)
return (
<div
@ -232,13 +220,17 @@ const OnDeviceStarterScreen = ({ isShowStarterScreen }: Props) => {
className="my-2 flex items-start justify-between gap-2 border-b border-[hsla(var(--app-border))] pb-4 pt-1 last:border-none"
>
<div className="w-full text-left">
<h6 className="mt-1.5 font-medium">{featModel.name}</h6>
<h6 className="mt-1.5 font-medium capitalize">
{extractModelName(featModel.id)}
</h6>
</div>
{isDownloading ? (
<div className="flex w-full flex-col items-end gap-2">
{Object.values(downloadStates)
.filter((x) => x.modelId === featModel.id)
.filter(
(x) => x.modelId === featModel.models[0]?.id
)
.map((item, i) => (
<div
className="mt-1.5 flex w-full items-center gap-2"
@ -262,7 +254,7 @@ const OnDeviceStarterScreen = ({ isShowStarterScreen }: Props) => {
</div>
))}
<span className="text-[hsla(var(--text-secondary))]">
{toGigabytes(featModel.metadata?.size)}
{toGigabytes(featModel.models[0]?.size)}
</span>
</div>
) : (
@ -271,17 +263,13 @@ const OnDeviceStarterScreen = ({ isShowStarterScreen }: Props) => {
theme="ghost"
className="!bg-[hsla(var(--secondary-bg))]"
onClick={() =>
downloadModel(
featModel.sources[0].url,
featModel.id,
featModel.name
)
downloadModel(featModel.models[0]?.id)
}
>
Download
</Button>
<span className="text-[hsla(var(--text-secondary))]">
{toGigabytes(featModel.metadata?.size)}
{toGigabytes(featModel.models[0]?.size)}
</span>
</div>
)}

View File

@ -8,7 +8,12 @@ export const normalizeModelId = (downloadUrl: string): string => {
return downloadUrl.split('/').pop() ?? downloadUrl
}
/**
* Default models to recommend to users when they first open the app.
* TODO: These will be replaced when we have a proper recommendation system
* AND cortexso repositories are updated with tags.
*/
export const manualRecommendationModel = [
'llama3.2-1b-instruct',
'llama3.2-3b-instruct',
'cortexso/deepseek-r1',
'cortexso/llama3.2',
]