diff --git a/extensions/model-extension/package.json b/extensions/model-extension/package.json index 94211a11d..46e92b0ed 100644 --- a/extensions/model-extension/package.json +++ b/extensions/model-extension/package.json @@ -1,6 +1,6 @@ { "name": "@janhq/model-extension", - "version": "1.0.20", + "version": "1.0.21", "description": "Model Management Extension provides model exploration and seamless downloads", "main": "dist/index.js", "module": "dist/module.js", diff --git a/models/capybara-34b/model.json b/models/capybara-34b/model.json index 521fb5301..ffca28c6d 100644 --- a/models/capybara-34b/model.json +++ b/models/capybara-34b/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/codeninja-1.0-7b/model.json b/models/codeninja-1.0-7b/model.json index c543f1a40..98fbac5df 100644 --- a/models/codeninja-1.0-7b/model.json +++ b/models/codeninja-1.0-7b/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": ["<|end_of_turn|>"], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/deepseek-coder-1.3b/model.json b/models/deepseek-coder-1.3b/model.json index 46f7a80ea..8c454802f 100644 --- a/models/deepseek-coder-1.3b/model.json +++ b/models/deepseek-coder-1.3b/model.json @@ -16,6 +16,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/deepseek-coder-34b/model.json b/models/deepseek-coder-34b/model.json index 594470626..905a66033 100644 --- a/models/deepseek-coder-34b/model.json +++ b/models/deepseek-coder-34b/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/dolphin-2.7-mixtral-8x7b/model.json b/models/dolphin-2.7-mixtral-8x7b/model.json index 4b16b5035..482f1a30e 100644 --- a/models/dolphin-2.7-mixtral-8x7b/model.json +++ b/models/dolphin-2.7-mixtral-8x7b/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/llama2-chat-70b-q4/model.json b/models/llama2-chat-70b-q4/model.json index 84e22d1f0..2595ab677 100644 --- a/models/llama2-chat-70b-q4/model.json +++ b/models/llama2-chat-70b-q4/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/llama2-chat-7b-q4/model.json b/models/llama2-chat-7b-q4/model.json index ec8bb2cd3..68eab3790 100644 --- a/models/llama2-chat-7b-q4/model.json +++ b/models/llama2-chat-7b-q4/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/mistral-ins-7b-q4/model.json b/models/mistral-ins-7b-q4/model.json index f06bddaad..6db1aa35b 100644 --- a/models/mistral-ins-7b-q4/model.json +++ b/models/mistral-ins-7b-q4/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/mixtral-8x7b-instruct/model.json b/models/mixtral-8x7b-instruct/model.json index 5167e8335..31ff2838a 100644 --- a/models/mixtral-8x7b-instruct/model.json +++ b/models/mixtral-8x7b-instruct/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/noromaid-7b/model.json b/models/noromaid-7b/model.json index dd919bb65..fbb7858e1 100644 --- a/models/noromaid-7b/model.json +++ b/models/noromaid-7b/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/openchat-3.5-7b/model.json b/models/openchat-3.5-7b/model.json index 09b95eb64..e4b72f9c6 100644 --- a/models/openchat-3.5-7b/model.json +++ b/models/openchat-3.5-7b/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": ["<|end_of_turn|>"], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/openhermes-neural-7b/model.json b/models/openhermes-neural-7b/model.json index 230ef65f2..ca3e88502 100644 --- a/models/openhermes-neural-7b/model.json +++ b/models/openhermes-neural-7b/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/phi-2-3b/model.json b/models/phi-2-3b/model.json index 10e39c292..97ff369e7 100644 --- a/models/phi-2-3b/model.json +++ b/models/phi-2-3b/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/phind-34b/model.json b/models/phind-34b/model.json index 29ef7572b..7fc77ed71 100644 --- a/models/phind-34b/model.json +++ b/models/phind-34b/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/solar-10.7b-slerp/model.json b/models/solar-10.7b-slerp/model.json index 7963bd05f..9177fa013 100644 --- a/models/solar-10.7b-slerp/model.json +++ b/models/solar-10.7b-slerp/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/starling-7b/model.json b/models/starling-7b/model.json index d5f5b57c8..1a6d7e55c 100644 --- a/models/starling-7b/model.json +++ b/models/starling-7b/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": ["<|end_of_turn|>"], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/stealth-v1.2-7b/model.json b/models/stealth-v1.2-7b/model.json index dee5d68f5..92bfe46e1 100644 --- a/models/stealth-v1.2-7b/model.json +++ b/models/stealth-v1.2-7b/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/tinyllama-1.1b/model.json b/models/tinyllama-1.1b/model.json index 50a68f1a9..641511569 100644 --- a/models/tinyllama-1.1b/model.json +++ b/models/tinyllama-1.1b/model.json @@ -15,13 +15,14 @@ "top_p": 0.95, "stream": true, "max_tokens": 2048, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, "metadata": { "author": "TinyLlama", "tags": ["Tiny", "Foundation Model"], - "size": 1170000000 + "size": 669000000 }, "engine": "nitro" } \ No newline at end of file diff --git a/models/trinity-v1.2-7b/model.json b/models/trinity-v1.2-7b/model.json index 1532b1f09..ce5e7a4cf 100644 --- a/models/trinity-v1.2-7b/model.json +++ b/models/trinity-v1.2-7b/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/tulu-2-70b/model.json b/models/tulu-2-70b/model.json index 4437dcbbd..ae95b870d 100644 --- a/models/tulu-2-70b/model.json +++ b/models/tulu-2-70b/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/wizardcoder-13b/model.json b/models/wizardcoder-13b/model.json index f73c93e8e..1605ed362 100644 --- a/models/wizardcoder-13b/model.json +++ b/models/wizardcoder-13b/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/yarn-mistral-7b/model.json b/models/yarn-mistral-7b/model.json index 8b048dd3d..2676fbb58 100644 --- a/models/yarn-mistral-7b/model.json +++ b/models/yarn-mistral-7b/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/models/yi-34b/model.json b/models/yi-34b/model.json index d5b90febe..8ff23aaa0 100644 --- a/models/yi-34b/model.json +++ b/models/yi-34b/model.json @@ -15,6 +15,7 @@ "top_p": 0.95, "stream": true, "max_tokens": 4096, + "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, diff --git a/web/containers/CardSidebar/index.tsx b/web/containers/CardSidebar/index.tsx index ac564c2f2..8222c4918 100644 --- a/web/containers/CardSidebar/index.tsx +++ b/web/containers/CardSidebar/index.tsx @@ -5,12 +5,10 @@ import { ChevronDownIcon, MoreVerticalIcon, FolderOpenIcon, - Code2Icon, PencilIcon, } from 'lucide-react' import { twMerge } from 'tailwind-merge' -import { useActiveModel } from '@/hooks/useActiveModel' import { useClickOutside } from '@/hooks/useClickOutside' import { activeThreadAtom } from '@/helpers/atoms/Thread.atom' @@ -33,16 +31,15 @@ export default function CardSidebar({ const [more, setMore] = useState(false) const [menu, setMenu] = useState(null) const [toggle, setToggle] = useState(null) - const { activeModel } = useActiveModel() const activeThread = useAtomValue(activeThreadAtom) useClickOutside(() => setMore(false), null, [menu, toggle]) let openFolderTitle: string = 'Open Containing Folder' if (isMac) { - openFolderTitle = 'Reveal in Finder' + openFolderTitle = 'Show in Finder' } else if (isWindows) { - openFolderTitle = 'Reveal in File Explorer' + openFolderTitle = 'Show in File Explorer' } return ( @@ -108,7 +105,7 @@ export default function CardSidebar({ {title === 'Model' ? (
- Show in Finder + {openFolderTitle} Opens thread.json. Changes affect this thread only. diff --git a/web/hooks/useUpdateModelParameters.ts b/web/hooks/useUpdateModelParameters.ts index db7c759fc..632e9f631 100644 --- a/web/hooks/useUpdateModelParameters.ts +++ b/web/hooks/useUpdateModelParameters.ts @@ -42,7 +42,9 @@ export default function useUpdateModelParameters() { } const updatedModelParams: ModelParams = { ...activeModelParams, - [name]: value, + // Explicitly set the value to an array if the name is 'stop' + // This is because the inference engine would only accept an array for the 'stop' parameter + [name]: name === 'stop' ? (value === '' ? [] : [value]) : value, } // update the state