From c35ba68119eeb8dbbc9c6a6248a2f06241a6bbc9 Mon Sep 17 00:00:00 2001 From: 0xSage Date: Fri, 1 Dec 2023 15:03:37 +0800 Subject: [PATCH 01/19] chore: fix pr auto labeling --- .github/workflows/auto-label-conventional-commits.yaml | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/.github/workflows/auto-label-conventional-commits.yaml b/.github/workflows/auto-label-conventional-commits.yaml index 7e6e19a66..3a915dd83 100644 --- a/.github/workflows/auto-label-conventional-commits.yaml +++ b/.github/workflows/auto-label-conventional-commits.yaml @@ -1,23 +1,18 @@ name: "Auto Label Conventional Commits" on: - issues: - types: - - reopened - - opened pull_request: types: - reopened - opened jobs: - label_issues: + label_prs: runs-on: ubuntu-latest permissions: - issues: write pull-requests: write steps: - name: Checkout uses: actions/checkout@v4 - - name: Label issues + - name: Label PRs run: | ISSUE_TITLE=$(gh issue view ${{ github.event.number }} --json title -q ".title") case "$ISSUE_TITLE" in From e6812b1247534c223d9e277353bec49fbc1785e5 Mon Sep 17 00:00:00 2001 From: Hoang Ha <64120343+hahuyhoang411@users.noreply.github.com> Date: Fri, 1 Dec 2023 17:20:58 +0700 Subject: [PATCH 02/19] chore: pre-populate Jan's /models folder with model.jsons (#775) * draft model.json * islm3b update * capybara 34b update * deepseek coder update * dolphin yi update * fix the maxtokens of islm * lzlv 70b update * marx3b update * mythomax 13b update * update neural chat 7b * noromaid 20b update * update openchat 7b * openhermes7b update * openorca 7b * orca 13b update * phind 34b update * rocket 3b update * starling 7b update * storytelling 70b update * tiefighter 13B * update tiefighter tags * tinyllama update * wizard coder 13b * update wizard coder 13b description * wizard coder 34b update * wizard coder minor fix * xwin 70b update * yarn 70b * yi 34b * zephyr beta 7b * neuralhermes-7b update * change path + ctxlen * update id * fix startling --- .gitignore | 1 - models/capybara-34b/model.json | 24 ++++++++++++++++++++++++ models/deepseek-coder-1.3b/model.json | 23 +++++++++++++++++++++++ models/dolphin-yi-34b/model.json | 24 ++++++++++++++++++++++++ models/islm-3b/model.json | 24 ++++++++++++++++++++++++ models/lzlv-70b/model.json | 24 ++++++++++++++++++++++++ models/marx-3b/model.json | 23 +++++++++++++++++++++++ models/mythomax-13b/model.json | 24 ++++++++++++++++++++++++ models/neural-chat-7b/model.json | 24 ++++++++++++++++++++++++ models/neuralhermes-7b/model.json | 24 ++++++++++++++++++++++++ models/noromaid-20b/model.json | 24 ++++++++++++++++++++++++ models/openchat-7b/model.json | 24 ++++++++++++++++++++++++ models/openhermes-mistral-7b/model.json | 24 ++++++++++++++++++++++++ models/openorca-13b/model.json | 24 ++++++++++++++++++++++++ models/openorca-7b/model.json | 24 ++++++++++++++++++++++++ models/phind-34b/model.json | 24 ++++++++++++++++++++++++ models/rocket-3b/model.json | 23 +++++++++++++++++++++++ models/starling-7b/model.json | 24 ++++++++++++++++++++++++ models/storytelling-70b/model.json | 24 ++++++++++++++++++++++++ models/tiefighter-13b/model.json | 24 ++++++++++++++++++++++++ models/tinyllama-1.1b/model.json | 23 +++++++++++++++++++++++ models/wizardcoder-13b/model.json | 24 ++++++++++++++++++++++++ models/wizardcoder-34b/model.json | 24 ++++++++++++++++++++++++ models/xwin-70b/model.json | 24 ++++++++++++++++++++++++ models/yarn-70b/model.json | 21 +++++++++++++++++++++ models/yi-34b/model.json | 24 ++++++++++++++++++++++++ models/zephyr-beta-7b/model.json | 24 ++++++++++++++++++++++++ 27 files changed, 617 insertions(+), 1 deletion(-) create mode 100644 models/capybara-34b/model.json create mode 100644 models/deepseek-coder-1.3b/model.json create mode 100644 models/dolphin-yi-34b/model.json create mode 100644 models/islm-3b/model.json create mode 100644 models/lzlv-70b/model.json create mode 100644 models/marx-3b/model.json create mode 100644 models/mythomax-13b/model.json create mode 100644 models/neural-chat-7b/model.json create mode 100644 models/neuralhermes-7b/model.json create mode 100644 models/noromaid-20b/model.json create mode 100644 models/openchat-7b/model.json create mode 100644 models/openhermes-mistral-7b/model.json create mode 100644 models/openorca-13b/model.json create mode 100644 models/openorca-7b/model.json create mode 100644 models/phind-34b/model.json create mode 100644 models/rocket-3b/model.json create mode 100644 models/starling-7b/model.json create mode 100644 models/storytelling-70b/model.json create mode 100644 models/tiefighter-13b/model.json create mode 100644 models/tinyllama-1.1b/model.json create mode 100644 models/wizardcoder-13b/model.json create mode 100644 models/wizardcoder-34b/model.json create mode 100644 models/xwin-70b/model.json create mode 100644 models/yarn-70b/model.json create mode 100644 models/yi-34b/model.json create mode 100644 models/zephyr-beta-7b/model.json diff --git a/.gitignore b/.gitignore index 9dfd1887c..a273404c2 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,6 @@ .env # Jan inference -models/** error.log node_modules *.tgz diff --git a/models/capybara-34b/model.json b/models/capybara-34b/model.json new file mode 100644 index 000000000..562bcbe93 --- /dev/null +++ b/models/capybara-34b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/Nous-Capybara-34B-GGUF/resolve/main/nous-capybara-34b.Q5_K_M.gguf", + "id": "capybara-34b", + "object": "model", + "name": "Capybara 200k 34B", + "version": 1.0, + "description": "Nous Capybara 34B, a variant of the Yi-34B model, is the first Nous model with a 200K context length, trained for three epochs on the innovative Capybara dataset.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "", + "user_prompt": "USER: ", + "ai_prompt": "ASSISTANT: " + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "NousResearch, The Bloke", + "tags": ["General", "Big Context Length"], + "size": 24320000000 + } + } + \ No newline at end of file diff --git a/models/deepseek-coder-1.3b/model.json b/models/deepseek-coder-1.3b/model.json new file mode 100644 index 000000000..2ff6d6e7b --- /dev/null +++ b/models/deepseek-coder-1.3b/model.json @@ -0,0 +1,23 @@ +{ + "source_url": "https://huggingface.co/TheBloke/deepseek-coder-1.3b-base-GGUF/resolve/main/deepseek-coder-1.3b-base.Q4_K_M.gguf", + "id": "deepseek-coder-1.3b", + "object": "model", + "name": "Deepseek Coder 1.3B", + "version": "1.0", + "description": "", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "", + "user_prompt": "", + "ai_prompt": "" + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "deepseek, The Bloke", + "tags": ["Code"], + "size": 870000000 + } + } diff --git a/models/dolphin-yi-34b/model.json b/models/dolphin-yi-34b/model.json new file mode 100644 index 000000000..3b1bf3619 --- /dev/null +++ b/models/dolphin-yi-34b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/dolphin-2_2-yi-34b-GGUF/resolve/main/dolphin-2_2-yi-34b.Q5_K_M.gguf", + "id": "dolphin-yi-34b", + "object": "model", + "name": "Dolphin Yi 34B", + "version": "1.0", + "description": "Dolphin, based on the Yi-34B model and enhanced with features like conversation and empathy, is trained on a unique dataset for advanced multi-turn conversations. Notably uncensored, it requires careful implementation of an alignment layer for ethical use.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "<|im_start|>system\n", + "user_prompt": "<|im_end|>\n<|im_start|>user\n", + "ai_prompt": "<|im_end|>\n<|im_start|>assistant\n" + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "ehartford, The Bloke", + "tags": ["General Use", "Role-playing"], + "size": 24320000000 + } + } + \ No newline at end of file diff --git a/models/islm-3b/model.json b/models/islm-3b/model.json new file mode 100644 index 000000000..916d7c50e --- /dev/null +++ b/models/islm-3b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/UmbrellaCorp/IS-LM-3B_GGUF/resolve/main/IS-LM-Q4_K_M.gguf", + "id": "islm-3b", + "object": "model", + "name": "IS LM 3B", + "version": "1.0", + "description": "IS LM 3B, based on the StableLM 3B model is specifically finetuned for economic analysis using DataForge Economics and QLoRA over three epochs, enhancing its proficiency in economic forecasting and analysis.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "", + "user_prompt": "USER: ", + "ai_prompt": "ASSISTANT: " + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "UmbrellaCorp, The Bloke", + "tags": ["General Use", "Economics"], + "size": 1710000000 + } + } + \ No newline at end of file diff --git a/models/lzlv-70b/model.json b/models/lzlv-70b/model.json new file mode 100644 index 000000000..4aaee79b3 --- /dev/null +++ b/models/lzlv-70b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/lzlv_70B-GGUF/resolve/main/lzlv_70b_fp16_hf.Q5_K_M.gguf", + "id": "lzlv-70b", + "object": "model", + "name": "Lzlv 70B", + "version": "1.0", + "description": "lzlv_70B is a sophisticated AI model designed for roleplaying and creative tasks. This merge aims to combine intelligence with creativity, seemingly outperforming its individual components in complex scenarios and creative outputs.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "", + "user_prompt": "USER: ", + "ai_prompt": "ASSISTANT: " + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "lizpreciatior, The Bloke", + "tags": ["General Use", "Role-playing"], + "size": 48750000000 + } + } + \ No newline at end of file diff --git a/models/marx-3b/model.json b/models/marx-3b/model.json new file mode 100644 index 000000000..78617d5c3 --- /dev/null +++ b/models/marx-3b/model.json @@ -0,0 +1,23 @@ +{ + "source_url": "https://huggingface.co/TheBloke/Marx-3B-v3-GGUF/resolve/main/marx-3b-v3.Q4_K_M.gguf", + "id": "marx-3b", + "object": "model", + "name": "Marx 3B", + "version": "1.0", + "description": "Marx 3B, based on the StableLM 3B model is specifically finetuned for chating using EverythingLM data and QLoRA over two epochs, enhancing its proficiency in understand general knowledege.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "", + "user_prompt": "### HUMAN: ", + "ai_prompt": "### RESPONSE: " + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "Bohan Du, The Bloke", + "tags": ["General Use"], + "size": 1620000000 + } + } \ No newline at end of file diff --git a/models/mythomax-13b/model.json b/models/mythomax-13b/model.json new file mode 100644 index 000000000..455f73968 --- /dev/null +++ b/models/mythomax-13b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/MythoMax-L2-13B-GGUF/resolve/main/mythomax-l2-13b.Q5_K_M.gguf", + "id": "mythomax-13b", + "object": "model", + "name": "Mythomax L2 13B", + "version": "1.0", + "description": "Mythomax L2 13b, an advanced AI model derived from MythoMix, merges MythoLogic-L2's deep comprehension with Huginn's writing skills through a unique tensor merge technique, excelling in roleplaying and storytelling.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "", + "user_prompt": "### Instruction: ", + "ai_prompt": "### Response: " + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "Gryphe, The Bloke", + "tags": ["Role-playing"], + "size": 9230000000 + } + } + \ No newline at end of file diff --git a/models/neural-chat-7b/model.json b/models/neural-chat-7b/model.json new file mode 100644 index 000000000..f4f4f14d4 --- /dev/null +++ b/models/neural-chat-7b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/neural-chat-7B-v3-1-GGUF/resolve/main/neural-chat-7b-v3-1.Q4_K_M.gguf", + "id": "neural-chat-7b", + "object": "model", + "name": "Neural Chat 7B", + "version": "1.0", + "description": "The Neural Chat 7B model, developed on the foundation of mistralai/Mistral-7B-v0.1, has been fine-tuned using the Open-Orca/SlimOrca dataset and aligned with the Direct Preference Optimization (DPO) algorithm. It has demonstrated substantial improvements in various AI tasks and performance well on the open_llm_leaderboard.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "### System: ", + "user_prompt": "### User: ", + "ai_prompt": "### Assistant: " + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "Intel, The Bloke", + "tags": ["General Use", "Role-playing", "Big Context Length"], + "size": 4370000000 + } + } + \ No newline at end of file diff --git a/models/neuralhermes-7b/model.json b/models/neuralhermes-7b/model.json new file mode 100644 index 000000000..07cca58d4 --- /dev/null +++ b/models/neuralhermes-7b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/NeuralHermes-2.5-Mistral-7B-GGUF/resolve/main/neuralhermes-2.5-mistral-7b.Q4_K_M.gguf", + "id": "neuralhermes-7b", + "object": "model", + "name": "NeuralHermes 7B", + "version": "1.0", + "description": "NeuralHermes 2.5 has been enhanced using Direct Preference Optimization. This fine-tuning, inspired by the RLHF process of Neural-chat-7b and OpenHermes-2.5-Mistral-7B, has led to improved performance across several benchmarks.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "<|im_start|>system\n", + "user_prompt": "<|im_end|>\n<|im_start|>user\n", + "ai_prompt": "<|im_end|>\n<|im_start|>assistant\n" + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "Intel, The Bloke", + "tags": ["General Use", "Code", "Big Context Length"], + "size": 4370000000 + } + } + \ No newline at end of file diff --git a/models/noromaid-20b/model.json b/models/noromaid-20b/model.json new file mode 100644 index 000000000..86291e4f5 --- /dev/null +++ b/models/noromaid-20b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/Noromaid-20B-v0.1.1-GGUF/resolve/main/noromaid-20b-v0.1.1.Q4_K_M.gguf", + "id": "noromaid-20b", + "object": "model", + "name": "Noromaid 20B", + "version": "1.0", + "description": "The Noromaid 20b model is designed for role-playing and general use, featuring a unique touch with the no_robots dataset that enhances human-like behavior.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "", + "user_prompt": "### Instruction: ", + "ai_prompt": "### Response: " + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "NeverSleep, The Bloke", + "tags": ["Role-playing"], + "size": 12040000000 + } + } + \ No newline at end of file diff --git a/models/openchat-7b/model.json b/models/openchat-7b/model.json new file mode 100644 index 000000000..1fd6bb259 --- /dev/null +++ b/models/openchat-7b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/openchat_3.5-GGUF/resolve/main/openchat_3.5.Q4_K_M.gguf", + "id": "openchat-7b", + "object": "model", + "name": "Open Chat 3.5 7B", + "version": "1.0", + "description": "OpenChat represents a breakthrough in the realm of open-source language models. By implementing the C-RLFT fine-tuning strategy, inspired by offline reinforcement learning, this 7B model achieves results on par with ChatGPT (March).", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "", + "user_prompt": "GPT4 User: ", + "ai_prompt": "<|end_of_turn|>\nGPT4 Assistant: " + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "OpenChat, The Bloke", + "tags": ["General", "Code"], + "size": 4370000000 + } + } + \ No newline at end of file diff --git a/models/openhermes-mistral-7b/model.json b/models/openhermes-mistral-7b/model.json new file mode 100644 index 000000000..6b64363d5 --- /dev/null +++ b/models/openhermes-mistral-7b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/OpenHermes-2.5-Mistral-7B-GGUF/resolve/main/openhermes-2.5-mistral-7b.Q4_K_M.gguf", + "id": "openhermes-mistral-7b", + "object": "model", + "name": "Openhermes 2.5 Mistral 7B", + "version": "1.0", + "description": "The OpenHermes 2.5 Mistral 7B incorporates additional code datasets, more than a million GPT-4 generated data examples, and other high-quality open datasets. This enhancement led to significant improvement in benchmarks, highlighting its improved skill in handling code-centric tasks.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "<|im_start|>system\n", + "user_prompt": "<|im_end|>\n<|im_start|>user\n", + "ai_prompt": "<|im_end|>\n<|im_start|>assistant\n" + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "Teknium, The Bloke", + "tags": ["General", "Roleplay"], + "size": 4370000000 + } + } + \ No newline at end of file diff --git a/models/openorca-13b/model.json b/models/openorca-13b/model.json new file mode 100644 index 000000000..02a555430 --- /dev/null +++ b/models/openorca-13b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/Orca-2-13B-GGUF/resolve/main/orca-2-13b.Q5_K_M.gguf", + "id": "openorca-13b", + "object": "model", + "name": "Orca 2 13B", + "version": "1.0", + "description": "Orca 2 is a finetuned version of LLAMA-2, designed primarily for single-turn responses in reasoning, reading comprehension, math problem solving, and text summarization.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "<|im_start|>system\n", + "user_prompt": "<|im_end|>\n<|im_start|>user\n", + "ai_prompt": "<|im_end|>\n<|im_start|>assistant\n" + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "Microsoft, The Bloke", + "tags": ["General Use"], + "size": 9230000000 + } + } + \ No newline at end of file diff --git a/models/openorca-7b/model.json b/models/openorca-7b/model.json new file mode 100644 index 000000000..42c88212c --- /dev/null +++ b/models/openorca-7b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/Mistral-7B-OpenOrca-GGUF/resolve/main/mistral-7b-openorca.Q4_K_M.gguf", + "id": "openorca-7b", + "object": "model", + "name": "OpenOrca 7B", + "version": "1.0", + "description": "OpenOrca 8k 7B is a model based on Mistral 7B, fine-tuned using the OpenOrca dataset. Notably ranked first on the HF Leaderboard for models under 30B, it excels in efficiency and accessibility.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "<|im_start|>system\n", + "user_prompt": "<|im_end|>\n<|im_start|>user\n", + "ai_prompt": "<|im_end|>\n<|im_start|>assistant\n" + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "OpenOrca, The Bloke", + "tags": ["General", "Code"], + "size": 4370000000 + } + } + \ No newline at end of file diff --git a/models/phind-34b/model.json b/models/phind-34b/model.json new file mode 100644 index 000000000..4391ae08d --- /dev/null +++ b/models/phind-34b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/Phind-CodeLlama-34B-v2-GGUF/resolve/main/phind-codellama-34b-v2.Q5_K_M.gguf", + "id": "phind-34b", + "object": "model", + "name": "Phind 34B", + "version": "1.0", + "description": "Phind-CodeLlama-34B-v2 is an AI model fine-tuned on 1.5B tokens of high-quality programming data. It's a SOTA open-source model in coding. This multi-lingual model excels in various programming languages, including Python, C/C++, TypeScript, Java, and is designed to be steerable and user-friendly.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "### System Prompt\n", + "user_prompt": "### User Message\n", + "ai_prompt": "### Assistant\n" + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "Phind, The Bloke", + "tags": ["Code", "Big Context Length"], + "size": 24320000000 + } + } + \ No newline at end of file diff --git a/models/rocket-3b/model.json b/models/rocket-3b/model.json new file mode 100644 index 000000000..b00eb1f44 --- /dev/null +++ b/models/rocket-3b/model.json @@ -0,0 +1,23 @@ +{ + "source_url": "https://huggingface.co/TheBloke/rocket-3B-GGUF/resolve/main/rocket-3b.Q4_K_M.gguf", + "id": "rocket-3b", + "object": "model", + "name": "Rocket 3B", + "version": "1.0", + "description": "Rocket-3B is a GPT-like model, primarily English, fine-tuned on diverse public datasets. It outperforms larger models in benchmarks, showcasing superior understanding and text generation, making it an effective chat model for its size.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "<|im_start|>system\n", + "user_prompt": "<|im_end|>\n<|im_start|>user\n", + "ai_prompt": "<|im_end|>\n<|im_start|>assistant\n" + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "pansophic, The Bloke", + "tags": ["General Use"], + "size": 1710000000 + } + } \ No newline at end of file diff --git a/models/starling-7b/model.json b/models/starling-7b/model.json new file mode 100644 index 000000000..c029ea7d9 --- /dev/null +++ b/models/starling-7b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/Starling-LM-7B-alpha-GGUF/resolve/main/starling-lm-7b-alpha.Q4_K_M.gguf", + "id": "starling-7b", + "object": "model", + "name": "Strarling alpha 7B", + "version": "1.0", + "description": "Starling-RM-7B-alpha is a language model finetuned with Reinforcement Learning from AI Feedback from Openchat 3.5. It stands out for its impressive performance using GPT-4 as a judge, making it one of the top-performing models in its category.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "", + "user_prompt": "GPT4 User: ", + "ai_prompt": "<|end_of_turn|>\nGPT4 Assistant: " + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "Berkeley-nest, The Bloke", + "tags": ["General", "Code"], + "size": 4370000000 + } + } + \ No newline at end of file diff --git a/models/storytelling-70b/model.json b/models/storytelling-70b/model.json new file mode 100644 index 000000000..76e6f7922 --- /dev/null +++ b/models/storytelling-70b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/GOAT-70B-Storytelling-GGUF/resolve/main/goat-70b-storytelling.Q5_K_M.gguf", + "id": "storytelling-70b", + "object": "model", + "name": "Storytelling 70B", + "version": "1.0", + "description": "The GOAT-70B-Storytelling model is designed for autonomous story-writing, including crafting books and movie scripts. Based on the LLaMA 2 70B architecture, this model excels in generating cohesive and engaging narratives using inputs like plot outlines and character profiles.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "", + "user_prompt": "### USER: ", + "ai_prompt": "\n### ASSISTANT: " + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "GOAT-AI, The Bloke", + "tags": ["General Use", "Writing"], + "size": 48750000000 + } + } + \ No newline at end of file diff --git a/models/tiefighter-13b/model.json b/models/tiefighter-13b/model.json new file mode 100644 index 000000000..b1d354ce3 --- /dev/null +++ b/models/tiefighter-13b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/LLaMA2-13B-Tiefighter-GGUF/resolve/main/llama2-13b-tiefighter.Q5_K_M.gguf", + "id": "tiefighter-13b", + "object": "model", + "name": "Tiefighter 13B", + "version": "1.0", + "description": "Tiefighter-13B is a highly creative, merged AI model achieved by combining various 'LORAs' on top of an existing merge, particularly focusing on storytelling and improvisation. This model excels in story writing, chatbots, and adventuring, and is designed to perform better with less detailed inputs, leveraging its inherent creativity.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "", + "user_prompt": "### Instruction: ", + "ai_prompt": "\n### Response: " + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "KoboldAI, The Bloke", + "tags": ["General Use", "Role-playing", "Writing"], + "size": 9230000000 + } + } + \ No newline at end of file diff --git a/models/tinyllama-1.1b/model.json b/models/tinyllama-1.1b/model.json new file mode 100644 index 000000000..f561eb25d --- /dev/null +++ b/models/tinyllama-1.1b/model.json @@ -0,0 +1,23 @@ +{ + "source_url": "https://huggingface.co/TinyLlama/TinyLlama-1.1B-Chat-v0.6/resolve/main/ggml-model-q4_0.gguf", + "id": "tinyllama-1.1b", + "object": "model", + "name": "TinyLlama Chat 1.1B", + "version": "1.0", + "description": "The TinyLlama project, featuring a 1.1B parameter Llama model, is pretrained on an expansive 3 trillion token dataset. Its design ensures easy integration with various Llama-based open-source projects. Despite its smaller size, it efficiently utilizes lower computational and memory resources, drawing on GPT-4's analytical prowess to enhance its conversational abilities and versatility.", + "format": "gguf", + "settings": { + "ctx_len": 2048, + "system_prompt": "<|system|>\n", + "user_prompt": "<|user|>\n", + "ai_prompt": "<|assistant|>\n" + }, + "parameters": { + "max_tokens": 2048 + }, + "metadata": { + "author": "TinyLlama", + "tags": ["General Use"], + "size": 637000000 + } +} \ No newline at end of file diff --git a/models/wizardcoder-13b/model.json b/models/wizardcoder-13b/model.json new file mode 100644 index 000000000..944b5632b --- /dev/null +++ b/models/wizardcoder-13b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/WizardCoder-Python-13B-V1.0-GGUF/resolve/main/wizardcoder-python-13b-v1.0.Q5_K_M.gguf", + "id": "wizardcoder-13b", + "object": "model", + "name": "Wizard Coder Python 13B", + "version": "1.0", + "description": "WizardCoder-Python-13B is a Python coding model major models like ChatGPT-3.5. This model based on the Llama2 architecture, demonstrate high proficiency in specific domains like coding and mathematics.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "", + "user_prompt": "### Instruction:\n", + "ai_prompt": "### Response:\n" + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "WizardLM, The Bloke", + "tags": ["Code", "Big Context Length"], + "size": 9230000000 + } + } + \ No newline at end of file diff --git a/models/wizardcoder-34b/model.json b/models/wizardcoder-34b/model.json new file mode 100644 index 000000000..aa2618e1b --- /dev/null +++ b/models/wizardcoder-34b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/WizardCoder-Python-34B-V1.0-GGUF/resolve/main/wizardcoder-python-34b-v1.0.Q5_K_M.gguf", + "id": "wizardcoder-34b", + "object": "model", + "name": "Wizard Coder Python 34B", + "version": "1.0", + "description": "WizardCoder-Python-34B is a Python coding model major models like ChatGPT-3.5. This model based on the Llama2 architecture, demonstrate high proficiency in specific domains like coding and mathematics.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "", + "user_prompt": "### Instruction:\n", + "ai_prompt": "### Response:\n" + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "WizardLM, The Bloke", + "tags": ["Code", "Big Context Length"], + "size": 24320000000 + } + } + \ No newline at end of file diff --git a/models/xwin-70b/model.json b/models/xwin-70b/model.json new file mode 100644 index 000000000..a5c1647b0 --- /dev/null +++ b/models/xwin-70b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/Xwin-LM-70B-V0.1-GGUF/resolve/main/xwin-lm-70b-v0.1.Q5_K_M.gguf", + "id": "xwin-70b", + "object": "model", + "name": "Xwin LM 70B", + "version": "1.0", + "description": "Xwin-LM, based on Llama2 models, emphasizes alignment and exhibits advanced language understanding, text generation, and role-playing abilities.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "", + "user_prompt": "USER: ", + "ai_prompt": "ASSISTANT: " + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "Xwin-LM, The Bloke", + "tags": ["General Use", "Role-playing"], + "size": 48750000000 + } + } + \ No newline at end of file diff --git a/models/yarn-70b/model.json b/models/yarn-70b/model.json new file mode 100644 index 000000000..67d8d3804 --- /dev/null +++ b/models/yarn-70b/model.json @@ -0,0 +1,21 @@ +{ + "source_url": "https://huggingface.co/TheBloke/Yarn-Llama-2-70B-32k-GGUF/resolve/main/yarn-llama-2-70b-32k.Q5_K_M.gguf", + "id": "yarn-70b", + "object": "model", + "name": "Yarn 32k 70B", + "version": "1,0", + "description": "Yarn-Llama-2-70b-32k is designed specifically for handling long contexts. It represents an extension of the Llama-2-70b-hf model, now supporting a 32k token context window.", + "format": "gguf", + "settings": { + "ctx_len": 4096 + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "NousResearch, The Bloke", + "tags": ["General Use", "Big Context Length"], + "size": 48750000000 + } + } + \ No newline at end of file diff --git a/models/yi-34b/model.json b/models/yi-34b/model.json new file mode 100644 index 000000000..f899bc54b --- /dev/null +++ b/models/yi-34b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/Yi-34B-Chat-GGUF/resolve/main/yi-34b-chat.Q5_K_M.gguf", + "id": "yi-34b", + "object": "model", + "name": "Yi 34B", + "version": "1.0", + "description": "Yi-34B, a specialized chat model, is known for its diverse and creative responses and excels across various NLP tasks and benchmarks.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "<|im_start|>system\n", + "user_prompt": "<|im_end|>\n<|im_start|>user\n", + "ai_prompt": "<|im_end|>\n<|im_start|>assistant\n" + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "01-ai, The Bloke", + "tags": ["General", "Role-playing", "Writing"], + "size": 24320000000 + } + } + \ No newline at end of file diff --git a/models/zephyr-beta-7b/model.json b/models/zephyr-beta-7b/model.json new file mode 100644 index 000000000..24529bc9a --- /dev/null +++ b/models/zephyr-beta-7b/model.json @@ -0,0 +1,24 @@ +{ + "source_url": "https://huggingface.co/TheBloke/zephyr-7B-beta-GGUF/resolve/main/zephyr-7b-beta.Q4_K_M.gguf", + "id": "zephyr-beta-7b", + "object": "model", + "name": "Zephyr Beta 7B", + "version": "1.0", + "description": "The Zephyr-7B-β model marks the second iteration in the Zephyr series, designed to function as an effective assistant. It has been fine-tuned from the mistralai/Mistral-7B-v0.1 base model, utilizing a combination of public and synthetic datasets with the application of Direct Preference Optimization.", + "format": "gguf", + "settings": { + "ctx_len": 4096, + "system_prompt": "<|system|>\n", + "user_prompt": "\n<|user|>\n", + "ai_prompt": "\n<|assistant|>\n" + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "HuggingFaceH4, The Bloke", + "tags": ["General Use", "Big Context Length"], + "size": 4370000000 + } + } + \ No newline at end of file From 1bf4c1b6210bdcc98ebf9ac0275fc870795041b7 Mon Sep 17 00:00:00 2001 From: NamH Date: Fri, 1 Dec 2023 18:03:51 +0700 Subject: [PATCH 03/19] feat: pre-populate Jan's /models folder (#796) Signed-off-by: James Co-authored-by: James --- .gitignore | 1 + core/src/core.ts | 4 + core/src/extensions/model.ts | 4 +- core/src/fs.ts | 4 + core/src/types/index.ts | 79 +++------ electron/handlers/app.ts | 8 +- electron/handlers/download.ts | 11 +- electron/handlers/extension.ts | 25 ++- electron/handlers/fs.ts | 11 +- electron/handlers/update.ts | 2 +- electron/invokers/fs.ts | 14 ++ electron/main.ts | 6 +- electron/managers/extension.ts | 4 +- electron/managers/module.ts | 2 +- electron/package.json | 7 +- electron/utils/path.ts | 15 ++ extensions/inference-extension/src/index.ts | 1 - .../model-extension/src/@types/global.d.ts | 5 +- .../model-extension/src/@types/schema.ts | 21 --- .../src/helpers/modelParser.ts | 46 ------ extensions/model-extension/src/index.ts | 151 +++++++++++++----- extensions/model-extension/webpack.config.js | 3 - package.json | 11 +- web/containers/ItemCardSidebar/index.tsx | 11 +- .../BottomBar/DownloadingState/index.tsx | 8 +- .../Layout/TopBar/CommandSearch/index.tsx | 25 +-- web/containers/ModalCancelDownload/index.tsx | 26 ++- web/containers/Providers/EventListener.tsx | 14 +- web/hooks/useActiveModel.ts | 3 +- web/hooks/useCreateNewThread.ts | 20 +++ web/hooks/useDeleteConversation.ts | 4 +- web/hooks/useDownloadModel.ts | 3 +- web/hooks/useDownloadState.ts | 24 +-- web/hooks/useGetConfiguredModels.ts | 20 +-- web/hooks/useGetMostSuitableModelVersion.ts | 27 ---- web/hooks/useGetPerformanceTag.ts | 50 ------ web/hooks/useSendChatMessage.ts | 47 +----- web/hooks/useSetActiveThread.ts | 5 - web/screens/Chat/MessageToolbar/index.tsx | 6 +- web/screens/Chat/Sidebar/index.tsx | 18 ++- .../ExploreModels/ExploreModelItem/index.tsx | 73 ++------- .../ExploreModelItemHeader/index.tsx | 54 ++----- .../ExploreModels/ExploreModelList/index.tsx | 8 +- .../ExploreModels/ModelVersionItem/index.tsx | 15 +- web/screens/MyModels/BlankState/index.tsx | 2 +- web/screens/MyModels/index.tsx | 5 +- web/screens/Settings/index.tsx | 1 - web/types/downloadState.d.ts | 1 - web/utils/dummy.ts | 119 -------------- 49 files changed, 362 insertions(+), 662 deletions(-) delete mode 100644 extensions/model-extension/src/@types/schema.ts delete mode 100644 extensions/model-extension/src/helpers/modelParser.ts delete mode 100644 web/hooks/useGetMostSuitableModelVersion.ts delete mode 100644 web/hooks/useGetPerformanceTag.ts delete mode 100644 web/utils/dummy.ts diff --git a/.gitignore b/.gitignore index a273404c2..4bfb0576f 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,7 @@ dist build .DS_Store electron/renderer +electron/models package-lock.json *.log diff --git a/core/src/core.ts b/core/src/core.ts index b593277cb..0e032f4d9 100644 --- a/core/src/core.ts +++ b/core/src/core.ts @@ -54,6 +54,9 @@ const getUserSpace = (): Promise => window.core.api?.getUserSpace(); const openFileExplorer: (path: string) => Promise = (path) => window.core.api?.openFileExplorer(path); +const getResourcePath: () => Promise = () => + window.core.api?.getResourcePath(); + /** * Register extension point function type definition */ @@ -74,4 +77,5 @@ export { appDataPath, getUserSpace, openFileExplorer, + getResourcePath, }; diff --git a/core/src/extensions/model.ts b/core/src/extensions/model.ts index 3a5cc1ba3..276d15dcc 100644 --- a/core/src/extensions/model.ts +++ b/core/src/extensions/model.ts @@ -1,5 +1,5 @@ import { BaseExtension } from "../extension"; -import { Model, ModelCatalog } from "../types/index"; +import { Model } from "../types/index"; /** * Model extension for managing models. @@ -43,5 +43,5 @@ export abstract class ModelExtension extends BaseExtension { * Gets a list of configured models. * @returns A Promise that resolves with an array of configured models. */ - abstract getConfiguredModels(): Promise; + abstract getConfiguredModels(): Promise; } diff --git a/core/src/fs.ts b/core/src/fs.ts index e8eb38e04..d12b473bf 100644 --- a/core/src/fs.ts +++ b/core/src/fs.ts @@ -62,6 +62,9 @@ const deleteFile: (path: string) => Promise = (path) => const appendFile: (path: string, data: string) => Promise = (path, data) => window.core.api?.appendFile(path, data); +const copyFile: (src: string, dest: string) => Promise = (src, dest) => + window.core.api?.copyFile(src, dest); + /** * Reads a file line by line. * @param {string} path - The path of the file to read. @@ -80,4 +83,5 @@ export const fs = { deleteFile, appendFile, readLineByLine, + copyFile, }; diff --git a/core/src/types/index.ts b/core/src/types/index.ts index 15e83772f..bbd1e98de 100644 --- a/core/src/types/index.ts +++ b/core/src/types/index.ts @@ -180,7 +180,7 @@ export interface Model { /** * The version of the model. */ - version: string; + version: number; /** * The model download source. It can be an external url or a local filepath. @@ -197,12 +197,6 @@ export interface Model { */ name: string; - /** - * The organization that owns the model (you!) - * Default: "you" - */ - owned_by: string; - /** * The Unix timestamp (in seconds) for when the model was created */ @@ -236,11 +230,16 @@ export interface Model { metadata: ModelMetadata; } +export type ModelMetadata = { + author: string; + tags: string[]; + size: number; +}; + /** * The Model transition states. */ export enum ModelState { - ToDownload = "to_download", Downloading = "downloading", Ready = "ready", Running = "running", @@ -250,65 +249,27 @@ export enum ModelState { * The available model settings. */ export type ModelSettingParams = { - ctx_len: number; - ngl: number; - embedding: boolean; - n_parallel: number; + ctx_len?: number; + ngl?: number; + embedding?: boolean; + n_parallel?: number; + system_prompt?: string; + user_prompt?: string; + ai_prompt?: string; }; /** * The available model runtime parameters. */ export type ModelRuntimeParam = { - temperature: number; - token_limit: number; - top_k: number; - top_p: number; - stream: boolean; + temperature?: number; + token_limit?: number; + top_k?: number; + top_p?: number; + stream?: boolean; + max_tokens?: number; }; -/** - * The metadata of the model. - */ -export type ModelMetadata = { - engine: string; - quantization: string; - size: number; - binaries: string[]; - maxRamRequired: number; - author: string; - avatarUrl: string; -}; - -/** - * Model type of the presentation object which will be presented to the user - * @data_transfer_object - */ -export interface ModelCatalog { - /** The unique id of the model.*/ - id: string; - /** The name of the model.*/ - name: string; - /** The avatar url of the model.*/ - avatarUrl: string; - /** The short description of the model.*/ - shortDescription: string; - /** The long description of the model.*/ - longDescription: string; - /** The author name of the model.*/ - author: string; - /** The version of the model.*/ - version: string; - /** The origin url of the model repo.*/ - modelUrl: string; - /** The timestamp indicating when this model was released.*/ - releaseDate: number; - /** The tags attached to the model description **/ - tags: string[]; - /** The available versions of this model to download. */ - availableVersions: Model[]; -} - /** * Assistant type defines the shape of an assistant object. * @stored diff --git a/electron/handlers/app.ts b/electron/handlers/app.ts index 4a6d56b6a..adbc875b2 100644 --- a/electron/handlers/app.ts +++ b/electron/handlers/app.ts @@ -1,9 +1,9 @@ import { app, ipcMain, shell } from 'electron' -import { ModuleManager } from '../managers/module' +import { ModuleManager } from './../managers/module' import { join } from 'path' -import { ExtensionManager } from '../managers/extension' -import { WindowManager } from '../managers/window' -import { userSpacePath } from '../utils/path' +import { ExtensionManager } from './../managers/extension' +import { WindowManager } from './../managers/window' +import { userSpacePath } from './../utils/path' export function handleAppIPCs() { /** diff --git a/electron/handlers/download.ts b/electron/handlers/download.ts index 316576e89..1776fccd9 100644 --- a/electron/handlers/download.ts +++ b/electron/handlers/download.ts @@ -1,9 +1,10 @@ import { app, ipcMain } from 'electron' -import { DownloadManager } from '../managers/download' +import { DownloadManager } from './../managers/download' import { resolve, join } from 'path' -import { WindowManager } from '../managers/window' +import { WindowManager } from './../managers/window' import request from 'request' -import { createWriteStream, unlink } from 'fs' +import { createWriteStream } from 'fs' +import { getResourcePath } from './../utils/path' const progress = require('request-progress') export function handleDownloaderIPCs() { @@ -37,6 +38,10 @@ export function handleDownloaderIPCs() { rq?.abort() }) + ipcMain.handle('getResourcePath', async (_event) => { + return getResourcePath() + }) + /** * Downloads a file from a given URL. * @param _event - The IPC event object. diff --git a/electron/handlers/extension.ts b/electron/handlers/extension.ts index 1af1be36c..5c2c13ff4 100644 --- a/electron/handlers/extension.ts +++ b/electron/handlers/extension.ts @@ -1,19 +1,16 @@ -import { app, ipcMain, webContents } from 'electron' -import { readdirSync, rmdir, writeFileSync } from 'fs' -import { ModuleManager } from '../managers/module' +import { ipcMain, webContents } from 'electron' +import { readdirSync } from 'fs' +import { ModuleManager } from './../managers/module' import { join, extname } from 'path' -import { ExtensionManager } from '../managers/extension' -import { WindowManager } from '../managers/window' -import { manifest, tarball } from 'pacote' import { getActiveExtensions, getAllExtensions, installExtensions, -} from '../extension/store' -import { getExtension } from '../extension/store' -import { removeExtension } from '../extension/store' -import Extension from '../extension/extension' -import { userSpacePath } from '../utils/path' +} from './../extension/store' +import { getExtension } from './../extension/store' +import { removeExtension } from './../extension/store' +import Extension from './../extension/extension' +import { getResourcePath, userSpacePath } from './../utils/path' export function handleExtensionIPCs() { /**MARK: General handlers */ @@ -48,11 +45,7 @@ export function handleExtensionIPCs() { * @returns An array of paths to the base extensions. */ ipcMain.handle('extension:baseExtensions', async (_event) => { - const baseExtensionPath = join( - __dirname, - '../', - app.isPackaged ? '../../app.asar.unpacked/pre-install' : '../pre-install' - ) + const baseExtensionPath = join(getResourcePath(), 'pre-install') return readdirSync(baseExtensionPath) .filter((file) => extname(file) === '.tgz') .map((file) => join(baseExtensionPath, file)) diff --git a/electron/handlers/fs.ts b/electron/handlers/fs.ts index 9c39c1092..16cef6eb6 100644 --- a/electron/handlers/fs.ts +++ b/electron/handlers/fs.ts @@ -1,8 +1,9 @@ -import { app, ipcMain } from 'electron' +import { ipcMain } from 'electron' import * as fs from 'fs' +import fse from 'fs-extra' import { join } from 'path' import readline from 'readline' -import { userSpacePath } from '../utils/path' +import { userSpacePath } from './../utils/path' /** * Handles file system operations. @@ -145,6 +146,12 @@ export function handleFsIPCs() { } }) + ipcMain.handle('copyFile', async (_event, src: string, dest: string) => { + console.debug(`Copying file from ${src} to ${dest}`) + + return fse.copySync(src, dest, { overwrite: false }) + }) + /** * Reads a file line by line. * @param event - The event object. diff --git a/electron/handlers/update.ts b/electron/handlers/update.ts index 340db54b9..08d32fffe 100644 --- a/electron/handlers/update.ts +++ b/electron/handlers/update.ts @@ -1,5 +1,5 @@ import { app, dialog } from "electron"; -import { WindowManager } from "../managers/window"; +import { WindowManager } from "./../managers/window"; import { autoUpdater } from "electron-updater"; export function handleAppUpdates() { diff --git a/electron/invokers/fs.ts b/electron/invokers/fs.ts index d7d204d0a..309562ad6 100644 --- a/electron/invokers/fs.ts +++ b/electron/invokers/fs.ts @@ -67,6 +67,20 @@ export function fsInvokers() { * @param {string} path - The path of the directory to remove. */ rmdir: (path: string) => ipcRenderer.invoke('rmdir', path), + + /** + * Copies a file from the source path to the destination path. + * @param {string} src - The source path of the file to copy. + * @param {string} dest - The destination path where the file should be copied. + */ + copyFile: (src: string, dest: string) => ipcRenderer.invoke('copyFile', src, dest), + + /** + * Retrieves the resource path. + * @returns {Promise} A promise that resolves to the resource path. + */ + getResourcePath: () => ipcRenderer.invoke('getResourcePath'), + } return interfaces diff --git a/electron/main.ts b/electron/main.ts index cfd6ca665..189836866 100644 --- a/electron/main.ts +++ b/electron/main.ts @@ -1,7 +1,7 @@ import { app, BrowserWindow } from 'electron' import { join } from 'path' import { setupMenu } from './utils/menu' -import { handleFsIPCs } from './handlers/fs' +import { createUserSpace, getResourcePath } from './utils/path' /** * Managers @@ -18,9 +18,11 @@ import { handleThemesIPCs } from './handlers/theme' import { handleExtensionIPCs } from './handlers/extension' import { handleAppIPCs } from './handlers/app' import { handleAppUpdates } from './handlers/update' +import { handleFsIPCs } from './handlers/fs' app .whenReady() + .then(createUserSpace) .then(ExtensionManager.instance.migrateExtensions) .then(ExtensionManager.instance.setupExtensions) .then(setupMenu) @@ -56,7 +58,7 @@ function createMainWindow() { }) const startURL = app.isPackaged - ? `file://${join(__dirname, '../renderer/index.html')}` + ? `file://${join(__dirname, '..', 'renderer', 'index.html')}` : 'http://localhost:3000' /* Load frontend app to the window */ diff --git a/electron/managers/extension.ts b/electron/managers/extension.ts index e23c75ddf..7eef24877 100644 --- a/electron/managers/extension.ts +++ b/electron/managers/extension.ts @@ -1,10 +1,10 @@ import { app } from 'electron' -import { init } from '../extension' +import { init } from './../extension' import { join, resolve } from 'path' import { rmdir } from 'fs' import Store from 'electron-store' import { existsSync, mkdirSync, writeFileSync } from 'fs' -import { userSpacePath } from '../utils/path' +import { userSpacePath } from './../utils/path' /** * Manages extension installation and migration. */ diff --git a/electron/managers/module.ts b/electron/managers/module.ts index 43dda0fb6..dc16d0d22 100644 --- a/electron/managers/module.ts +++ b/electron/managers/module.ts @@ -1,4 +1,4 @@ -import { dispose } from "../utils/disposable"; +import { dispose } from "./../utils/disposable"; /** * Manages imported modules. diff --git a/electron/package.json b/electron/package.json index 46e9b328c..627f5ad54 100644 --- a/electron/package.json +++ b/electron/package.json @@ -13,10 +13,12 @@ "renderer/**/*", "build/*.{js,map}", "build/**/*.{js,map}", - "pre-install" + "pre-install", + "models/**/*" ], "asarUnpack": [ - "pre-install" + "pre-install", + "models" ], "publish": [ { @@ -70,6 +72,7 @@ "@uiball/loaders": "^1.3.0", "electron-store": "^8.1.0", "electron-updater": "^6.1.4", + "fs-extra": "^11.2.0", "pacote": "^17.0.4", "request": "^2.88.2", "request-progress": "^3.0.0", diff --git a/electron/utils/path.ts b/electron/utils/path.ts index 30eb0ef2d..8f3092561 100644 --- a/electron/utils/path.ts +++ b/electron/utils/path.ts @@ -1,4 +1,19 @@ import { join } from 'path' import { app } from 'electron' +import { mkdir } from 'fs-extra' + +export async function createUserSpace(): Promise { + return mkdir(userSpacePath).catch(() => {}) +} export const userSpacePath = join(app.getPath('home'), 'jan') + +export function getResourcePath() { + let appPath = join(app.getAppPath(), '..', 'app.asar.unpacked') + + if (!app.isPackaged) { + // for development mode + appPath = join(__dirname, '..', '..') + } + return appPath +} diff --git a/extensions/inference-extension/src/index.ts b/extensions/inference-extension/src/index.ts index 1ba471ab1..e8e7758bb 100644 --- a/extensions/inference-extension/src/index.ts +++ b/extensions/inference-extension/src/index.ts @@ -146,7 +146,6 @@ export default class JanInferenceExtension implements InferenceExtension { object: "thread.message", }; events.emit(EventName.OnMessageResponse, message); - console.log(JSON.stringify(data, null, 2)); instance.isCancelled = false; instance.controller = new AbortController(); diff --git a/extensions/model-extension/src/@types/global.d.ts b/extensions/model-extension/src/@types/global.d.ts index 87056c342..bb030c762 100644 --- a/extensions/model-extension/src/@types/global.d.ts +++ b/extensions/model-extension/src/@types/global.d.ts @@ -1,3 +1,2 @@ -declare const PLUGIN_NAME: string; -declare const MODULE_PATH: string; -declare const MODEL_CATALOG_URL: string; +declare const PLUGIN_NAME: string +declare const MODULE_PATH: string diff --git a/extensions/model-extension/src/@types/schema.ts b/extensions/model-extension/src/@types/schema.ts deleted file mode 100644 index 1d3c3a7d1..000000000 --- a/extensions/model-extension/src/@types/schema.ts +++ /dev/null @@ -1,21 +0,0 @@ -interface Version { - name: string - quantMethod: string - bits: number - size: number - maxRamRequired: number - usecase: string - downloadLink: string -} -interface ModelSchema { - id: string - name: string - shortDescription: string - avatarUrl: string - longDescription: string - author: string - version: string - modelUrl: string - tags: string[] - versions: Version[] -} diff --git a/extensions/model-extension/src/helpers/modelParser.ts b/extensions/model-extension/src/helpers/modelParser.ts deleted file mode 100644 index 3a397fb7d..000000000 --- a/extensions/model-extension/src/helpers/modelParser.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { ModelCatalog } from '@janhq/core' - -export const parseToModel = (modelGroup): ModelCatalog => { - const modelVersions = [] - modelGroup.versions.forEach((v) => { - const model = { - object: 'model', - version: modelGroup.version, - source_url: v.downloadLink, - id: v.name, - name: v.name, - owned_by: 'you', - created: 0, - description: modelGroup.longDescription, - state: 'to_download', - settings: v.settings, - parameters: v.parameters, - metadata: { - engine: '', - quantization: v.quantMethod, - size: v.size, - binaries: [], - maxRamRequired: v.maxRamRequired, - author: modelGroup.author, - avatarUrl: modelGroup.avatarUrl, - }, - } - modelVersions.push(model) - }) - - const modelCatalog: ModelCatalog = { - id: modelGroup.id, - name: modelGroup.name, - avatarUrl: modelGroup.avatarUrl, - shortDescription: modelGroup.shortDescription, - longDescription: modelGroup.longDescription, - author: modelGroup.author, - version: modelGroup.version, - modelUrl: modelGroup.modelUrl, - releaseDate: modelGroup.createdAt, - tags: modelGroup.tags, - availableVersions: modelVersions, - } - - return modelCatalog -} diff --git a/extensions/model-extension/src/index.ts b/extensions/model-extension/src/index.ts index a2b0be304..d0267b84e 100644 --- a/extensions/model-extension/src/index.ts +++ b/extensions/model-extension/src/index.ts @@ -1,6 +1,12 @@ -import { ExtensionType, fs, downloadFile, abortDownload } from '@janhq/core' -import { ModelExtension, Model, ModelCatalog } from '@janhq/core' -import { parseToModel } from './helpers/modelParser' +import { + ExtensionType, + fs, + downloadFile, + abortDownload, + getResourcePath, + getUserSpace, +} from '@janhq/core' +import { ModelExtension, Model, ModelState } from '@janhq/core' import { join } from 'path' /** @@ -24,10 +30,7 @@ export default class JanModelExtension implements ModelExtension { * @override */ onLoad(): void { - /** Cloud Native - * TODO: Fetch all downloading progresses? - **/ - fs.mkdir(JanModelExtension._homeDir) + this.copyModelsToHomeDir() } /** @@ -36,6 +39,30 @@ export default class JanModelExtension implements ModelExtension { */ onUnload(): void {} + private async copyModelsToHomeDir() { + try { + // list all of the files under the home directory + const files = await fs.listFiles('') + + if (files.includes(JanModelExtension._homeDir)) { + // ignore if the model is already downloaded + console.debug('Model already downloaded') + return + } + + // copy models folder from resources to home directory + const resourePath = await getResourcePath() + const srcPath = join(resourePath, 'models') + + const userSpace = await getUserSpace() + const destPath = join(userSpace, JanModelExtension._homeDir) + + await fs.copyFile(srcPath, destPath) + } catch (err) { + console.error(err) + } + } + /** * Downloads a machine learning model. * @param model - The model to download. @@ -57,11 +84,11 @@ export default class JanModelExtension implements ModelExtension { * @returns {Promise} A promise that resolves when the download has been cancelled. */ async cancelModelDownload(modelId: string): Promise { - return abortDownload(join(JanModelExtension._homeDir, modelId, modelId)).then( - () => { - fs.rmdir(join(JanModelExtension._homeDir, modelId)) - } - ) + return abortDownload( + join(JanModelExtension._homeDir, modelId, modelId) + ).then(() => { + fs.deleteFile(join(JanModelExtension._homeDir, modelId, modelId)) + }) } /** @@ -72,7 +99,26 @@ export default class JanModelExtension implements ModelExtension { async deleteModel(modelId: string): Promise { try { const dirPath = join(JanModelExtension._homeDir, modelId) - await fs.rmdir(dirPath) + + // remove all files under dirPath except model.json + const files = await fs.listFiles(dirPath) + const deletePromises = files.map((fileName: string) => { + if (fileName !== JanModelExtension._modelMetadataFileName) { + return fs.deleteFile(join(dirPath, fileName)) + } + }) + await Promise.allSettled(deletePromises) + + // update the state as default + const jsonFilePath = join( + dirPath, + JanModelExtension._modelMetadataFileName + ) + const json = await fs.readFile(jsonFilePath) + const model = JSON.parse(json) as Model + delete model.state + + await fs.writeFile(jsonFilePath, JSON.stringify(model, null, 2)) } catch (err) { console.error(err) } @@ -91,7 +137,17 @@ export default class JanModelExtension implements ModelExtension { ) try { - await fs.writeFile(jsonFilePath, JSON.stringify(model, null, 2)) + await fs.writeFile( + jsonFilePath, + JSON.stringify( + { + ...model, + state: ModelState.Ready, + }, + null, + 2 + ) + ) } catch (err) { console.error(err) } @@ -102,39 +158,62 @@ export default class JanModelExtension implements ModelExtension { * @returns A Promise that resolves with an array of all models. */ async getDownloadedModels(): Promise { - const results: Model[] = [] - const allDirs: string[] = await fs.listFiles(JanModelExtension._homeDir) - for (const dir of allDirs) { - const modelDirPath = join(JanModelExtension._homeDir, dir) - const isModelDir = await fs.isDirectory(modelDirPath) - if (!isModelDir) { - // if not a directory, ignore - continue + const models = await this.getModelsMetadata() + return models.filter((model) => model.state === ModelState.Ready) + } + + private async getModelsMetadata(): Promise { + try { + const filesUnderJanRoot = await fs.listFiles('') + if (!filesUnderJanRoot.includes(JanModelExtension._homeDir)) { + console.debug('model folder not found') + return [] } - const jsonFiles: string[] = (await fs.listFiles(modelDirPath)).filter( - (fileName: string) => fileName === JanModelExtension._modelMetadataFileName - ) + const files: string[] = await fs.listFiles(JanModelExtension._homeDir) - for (const json of jsonFiles) { - const model: Model = JSON.parse( - await fs.readFile(join(modelDirPath, json)) + const allDirectories: string[] = [] + for (const file of files) { + const isDirectory = await fs.isDirectory( + join(JanModelExtension._homeDir, file) ) - results.push(model) + if (isDirectory) { + allDirectories.push(file) + } } - } - return results + const readJsonPromises = allDirectories.map((dirName) => { + const jsonPath = join( + JanModelExtension._homeDir, + dirName, + JanModelExtension._modelMetadataFileName + ) + return this.readModelMetadata(jsonPath) + }) + const results = await Promise.allSettled(readJsonPromises) + const modelData = results.map((result) => { + if (result.status === 'fulfilled') { + return JSON.parse(result.value) as Model + } else { + console.error(result.reason) + } + }) + return modelData + } catch (err) { + console.error(err) + return [] + } + } + + private readModelMetadata(path: string) { + return fs.readFile(join(path)) } /** * Gets all available models. * @returns A Promise that resolves with an array of all models. */ - getConfiguredModels(): Promise { - // Add a timestamp to the URL to prevent caching - return import( - /* webpackIgnore: true */ MODEL_CATALOG_URL + `?t=${Date.now()}` - ).then((module) => module.default.map((e) => parseToModel(e))) + async getConfiguredModels(): Promise { + return this.getModelsMetadata() } } diff --git a/extensions/model-extension/webpack.config.js b/extensions/model-extension/webpack.config.js index 3475516ed..a9332da99 100644 --- a/extensions/model-extension/webpack.config.js +++ b/extensions/model-extension/webpack.config.js @@ -19,9 +19,6 @@ module.exports = { new webpack.DefinePlugin({ PLUGIN_NAME: JSON.stringify(packageJson.name), MODULE_PATH: JSON.stringify(`${packageJson.name}/${packageJson.module}`), - MODEL_CATALOG_URL: JSON.stringify( - 'https://cdn.jsdelivr.net/npm/@janhq/models@latest/dist/index.js' - ), }), ], output: { diff --git a/package.json b/package.json index a2476887f..9192a0238 100644 --- a/package.json +++ b/package.json @@ -6,8 +6,7 @@ "uikit", "core", "electron", - "web", - "server" + "web" ], "nohoist": [ "uikit", @@ -17,15 +16,13 @@ "electron", "electron/**", "web", - "web/**", - "server", - "server/**" + "web/**" ] }, "scripts": { "lint": "yarn workspace jan lint && yarn workspace jan-web lint", "test": "yarn workspace jan test:e2e", - "dev:electron": "yarn workspace jan dev", + "dev:electron": "cpx \"models/**\" \"electron/models/\" && yarn workspace jan dev", "dev:web": "yarn workspace jan-web dev", "dev": "concurrently --kill-others \"yarn dev:web\" \"wait-on http://localhost:3000 && yarn dev:electron\"", "test-local": "yarn lint && yarn build:test && yarn test", @@ -33,7 +30,7 @@ "build:uikit": "yarn workspace @janhq/uikit install && yarn workspace @janhq/uikit build", "build:core": "cd core && yarn install && yarn run build", "build:web": "yarn workspace jan-web build && cpx \"web/out/**\" \"electron/renderer/\"", - "build:electron": "yarn workspace jan build", + "build:electron": "yarn workspace jan build && cpx \"models/**\" \"electron/models/\"", "build:electron:test": "yarn workspace jan build:test", "build:extensions": "rimraf ./electron/pre-install/*.tgz && concurrently --kill-others-on-fail \"cd ./extensions/conversational-extension && npm install && npm run build:publish\" \"cd ./extensions/inference-extension && npm install && npm run build:publish\" \"cd ./extensions/model-extension && npm install && npm run build:publish\" \"cd ./extensions/monitoring-extension && npm install && npm run build:publish\" \"cd ./extensions/assistant-extension && npm install && npm run build:publish\"", "build:test": "yarn build:web && yarn workspace jan build:test", diff --git a/web/containers/ItemCardSidebar/index.tsx b/web/containers/ItemCardSidebar/index.tsx index b6a7bacbd..627d7f45d 100644 --- a/web/containers/ItemCardSidebar/index.tsx +++ b/web/containers/ItemCardSidebar/index.tsx @@ -1,9 +1,16 @@ type Props = { title: string description?: string + disabled?: boolean + onChange?: (text?: string) => void } -export default function ItemCardSidebar({ description, title }: Props) { +export default function ItemCardSidebar({ + description, + title, + disabled, + onChange, +}: Props) { return (
@@ -11,9 +18,11 @@ export default function ItemCardSidebar({ description, title }: Props) {
onChange?.(e.target.value)} />
) diff --git a/web/containers/Layout/BottomBar/DownloadingState/index.tsx b/web/containers/Layout/BottomBar/DownloadingState/index.tsx index bc456bf80..1aad0fb1c 100644 --- a/web/containers/Layout/BottomBar/DownloadingState/index.tsx +++ b/web/containers/Layout/BottomBar/DownloadingState/index.tsx @@ -69,18 +69,14 @@ export default function DownloadingState() { />
-

{item?.fileName}

+

{item?.modelId}

{formatDownloadPercentage(item?.percent)}
+ {activeThread && ( + + )} ) } diff --git a/web/containers/ModalCancelDownload/index.tsx b/web/containers/ModalCancelDownload/index.tsx index de54e1cf8..8619c543c 100644 --- a/web/containers/ModalCancelDownload/index.tsx +++ b/web/containers/ModalCancelDownload/index.tsx @@ -24,34 +24,30 @@ import { extensionManager } from '@/extension' import { downloadingModelsAtom } from '@/helpers/atoms/Model.atom' type Props = { - suitableModel: Model + model: Model isFromList?: boolean } -export default function ModalCancelDownload({ - suitableModel, - isFromList, -}: Props) { +export default function ModalCancelDownload({ model, isFromList }: Props) { const { modelDownloadStateAtom } = useDownloadState() const downloadAtom = useMemo( - () => atom((get) => get(modelDownloadStateAtom)[suitableModel.name]), + () => atom((get) => get(modelDownloadStateAtom)[model.id]), // eslint-disable-next-line react-hooks/exhaustive-deps - [suitableModel.name] + [model.id] ) const models = useAtomValue(downloadingModelsAtom) const downloadState = useAtomValue(downloadAtom) + const cancelText = `Cancel ${formatDownloadPercentage(downloadState.percent)}` return ( {isFromList ? ( ) : ( - + )} @@ -60,7 +56,7 @@ export default function ModalCancelDownload({

Are you sure you want to cancel the download of  - {downloadState?.fileName}? + {downloadState?.modelId}?

@@ -71,11 +67,7 @@ export default function ModalCancelDownload({ - - {show && ( - - )} -
- )}
) diff --git a/web/screens/ExploreModels/ExploreModelItemHeader/index.tsx b/web/screens/ExploreModels/ExploreModelItemHeader/index.tsx index c845c5a45..b51ec164c 100644 --- a/web/screens/ExploreModels/ExploreModelItemHeader/index.tsx +++ b/web/screens/ExploreModels/ExploreModelItemHeader/index.tsx @@ -1,7 +1,7 @@ /* eslint-disable react-hooks/exhaustive-deps */ -import { useCallback, useEffect, useMemo, useState } from 'react' +import { useCallback, useMemo, useState } from 'react' -import { Model, ModelCatalog } from '@janhq/core' +import { Model } from '@janhq/core' import { Badge, Button } from '@janhq/uikit' import { atom, useAtomValue } from 'jotai' @@ -15,67 +15,41 @@ import { ModelPerformance, TagType } from '@/constants/tagType' import useDownloadModel from '@/hooks/useDownloadModel' import { useDownloadState } from '@/hooks/useDownloadState' import { useGetDownloadedModels } from '@/hooks/useGetDownloadedModels' -import useGetPerformanceTag from '@/hooks/useGetPerformanceTag' import { useMainViewState } from '@/hooks/useMainViewState' import { toGigabytes } from '@/utils/converter' -import { totalRamAtom } from '@/helpers/atoms/SystemBar.atom' - type Props = { - suitableModel: Model - exploreModel: ModelCatalog + model: Model } -const ExploreModelItemHeader: React.FC = ({ - suitableModel, - exploreModel, -}) => { +const ExploreModelItemHeader: React.FC = ({ model }) => { const { downloadModel } = useDownloadModel() const { downloadedModels } = useGetDownloadedModels() const { modelDownloadStateAtom, downloadStates } = useDownloadState() - const { getPerformanceForModel } = useGetPerformanceTag() const [title, setTitle] = useState('Recommended') - const totalRam = useAtomValue(totalRamAtom) + const [performanceTag, setPerformanceTag] = useState( ModelPerformance.PerformancePositive ) const downloadAtom = useMemo( - () => atom((get) => get(modelDownloadStateAtom)[suitableModel.name]), - [suitableModel.name] + () => atom((get) => get(modelDownloadStateAtom)[model.id]), + [model.id] ) const downloadState = useAtomValue(downloadAtom) const { setMainViewState } = useMainViewState() - const calculatePerformance = useCallback( - (suitableModel: Model) => async () => { - const { title, performanceTag } = await getPerformanceForModel( - suitableModel, - totalRam - ) - setPerformanceTag(performanceTag) - setTitle(title) - }, - [totalRam] - ) - - useEffect(() => { - calculatePerformance(suitableModel) - }, [suitableModel]) - const onDownloadClick = useCallback(() => { - downloadModel(suitableModel) + downloadModel(model) // eslint-disable-next-line react-hooks/exhaustive-deps - }, [suitableModel]) + }, [model]) - // TODO: Comparing between Model Id and Version Name? - const isDownloaded = - downloadedModels.find((model) => model.id === suitableModel.name) != null + const isDownloaded = downloadedModels.find((md) => md.id === model.id) != null let downloadButton = ( ) @@ -93,7 +67,7 @@ const ExploreModelItemHeader: React.FC = ({ } if (downloadState != null && downloadStates.length > 0) { - downloadButton = + downloadButton = } const renderBadge = (performance: TagType) => { @@ -115,7 +89,7 @@ const ExploreModelItemHeader: React.FC = ({ return (
- {exploreModel.name} + {model.name} {performanceTag && renderBadge(performanceTag)}
{downloadButton} diff --git a/web/screens/ExploreModels/ExploreModelList/index.tsx b/web/screens/ExploreModels/ExploreModelList/index.tsx index 8c0c9bdb4..eea9f0238 100644 --- a/web/screens/ExploreModels/ExploreModelList/index.tsx +++ b/web/screens/ExploreModels/ExploreModelList/index.tsx @@ -1,16 +1,14 @@ -import { ModelCatalog } from '@janhq/core' +import { Model } from '@janhq/core' import ExploreModelItem from '@/screens/ExploreModels/ExploreModelItem' type Props = { - models: ModelCatalog[] + models: Model[] } const ExploreModelList: React.FC = ({ models }) => (
- {models?.map((item, i) => ( - - ))} + {models?.map((model) => )}
) diff --git a/web/screens/ExploreModels/ModelVersionItem/index.tsx b/web/screens/ExploreModels/ModelVersionItem/index.tsx index f7d09307b..e16c477f6 100644 --- a/web/screens/ExploreModels/ModelVersionItem/index.tsx +++ b/web/screens/ExploreModels/ModelVersionItem/index.tsx @@ -2,7 +2,7 @@ import React, { useMemo } from 'react' import { Model } from '@janhq/core' -import { Badge, Button } from '@janhq/uikit' +import { Button } from '@janhq/uikit' import { atom, useAtomValue } from 'jotai' import ModalCancelDownload from '@/containers/ModalCancelDownload' @@ -63,7 +63,7 @@ const ModelVersionItem: React.FC = ({ model }) => { } if (downloadState != null && downloadStates.length > 0) { - downloadButton = + downloadButton = } return ( @@ -74,16 +74,7 @@ const ModelVersionItem: React.FC = ({ model }) => {
-
- {`${toGigabytes( - model.metadata.maxRamRequired - )} RAM required`} - {toGigabytes(model.metadata.size)} -
+
{downloadButton}
diff --git a/web/screens/MyModels/BlankState/index.tsx b/web/screens/MyModels/BlankState/index.tsx index a820440d0..c0d7be6bb 100644 --- a/web/screens/MyModels/BlankState/index.tsx +++ b/web/screens/MyModels/BlankState/index.tsx @@ -55,7 +55,7 @@ export default function BlankStateMyModel() { } />
-

{item?.fileName}

+

{item?.modelId}

{formatDownloadPercentage(item?.percent)}
diff --git a/web/screens/MyModels/index.tsx b/web/screens/MyModels/index.tsx index d9c2d2880..c8176f010 100644 --- a/web/screens/MyModels/index.tsx +++ b/web/screens/MyModels/index.tsx @@ -63,10 +63,7 @@ const MyModelsScreen = () => {
- + {model.metadata.author.charAt(0)} diff --git a/web/screens/Settings/index.tsx b/web/screens/Settings/index.tsx index ced7589b5..63c343add 100644 --- a/web/screens/Settings/index.tsx +++ b/web/screens/Settings/index.tsx @@ -30,7 +30,6 @@ const SettingsScreen = () => { setMenus(menu) }, []) - const preferenceExtensions = preferenceItems .map((x) => x.extensionnName) .filter((x, i) => { diff --git a/web/types/downloadState.d.ts b/web/types/downloadState.d.ts index cb154522d..3c3389b4f 100644 --- a/web/types/downloadState.d.ts +++ b/web/types/downloadState.d.ts @@ -4,7 +4,6 @@ type DownloadState = { speed: number percent: number size: DownloadSize - fileName: string error?: string } diff --git a/web/utils/dummy.ts b/web/utils/dummy.ts deleted file mode 100644 index bde61e38f..000000000 --- a/web/utils/dummy.ts +++ /dev/null @@ -1,119 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { ModelCatalog, ModelState } from '@janhq/core' - -export const dummyModel: ModelCatalog = { - id: 'aladar/TinyLLama-v0-GGUF', - name: 'TinyLLama-v0-GGUF', - shortDescription: 'TinyLlama-1.1B-Chat-v0.3-GGUF', - longDescription: 'https://huggingface.co/aladar/TinyLLama-v0-GGUF/tree/main', - avatarUrl: '', - releaseDate: Date.now(), - author: 'aladar', - version: '1.0.0', - modelUrl: 'aladar/TinyLLama-v0-GGUF', - tags: ['freeform', 'tags'], - availableVersions: [ - { - object: 'model', - version: '1.0.0', - source_url: - 'https://huggingface.co/aladar/TinyLLama-v0-GGUF/resolve/main/TinyLLama-v0.Q8_0.gguf', - id: 'TinyLLama-v0.Q8_0.gguf', - name: 'TinyLLama-v0.Q8_0.gguf', - owned_by: 'you', - created: 0, - description: '', - state: ModelState.ToDownload, - settings: { - ctx_len: 2048, - ngl: 100, - embedding: true, - n_parallel: 4, - }, - parameters: { - temperature: 0.7, - token_limit: 2048, - top_k: 0, - top_p: 1, - stream: true, - }, - metadata: { - engine: '', - quantization: '', - size: 5816320, - binaries: [], - maxRamRequired: 256000000, - author: 'aladar', - avatarUrl: '', - }, - }, - { - object: 'model', - version: '1.0.0', - source_url: - 'https://huggingface.co/aladar/TinyLLama-v0-GGUF/resolve/main/TinyLLama-v0.f16.gguf', - id: 'TinyLLama-v0.f16.gguf', - name: 'TinyLLama-v0.f16.gguf', - owned_by: 'you', - created: 0, - description: '', - state: ModelState.ToDownload, - settings: { - ctx_len: 2048, - ngl: 100, - embedding: true, - n_parallel: 4, - }, - parameters: { - temperature: 0.7, - token_limit: 2048, - top_k: 0, - top_p: 1, - stream: true, - }, - metadata: { - engine: '', - quantization: '', - size: 5816320, - binaries: [], - maxRamRequired: 256000000, - author: 'aladar', - avatarUrl: '', - }, - }, - { - object: 'model', - version: '1.0.0', - source_url: - 'https://huggingface.co/aladar/TinyLLama-v0-GGUF/resolve/main/TinyLLama-v0.f32.gguf', - id: 'TinyLLama-v0.f32.gguf', - name: 'TinyLLama-v0.f32.gguf', - owned_by: 'you', - created: 0, - description: '', - state: ModelState.ToDownload, - settings: { - ctx_len: 2048, - ngl: 100, - embedding: true, - n_parallel: 4, - }, - parameters: { - temperature: 0.7, - token_limit: 2048, - top_k: 0, - top_p: 1, - stream: true, - }, - metadata: { - engine: '', - quantization: '', - size: 5816320, - binaries: [], - maxRamRequired: 256000000, - author: 'aladar', - avatarUrl: '', - }, - }, - ], -} From 56f0aa07335b1852af0d7457157f475213089be3 Mon Sep 17 00:00:00 2001 From: hieu-jan <150573299+hieu-jan@users.noreply.github.com> Date: Fri, 1 Dec 2023 22:34:37 +0900 Subject: [PATCH 04/19] add custom meta for each page --- docs/docs/about/about.md | 2 ++ docs/docs/community/community.md | 2 ++ docs/docs/docs/assistants.md | 2 ++ docs/docs/docs/extensions.md | 2 ++ docs/docs/docs/models.md | 2 ++ docs/docs/docs/modules.md | 2 ++ docs/docs/docs/server.md | 2 ++ docs/docs/docs/themes.md | 2 ++ docs/docs/docs/tools.md | 2 ++ docs/docs/handbook/engineering/engineering.md | 2 ++ docs/docs/handbook/handbook.md | 2 ++ docs/docs/hardware/community.md | 2 ++ docs/docs/install/from-source.md | 2 ++ docs/docs/install/linux.md | 2 ++ docs/docs/install/mac.md | 2 ++ docs/docs/install/overview.md | 2 ++ docs/docs/install/windows.md | 2 ++ docs/docs/intro/how-jan-works.md | 2 ++ docs/docs/intro/introduction.md | 2 ++ docs/docs/intro/quickstart.md | 2 ++ docs/docs/specs/architecture.md | 2 ++ docs/docs/specs/engineering/assistants.md | 2 ++ docs/docs/specs/engineering/chats.md | 2 ++ docs/docs/specs/engineering/files.md | 2 ++ docs/docs/specs/engineering/fine-tuning.md | 2 ++ docs/docs/specs/engineering/messages.md | 2 ++ docs/docs/specs/engineering/models.md | 2 ++ docs/docs/specs/engineering/prompts.md | 2 ++ docs/docs/specs/engineering/threads.md | 2 ++ docs/docs/specs/file-based.md | 2 ++ docs/docs/specs/jan.md | 2 ++ docs/docs/specs/product/chat.md | 2 ++ docs/docs/specs/product/hub.md | 2 ++ docs/docs/specs/product/settings.md | 2 ++ docs/docs/specs/product/system-monitor.md | 2 ++ docs/docs/specs/user-interface.md | 2 ++ 36 files changed, 72 insertions(+) diff --git a/docs/docs/about/about.md b/docs/docs/about/about.md index 4a82d93d6..5fabb707e 100644 --- a/docs/docs/about/about.md +++ b/docs/docs/about/about.md @@ -1,5 +1,7 @@ --- title: About Jan +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- Jan believes in the need for an open source AI ecosystem, and are building the infra and tooling to allow open source AIs to compete on a level playing field with proprietary ones. diff --git a/docs/docs/community/community.md b/docs/docs/community/community.md index d6807f38a..623cea8e8 100644 --- a/docs/docs/community/community.md +++ b/docs/docs/community/community.md @@ -1,5 +1,7 @@ --- title: Community +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- - [ ] Social media links \ No newline at end of file diff --git a/docs/docs/docs/assistants.md b/docs/docs/docs/assistants.md index 0edc163ba..2f4b1f99f 100644 --- a/docs/docs/docs/assistants.md +++ b/docs/docs/docs/assistants.md @@ -1,3 +1,5 @@ --- title: Build an Assistant +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- \ No newline at end of file diff --git a/docs/docs/docs/extensions.md b/docs/docs/docs/extensions.md index 87edbf863..56cfdfe51 100644 --- a/docs/docs/docs/extensions.md +++ b/docs/docs/docs/extensions.md @@ -1,5 +1,7 @@ --- title: Extending Jan +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- ## Overview diff --git a/docs/docs/docs/models.md b/docs/docs/docs/models.md index 9e929b76b..4e123e746 100644 --- a/docs/docs/docs/models.md +++ b/docs/docs/docs/models.md @@ -1,3 +1,5 @@ --- title: Model Management +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- \ No newline at end of file diff --git a/docs/docs/docs/modules.md b/docs/docs/docs/modules.md index 41a112417..cb7888f67 100644 --- a/docs/docs/docs/modules.md +++ b/docs/docs/docs/modules.md @@ -1,3 +1,5 @@ --- title: Build a Module +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- \ No newline at end of file diff --git a/docs/docs/docs/server.md b/docs/docs/docs/server.md index 05a715932..d309d8817 100644 --- a/docs/docs/docs/server.md +++ b/docs/docs/docs/server.md @@ -1,5 +1,7 @@ --- title: API Server +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- :::warning diff --git a/docs/docs/docs/themes.md b/docs/docs/docs/themes.md index 2d07b30e1..3edfaf490 100644 --- a/docs/docs/docs/themes.md +++ b/docs/docs/docs/themes.md @@ -1,3 +1,5 @@ --- title: Build a Theme +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- \ No newline at end of file diff --git a/docs/docs/docs/tools.md b/docs/docs/docs/tools.md index 3c6c721e4..d8dd132a8 100644 --- a/docs/docs/docs/tools.md +++ b/docs/docs/docs/tools.md @@ -1,3 +1,5 @@ --- title: Build a Tool +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- \ No newline at end of file diff --git a/docs/docs/handbook/engineering/engineering.md b/docs/docs/handbook/engineering/engineering.md index e320b25fe..3ca9952c4 100644 --- a/docs/docs/handbook/engineering/engineering.md +++ b/docs/docs/handbook/engineering/engineering.md @@ -1,5 +1,7 @@ --- title: Engineering +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- ## Connecting to Rigs diff --git a/docs/docs/handbook/handbook.md b/docs/docs/handbook/handbook.md index 674b96c76..a0485da61 100644 --- a/docs/docs/handbook/handbook.md +++ b/docs/docs/handbook/handbook.md @@ -1,6 +1,8 @@ --- title: Onboarding Checklist slug: /handbook +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- # Welcome diff --git a/docs/docs/hardware/community.md b/docs/docs/hardware/community.md index 5d9bfcc16..a8c3ffee9 100644 --- a/docs/docs/hardware/community.md +++ b/docs/docs/hardware/community.md @@ -1,5 +1,7 @@ --- title: Hardware Examples +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- ## Add your own example diff --git a/docs/docs/install/from-source.md b/docs/docs/install/from-source.md index 961e7fc85..5377e831c 100644 --- a/docs/docs/install/from-source.md +++ b/docs/docs/install/from-source.md @@ -1,5 +1,7 @@ --- title: From Source +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- # Install Jan from Source diff --git a/docs/docs/install/linux.md b/docs/docs/install/linux.md index a7af581c4..0b61f96d8 100644 --- a/docs/docs/install/linux.md +++ b/docs/docs/install/linux.md @@ -1,5 +1,7 @@ --- title: Linux +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- # Installing Jan on Linux diff --git a/docs/docs/install/mac.md b/docs/docs/install/mac.md index 21ecdb54c..a618d05e3 100644 --- a/docs/docs/install/mac.md +++ b/docs/docs/install/mac.md @@ -1,5 +1,7 @@ --- title: Mac +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- # Installing Jan on MacOS diff --git a/docs/docs/install/overview.md b/docs/docs/install/overview.md index 067eb55f2..b41db64d7 100644 --- a/docs/docs/install/overview.md +++ b/docs/docs/install/overview.md @@ -1,5 +1,7 @@ --- title: Overview +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- Getting up and running open-source AI models on your own computer with Jan is quick and easy. Jan is lightweight and can run on a variety of hardware and platform versions. Specific requirements tailored to your platform are outlined below. diff --git a/docs/docs/install/windows.md b/docs/docs/install/windows.md index ecf57f51f..f3de435ec 100644 --- a/docs/docs/install/windows.md +++ b/docs/docs/install/windows.md @@ -1,5 +1,7 @@ --- title: Windows +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- # Installing Jan on Windows diff --git a/docs/docs/intro/how-jan-works.md b/docs/docs/intro/how-jan-works.md index fdfd12a10..b8202224d 100644 --- a/docs/docs/intro/how-jan-works.md +++ b/docs/docs/intro/how-jan-works.md @@ -1,5 +1,7 @@ --- title: How Jan Works +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- - Local Filesystem diff --git a/docs/docs/intro/introduction.md b/docs/docs/intro/introduction.md index a483b10cc..1501cfc4b 100644 --- a/docs/docs/intro/introduction.md +++ b/docs/docs/intro/introduction.md @@ -1,6 +1,8 @@ --- title: Introduction slug: /docs +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- Jan is a ChatGPT-alternative that runs on your own computer, with a [local API server](/api). diff --git a/docs/docs/intro/quickstart.md b/docs/docs/intro/quickstart.md index e417838ea..606003be1 100644 --- a/docs/docs/intro/quickstart.md +++ b/docs/docs/intro/quickstart.md @@ -1,5 +1,7 @@ --- title: Quickstart +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- - Write in the style of comics, explanation diff --git a/docs/docs/specs/architecture.md b/docs/docs/specs/architecture.md index 39b7fa833..2557f6203 100644 --- a/docs/docs/specs/architecture.md +++ b/docs/docs/specs/architecture.md @@ -1,6 +1,8 @@ --- title: Architecture slug: /specs +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- :::warning diff --git a/docs/docs/specs/engineering/assistants.md b/docs/docs/specs/engineering/assistants.md index ea0ec0955..8a96f6408 100644 --- a/docs/docs/specs/engineering/assistants.md +++ b/docs/docs/specs/engineering/assistants.md @@ -1,6 +1,8 @@ --- title: "Assistants" slug: /specs/assistants +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- :::caution diff --git a/docs/docs/specs/engineering/chats.md b/docs/docs/specs/engineering/chats.md index 7bb96faf0..7daac57b0 100644 --- a/docs/docs/specs/engineering/chats.md +++ b/docs/docs/specs/engineering/chats.md @@ -1,6 +1,8 @@ --- title: Chats slug: /specs/chats +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- :::caution diff --git a/docs/docs/specs/engineering/files.md b/docs/docs/specs/engineering/files.md index 0becbf6d6..b93054ef1 100644 --- a/docs/docs/specs/engineering/files.md +++ b/docs/docs/specs/engineering/files.md @@ -1,6 +1,8 @@ --- title: "Files" slug: /specs/files +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- :::warning diff --git a/docs/docs/specs/engineering/fine-tuning.md b/docs/docs/specs/engineering/fine-tuning.md index f2d4153d2..97c45d85b 100644 --- a/docs/docs/specs/engineering/fine-tuning.md +++ b/docs/docs/specs/engineering/fine-tuning.md @@ -1,6 +1,8 @@ --- title: "Fine-tuning" slug: /specs/finetuning +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- Todo: @hiro diff --git a/docs/docs/specs/engineering/messages.md b/docs/docs/specs/engineering/messages.md index 62a721fa8..4032e61d4 100644 --- a/docs/docs/specs/engineering/messages.md +++ b/docs/docs/specs/engineering/messages.md @@ -1,6 +1,8 @@ --- title: Messages slug: /specs/messages +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- :::caution diff --git a/docs/docs/specs/engineering/models.md b/docs/docs/specs/engineering/models.md index c47a62bab..decf8f5e9 100644 --- a/docs/docs/specs/engineering/models.md +++ b/docs/docs/specs/engineering/models.md @@ -1,6 +1,8 @@ --- title: Models slug: /specs/models +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- :::caution diff --git a/docs/docs/specs/engineering/prompts.md b/docs/docs/specs/engineering/prompts.md index 37422b517..9d4fa4fd6 100644 --- a/docs/docs/specs/engineering/prompts.md +++ b/docs/docs/specs/engineering/prompts.md @@ -1,6 +1,8 @@ --- title: Prompts slug: /specs/prompts +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- - [ ] /prompts folder diff --git a/docs/docs/specs/engineering/threads.md b/docs/docs/specs/engineering/threads.md index 982c4f8cb..c1421e4ae 100644 --- a/docs/docs/specs/engineering/threads.md +++ b/docs/docs/specs/engineering/threads.md @@ -1,6 +1,8 @@ --- title: Threads slug: /specs/threads +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- :::caution diff --git a/docs/docs/specs/file-based.md b/docs/docs/specs/file-based.md index 3b38bb06b..26f3d8efb 100644 --- a/docs/docs/specs/file-based.md +++ b/docs/docs/specs/file-based.md @@ -1,5 +1,7 @@ --- title: File-based Approach +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- :::warning diff --git a/docs/docs/specs/jan.md b/docs/docs/specs/jan.md index 9a97c29c2..e92dddf7a 100644 --- a/docs/docs/specs/jan.md +++ b/docs/docs/specs/jan.md @@ -1,5 +1,7 @@ --- title: Jan (Assistant) +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- ## Jan: a "global" assistant diff --git a/docs/docs/specs/product/chat.md b/docs/docs/specs/product/chat.md index 28969f348..acbf57487 100644 --- a/docs/docs/specs/product/chat.md +++ b/docs/docs/specs/product/chat.md @@ -1,6 +1,8 @@ --- title: Chat slug: /specs/chat +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- ## Overview diff --git a/docs/docs/specs/product/hub.md b/docs/docs/specs/product/hub.md index c2523b0fb..1a9f6064a 100644 --- a/docs/docs/specs/product/hub.md +++ b/docs/docs/specs/product/hub.md @@ -1,6 +1,8 @@ --- title: Hub slug: /specs/hub +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- ## Overview diff --git a/docs/docs/specs/product/settings.md b/docs/docs/specs/product/settings.md index a80c50034..d7e60e943 100644 --- a/docs/docs/specs/product/settings.md +++ b/docs/docs/specs/product/settings.md @@ -1,6 +1,8 @@ --- title: Settings slug: /specs/settings +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- ## Overview diff --git a/docs/docs/specs/product/system-monitor.md b/docs/docs/specs/product/system-monitor.md index 52d11a272..f4c77c38c 100644 --- a/docs/docs/specs/product/system-monitor.md +++ b/docs/docs/specs/product/system-monitor.md @@ -1,6 +1,8 @@ --- title: System Monitor slug: /specs/system-monitor +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- ## Overview diff --git a/docs/docs/specs/user-interface.md b/docs/docs/specs/user-interface.md index c540a6973..156eac5a6 100644 --- a/docs/docs/specs/user-interface.md +++ b/docs/docs/specs/user-interface.md @@ -1,5 +1,7 @@ --- title: User Interface +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: [Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee] --- :::warning From 3cca60842f2cb8ac3b7095df01a2ae92afebbf19 Mon Sep 17 00:00:00 2001 From: hieu-jan <150573299+hieu-jan@users.noreply.github.com> Date: Fri, 1 Dec 2023 22:40:29 +0900 Subject: [PATCH 05/19] change old description --- docs/src/pages/index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/pages/index.js b/docs/src/pages/index.js index d87e00498..986fff9a2 100644 --- a/docs/src/pages/index.js +++ b/docs/src/pages/index.js @@ -19,7 +19,7 @@ export default function Home() {
From 5e819153862e7a8a168ef6f756d42f16d8dfd024 Mon Sep 17 00:00:00 2001 From: hieu-jan <150573299+hieu-jan@users.noreply.github.com> Date: Fri, 1 Dec 2023 22:49:31 +0900 Subject: [PATCH 06/19] improve SEO --- docs/docusaurus.config.js | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 7b07016d2..fc4c89a9e 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -149,6 +149,33 @@ const config = { { property: 'twitter:description', content: 'Jan is a ChatGPT-alternative that runs on your own computer, with a local API server.' }, { property: 'twitter:image', content: 'https://jan.ai/img/jan-social-card.png' }, ], + headTags: [ + // Declare a preconnect tag + { + tagName: 'link', + attributes: { + rel: 'preconnect', + href: 'https://jan.ai/', + }, + }, + // Declare some json-ld structured data + { + tagName: 'script', + attributes: { + type: 'application/ld+json', + }, + innerHTML: JSON.stringify({ + '@context': 'https://schema.org/', + '@type': 'LLMInference', + name: 'Jan', + description: "Jan is a ChatGPT-alternative that runs on your own computer, with a local API server.", + keywords: "Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee", + applicationCategory: "BusinessApplication", + operatingSystem: "Multiple", + url: 'https://jan.ai/', + }), + }, + ], navbar: { title: "Jan", logo: { From a5a71d792e5786a31cdc499988060bfee0b06c30 Mon Sep 17 00:00:00 2001 From: hieu-jan <150573299+hieu-jan@users.noreply.github.com> Date: Fri, 1 Dec 2023 22:50:03 +0900 Subject: [PATCH 07/19] improve SEO --- docs/docusaurus.config.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index fc4c89a9e..d51268bc4 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -166,7 +166,7 @@ const config = { }, innerHTML: JSON.stringify({ '@context': 'https://schema.org/', - '@type': 'LLMInference', + '@type': 'localAI', name: 'Jan', description: "Jan is a ChatGPT-alternative that runs on your own computer, with a local API server.", keywords: "Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee", From f1782689d81ac08c58cd3069a32fad5f2cfc1a07 Mon Sep 17 00:00:00 2001 From: hieu-jan <150573299+hieu-jan@users.noreply.github.com> Date: Fri, 1 Dec 2023 22:54:03 +0900 Subject: [PATCH 08/19] improve docs --- docs/static/robots.txt | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 docs/static/robots.txt diff --git a/docs/static/robots.txt b/docs/static/robots.txt new file mode 100644 index 000000000..eb0536286 --- /dev/null +++ b/docs/static/robots.txt @@ -0,0 +1,2 @@ +User-agent: * +Disallow: From 164a62d0106858ccb45ea7941afeff8f91091358 Mon Sep 17 00:00:00 2001 From: hieu-jan <150573299+hieu-jan@users.noreply.github.com> Date: Fri, 1 Dec 2023 23:11:13 +0900 Subject: [PATCH 09/19] add noIndex --- docs/docusaurus.config.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index d51268bc4..34469a45a 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -38,6 +38,8 @@ const config = { mermaid: true, }, + noIndex: false, + // Plugins we added plugins: [ "docusaurus-plugin-sass", From 89783e88563362339d74288fc2ff3a750db5be1f Mon Sep 17 00:00:00 2001 From: hieu-jan <150573299+hieu-jan@users.noreply.github.com> Date: Fri, 1 Dec 2023 23:18:38 +0900 Subject: [PATCH 10/19] add robots noindex --- docs/docusaurus.config.js | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 34469a45a..1563f45a7 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -142,6 +142,7 @@ const config = { metadata: [ { name: 'description', content: 'Jan is a ChatGPT-alternative that runs on your own computer, with a local API server.' }, { name: 'keywords', content: 'Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee' }, + { name: 'robots', content: 'noindex'}, { property: 'og:title', content: 'Run your own AI | Jan' }, { property: 'og:description', content: 'Jan is a ChatGPT-alternative that runs on your own computer, with a local API server.' }, { property: 'og:image', content: 'https://jan.ai/img/jan-social-card.png' }, From 81c006d922195099f3bdab7bbc74b0e3e1eb6e3d Mon Sep 17 00:00:00 2001 From: hieu-jan <150573299+hieu-jan@users.noreply.github.com> Date: Fri, 1 Dec 2023 23:25:02 +0900 Subject: [PATCH 11/19] approve index --- docs/docusaurus.config.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 1563f45a7..7ae13282b 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -142,7 +142,7 @@ const config = { metadata: [ { name: 'description', content: 'Jan is a ChatGPT-alternative that runs on your own computer, with a local API server.' }, { name: 'keywords', content: 'Jan, ChatGPT alternative, on-premises AI, local API server, local AI, llm, conversational AI, no-subscription fee' }, - { name: 'robots', content: 'noindex'}, + { name: 'robots', content: 'index, follow' }, { property: 'og:title', content: 'Run your own AI | Jan' }, { property: 'og:description', content: 'Jan is a ChatGPT-alternative that runs on your own computer, with a local API server.' }, { property: 'og:image', content: 'https://jan.ai/img/jan-social-card.png' }, From 1d557f7e1f219fda707b96e329f2a2679cdecf8d Mon Sep 17 00:00:00 2001 From: hieu-jan <150573299+hieu-jan@users.noreply.github.com> Date: Fri, 1 Dec 2023 23:34:27 +0900 Subject: [PATCH 12/19] add og:type --- docs/docusaurus.config.js | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 7ae13282b..da62e3399 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -146,6 +146,7 @@ const config = { { property: 'og:title', content: 'Run your own AI | Jan' }, { property: 'og:description', content: 'Jan is a ChatGPT-alternative that runs on your own computer, with a local API server.' }, { property: 'og:image', content: 'https://jan.ai/img/jan-social-card.png' }, + { property: 'og:type', content: 'website' }, { property: 'twitter:card', content: 'summary_large_image' }, { property: 'twitter:site', content: '@janhq_' }, { property: 'twitter:title', content: 'Run your own AI | Jan' }, From b777ff085d93d166196adbd04562555a1680f33f Mon Sep 17 00:00:00 2001 From: hieu-jan <150573299+hieu-jan@users.noreply.github.com> Date: Fri, 1 Dec 2023 23:41:53 +0900 Subject: [PATCH 13/19] allow robots --- docs/static/robots.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/static/robots.txt b/docs/static/robots.txt index eb0536286..f6e6d1d41 100644 --- a/docs/static/robots.txt +++ b/docs/static/robots.txt @@ -1,2 +1,2 @@ -User-agent: * -Disallow: +User-Agent: * +Allow: / From 96358dc71c087219d148335d1b07890a8b68efb3 Mon Sep 17 00:00:00 2001 From: Louis Date: Sat, 2 Dec 2023 08:30:34 +0700 Subject: [PATCH 14/19] fix: CI script - reorder copy models action (#819) --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 9192a0238..c2754d3ad 100644 --- a/package.json +++ b/package.json @@ -30,11 +30,11 @@ "build:uikit": "yarn workspace @janhq/uikit install && yarn workspace @janhq/uikit build", "build:core": "cd core && yarn install && yarn run build", "build:web": "yarn workspace jan-web build && cpx \"web/out/**\" \"electron/renderer/\"", - "build:electron": "yarn workspace jan build && cpx \"models/**\" \"electron/models/\"", + "build:electron": "cpx \"models/**\" \"electron/models/\" && yarn workspace jan build", "build:electron:test": "yarn workspace jan build:test", "build:extensions": "rimraf ./electron/pre-install/*.tgz && concurrently --kill-others-on-fail \"cd ./extensions/conversational-extension && npm install && npm run build:publish\" \"cd ./extensions/inference-extension && npm install && npm run build:publish\" \"cd ./extensions/model-extension && npm install && npm run build:publish\" \"cd ./extensions/monitoring-extension && npm install && npm run build:publish\" \"cd ./extensions/assistant-extension && npm install && npm run build:publish\"", "build:test": "yarn build:web && yarn workspace jan build:test", - "build": "yarn build:web && yarn workspace jan build", + "build": "yarn build:web && yarn build:electron", "build:publish": "yarn build:web && yarn workspace jan build:publish" }, "devDependencies": { From e5a440fc8f26dca602b070590688aa8f835564e1 Mon Sep 17 00:00:00 2001 From: hiento09 <136591877+hiento09@users.noreply.github.com> Date: Mon, 4 Dec 2023 10:30:03 +0700 Subject: [PATCH 15/19] Refactor build:extension command (#822) * Refactor build:extenstions command * Enable parallel build extension * build extension on windows, set job name similar to folder name * windows build extension write logs to console for debugging --------- Co-authored-by: Hien To Co-authored-by: Hien To --- package.json | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index c2754d3ad..2a4a7fa85 100644 --- a/package.json +++ b/package.json @@ -32,7 +32,10 @@ "build:web": "yarn workspace jan-web build && cpx \"web/out/**\" \"electron/renderer/\"", "build:electron": "cpx \"models/**\" \"electron/models/\" && yarn workspace jan build", "build:electron:test": "yarn workspace jan build:test", - "build:extensions": "rimraf ./electron/pre-install/*.tgz && concurrently --kill-others-on-fail \"cd ./extensions/conversational-extension && npm install && npm run build:publish\" \"cd ./extensions/inference-extension && npm install && npm run build:publish\" \"cd ./extensions/model-extension && npm install && npm run build:publish\" \"cd ./extensions/monitoring-extension && npm install && npm run build:publish\" \"cd ./extensions/assistant-extension && npm install && npm run build:publish\"", + "build:extensions:windows": "rimraf ./electron/pre-install/*.tgz && powershell -command \"$jobs = Get-ChildItem -Path './extensions' -Directory | ForEach-Object { Start-Job -Name ($_.Name) -ScriptBlock { param($_dir); try { Set-Location $_dir; npm install; npm run build:publish; Write-Output 'Build successful in ' + $_dir } catch { Write-Error 'Error in ' + $_dir; throw } } -ArgumentList $_.FullName }; $jobs | Wait-Job; $jobs | ForEach-Object { Receive-Job -Job $_ -Keep } | ForEach-Object { Write-Host $_ }; $failed = $jobs | Where-Object { $_.State -ne 'Completed' -or $_.ChildJobs[0].JobStateInfo.State -ne 'Completed' }; if ($failed) { Exit 1 }\"", + "build:extensions:linux": "rimraf ./electron/pre-install/*.tgz && find ./extensions -mindepth 1 -maxdepth 1 -type d -print0 | xargs -0 -n 1 -P 4 -I {} sh -c 'cd {} && npm install && npm run build:publish'", + "build:extensions:darwin": "rimraf ./electron/pre-install/*.tgz && find ./extensions -mindepth 1 -maxdepth 1 -type d -print0 | xargs -0 -n 1 -P 4 -I {} sh -c 'cd {} && npm install && npm run build:publish'", + "build:extensions": "run-script-os", "build:test": "yarn build:web && yarn workspace jan build:test", "build": "yarn build:web && yarn build:electron", "build:publish": "yarn build:web && yarn workspace jan build:publish" From 424b00338e59f3f00b996f7248db668584bbe756 Mon Sep 17 00:00:00 2001 From: Faisal Amir Date: Mon, 4 Dec 2023 10:55:47 +0700 Subject: [PATCH 16/19] feat: revamp thread screen (#802) * Make thread screen as default screen * Blank state when user have not any model * Cleanup topbar thread screen * Improve style right panel * Add instructions right panel * Styling thread list history * Resolve conflict * Default title new thread * Fix trigger panel sidebar * Make default right panel false when no activethread * Fix CI test * chore: assistant instruction with system prompt * Fix title and blank state explore the hub * Claenup style thread screen and add buble message for assitant * Remove unused import * Styling more menus on thread list and right panel, and make max height textarea 400 pixel * Finished revamp ui thread * Finished system monitor UI * Style box running models * Make animate right panel more smooth * Add status arround textarea for starting model info * Temporary disable hide left panel * chore: system resource monitoring update * copy nits * chore: typo * Reverse icon chevron accordion * Move my models into setting page --------- Co-authored-by: Louis Co-authored-by: 0xSage --- core/src/types/index.ts | 7 +- electron/managers/window.ts | 18 +- electron/tests/explore.e2e.spec.ts | 42 ++-- electron/tests/main.e2e.spec.ts | 64 +++---- electron/tests/my-models.e2e.spec.ts | 41 ---- electron/tests/navigation.e2e.spec.ts | 51 ++--- electron/tests/settings.e2e.spec.ts | 42 ++-- electron/tests/system-monitor.e2e.spec.ts | 41 ++++ extensions/assistant-extension/src/index.ts | 6 +- uikit/package.json | 2 + uikit/src/badge/styles.scss | 2 +- uikit/src/command/styles.scss | 2 +- uikit/src/index.ts | 1 + uikit/src/input/index.tsx | 2 +- uikit/src/main.scss | 5 +- uikit/src/select/index.tsx | 139 ++++++++++++++ uikit/src/select/styles.scss | 31 +++ web/app/layout.tsx | 2 +- web/app/page.tsx | 18 +- web/constants/screens.ts | 8 +- web/containers/CardSidebar/index.tsx | 122 ++++++------ web/containers/DropdownListSidebar/index.tsx | 168 ++++++++-------- .../BottomBar/DownloadingState/index.tsx | 4 - web/containers/Layout/BottomBar/index.tsx | 6 +- web/containers/Layout/Ribbon/index.tsx | 60 +++--- .../CommandListDownloadedModel/index.tsx | 4 +- .../Layout/TopBar/CommandSearch/index.tsx | 53 +----- web/containers/Layout/TopBar/index.tsx | 81 +++++++- web/containers/ModalCancelDownload/index.tsx | 1 - web/containers/Shortcut/index.tsx | 2 +- web/containers/Toast/index.tsx | 2 +- web/helpers/atoms/ChatMessage.atom.ts | 2 +- web/helpers/atoms/SystemBar.atom.ts | 3 + web/hooks/useClickOutside.ts | 42 ++++ web/hooks/useCreateNewThread.ts | 16 +- web/hooks/useDeleteConversation.ts | 10 +- web/hooks/useDownloadModel.ts | 2 +- web/hooks/useGetSystemResources.ts | 11 +- web/hooks/useMainViewState.ts | 2 +- web/hooks/useSendChatMessage.ts | 51 +++-- web/package.json | 2 + web/screens/Chat/ChatBody/index.tsx | 58 +++++- web/screens/Chat/ChatItem/index.tsx | 5 +- web/screens/Chat/MessageToolbar/index.tsx | 32 +--- web/screens/Chat/Sidebar/index.tsx | 115 ++++++++--- web/screens/Chat/SimpleTextMessage/index.tsx | 54 ++++-- web/screens/Chat/ThreadList/index.tsx | 113 +++++++---- web/screens/Chat/index.tsx | 146 ++++++-------- .../ExploreModelItemHeader/index.tsx | 2 +- .../ExploreModels/ModelVersionItem/index.tsx | 2 +- web/screens/ExploreModels/index.tsx | 2 +- web/screens/MyModels/BlankState/index.tsx | 77 -------- web/screens/MyModels/index.tsx | 180 ------------------ .../ExtensionsCatalog/index.tsx | 2 +- web/screens/Settings/Models/Row.tsx | 137 +++++++++++++ web/screens/Settings/Models/index.tsx | 65 +++++++ web/screens/Settings/index.tsx | 45 ++++- web/screens/SystemMonitor/index.tsx | 117 ++++++++++++ web/screens/Welcome/index.tsx | 74 ------- web/styles/components/message.scss | 6 +- 60 files changed, 1435 insertions(+), 965 deletions(-) delete mode 100644 electron/tests/my-models.e2e.spec.ts create mode 100644 electron/tests/system-monitor.e2e.spec.ts create mode 100644 uikit/src/select/index.tsx create mode 100644 uikit/src/select/styles.scss create mode 100644 web/hooks/useClickOutside.ts delete mode 100644 web/screens/MyModels/BlankState/index.tsx delete mode 100644 web/screens/MyModels/index.tsx create mode 100644 web/screens/Settings/Models/Row.tsx create mode 100644 web/screens/Settings/Models/index.tsx create mode 100644 web/screens/SystemMonitor/index.tsx delete mode 100644 web/screens/Welcome/index.tsx diff --git a/core/src/types/index.ts b/core/src/types/index.ts index bbd1e98de..7580c2432 100644 --- a/core/src/types/index.ts +++ b/core/src/types/index.ts @@ -143,6 +143,7 @@ export type ThreadAssistantInfo = { assistant_id: string; assistant_name: string; model: ModelInfo; + instructions?: string; }; /** @@ -288,13 +289,13 @@ export type Assistant = { /** Represents the name of the object. */ name: string; /** Represents the description of the object. */ - description: string; + description?: string; /** Represents the model of the object. */ model: string; /** Represents the instructions for the object. */ - instructions: string; + instructions?: string; /** Represents the tools associated with the object. */ - tools: any; + tools?: any; /** Represents the file identifiers associated with the object. */ file_ids: string[]; /** Represents the metadata of the object. */ diff --git a/electron/managers/window.ts b/electron/managers/window.ts index c930dd5ec..0d5a0eaf4 100644 --- a/electron/managers/window.ts +++ b/electron/managers/window.ts @@ -1,15 +1,15 @@ -import { BrowserWindow } from "electron"; +import { BrowserWindow } from 'electron' /** * Manages the current window instance. */ export class WindowManager { - public static instance: WindowManager = new WindowManager(); - public currentWindow?: BrowserWindow; + public static instance: WindowManager = new WindowManager() + public currentWindow?: BrowserWindow constructor() { if (WindowManager.instance) { - return WindowManager.instance; + return WindowManager.instance } } @@ -21,17 +21,17 @@ export class WindowManager { createWindow(options?: Electron.BrowserWindowConstructorOptions | undefined) { this.currentWindow = new BrowserWindow({ width: 1200, - minWidth: 800, + minWidth: 1200, height: 800, show: false, trafficLightPosition: { x: 10, y: 15, }, - titleBarStyle: "hidden", - vibrancy: "sidebar", + titleBarStyle: 'hidden', + vibrancy: 'sidebar', ...options, - }); - return this.currentWindow; + }) + return this.currentWindow } } diff --git a/electron/tests/explore.e2e.spec.ts b/electron/tests/explore.e2e.spec.ts index 5a4412cb3..77eb3dbda 100644 --- a/electron/tests/explore.e2e.spec.ts +++ b/electron/tests/explore.e2e.spec.ts @@ -1,41 +1,41 @@ -import { _electron as electron } from "playwright"; -import { ElectronApplication, Page, expect, test } from "@playwright/test"; +import { _electron as electron } from 'playwright' +import { ElectronApplication, Page, expect, test } from '@playwright/test' import { findLatestBuild, parseElectronApp, stubDialog, -} from "electron-playwright-helpers"; +} from 'electron-playwright-helpers' -let electronApp: ElectronApplication; -let page: Page; +let electronApp: ElectronApplication +let page: Page test.beforeAll(async () => { - process.env.CI = "e2e"; + process.env.CI = 'e2e' - const latestBuild = findLatestBuild("dist"); - expect(latestBuild).toBeTruthy(); + const latestBuild = findLatestBuild('dist') + expect(latestBuild).toBeTruthy() // parse the packaged Electron app and find paths and other info - const appInfo = parseElectronApp(latestBuild); - expect(appInfo).toBeTruthy(); + const appInfo = parseElectronApp(latestBuild) + expect(appInfo).toBeTruthy() electronApp = await electron.launch({ args: [appInfo.main], // main file from package.json executablePath: appInfo.executable, // path to the Electron executable - }); - await stubDialog(electronApp, "showMessageBox", { response: 1 }); + }) + await stubDialog(electronApp, 'showMessageBox', { response: 1 }) - page = await electronApp.firstWindow(); -}); + page = await electronApp.firstWindow() +}) test.afterAll(async () => { - await electronApp.close(); - await page.close(); -}); + await electronApp.close() + await page.close() +}) -test("explores models", async () => { - await page.getByTestId("Explore Models").first().click(); - await page.getByTestId("testid-explore-models").isVisible(); +test('explores models', async () => { + await page.getByTestId('Hub').first().click() + await page.getByTestId('testid-explore-models').isVisible() // More test cases here... -}); +}) diff --git a/electron/tests/main.e2e.spec.ts b/electron/tests/main.e2e.spec.ts index d6df31ca4..1a5bfe696 100644 --- a/electron/tests/main.e2e.spec.ts +++ b/electron/tests/main.e2e.spec.ts @@ -1,55 +1,55 @@ -import { _electron as electron } from "playwright"; -import { ElectronApplication, Page, expect, test } from "@playwright/test"; +import { _electron as electron } from 'playwright' +import { ElectronApplication, Page, expect, test } from '@playwright/test' import { findLatestBuild, parseElectronApp, stubDialog, -} from "electron-playwright-helpers"; +} from 'electron-playwright-helpers' -let electronApp: ElectronApplication; -let page: Page; +let electronApp: ElectronApplication +let page: Page test.beforeAll(async () => { - process.env.CI = "e2e"; + process.env.CI = 'e2e' - const latestBuild = findLatestBuild("dist"); - expect(latestBuild).toBeTruthy(); + const latestBuild = findLatestBuild('dist') + expect(latestBuild).toBeTruthy() // parse the packaged Electron app and find paths and other info - const appInfo = parseElectronApp(latestBuild); - expect(appInfo).toBeTruthy(); - expect(appInfo.asar).toBe(true); - expect(appInfo.executable).toBeTruthy(); - expect(appInfo.main).toBeTruthy(); - expect(appInfo.name).toBe("jan"); - expect(appInfo.packageJson).toBeTruthy(); - expect(appInfo.packageJson.name).toBe("jan"); - expect(appInfo.platform).toBeTruthy(); - expect(appInfo.platform).toBe(process.platform); - expect(appInfo.resourcesDir).toBeTruthy(); + const appInfo = parseElectronApp(latestBuild) + expect(appInfo).toBeTruthy() + expect(appInfo.asar).toBe(true) + expect(appInfo.executable).toBeTruthy() + expect(appInfo.main).toBeTruthy() + expect(appInfo.name).toBe('jan') + expect(appInfo.packageJson).toBeTruthy() + expect(appInfo.packageJson.name).toBe('jan') + expect(appInfo.platform).toBeTruthy() + expect(appInfo.platform).toBe(process.platform) + expect(appInfo.resourcesDir).toBeTruthy() electronApp = await electron.launch({ args: [appInfo.main], // main file from package.json executablePath: appInfo.executable, // path to the Electron executable - }); - await stubDialog(electronApp, "showMessageBox", { response: 1 }); + }) + await stubDialog(electronApp, 'showMessageBox', { response: 1 }) - page = await electronApp.firstWindow(); -}); + page = await electronApp.firstWindow() +}) test.afterAll(async () => { - await electronApp.close(); - await page.close(); -}); + await electronApp.close() + await page.close() +}) -test("renders the home page", async () => { - expect(page).toBeDefined(); +test('renders the home page', async () => { + expect(page).toBeDefined() // Welcome text is available const welcomeText = await page - .getByTestId("testid-welcome-title") + .getByTestId('testid-welcome-title') .first() - .isVisible(); - expect(welcomeText).toBe(false); -}); + .isVisible() + expect(welcomeText).toBe(false) +}) diff --git a/electron/tests/my-models.e2e.spec.ts b/electron/tests/my-models.e2e.spec.ts deleted file mode 100644 index a3355fb33..000000000 --- a/electron/tests/my-models.e2e.spec.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { _electron as electron } from "playwright"; -import { ElectronApplication, Page, expect, test } from "@playwright/test"; - -import { - findLatestBuild, - parseElectronApp, - stubDialog, -} from "electron-playwright-helpers"; - -let electronApp: ElectronApplication; -let page: Page; - -test.beforeAll(async () => { - process.env.CI = "e2e"; - - const latestBuild = findLatestBuild("dist"); - expect(latestBuild).toBeTruthy(); - - // parse the packaged Electron app and find paths and other info - const appInfo = parseElectronApp(latestBuild); - expect(appInfo).toBeTruthy(); - - electronApp = await electron.launch({ - args: [appInfo.main], // main file from package.json - executablePath: appInfo.executable, // path to the Electron executable - }); - await stubDialog(electronApp, "showMessageBox", { response: 1 }); - - page = await electronApp.firstWindow(); -}); - -test.afterAll(async () => { - await electronApp.close(); - await page.close(); -}); - -test("shows my models", async () => { - await page.getByTestId("My Models").first().click(); - await page.getByTestId("testid-my-models").isVisible(); - // More test cases here... -}); diff --git a/electron/tests/navigation.e2e.spec.ts b/electron/tests/navigation.e2e.spec.ts index 104333650..2f4f7b767 100644 --- a/electron/tests/navigation.e2e.spec.ts +++ b/electron/tests/navigation.e2e.spec.ts @@ -1,43 +1,43 @@ -import { _electron as electron } from "playwright"; -import { ElectronApplication, Page, expect, test } from "@playwright/test"; +import { _electron as electron } from 'playwright' +import { ElectronApplication, Page, expect, test } from '@playwright/test' import { findLatestBuild, parseElectronApp, stubDialog, -} from "electron-playwright-helpers"; +} from 'electron-playwright-helpers' -let electronApp: ElectronApplication; -let page: Page; +let electronApp: ElectronApplication +let page: Page test.beforeAll(async () => { - process.env.CI = "e2e"; + process.env.CI = 'e2e' - const latestBuild = findLatestBuild("dist"); - expect(latestBuild).toBeTruthy(); + const latestBuild = findLatestBuild('dist') + expect(latestBuild).toBeTruthy() // parse the packaged Electron app and find paths and other info - const appInfo = parseElectronApp(latestBuild); - expect(appInfo).toBeTruthy(); + const appInfo = parseElectronApp(latestBuild) + expect(appInfo).toBeTruthy() electronApp = await electron.launch({ args: [appInfo.main], // main file from package.json executablePath: appInfo.executable, // path to the Electron executable - }); - await stubDialog(electronApp, "showMessageBox", { response: 1 }); + }) + await stubDialog(electronApp, 'showMessageBox', { response: 1 }) - page = await electronApp.firstWindow(); -}); + page = await electronApp.firstWindow() +}) test.afterAll(async () => { - await electronApp.close(); - await page.close(); -}); + await electronApp.close() + await page.close() +}) -test("renders left navigation panel", async () => { +test('renders left navigation panel', async () => { // Chat section should be there - const chatSection = await page.getByTestId("Chat").first().isVisible(); - expect(chatSection).toBe(false); + const chatSection = await page.getByTestId('Chat').first().isVisible() + expect(chatSection).toBe(false) // Home actions /* Disable unstable feature tests @@ -45,7 +45,10 @@ test("renders left navigation panel", async () => { ** Enable back when it is whitelisted */ - const myModelsBtn = await page.getByTestId("My Models").first().isEnabled(); - const settingsBtn = await page.getByTestId("Settings").first().isEnabled(); - expect([myModelsBtn, settingsBtn].filter((e) => !e).length).toBe(0); -}); + const systemMonitorBtn = await page + .getByTestId('System Monitor') + .first() + .isEnabled() + const settingsBtn = await page.getByTestId('Settings').first().isEnabled() + expect([systemMonitorBtn, settingsBtn].filter((e) => !e).length).toBe(0) +}) diff --git a/electron/tests/settings.e2e.spec.ts b/electron/tests/settings.e2e.spec.ts index 2f8d6465b..798504c70 100644 --- a/electron/tests/settings.e2e.spec.ts +++ b/electron/tests/settings.e2e.spec.ts @@ -1,40 +1,40 @@ -import { _electron as electron } from "playwright"; -import { ElectronApplication, Page, expect, test } from "@playwright/test"; +import { _electron as electron } from 'playwright' +import { ElectronApplication, Page, expect, test } from '@playwright/test' import { findLatestBuild, parseElectronApp, stubDialog, -} from "electron-playwright-helpers"; +} from 'electron-playwright-helpers' -let electronApp: ElectronApplication; -let page: Page; +let electronApp: ElectronApplication +let page: Page test.beforeAll(async () => { - process.env.CI = "e2e"; + process.env.CI = 'e2e' - const latestBuild = findLatestBuild("dist"); - expect(latestBuild).toBeTruthy(); + const latestBuild = findLatestBuild('dist') + expect(latestBuild).toBeTruthy() // parse the packaged Electron app and find paths and other info - const appInfo = parseElectronApp(latestBuild); - expect(appInfo).toBeTruthy(); + const appInfo = parseElectronApp(latestBuild) + expect(appInfo).toBeTruthy() electronApp = await electron.launch({ args: [appInfo.main], // main file from package.json executablePath: appInfo.executable, // path to the Electron executable - }); - await stubDialog(electronApp, "showMessageBox", { response: 1 }); + }) + await stubDialog(electronApp, 'showMessageBox', { response: 1 }) - page = await electronApp.firstWindow(); -}); + page = await electronApp.firstWindow() +}) test.afterAll(async () => { - await electronApp.close(); - await page.close(); -}); + await electronApp.close() + await page.close() +}) -test("shows settings", async () => { - await page.getByTestId("Settings").first().click(); - await page.getByTestId("testid-setting-description").isVisible(); -}); +test('shows settings', async () => { + await page.getByTestId('Settings').first().click() + await page.getByTestId('testid-setting-description').isVisible() +}) diff --git a/electron/tests/system-monitor.e2e.spec.ts b/electron/tests/system-monitor.e2e.spec.ts new file mode 100644 index 000000000..747a8ae18 --- /dev/null +++ b/electron/tests/system-monitor.e2e.spec.ts @@ -0,0 +1,41 @@ +import { _electron as electron } from 'playwright' +import { ElectronApplication, Page, expect, test } from '@playwright/test' + +import { + findLatestBuild, + parseElectronApp, + stubDialog, +} from 'electron-playwright-helpers' + +let electronApp: ElectronApplication +let page: Page + +test.beforeAll(async () => { + process.env.CI = 'e2e' + + const latestBuild = findLatestBuild('dist') + expect(latestBuild).toBeTruthy() + + // parse the packaged Electron app and find paths and other info + const appInfo = parseElectronApp(latestBuild) + expect(appInfo).toBeTruthy() + + electronApp = await electron.launch({ + args: [appInfo.main], // main file from package.json + executablePath: appInfo.executable, // path to the Electron executable + }) + await stubDialog(electronApp, 'showMessageBox', { response: 1 }) + + page = await electronApp.firstWindow() +}) + +test.afterAll(async () => { + await electronApp.close() + await page.close() +}) + +test('shows system monitor', async () => { + await page.getByTestId('System Monitor').first().click() + await page.getByTestId('testid-system-monitor').isVisible() + // More test cases here... +}) diff --git a/extensions/assistant-extension/src/index.ts b/extensions/assistant-extension/src/index.ts index 7321a0660..8d01021b7 100644 --- a/extensions/assistant-extension/src/index.ts +++ b/extensions/assistant-extension/src/index.ts @@ -89,12 +89,12 @@ export default class JanAssistantExtension implements AssistantExtension { private async createJanAssistant(): Promise { const janAssistant: Assistant = { avatar: "", - thread_location: undefined, // TODO: make this property ? + thread_location: undefined, id: "jan", object: "assistant", // TODO: maybe we can set default value for this? created_at: Date.now(), - name: "Jan Assistant", - description: "Just Jan Assistant", + name: "Jan", + description: "A default assistant that can use all downloaded models", model: "*", instructions: "Your name is Jan.", tools: undefined, diff --git a/uikit/package.json b/uikit/package.json index dd67be599..a96b5d37e 100644 --- a/uikit/package.json +++ b/uikit/package.json @@ -20,9 +20,11 @@ "@radix-ui/react-avatar": "^1.0.4", "@radix-ui/react-context": "^1.0.1", "@radix-ui/react-dialog": "^1.0.5", + "@radix-ui/react-icons": "^1.3.0", "@radix-ui/react-label": "^2.0.2", "@radix-ui/react-progress": "^1.0.3", "@radix-ui/react-scroll-area": "^1.0.5", + "@radix-ui/react-select": "^2.0.0", "@radix-ui/react-slot": "^1.0.2", "@radix-ui/react-switch": "^1.0.3", "@radix-ui/react-toast": "^1.1.5", diff --git a/uikit/src/badge/styles.scss b/uikit/src/badge/styles.scss index e5a783d88..cf8e52c8b 100644 --- a/uikit/src/badge/styles.scss +++ b/uikit/src/badge/styles.scss @@ -6,7 +6,7 @@ } &-success { - @apply border-transparent bg-green-500 text-green-900 hover:bg-green-500/80; + @apply border-transparent bg-green-100 text-green-600; } &-secondary { diff --git a/uikit/src/command/styles.scss b/uikit/src/command/styles.scss index 80171ef50..a832792d6 100644 --- a/uikit/src/command/styles.scss +++ b/uikit/src/command/styles.scss @@ -25,7 +25,7 @@ } &-list-item { - @apply text-foreground aria-selected:bg-primary relative flex cursor-pointer select-none items-center rounded-md px-2 py-2 text-sm outline-none; + @apply text-foreground aria-selected:bg-secondary relative flex cursor-pointer select-none items-center rounded-md px-2 py-2 text-sm outline-none; } &-empty { diff --git a/uikit/src/index.ts b/uikit/src/index.ts index 67c3af93f..067752de0 100644 --- a/uikit/src/index.ts +++ b/uikit/src/index.ts @@ -10,3 +10,4 @@ export * from './tooltip' export * from './modal' export * from './command' export * from './textarea' +export * from './select' diff --git a/uikit/src/input/index.tsx b/uikit/src/input/index.tsx index 8d90ab232..9b7808055 100644 --- a/uikit/src/input/index.tsx +++ b/uikit/src/input/index.tsx @@ -9,7 +9,7 @@ const Input = forwardRef( return ( diff --git a/uikit/src/main.scss b/uikit/src/main.scss index 562e09532..1eca363b4 100644 --- a/uikit/src/main.scss +++ b/uikit/src/main.scss @@ -14,6 +14,7 @@ @import './modal/styles.scss'; @import './command/styles.scss'; @import './textarea/styles.scss'; +@import './select/styles.scss'; .animate-spin { animation: spin 1s linear infinite; @@ -104,7 +105,3 @@ --secondary-foreground: 210 20% 98%; } } - -:is(p) { - @apply text-muted-foreground; -} diff --git a/uikit/src/select/index.tsx b/uikit/src/select/index.tsx new file mode 100644 index 000000000..9bee7a153 --- /dev/null +++ b/uikit/src/select/index.tsx @@ -0,0 +1,139 @@ +'use client' + +import * as React from 'react' +import { + CaretSortIcon, + // CheckIcon, + ChevronDownIcon, + ChevronUpIcon, +} from '@radix-ui/react-icons' + +import * as SelectPrimitive from '@radix-ui/react-select' + +import { twMerge } from 'tailwind-merge' + +const Select = SelectPrimitive.Root + +const SelectGroup = SelectPrimitive.Group + +const SelectValue = SelectPrimitive.Value + +const SelectTrigger = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + + {children} + + + + +)) +SelectTrigger.displayName = SelectPrimitive.Trigger.displayName + +const SelectScrollUpButton = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + + + +)) +SelectScrollUpButton.displayName = SelectPrimitive.ScrollUpButton.displayName + +const SelectScrollDownButton = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + + + +)) +SelectScrollDownButton.displayName = + SelectPrimitive.ScrollDownButton.displayName + +const SelectContent = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, position = 'popper', ...props }, ref) => ( + + + + + {children} + + + + +)) +SelectContent.displayName = SelectPrimitive.Content.displayName + +const SelectLabel = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)) +SelectLabel.displayName = SelectPrimitive.Label.displayName + +const SelectItem = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + + {/* + + + + */} + {children} + +)) +SelectItem.displayName = SelectPrimitive.Item.displayName + +export { + Select, + SelectGroup, + SelectValue, + SelectTrigger, + SelectContent, + SelectLabel, + SelectItem, + SelectScrollUpButton, + SelectScrollDownButton, +} diff --git a/uikit/src/select/styles.scss b/uikit/src/select/styles.scss new file mode 100644 index 000000000..a0bf625f0 --- /dev/null +++ b/uikit/src/select/styles.scss @@ -0,0 +1,31 @@ +.select { + @apply ring-offset-background placeholder:text-muted-foreground border-border flex h-9 w-full items-center justify-between whitespace-nowrap rounded-md border bg-transparent px-3 py-2 text-sm shadow-sm disabled:cursor-not-allowed disabled:opacity-50 [&>span]:line-clamp-1; + + &-caret { + @apply h-4 w-4 opacity-50; + } + + &-scroll-up-button { + @apply flex cursor-default items-center justify-center py-1; + } + + &-scroll-down-button { + @apply flex cursor-default items-center justify-center py-1; + } + + &-label { + @apply px-2 py-1.5 text-sm font-semibold; + } + + &-item { + @apply hover:bg-secondary relative my-1 block w-full cursor-pointer select-none items-center rounded-sm px-4 py-2 text-sm outline-none data-[disabled]:pointer-events-none data-[disabled]:opacity-50; + } + + &-trigger-viewport { + @apply w-full py-1; + } + + &-content { + @apply bg-background border-border relative z-50 mt-1 block max-h-96 w-full min-w-[8rem] overflow-hidden rounded-md border shadow-md; + } +} diff --git a/web/app/layout.tsx b/web/app/layout.tsx index 38dee2056..c62390ba5 100644 --- a/web/app/layout.tsx +++ b/web/app/layout.tsx @@ -15,7 +15,7 @@ export const metadata: Metadata = { export default function RootLayout({ children }: PropsWithChildren) { return ( - +
{children} diff --git a/web/app/page.tsx b/web/app/page.tsx index 20abda6f9..cae3262a7 100644 --- a/web/app/page.tsx +++ b/web/app/page.tsx @@ -8,29 +8,25 @@ import { useMainViewState } from '@/hooks/useMainViewState' import ChatScreen from '@/screens/Chat' import ExploreModelsScreen from '@/screens/ExploreModels' -import MyModelsScreen from '@/screens/MyModels' + import SettingsScreen from '@/screens/Settings' -import WelcomeScreen from '@/screens/Welcome' +import SystemMonitorScreen from '@/screens/SystemMonitor' export default function Page() { const { mainViewState } = useMainViewState() let children = null switch (mainViewState) { - case MainViewState.Welcome: - children = - break - - case MainViewState.ExploreModels: + case MainViewState.Hub: children = break - case MainViewState.MyModels: - children = + case MainViewState.Settings: + children = break - case MainViewState.Setting: - children = + case MainViewState.SystemMonitor: + children = break default: diff --git a/web/constants/screens.ts b/web/constants/screens.ts index 76ad6fab5..19f82aaac 100644 --- a/web/constants/screens.ts +++ b/web/constants/screens.ts @@ -1,7 +1,7 @@ export enum MainViewState { - Welcome, - ExploreModels, + Hub, MyModels, - Setting, - Chat, + Settings, + Thread, + SystemMonitor, } diff --git a/web/containers/CardSidebar/index.tsx b/web/containers/CardSidebar/index.tsx index 42f975aaf..38264e457 100644 --- a/web/containers/CardSidebar/index.tsx +++ b/web/containers/CardSidebar/index.tsx @@ -1,13 +1,15 @@ -import { ReactNode, useState } from 'react' -import { Fragment } from 'react' +import { ReactNode, useState, useRef } from 'react' -import { Menu, Transition } from '@headlessui/react' import { ChevronDownIcon, - EllipsisVerticalIcon, -} from '@heroicons/react/20/solid' + MoreVerticalIcon, + FolderOpenIcon, + Code2Icon, +} from 'lucide-react' import { twMerge } from 'tailwind-merge' +import { useClickOutside } from '@/hooks/useClickOutside' + interface Props { children: ReactNode title: string @@ -21,65 +23,75 @@ export default function CardSidebar({ onViewJsonClick, }: Props) { const [show, setShow] = useState(true) + const [more, setMore] = useState(false) + const [menu, setMenu] = useState(null) + const [toggle, setToggle] = useState(null) + + useClickOutside(() => setMore(false), null, [menu, toggle]) return ( -
-
+
+
- - - Open options - - setMore(!more)} + > + +
+ {more && ( +
- - - {({ active }) => ( - onRevealInFinderClick(title)} - className={twMerge( - active ? 'bg-gray-50' : '', - 'block cursor-pointer px-3 py-1 text-xs leading-6 text-gray-900' - )} - > - Reveal in finder - - )} - - - {({ active }) => ( - onViewJsonClick(title)} - className={twMerge( - active ? 'bg-gray-50' : '', - 'block cursor-pointer px-3 py-1 text-xs leading-6 text-gray-900' - )} - > - View a JSON - - )} - - - - +
{ + onRevealInFinderClick(title) + setMore(false) + }} + > + + + Reveal in Finder + +
+
{ + onViewJsonClick(title) + setMore(false) + }} + > + + + View as JSON + +
+
+ )}
{show &&
{children}
}
diff --git a/web/containers/DropdownListSidebar/index.tsx b/web/containers/DropdownListSidebar/index.tsx index b159a131e..589847fdf 100644 --- a/web/containers/DropdownListSidebar/index.tsx +++ b/web/containers/DropdownListSidebar/index.tsx @@ -1,104 +1,114 @@ -import { Fragment, useEffect, useState } from 'react' - -import { Listbox, Transition } from '@headlessui/react' -import { CheckIcon, ChevronUpDownIcon } from '@heroicons/react/20/solid' +import { useEffect, useState } from 'react' import { Model } from '@janhq/core' -import { atom, useSetAtom } from 'jotai' +import { + Button, + Select, + SelectContent, + SelectGroup, + SelectItem, + SelectTrigger, + SelectValue, +} from '@janhq/uikit' + +import { atom, useAtomValue, useSetAtom } from 'jotai' + +import { MonitorIcon } from 'lucide-react' + import { twMerge } from 'tailwind-merge' +import { MainViewState } from '@/constants/screens' + import { getDownloadedModels } from '@/hooks/useGetDownloadedModels' +import { useMainViewState } from '@/hooks/useMainViewState' + +import { toGigabytes } from '@/utils/converter' + +import { activeThreadAtom } from '@/helpers/atoms/Conversation.atom' + export const selectedModelAtom = atom(undefined) export default function DropdownListSidebar() { const [downloadedModels, setDownloadedModels] = useState([]) - const [selected, setSelected] = useState() const setSelectedModel = useSetAtom(selectedModelAtom) + const activeThread = useAtomValue(activeThreadAtom) + const [selected, setSelected] = useState() + const { setMainViewState } = useMainViewState() useEffect(() => { getDownloadedModels().then((downloadedModels) => { setDownloadedModels(downloadedModels) - if (downloadedModels.length > 0) { - setSelected(downloadedModels[0]) - setSelectedModel(downloadedModels[0]) + setSelected( + downloadedModels.filter( + (x) => x.id === activeThread?.assistants[0].model.id + )[0] || downloadedModels[0] + ) + setSelectedModel( + downloadedModels.filter( + (x) => x.id === activeThread?.assistants[0].model.id + )[0] || downloadedModels[0] + ) } }) - }, []) - - if (!selected) return null + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [activeThread]) return ( - { - setSelected(model) - setSelectedModel(model) + ) } diff --git a/web/containers/Layout/BottomBar/DownloadingState/index.tsx b/web/containers/Layout/BottomBar/DownloadingState/index.tsx index 1aad0fb1c..0648508d0 100644 --- a/web/containers/Layout/BottomBar/DownloadingState/index.tsx +++ b/web/containers/Layout/BottomBar/DownloadingState/index.tsx @@ -12,18 +12,14 @@ import { ModalTrigger, } from '@janhq/uikit' -import { useAtomValue } from 'jotai' - import { useDownloadState } from '@/hooks/useDownloadState' import { formatDownloadPercentage } from '@/utils/converter' import { extensionManager } from '@/extension' -import { downloadingModelsAtom } from '@/helpers/atoms/Model.atom' export default function DownloadingState() { const { downloadStates } = useDownloadState() - const models = useAtomValue(downloadingModelsAtom) const totalCurrentProgress = downloadStates .map((a) => a.size.transferred + a.size.transferred) diff --git a/web/containers/Layout/BottomBar/index.tsx b/web/containers/Layout/BottomBar/index.tsx index 1a264da02..fb0ef5ed6 100644 --- a/web/containers/Layout/BottomBar/index.tsx +++ b/web/containers/Layout/BottomBar/index.tsx @@ -30,7 +30,7 @@ const BottomBar = () => { const { downloadStates } = useDownloadState() return ( -
+
{progress && progress > 0 ? ( @@ -49,7 +49,7 @@ const BottomBar = () => { name="Active model:" value={ activeModel?.id || ( - +   to show your model @@ -63,7 +63,7 @@ const BottomBar = () => { diff --git a/web/containers/Layout/Ribbon/index.tsx b/web/containers/Layout/Ribbon/index.tsx index 6babadb9d..fa6d53193 100644 --- a/web/containers/Layout/Ribbon/index.tsx +++ b/web/containers/Layout/Ribbon/index.tsx @@ -1,5 +1,3 @@ -import { useContext } from 'react' - import { Tooltip, TooltipContent, @@ -11,9 +9,8 @@ import { motion as m } from 'framer-motion' import { MessageCircleIcon, SettingsIcon, - DatabaseIcon, - CpuIcon, - BookOpenIcon, + MonitorIcon, + LayoutGridIcon, } from 'lucide-react' import { twMerge } from 'tailwind-merge' @@ -34,36 +31,51 @@ export default function RibbonNav() { const primaryMenus = [ { - name: 'Getting Started', - icon: , - state: MainViewState.Welcome, + name: 'Thread', + icon: ( + + ), + state: MainViewState.Thread, }, { - name: 'Chat', - icon: , - state: MainViewState.Chat, + name: 'Hub', + icon: ( + + ), + state: MainViewState.Hub, }, ] const secondaryMenus = [ { - name: 'Explore Models', - icon: , - state: MainViewState.ExploreModels, - }, - { - name: 'My Models', - icon: , - state: MainViewState.MyModels, + name: 'System Monitor', + icon: ( + + ), + state: MainViewState.SystemMonitor, }, { name: 'Settings', - icon: , - state: MainViewState.Setting, + icon: ( + + ), + state: MainViewState.Settings, }, ] return ( -
+
@@ -90,7 +102,7 @@ export default function RibbonNav() {
{isActive && ( )} @@ -126,7 +138,7 @@ export default function RibbonNav() {
{isActive && ( )} diff --git a/web/containers/Layout/TopBar/CommandListDownloadedModel/index.tsx b/web/containers/Layout/TopBar/CommandListDownloadedModel/index.tsx index 0fb278080..d0ea6b26b 100644 --- a/web/containers/Layout/TopBar/CommandListDownloadedModel/index.tsx +++ b/web/containers/Layout/TopBar/CommandListDownloadedModel/index.tsx @@ -85,12 +85,12 @@ export default function CommandListDownloadedModel() { { - setMainViewState(MainViewState.ExploreModels) + setMainViewState(MainViewState.Hub) setOpen(false) }} > - Explore Models + Explore The Hub diff --git a/web/containers/Layout/TopBar/CommandSearch/index.tsx b/web/containers/Layout/TopBar/CommandSearch/index.tsx index 2e20ff583..d83feb22e 100644 --- a/web/containers/Layout/TopBar/CommandSearch/index.tsx +++ b/web/containers/Layout/TopBar/CommandSearch/index.tsx @@ -1,7 +1,6 @@ import { Fragment, useState, useEffect } from 'react' import { - Button, CommandModal, CommandEmpty, CommandGroup, @@ -11,14 +10,7 @@ import { CommandList, } from '@janhq/uikit' -import { useAtomValue, useSetAtom } from 'jotai' -import { - MessageCircleIcon, - SettingsIcon, - DatabaseIcon, - CpuIcon, - BookOpenIcon, -} from 'lucide-react' +import { MessageCircleIcon, SettingsIcon, LayoutGridIcon } from 'lucide-react' import ShortCut from '@/containers/Shortcut' @@ -26,43 +18,27 @@ import { MainViewState } from '@/constants/screens' import { useMainViewState } from '@/hooks/useMainViewState' -import { showRightSideBarAtom } from '@/screens/Chat/Sidebar' - -import { activeThreadAtom } from '@/helpers/atoms/Conversation.atom' - export default function CommandSearch() { const { setMainViewState } = useMainViewState() const [open, setOpen] = useState(false) - const setShowRightSideBar = useSetAtom(showRightSideBarAtom) - const activeThread = useAtomValue(activeThreadAtom) const menus = [ - { - name: 'Getting Started', - icon: , - state: MainViewState.Welcome, - }, { name: 'Chat', icon: ( ), - state: MainViewState.Chat, + state: MainViewState.Thread, }, { - name: 'Explore Models', - icon: , - state: MainViewState.ExploreModels, - }, - { - name: 'My Models', - icon: , - state: MainViewState.MyModels, + name: 'Hub', + icon: , + state: MainViewState.Hub, }, { name: 'Settings', icon: , - state: MainViewState.Setting, + state: MainViewState.Settings, shortcut: , }, ] @@ -75,7 +51,7 @@ export default function CommandSearch() { } if (e.key === ',' && (e.metaKey || e.ctrlKey)) { e.preventDefault() - setMainViewState(MainViewState.Setting) + setMainViewState(MainViewState.Settings) } } document.addEventListener('keydown', down) @@ -85,7 +61,8 @@ export default function CommandSearch() { return ( -
+ {/* Temporary disable view search input until we have proper UI placement, but we keep function cmd + K for showing list page */} + {/*
-
- +
*/} @@ -124,15 +100,6 @@ export default function CommandSearch() { - {activeThread && ( - - )} ) } diff --git a/web/containers/Layout/TopBar/index.tsx b/web/containers/Layout/TopBar/index.tsx index 5ab4ebc84..aa7912bd3 100644 --- a/web/containers/Layout/TopBar/index.tsx +++ b/web/containers/Layout/TopBar/index.tsx @@ -1,21 +1,86 @@ +import { useAtomValue, useSetAtom } from 'jotai' +import { PanelLeftIcon, PenSquareIcon, PanelRightIcon } from 'lucide-react' + import CommandListDownloadedModel from '@/containers/Layout/TopBar/CommandListDownloadedModel' import CommandSearch from '@/containers/Layout/TopBar/CommandSearch' +import { MainViewState } from '@/constants/screens' + +import { useCreateNewThread } from '@/hooks/useCreateNewThread' +import useGetAssistants from '@/hooks/useGetAssistants' import { useMainViewState } from '@/hooks/useMainViewState' +import { showRightSideBarAtom } from '@/screens/Chat/Sidebar' + +import { activeThreadAtom } from '@/helpers/atoms/Conversation.atom' + const TopBar = () => { - const { viewStateName } = useMainViewState() + const activeThread = useAtomValue(activeThreadAtom) + const { mainViewState } = useMainViewState() + const { requestCreateNewThread } = useCreateNewThread() + const { assistants } = useGetAssistants() + const setShowRightSideBar = useSetAtom(showRightSideBarAtom) + + const titleScreen = (viewStateName: MainViewState) => { + switch (viewStateName) { + case MainViewState.Thread: + return activeThread ? activeThread?.title : 'New Thread' + + default: + return MainViewState[viewStateName]?.replace(/([A-Z])/g, ' $1').trim() + } + } + + const onCreateConversationClick = async () => { + if (assistants.length === 0) { + alert('No assistant available') + return + } + requestCreateNewThread(assistants[0]) + } return ( -
+
+ {mainViewState === MainViewState.Thread && ( +
+ )}
-
- - {viewStateName.replace(/([A-Z])/g, ' $1').trim()} - -
+ {mainViewState === MainViewState.Thread ? ( +
+
+
+ +
+
+ +
+
+ + {titleScreen(mainViewState)} + + {activeThread && ( +
setShowRightSideBar((show) => !show)} + > + +
+ )} +
+ ) : ( +
+ + {titleScreen(mainViewState)} + +
+ )} - {/* Command without trigger interface */}
diff --git a/web/containers/ModalCancelDownload/index.tsx b/web/containers/ModalCancelDownload/index.tsx index 8619c543c..4153b89ee 100644 --- a/web/containers/ModalCancelDownload/index.tsx +++ b/web/containers/ModalCancelDownload/index.tsx @@ -35,7 +35,6 @@ export default function ModalCancelDownload({ model, isFromList }: Props) { // eslint-disable-next-line react-hooks/exhaustive-deps [model.id] ) - const models = useAtomValue(downloadingModelsAtom) const downloadState = useAtomValue(downloadAtom) const cancelText = `Cancel ${formatDownloadPercentage(downloadState.percent)}` diff --git a/web/containers/Shortcut/index.tsx b/web/containers/Shortcut/index.tsx index 67a5f8d0c..ae93a827e 100644 --- a/web/containers/Shortcut/index.tsx +++ b/web/containers/Shortcut/index.tsx @@ -14,7 +14,7 @@ export default function ShortCut(props: { menu: string }) { } return ( -
+

{getSymbol(os) + ' + ' + menu}

) diff --git a/web/containers/Toast/index.tsx b/web/containers/Toast/index.tsx index 50f1f0f29..c5e5f03da 100644 --- a/web/containers/Toast/index.tsx +++ b/web/containers/Toast/index.tsx @@ -16,7 +16,7 @@ export function toaster(props: Props) { return (
{ const newData: Record = { ...get(chatMessages), } - newData[id] = newData[id].filter((e) => e.role === ChatCompletionRole.System) + newData[id] = newData[id]?.filter((e) => e.role === ChatCompletionRole.System) set(chatMessages, newData) }) diff --git a/web/helpers/atoms/SystemBar.atom.ts b/web/helpers/atoms/SystemBar.atom.ts index 9b44c2e92..aa5e77d58 100644 --- a/web/helpers/atoms/SystemBar.atom.ts +++ b/web/helpers/atoms/SystemBar.atom.ts @@ -1,3 +1,6 @@ import { atom } from 'jotai' export const totalRamAtom = atom(0) +export const usedRamAtom = atom(0) + +export const cpuUsageAtom = atom(0) diff --git a/web/hooks/useClickOutside.ts b/web/hooks/useClickOutside.ts new file mode 100644 index 000000000..4e8e5d2c3 --- /dev/null +++ b/web/hooks/useClickOutside.ts @@ -0,0 +1,42 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import { useEffect, useRef } from 'react' + +const DEFAULT_EVENTS = ['mousedown', 'touchstart'] + +export function useClickOutside( + handler: () => void, + events?: string[] | null, + nodes?: (HTMLElement | null)[] +) { + const ref = useRef() + + useEffect(() => { + const listener = (event: any) => { + const { target } = event ?? {} + if (Array.isArray(nodes)) { + const shouldIgnore = + target?.hasAttribute('data-ignore-outside-clicks') || + (!document.body.contains(target) && target.tagName !== 'HTML') + const shouldTrigger = nodes.every( + (node) => !!node && !event.composedPath().includes(node) + ) + shouldTrigger && !shouldIgnore && handler() + } else if (ref.current && !ref.current.contains(target)) { + handler() + } + } + + ;(events || DEFAULT_EVENTS).forEach((fn) => + document.addEventListener(fn, listener) + ) + + return () => { + ;(events || DEFAULT_EVENTS).forEach((fn) => + document.removeEventListener(fn, listener) + ) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [ref, handler, nodes]) + + return ref +} diff --git a/web/hooks/useCreateNewThread.ts b/web/hooks/useCreateNewThread.ts index 9ccecee7a..7526feb49 100644 --- a/web/hooks/useCreateNewThread.ts +++ b/web/hooks/useCreateNewThread.ts @@ -40,7 +40,6 @@ export const useCreateNewThread = () => { const setActiveThreadId = useSetAtom(setActiveThreadIdAtom) const [threadStates, setThreadStates] = useAtom(threadStatesAtom) const threads = useAtomValue(threadsAtom) - const activeThread = useAtomValue(activeThreadAtom) const updateThread = useSetAtom(updateThreadAtom) const requestCreateNewThread = async (assistant: Assistant) => { @@ -69,6 +68,7 @@ export const useCreateNewThread = () => { stream: false, }, }, + instructions: assistant.instructions, } const threadId = generateThreadId(assistant.id) const thread: Thread = { @@ -93,20 +93,18 @@ export const useCreateNewThread = () => { setActiveThreadId(thread.id) } - function updateThreadTitle(title: string) { - if (!activeThread) return - const updatedConv: Thread = { - ...activeThread, - title, + function updateThreadMetadata(thread: Thread) { + const updatedThread: Thread = { + ...thread, } - updateThread(updatedConv) + updateThread(updatedThread) extensionManager .get(ExtensionType.Conversational) - ?.saveThread(updatedConv) + ?.saveThread(updatedThread) } return { requestCreateNewThread, - updateThreadTitle, + updateThreadMetadata, } } diff --git a/web/hooks/useDeleteConversation.ts b/web/hooks/useDeleteConversation.ts index 1cfceebcf..b02796b10 100644 --- a/web/hooks/useDeleteConversation.ts +++ b/web/hooks/useDeleteConversation.ts @@ -17,7 +17,6 @@ import { } from '@/helpers/atoms/ChatMessage.atom' import { threadsAtom, - getActiveThreadIdAtom, setActiveThreadIdAtom, } from '@/helpers/atoms/Conversation.atom' @@ -25,14 +24,13 @@ export default function useDeleteThread() { const { activeModel } = useActiveModel() const [threads, setThreads] = useAtom(threadsAtom) const setCurrentPrompt = useSetAtom(currentPromptAtom) - const activeThreadId = useAtomValue(getActiveThreadIdAtom) const messages = useAtomValue(getCurrentChatMessagesAtom) const setActiveConvoId = useSetAtom(setActiveThreadIdAtom) const deleteMessages = useSetAtom(deleteConversationMessage) const cleanMessages = useSetAtom(cleanConversationMessages) - const cleanThread = async () => { + const cleanThread = async (activeThreadId: string) => { if (activeThreadId) { const thread = threads.filter((c) => c.id === activeThreadId)[0] cleanMessages(activeThreadId) @@ -46,7 +44,7 @@ export default function useDeleteThread() { } } - const deleteThread = async () => { + const deleteThread = async (activeThreadId: string) => { if (!activeThreadId) { alert('No active thread') return @@ -60,8 +58,8 @@ export default function useDeleteThread() { deleteMessages(activeThreadId) setCurrentPrompt('') toaster({ - title: 'Chat successfully deleted.', - description: `Chat with ${activeModel?.name} has been successfully deleted.`, + title: 'Thread successfully deleted.', + description: `Thread with ${activeModel?.name} has been successfully deleted.`, }) if (availableThreads.length > 0) { setActiveConvoId(availableThreads[0].id) diff --git a/web/hooks/useDownloadModel.ts b/web/hooks/useDownloadModel.ts index 6bcffdaed..b91ac2a57 100644 --- a/web/hooks/useDownloadModel.ts +++ b/web/hooks/useDownloadModel.ts @@ -1,6 +1,6 @@ import { Model, ExtensionType, ModelExtension } from '@janhq/core' -import { useAtom, useAtomValue } from 'jotai' +import { useAtom } from 'jotai' import { useDownloadState } from './useDownloadState' diff --git a/web/hooks/useGetSystemResources.ts b/web/hooks/useGetSystemResources.ts index ef4b2ef08..e2de61519 100644 --- a/web/hooks/useGetSystemResources.ts +++ b/web/hooks/useGetSystemResources.ts @@ -6,12 +6,18 @@ import { MonitoringExtension } from '@janhq/core' import { useSetAtom } from 'jotai' import { extensionManager } from '@/extension/ExtensionManager' -import { totalRamAtom } from '@/helpers/atoms/SystemBar.atom' +import { + cpuUsageAtom, + totalRamAtom, + usedRamAtom, +} from '@/helpers/atoms/SystemBar.atom' export default function useGetSystemResources() { const [ram, setRam] = useState(0) const [cpu, setCPU] = useState(0) const setTotalRam = useSetAtom(totalRamAtom) + const setUsedRam = useSetAtom(usedRamAtom) + const setCpuUsage = useSetAtom(cpuUsageAtom) const getSystemResources = async () => { if ( @@ -27,10 +33,12 @@ export default function useGetSystemResources() { const ram = (resourceInfor?.mem?.active ?? 0) / (resourceInfor?.mem?.total ?? 1) + if (resourceInfor?.mem?.active) setUsedRam(resourceInfor.mem.active) if (resourceInfor?.mem?.total) setTotalRam(resourceInfor.mem.total) setRam(Math.round(ram * 100)) setCPU(Math.round(currentLoadInfor?.currentLoad ?? 0)) + setCpuUsage(Math.round(currentLoadInfor?.currentLoad ?? 0)) } useEffect(() => { @@ -45,6 +53,7 @@ export default function useGetSystemResources() { // clean up interval return () => clearInterval(intervalId) + // eslint-disable-next-line react-hooks/exhaustive-deps }, []) return { diff --git a/web/hooks/useMainViewState.ts b/web/hooks/useMainViewState.ts index 3dccbb704..91c1a1c4d 100644 --- a/web/hooks/useMainViewState.ts +++ b/web/hooks/useMainViewState.ts @@ -2,7 +2,7 @@ import { atom, useAtom } from 'jotai' import { MainViewState } from '@/constants/screens' -const currentMainViewState = atom(MainViewState.Welcome) +const currentMainViewState = atom(MainViewState.Thread) export function useMainViewState() { const [mainViewState, setMainViewState] = useAtom(currentMainViewState) diff --git a/web/hooks/useSendChatMessage.ts b/web/hooks/useSendChatMessage.ts index 6b60a0e04..9cf61969d 100644 --- a/web/hooks/useSendChatMessage.ts +++ b/web/hooks/useSendChatMessage.ts @@ -1,3 +1,5 @@ +import { useState } from 'react' + import { ChatCompletionMessage, ChatCompletionRole, @@ -10,7 +12,7 @@ import { ThreadMessage, events, } from '@janhq/core' -import { ConversationalExtension, InferenceExtension } from '@janhq/core' +import { ConversationalExtension } from '@janhq/core' import { useAtom, useAtomValue, useSetAtom } from 'jotai' import { ulid } from 'ulid' @@ -44,6 +46,7 @@ export default function useSendChatMessage() { const { activeModel } = useActiveModel() const selectedModel = useAtomValue(selectedModelAtom) const { startModel } = useActiveModel() + const [queuedMessage, setQueuedMessage] = useState(false) const sendChatMessage = async () => { if (!currentPrompt || currentPrompt.trim().length === 0) { @@ -61,14 +64,15 @@ export default function useSendChatMessage() { } const assistantId = activeThread.assistants[0].assistant_id ?? '' const assistantName = activeThread.assistants[0].assistant_name ?? '' + const instructions = activeThread.assistants[0].instructions ?? '' const updatedThread: Thread = { ...activeThread, isFinishInit: true, - title: `${activeThread.assistants[0].assistant_name} with ${selectedModel.name}`, assistants: [ { assistant_id: assistantId, assistant_name: assistantName, + instructions: instructions, model: { id: selectedModel.id, settings: selectedModel.settings, @@ -90,18 +94,29 @@ export default function useSendChatMessage() { const prompt = currentPrompt.trim() setCurrentPrompt('') - const messages: ChatCompletionMessage[] = currentMessages - .map((msg) => ({ - role: msg.role, - content: msg.content[0]?.text.value ?? '', - })) - .concat([ - { - role: ChatCompletionRole.User, - content: prompt, - } as ChatCompletionMessage, - ]) - console.debug(`Sending messages: ${JSON.stringify(messages, null, 2)}`) + const messages: ChatCompletionMessage[] = [ + activeThread.assistants[0]?.instructions, + ] + .map((instructions) => { + const systemMessage: ChatCompletionMessage = { + role: ChatCompletionRole.System, + content: instructions, + } + return systemMessage + }) + .concat( + currentMessages + .map((msg) => ({ + role: msg.role, + content: msg.content[0]?.text.value ?? '', + })) + .concat([ + { + role: ChatCompletionRole.User, + content: prompt, + } as ChatCompletionMessage, + ]) + ) const msgId = ulid() const messageRequest: MessageRequest = { id: msgId, @@ -136,17 +151,17 @@ export default function useSendChatMessage() { ?.addNewMessage(threadMessage) const modelId = selectedModel?.id ?? activeThread.assistants[0].model.id + if (activeModel?.id !== modelId) { - toaster({ - title: 'Message queued.', - description: 'It will be sent once the model is done loading', - }) + setQueuedMessage(true) await startModel(modelId) + setQueuedMessage(false) } events.emit(EventName.OnMessageSent, messageRequest) } return { sendChatMessage, + queuedMessage, } } diff --git a/web/package.json b/web/package.json index 16522cace..922bc556a 100644 --- a/web/package.json +++ b/web/package.json @@ -33,6 +33,7 @@ "react-dom": "18.2.0", "react-hook-form": "^7.47.0", "react-hot-toast": "^2.4.1", + "react-scroll-to-bottom": "^4.2.0", "react-toastify": "^9.1.3", "sass": "^1.69.4", "tailwind-merge": "^2.0.0", @@ -48,6 +49,7 @@ "@types/node": "20.8.10", "@types/react": "18.2.34", "@types/react-dom": "18.2.14", + "@types/react-scroll-to-bottom": "^4.2.4", "@types/uuid": "^9.0.6", "@typescript-eslint/eslint-plugin": "^6.8.0", "@typescript-eslint/parser": "^6.8.0", diff --git a/web/screens/Chat/ChatBody/index.tsx b/web/screens/Chat/ChatBody/index.tsx index 10d008661..0a92b7a6c 100644 --- a/web/screens/Chat/ChatBody/index.tsx +++ b/web/screens/Chat/ChatBody/index.tsx @@ -1,17 +1,65 @@ +import { Fragment } from 'react' + +import ScrollToBottom from 'react-scroll-to-bottom' + +import { Button } from '@janhq/uikit' import { useAtomValue } from 'jotai' +import LogoMark from '@/containers/Brand/Logo/Mark' + +import { MainViewState } from '@/constants/screens' + +import { useGetDownloadedModels } from '@/hooks/useGetDownloadedModels' + +import { useMainViewState } from '@/hooks/useMainViewState' + import ChatItem from '../ChatItem' import { getCurrentChatMessagesAtom } from '@/helpers/atoms/ChatMessage.atom' const ChatBody: React.FC = () => { const messages = useAtomValue(getCurrentChatMessagesAtom) + const { downloadedModels } = useGetDownloadedModels() + const { setMainViewState } = useMainViewState() + + if (downloadedModels.length === 0) + return ( +
+ +

Welcome!

+

You need to download your first model

+ +
+ ) + return ( -
- {messages.map((message) => ( - - ))} -
+ + {messages.length === 0 ? ( +
+ +

How can I help you?

+
+ ) : ( + + {messages.map((message) => ( + + ))} + + )} +
) } diff --git a/web/screens/Chat/ChatItem/index.tsx b/web/screens/Chat/ChatItem/index.tsx index 5f192d436..fcc6cbab5 100644 --- a/web/screens/Chat/ChatItem/index.tsx +++ b/web/screens/Chat/ChatItem/index.tsx @@ -7,10 +7,7 @@ import SimpleTextMessage from '../SimpleTextMessage' type Ref = HTMLDivElement const ChatItem = forwardRef((message, ref) => ( -
+
)) diff --git a/web/screens/Chat/MessageToolbar/index.tsx b/web/screens/Chat/MessageToolbar/index.tsx index 5fe432e62..5380c7e29 100644 --- a/web/screens/Chat/MessageToolbar/index.tsx +++ b/web/screens/Chat/MessageToolbar/index.tsx @@ -1,7 +1,4 @@ -import { useMemo } from 'react' - import { - ChatCompletionRole, ChatCompletionMessage, EventName, MessageRequest, @@ -11,8 +8,8 @@ import { events, } from '@janhq/core' import { ConversationalExtension, InferenceExtension } from '@janhq/core' -import { atom, useAtomValue, useSetAtom } from 'jotai' -import { RefreshCcw, ClipboardCopy, Trash2Icon, StopCircle } from 'lucide-react' +import { useAtomValue, useSetAtom } from 'jotai' +import { RefreshCcw, Copy, Trash2Icon, StopCircle } from 'lucide-react' import { twMerge } from 'tailwind-merge' @@ -23,21 +20,17 @@ import { deleteMessageAtom, getCurrentChatMessagesAtom, } from '@/helpers/atoms/ChatMessage.atom' -import { - activeThreadAtom, - threadStatesAtom, -} from '@/helpers/atoms/Conversation.atom' +import { activeThreadAtom } from '@/helpers/atoms/Conversation.atom' const MessageToolbar = ({ message }: { message: ThreadMessage }) => { const deleteMessage = useSetAtom(deleteMessageAtom) const thread = useAtomValue(activeThreadAtom) const messages = useAtomValue(getCurrentChatMessagesAtom) - const threadStateAtom = useMemo( - () => atom((get) => get(threadStatesAtom)[thread?.id ?? '']), - [thread?.id] - ) - const threadState = useAtomValue(threadStateAtom) - + // const threadStateAtom = useMemo( + // () => atom((get) => get(threadStatesAtom)[thread?.id ?? '']), + // [thread?.id] + // ) + // const threadState = useAtomValue(threadStateAtom) const stopInference = async () => { await extensionManager .get(ExtensionType.Inference) @@ -51,12 +44,7 @@ const MessageToolbar = ({ message }: { message: ThreadMessage }) => { } return ( -
+
{message.status === MessageStatus.Pending && (
{ }) }} > - +
(false) +export const showRightSideBarAtom = atom(true) export default function Sidebar() { const showing = useAtomValue(showRightSideBarAtom) const activeThread = useAtomValue(activeThreadAtom) const selectedModel = useAtomValue(selectedModelAtom) - const { updateThreadTitle } = useCreateNewThread() + const { updateThreadMetadata } = useCreateNewThread() const onReviewInFinderClick = async (type: string) => { if (!activeThread) return if (!activeThread.isFinishInit) { - alert('Thread is not ready') + alert('Thread is not started yet') return } @@ -56,7 +61,7 @@ export default function Sidebar() { const onViewJsonClick = async (type: string) => { if (!activeThread) return if (!activeThread.isFinishInit) { - alert('Thread is not ready') + alert('Thread is not started yet') return } @@ -87,44 +92,104 @@ export default function Sidebar() { return (
-
+
- - updateThreadTitle(title ?? '')} - /> +
+
+ + { + if (activeThread) + updateThreadMetadata({ + ...activeThread, + title: e.target.value || '', + }) + }} + /> +
+
+ + + {activeThread?.id || '-'} + +
+
- +
+
+ + + {activeThread?.assistants[0].assistant_name ?? '-'} + +
+
+ +