chore: janhq to menloresearch
This commit is contained in:
parent
39e5649454
commit
33055b8db6
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -3,5 +3,5 @@ blank_issues_enabled: true
|
||||
|
||||
contact_links:
|
||||
- name: "\1F4AC Jan Discussions"
|
||||
url: "https://github.com/orgs/janhq/discussions/categories/q-a"
|
||||
url: "https://github.com/orgs/menloresearch/discussions/categories/q-a"
|
||||
about: "Get help, discuss features & roadmap, and share your projects"
|
||||
158
.github/workflows/nightly-integrate-cortex-cpp.yml
vendored
158
.github/workflows/nightly-integrate-cortex-cpp.yml
vendored
@ -18,67 +18,67 @@ jobs:
|
||||
pr_created: ${{ steps.check-update.outputs.pr_created }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
ref: dev
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
ref: dev
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global user.name 'github-actions[bot]'
|
||||
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global user.name 'github-actions[bot]'
|
||||
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
|
||||
|
||||
- name: Update submodule to latest release
|
||||
id: check-update
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
||||
run: |
|
||||
curl -s https://api.github.com/repos/janhq/cortex/releases > /tmp/github_api_releases.json
|
||||
latest_prerelease_name=$(cat /tmp/github_api_releases.json | jq -r '.[] | select(.prerelease) | .name' | head -n 1)
|
||||
- name: Update submodule to latest release
|
||||
id: check-update
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
||||
run: |
|
||||
curl -s https://api.github.com/repos/menloresearch/cortex/releases > /tmp/github_api_releases.json
|
||||
latest_prerelease_name=$(cat /tmp/github_api_releases.json | jq -r '.[] | select(.prerelease) | .name' | head -n 1)
|
||||
|
||||
get_asset_count() {
|
||||
local version_name=$1
|
||||
cat /tmp/github_api_releases.json | jq -r --arg version_name "$version_name" '.[] | select(.name == $version_name) | .assets | length'
|
||||
}
|
||||
get_asset_count() {
|
||||
local version_name=$1
|
||||
cat /tmp/github_api_releases.json | jq -r --arg version_name "$version_name" '.[] | select(.name == $version_name) | .assets | length'
|
||||
}
|
||||
|
||||
cortex_cpp_version_file_path="extensions/inference-nitro-extension/bin/version.txt"
|
||||
current_version_name=$(cat "$cortex_cpp_version_file_path" | head -n 1)
|
||||
cortex_cpp_version_file_path="extensions/inference-nitro-extension/bin/version.txt"
|
||||
current_version_name=$(cat "$cortex_cpp_version_file_path" | head -n 1)
|
||||
|
||||
current_version_asset_count=$(get_asset_count "$current_version_name")
|
||||
latest_prerelease_asset_count=$(get_asset_count "$latest_prerelease_name")
|
||||
current_version_asset_count=$(get_asset_count "$current_version_name")
|
||||
latest_prerelease_asset_count=$(get_asset_count "$latest_prerelease_name")
|
||||
|
||||
if [ "$current_version_name" = "$latest_prerelease_name" ]; then
|
||||
echo "cortex cpp remote repo doesn't have update today, skip update cortex.cpp for today nightly build"
|
||||
echo "::set-output name=pr_created::false"
|
||||
exit 0
|
||||
fi
|
||||
if [ "$current_version_name" = "$latest_prerelease_name" ]; then
|
||||
echo "cortex cpp remote repo doesn't have update today, skip update cortex.cpp for today nightly build"
|
||||
echo "::set-output name=pr_created::false"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "$current_version_asset_count" != "$latest_prerelease_asset_count" ]; then
|
||||
echo "Latest prerelease version has different number of assets, somethink went wrong, skip update cortex.cpp for today nightly build"
|
||||
echo "::set-output name=pr_created::false"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$current_version_asset_count" != "$latest_prerelease_asset_count" ]; then
|
||||
echo "Latest prerelease version has different number of assets, somethink went wrong, skip update cortex.cpp for today nightly build"
|
||||
echo "::set-output name=pr_created::false"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo $latest_prerelease_name > $cortex_cpp_version_file_path
|
||||
echo "Updated version from $current_version_name to $latest_prerelease_name."
|
||||
echo "::set-output name=pr_created::true"
|
||||
echo $latest_prerelease_name > $cortex_cpp_version_file_path
|
||||
echo "Updated version from $current_version_name to $latest_prerelease_name."
|
||||
echo "::set-output name=pr_created::true"
|
||||
|
||||
git add -f $cortex_cpp_version_file_path
|
||||
git commit -m "Update cortex cpp nightly to version $latest_prerelease_name"
|
||||
branch_name="update-nightly-$(date +'%Y-%m-%d-%H-%M')"
|
||||
git checkout -b $branch_name
|
||||
git push origin $branch_name
|
||||
git add -f $cortex_cpp_version_file_path
|
||||
git commit -m "Update cortex cpp nightly to version $latest_prerelease_name"
|
||||
branch_name="update-nightly-$(date +'%Y-%m-%d-%H-%M')"
|
||||
git checkout -b $branch_name
|
||||
git push origin $branch_name
|
||||
|
||||
pr_title="Update cortex cpp nightly to version $latest_prerelease_name"
|
||||
pr_body="This PR updates the Update cortex cpp nightly to version $latest_prerelease_name"
|
||||
pr_title="Update cortex cpp nightly to version $latest_prerelease_name"
|
||||
pr_body="This PR updates the Update cortex cpp nightly to version $latest_prerelease_name"
|
||||
|
||||
gh pr create --title "$pr_title" --body "$pr_body" --head $branch_name --base dev --reviewer Van-QA
|
||||
gh pr create --title "$pr_title" --body "$pr_body" --head $branch_name --base dev --reviewer Van-QA
|
||||
|
||||
pr_number=$(gh pr list --head $branch_name --json number --jq '.[0].number')
|
||||
echo "::set-output name=pr_number::$pr_number"
|
||||
pr_number=$(gh pr list --head $branch_name --json number --jq '.[0].number')
|
||||
echo "::set-output name=pr_number::$pr_number"
|
||||
|
||||
check-and-merge-pr:
|
||||
needs: update-submodule
|
||||
@ -89,39 +89,39 @@ jobs:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
||||
|
||||
- name: Wait for CI to pass
|
||||
env:
|
||||
- name: Wait for CI to pass
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
||||
run: |
|
||||
pr_number=${{ needs.update-submodule.outputs.pr_number }}
|
||||
while true; do
|
||||
ci_completed=$(gh pr checks $pr_number --json completedAt --jq '.[].completedAt')
|
||||
if echo "$ci_completed" | grep -q "0001-01-01T00:00:00Z"; then
|
||||
echo "CI is still running, waiting..."
|
||||
sleep 60
|
||||
else
|
||||
echo "CI has completed, checking states..."
|
||||
ci_states=$(gh pr checks $pr_number --json state --jq '.[].state')
|
||||
if echo "$ci_states" | grep -vqE "SUCCESS|SKIPPED"; then
|
||||
echo "CI failed, exiting..."
|
||||
exit 1
|
||||
run: |
|
||||
pr_number=${{ needs.update-submodule.outputs.pr_number }}
|
||||
while true; do
|
||||
ci_completed=$(gh pr checks $pr_number --json completedAt --jq '.[].completedAt')
|
||||
if echo "$ci_completed" | grep -q "0001-01-01T00:00:00Z"; then
|
||||
echo "CI is still running, waiting..."
|
||||
sleep 60
|
||||
else
|
||||
echo "CI passed, merging PR..."
|
||||
break
|
||||
echo "CI has completed, checking states..."
|
||||
ci_states=$(gh pr checks $pr_number --json state --jq '.[].state')
|
||||
if echo "$ci_states" | grep -vqE "SUCCESS|SKIPPED"; then
|
||||
echo "CI failed, exiting..."
|
||||
exit 1
|
||||
else
|
||||
echo "CI passed, merging PR..."
|
||||
break
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
- name: Merge the PR
|
||||
env:
|
||||
- name: Merge the PR
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
||||
run: |
|
||||
pr_number=${{ needs.update-submodule.outputs.pr_number }}
|
||||
gh pr merge $pr_number --merge --admin
|
||||
run: |
|
||||
pr_number=${{ needs.update-submodule.outputs.pr_number }}
|
||||
gh pr merge $pr_number --merge --admin
|
||||
|
||||
@ -13,46 +13,46 @@ jobs:
|
||||
outputs:
|
||||
new_version: ${{ steps.version_update.outputs.new_version }}
|
||||
steps:
|
||||
- name: Install jq
|
||||
uses: dcarbone/install-jq-action@v2.0.1
|
||||
- name: Install jq
|
||||
uses: dcarbone/install-jq-action@v2.0.1
|
||||
|
||||
- name: Get tag
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||
id: tag
|
||||
uses: dawidd6/action-get-tag@v1
|
||||
- name: Get tag
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||
id: tag
|
||||
uses: dawidd6/action-get-tag@v1
|
||||
|
||||
- name: Update app version based on latest release tag with build number
|
||||
id: version_update
|
||||
run: |
|
||||
# Function to get the latest release tag
|
||||
get_latest_tag() {
|
||||
local retries=0
|
||||
local max_retries=3
|
||||
local tag
|
||||
while [ $retries -lt $max_retries ]; do
|
||||
tag=$(curl -s https://api.github.com/repos/janhq/jan/releases/latest | jq -r .tag_name)
|
||||
if [ -n "$tag" ] && [ "$tag" != "null" ]; then
|
||||
echo $tag
|
||||
return
|
||||
else
|
||||
let retries++
|
||||
echo "Retrying... ($retries/$max_retries)"
|
||||
sleep 2
|
||||
fi
|
||||
done
|
||||
echo "Failed to fetch latest tag after $max_retries attempts."
|
||||
exit 1
|
||||
}
|
||||
- name: Update app version based on latest release tag with build number
|
||||
id: version_update
|
||||
run: |
|
||||
# Function to get the latest release tag
|
||||
get_latest_tag() {
|
||||
local retries=0
|
||||
local max_retries=3
|
||||
local tag
|
||||
while [ $retries -lt $max_retries ]; do
|
||||
tag=$(curl -s https://api.github.com/repos/menloresearch/jan/releases/latest | jq -r .tag_name)
|
||||
if [ -n "$tag" ] && [ "$tag" != "null" ]; then
|
||||
echo $tag
|
||||
return
|
||||
else
|
||||
let retries++
|
||||
echo "Retrying... ($retries/$max_retries)"
|
||||
sleep 2
|
||||
fi
|
||||
done
|
||||
echo "Failed to fetch latest tag after $max_retries attempts."
|
||||
exit 1
|
||||
}
|
||||
|
||||
if ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }}; then
|
||||
echo "Tag detected, set output follow tag"
|
||||
echo "::set-output name=new_version::${{ steps.tag.outputs.tag }}"
|
||||
else
|
||||
# Get the latest release tag from GitHub API
|
||||
LATEST_TAG=$(get_latest_tag)
|
||||
if ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }}; then
|
||||
echo "Tag detected, set output follow tag"
|
||||
echo "::set-output name=new_version::${{ steps.tag.outputs.tag }}"
|
||||
else
|
||||
# Get the latest release tag from GitHub API
|
||||
LATEST_TAG=$(get_latest_tag)
|
||||
|
||||
# Remove the 'v' and append the build number to the version
|
||||
new_version="${LATEST_TAG#v}-${GITHUB_RUN_NUMBER}"
|
||||
echo "New version: $new_version"
|
||||
echo "::set-output name=new_version::$new_version"
|
||||
fi
|
||||
# Remove the 'v' and append the build number to the version
|
||||
new_version="${LATEST_TAG#v}-${GITHUB_RUN_NUMBER}"
|
||||
echo "New version: $new_version"
|
||||
echo "::set-output name=new_version::$new_version"
|
||||
fi
|
||||
|
||||
@ -34,7 +34,7 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: "0"
|
||||
fetch-depth: '0'
|
||||
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
||||
ref: ${{ inputs.ref }}
|
||||
|
||||
@ -51,6 +51,6 @@ jobs:
|
||||
- macOS Universal: https://delta.jan.ai/nightly/jan-nightly-mac-universal-{{ VERSION }}.dmg
|
||||
- Linux Deb: https://delta.jan.ai/nightly/jan-nightly-linux-amd64-{{ VERSION }}.deb
|
||||
- Linux AppImage: https://delta.jan.ai/nightly/jan-nightly-linux-x86_64-{{ VERSION }}.AppImage
|
||||
- Github action run: https://github.com/janhq/jan/actions/runs/{{ GITHUB_RUN_ID }}
|
||||
- Github action run: https://github.com/menloresearch/jan/actions/runs/{{ GITHUB_RUN_ID }}
|
||||
env:
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
@ -6,8 +6,8 @@ First off, thank you for considering contributing to jan. It's people like you t
|
||||
|
||||
### Reporting Bugs
|
||||
|
||||
- **Ensure the bug was not already reported** by searching on GitHub under [Issues](https://github.com/janhq/jan/issues).
|
||||
- If you're unable to find an open issue addressing the problem, [open a new one](https://github.com/janhq/jan/issues/new).
|
||||
- **Ensure the bug was not already reported** by searching on GitHub under [Issues](https://github.com/menloresearch/jan/issues).
|
||||
- If you're unable to find an open issue addressing the problem, [open a new one](https://github.com/menloresearch/jan/issues/new).
|
||||
|
||||
### Suggesting Enhancements
|
||||
|
||||
|
||||
64
README.md
64
README.md
@ -4,18 +4,18 @@
|
||||
|
||||
<p align="center">
|
||||
<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
|
||||
<img alt="GitHub commit activity" src="https://img.shields.io/github/commit-activity/m/janhq/jan"/>
|
||||
<img alt="Github Last Commit" src="https://img.shields.io/github/last-commit/janhq/jan"/>
|
||||
<img alt="Github Contributors" src="https://img.shields.io/github/contributors/janhq/jan"/>
|
||||
<img alt="GitHub closed issues" src="https://img.shields.io/github/issues-closed/janhq/jan"/>
|
||||
<img alt="GitHub commit activity" src="https://img.shields.io/github/commit-activity/m/menloresearch/jan"/>
|
||||
<img alt="Github Last Commit" src="https://img.shields.io/github/last-commit/menloresearch/jan"/>
|
||||
<img alt="Github Contributors" src="https://img.shields.io/github/contributors/menloresearch/jan"/>
|
||||
<img alt="GitHub closed issues" src="https://img.shields.io/github/issues-closed/menloresearch/jan"/>
|
||||
<img alt="Discord" src="https://img.shields.io/discord/1107178041848909847?label=discord"/>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://jan.ai/docs/quickstart">Getting Started</a>
|
||||
- <a href="https://jan.ai/docs">Docs</a>
|
||||
- <a href="https://github.com/janhq/jan/releases">Changelog</a>
|
||||
- <a href="https://github.com/janhq/jan/issues">Bug reports</a>
|
||||
- <a href="https://github.com/menloresearch/jan/releases">Changelog</a>
|
||||
- <a href="https://github.com/menloresearch/jan/issues">Bug reports</a>
|
||||
- <a href="https://discord.gg/AsJ8krTT3N">Discord</a>
|
||||
</p>
|
||||
|
||||
@ -23,10 +23,9 @@
|
||||
⚠️ <b> Jan is currently in Development</b>: Expect breaking changes and bugs!
|
||||
</p>
|
||||
|
||||
|
||||
Jan is a ChatGPT-alternative that runs 100% offline on your device. Our goal is to make it easy for a layperson to download and run LLMs and use AI with **full control** and **privacy**.
|
||||
|
||||
Jan is powered by [Cortex](https://github.com/janhq/cortex.cpp), our embeddable local AI engine that runs on any hardware.
|
||||
Jan is powered by [Cortex](https://github.com/menloresearch/cortex.cpp), our embeddable local AI engine that runs on any hardware.
|
||||
From PCs to multi-GPU clusters, Jan & Cortex supports universal architectures:
|
||||
|
||||
- [x] NVIDIA GPUs (fast)
|
||||
@ -36,6 +35,7 @@ From PCs to multi-GPU clusters, Jan & Cortex supports universal architectures:
|
||||
- [x] Windows x64
|
||||
|
||||
#### Features:
|
||||
|
||||
- [Model Library](https://jan.ai/docs/models/manage-models#add-models) with popular LLMs like Llama, Gemma, Mistral, or Qwen
|
||||
- Connect to [Remote AI APIs](https://jan.ai/docs/remote-models/openai) like Groq and OpenRouter
|
||||
- Local API Server with OpenAI-equivalent API
|
||||
@ -54,25 +54,25 @@ From PCs to multi-GPU clusters, Jan & Cortex supports universal architectures:
|
||||
<td style="text-align:center"><b>Stable (Recommended)</b></td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://app.jan.ai/download/latest/win-x64'>
|
||||
<img src='https://github.com/janhq/jan/blob/dev/docs/static/img/windows.png' style="height:14px; width: 14px" />
|
||||
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/windows.png' style="height:14px; width: 14px" />
|
||||
<b>jan.exe</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://app.jan.ai/download/latest/mac-universal'>
|
||||
<img src='https://github.com/janhq/jan/blob/dev/docs/static/img/mac.png' style="height:15px; width: 15px" />
|
||||
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/mac.png' style="height:15px; width: 15px" />
|
||||
<b>jan.dmg</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://app.jan.ai/download/latest/linux-amd64-deb'>
|
||||
<img src='https://github.com/janhq/jan/blob/dev/docs/static/img/linux.png' style="height:14px; width: 14px" />
|
||||
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/linux.png' style="height:14px; width: 14px" />
|
||||
<b>jan.deb</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://app.jan.ai/download/latest/linux-amd64-appimage'>
|
||||
<img src='https://github.com/janhq/jan/blob/dev/docs/static/img/linux.png' style="height:14px; width: 14px" />
|
||||
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/linux.png' style="height:14px; width: 14px" />
|
||||
<b>jan.AppImage</b>
|
||||
</a>
|
||||
</td>
|
||||
@ -81,25 +81,25 @@ From PCs to multi-GPU clusters, Jan & Cortex supports universal architectures:
|
||||
<td style="text-align:center"><b>Beta (Preview)</b></td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://app.jan.ai/download/beta/win-x64'>
|
||||
<img src='https://github.com/janhq/jan/blob/dev/docs/static/img/windows.png' style="height:14px; width: 14px" />
|
||||
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/windows.png' style="height:14px; width: 14px" />
|
||||
<b>jan.exe</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://app.jan.ai/download/beta/mac-universal'>
|
||||
<img src='https://github.com/janhq/jan/blob/dev/docs/static/img/mac.png' style="height:15px; width: 15px" />
|
||||
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/mac.png' style="height:15px; width: 15px" />
|
||||
<b>jan.dmg</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://app.jan.ai/download/beta/linux-amd64-deb'>
|
||||
<img src='https://github.com/janhq/jan/blob/dev/docs/static/img/linux.png' style="height:14px; width: 14px" />
|
||||
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/linux.png' style="height:14px; width: 14px" />
|
||||
<b>jan.deb</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://app.jan.ai/download/beta/linux-amd64-appimage'>
|
||||
<img src='https://github.com/janhq/jan/blob/dev/docs/static/img/linux.png' style="height:14px; width: 14px" />
|
||||
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/linux.png' style="height:14px; width: 14px" />
|
||||
<b>jan.AppImage</b>
|
||||
</a>
|
||||
</td>
|
||||
@ -108,58 +108,58 @@ From PCs to multi-GPU clusters, Jan & Cortex supports universal architectures:
|
||||
<td style="text-align:center"><b>Nightly Build (Experimental)</b></td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://app.jan.ai/download/nightly/win-x64'>
|
||||
<img src='https://github.com/janhq/jan/blob/dev/docs/static/img/windows.png' style="height:14px; width: 14px" />
|
||||
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/windows.png' style="height:14px; width: 14px" />
|
||||
<b>jan.exe</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://app.jan.ai/download/nightly/mac-universal'>
|
||||
<img src='https://github.com/janhq/jan/blob/dev/docs/static/img/mac.png' style="height:15px; width: 15px" />
|
||||
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/mac.png' style="height:15px; width: 15px" />
|
||||
<b>jan.dmg</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://app.jan.ai/download/nightly/linux-amd64-deb'>
|
||||
<img src='https://github.com/janhq/jan/blob/dev/docs/static/img/linux.png' style="height:14px; width: 14px" />
|
||||
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/linux.png' style="height:14px; width: 14px" />
|
||||
<b>jan.deb</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://app.jan.ai/download/nightly/linux-amd64-appimage'>
|
||||
<img src='https://github.com/janhq/jan/blob/dev/docs/static/img/linux.png' style="height:14px; width: 14px" />
|
||||
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/linux.png' style="height:14px; width: 14px" />
|
||||
<b>jan.AppImage</b>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
Download the latest version of Jan at https://jan.ai/ or visit the [GitHub Releases](https://github.com/janhq/jan/releases) to download any previous release.
|
||||
Download the latest version of Jan at https://jan.ai/ or visit the [GitHub Releases](https://github.com/menloresearch/jan/releases) to download any previous release.
|
||||
|
||||
## Demo
|
||||
|
||||
https://github.com/user-attachments/assets/c3592fa2-c504-4d9d-a885-7e00122a50f3
|
||||
|
||||
*Real-time Video: Jan v0.5.7 on a Mac M2, 16GB Sonoma 14.2*
|
||||
_Real-time Video: Jan v0.5.7 on a Mac M2, 16GB Sonoma 14.2_
|
||||
|
||||
## Quicklinks
|
||||
|
||||
### Jan
|
||||
|
||||
- [Jan Website](https://jan.ai/)
|
||||
- [Jan GitHub](https://github.com/janhq/jan)
|
||||
- [Jan GitHub](https://github.com/menloresearch/jan)
|
||||
- [Documentation](https://jan.ai/docs)
|
||||
- [Jan Changelog](https://jan.ai/changelog)
|
||||
- [Jan Blog](https://jan.ai/blog)
|
||||
|
||||
### Cortex.cpp
|
||||
|
||||
Jan is powered by **Cortex.cpp**. It is a C++ command-line interface (CLI) designed as an alternative to [Ollama](https://ollama.com/). By default, it runs on the llama.cpp engine but also supports other engines, including ONNX and TensorRT-LLM, making it a multi-engine platform.
|
||||
|
||||
|
||||
- [Cortex Website](https://cortex.so/)
|
||||
- [Cortex GitHub](https://github.com/janhq/cortex.cpp)
|
||||
- [Cortex GitHub](https://github.com/menloresearch/cortex.cpp)
|
||||
- [Documentation](https://cortex.so/docs/)
|
||||
- [Models Library](https://cortex.so/models)
|
||||
- API Reference: *Under development*
|
||||
- API Reference: _Under development_
|
||||
|
||||
## Requirements for running Jan
|
||||
|
||||
@ -179,17 +179,17 @@ Jan is powered by **Cortex.cpp**. It is a C++ command-line interface (CLI) desig
|
||||
## Troubleshooting
|
||||
|
||||
As Jan is in development mode, you might get stuck on a some common issues:
|
||||
|
||||
- [Troubleshooting a broken build](https://jan.ai/docs/troubleshooting#broken-build)
|
||||
- [Troubleshooting NVIDIA GPU](https://jan.ai/docs/troubleshooting#troubleshooting-nvidia-gpu)
|
||||
- [Troubleshooting Something's Amiss](https://jan.ai/docs/troubleshooting#somethings-amiss)
|
||||
|
||||
|
||||
If you can't find what you need in our troubleshooting guide, feel free reach out to us for extra help:
|
||||
|
||||
1. Copy your [error logs & device specifications](https://jan.ai/docs/troubleshooting#how-to-get-error-logs).
|
||||
2. Go to our [Discord](https://discord.com/invite/FTk2MvZwJH) & send it to **#🆘|get-help** channel for further support.
|
||||
|
||||
*Check the logs to ensure the information is what you intend to send. Note that we retain your logs for only 24 hours, so report any issues promptly.*
|
||||
|
||||
_Check the logs to ensure the information is what you intend to send. Note that we retain your logs for only 24 hours, so report any issues promptly._
|
||||
|
||||
## Contributing
|
||||
|
||||
@ -206,7 +206,7 @@ Contributions are welcome! Please read the [CONTRIBUTING.md](CONTRIBUTING.md) fi
|
||||
1. **Clone the repository and prepare:**
|
||||
|
||||
```bash
|
||||
git clone https://github.com/janhq/jan
|
||||
git clone https://github.com/menloresearch/jan
|
||||
cd jan
|
||||
git checkout -b DESIRED_BRANCH
|
||||
```
|
||||
@ -219,8 +219,6 @@ Contributions are welcome! Please read the [CONTRIBUTING.md](CONTRIBUTING.md) fi
|
||||
|
||||
This will start the development server and open the desktop app.
|
||||
|
||||
|
||||
|
||||
### For production build
|
||||
|
||||
```bash
|
||||
@ -254,7 +252,7 @@ Beware of scams!
|
||||
- We will never request your personal information.
|
||||
- Our product is completely free; no paid version exists.
|
||||
- We do not have a token or ICO.
|
||||
- We are a [bootstrapped company](https://en.wikipedia.org/wiki/Bootstrapping), and don't have any external investors (*yet*). We're open to exploring opportunities with strategic partners want to tackle [our mission](https://jan.ai/about#mission) together.
|
||||
- We are a [bootstrapped company](https://en.wikipedia.org/wiki/Bootstrapping), and don't have any external investors (_yet_). We're open to exploring opportunities with strategic partners want to tackle [our mission](https://jan.ai/about#mission) together.
|
||||
|
||||
## License
|
||||
|
||||
|
||||
@ -28,7 +28,7 @@
|
||||
</screenshots>
|
||||
|
||||
<url type="homepage">https://jan.ai/</url>
|
||||
<url type="bugtracker">https://github.com/janhq/jan/issues</url>
|
||||
<url type="bugtracker">https://github.com/menloresearch/jan/issues</url>
|
||||
|
||||
<content_rating type="oars-1.1" />
|
||||
|
||||
|
||||
@ -8,37 +8,38 @@
|
||||
|
||||
```js
|
||||
// Web / extension runtime
|
||||
import * as core from "@janhq/core";
|
||||
import * as core from '@janhq/core'
|
||||
|
||||
// Node runtime
|
||||
import * as node from "@janhq/core/node";
|
||||
import * as node from '@janhq/core/node'
|
||||
```
|
||||
|
||||
## Build an Extension
|
||||
|
||||
1. Download an extension template, for example, [https://github.com/janhq/extension-template](https://github.com/janhq/extension-template).
|
||||
1. Download an extension template, for example, [https://github.com/menloresearch/extension-template](https://github.com/menloresearch/extension-template).
|
||||
|
||||
2. Update the source code:
|
||||
|
||||
1. Open `index.ts` in your code editor.
|
||||
2. Rename the extension class from `SampleExtension` to your preferred extension name.
|
||||
3. Import modules from the core package.
|
||||
```ts
|
||||
import * as core from "@janhq/core";
|
||||
import * as core from '@janhq/core'
|
||||
```
|
||||
4. In the `onLoad()` method, add your code:
|
||||
|
||||
```ts
|
||||
// Example of listening to app events and providing customized inference logic:
|
||||
import * as core from "@janhq/core";
|
||||
import * as core from '@janhq/core'
|
||||
|
||||
export default class MyExtension extends BaseExtension {
|
||||
// On extension load
|
||||
onLoad() {
|
||||
core.events.on(MessageEvent.OnMessageSent, (data) => MyExtension.inference(data, this));
|
||||
core.events.on(MessageEvent.OnMessageSent, (data) => MyExtension.inference(data, this))
|
||||
}
|
||||
|
||||
// Customized inference logic
|
||||
private static inference(incomingMessage: MessageRequestData) {
|
||||
|
||||
// Prepare customized message content
|
||||
const content: ThreadContent = {
|
||||
type: ContentType.Text,
|
||||
@ -46,16 +47,17 @@ import * as node from "@janhq/core/node";
|
||||
value: "I'm Jan Assistant!",
|
||||
annotations: [],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Modify message and send out
|
||||
const outGoingMessage: ThreadMessage = {
|
||||
...incomingMessage,
|
||||
content
|
||||
};
|
||||
content,
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
3. Build the extension:
|
||||
1. Navigate to the extension directory.
|
||||
2. Install dependencies.
|
||||
|
||||
@ -18,7 +18,7 @@ We try to **keep routes consistent** to maintain SEO.
|
||||
|
||||
## How to Contribute
|
||||
|
||||
Refer to the [Contributing Guide](https://github.com/janhq/jan/blob/main/CONTRIBUTING.md) for more comprehensive information on how to contribute to the Jan project.
|
||||
Refer to the [Contributing Guide](https://github.com/menloresearch/jan/blob/main/CONTRIBUTING.md) for more comprehensive information on how to contribute to the Jan project.
|
||||
|
||||
### Pre-requisites and Installation
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -27,7 +27,7 @@ export const APIReference = () => {
|
||||
<ApiReferenceReact
|
||||
configuration={{
|
||||
spec: {
|
||||
url: 'https://raw.githubusercontent.com/janhq/docs/main/public/openapi/jan.json',
|
||||
url: 'https://raw.githubusercontent.com/menloresearch/docs/main/public/openapi/jan.json',
|
||||
},
|
||||
theme: 'alternate',
|
||||
hideModels: true,
|
||||
|
||||
@ -57,7 +57,7 @@ const Changelog = () => {
|
||||
<p className="text-base mt-2 leading-relaxed">
|
||||
Latest release updates from the Jan team. Check out our
|
||||
<a
|
||||
href="https://github.com/orgs/janhq/projects/5/views/52"
|
||||
href="https://github.com/orgs/menloresearch/projects/5/views/52"
|
||||
className="text-blue-600 dark:text-blue-400 cursor-pointer"
|
||||
>
|
||||
Roadmap
|
||||
@ -150,7 +150,7 @@ const Changelog = () => {
|
||||
|
||||
<div className="text-center">
|
||||
<Link
|
||||
href="https://github.com/janhq/jan/releases"
|
||||
href="https://github.com/menloresearch/jan/releases"
|
||||
target="_blank"
|
||||
className="dark:nx-bg-neutral-900 dark:text-white bg-black text-white hover:text-white justify-center dark:border dark:border-neutral-800 flex-shrink-0 px-4 py-3 rounded-xl inline-flex items-center"
|
||||
>
|
||||
|
||||
@ -86,7 +86,7 @@ export default function CardDownload({ lastRelease }: Props) {
|
||||
.replace('{tag}', tag)
|
||||
return {
|
||||
...system,
|
||||
href: `https://github.com/janhq/jan/releases/download/${lastRelease.tag_name}/${downloadUrl}`,
|
||||
href: `https://github.com/menloresearch/jan/releases/download/${lastRelease.tag_name}/${downloadUrl}`,
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@ -149,7 +149,7 @@ const DropdownDownload = ({ lastRelease }: Props) => {
|
||||
.replace('{tag}', tag)
|
||||
return {
|
||||
...system,
|
||||
href: `https://github.com/janhq/jan/releases/download/${lastRelease.tag_name}/${downloadUrl}`,
|
||||
href: `https://github.com/menloresearch/jan/releases/download/${lastRelease.tag_name}/${downloadUrl}`,
|
||||
}
|
||||
})
|
||||
setSystems(updatedSystems)
|
||||
|
||||
@ -26,7 +26,7 @@ const socials = [
|
||||
icon: (
|
||||
<AiOutlineGithub className="text-lg text-black/60 dark:text-white/60" />
|
||||
),
|
||||
href: 'https://github.com/janhq/jan',
|
||||
href: 'https://github.com/menloresearch/jan',
|
||||
},
|
||||
{
|
||||
icon: <FaLinkedin className="text-lg text-black/60 dark:text-white/60" />,
|
||||
@ -62,7 +62,7 @@ const menus = [
|
||||
child: [
|
||||
{
|
||||
menu: 'Github',
|
||||
path: 'https://github.com/janhq/jan',
|
||||
path: 'https://github.com/menloresearch/jan',
|
||||
external: true,
|
||||
},
|
||||
{
|
||||
|
||||
@ -23,7 +23,7 @@ const BuiltWithLove = () => {
|
||||
</div>
|
||||
<div className="flex flex-col lg:flex-row gap-8 mt-8 items-center justify-center">
|
||||
<a
|
||||
href="https://github.com/janhq/jan"
|
||||
href="https://github.com/menloresearch/jan"
|
||||
target="_blank"
|
||||
className="dark:bg-white bg-black inline-flex w-56 px-4 py-3 rounded-xl cursor-pointer justify-center items-start space-x-4 "
|
||||
>
|
||||
|
||||
@ -44,7 +44,7 @@ const Hero = () => {
|
||||
<div className="mt-10 text-center">
|
||||
<div>
|
||||
<Link
|
||||
href="https://github.com/janhq/jan/releases"
|
||||
href="https://github.com/menloresearch/jan/releases"
|
||||
target="_blank"
|
||||
className="hidden lg:inline-block"
|
||||
>
|
||||
|
||||
@ -26,19 +26,19 @@ Jan operates on open-source principles, giving everyone the freedom to adjust, p
|
||||
|
||||
We embrace open development, showcasing our progress and upcoming features on GitHub, and we encourage your input and contributions:
|
||||
|
||||
- [Jan Framework](https://github.com/janhq/jan) (AGPLv3)
|
||||
- [Jan Framework](https://github.com/menloresearch/jan) (AGPLv3)
|
||||
- [Jan Desktop Client & Local server](https://jan.ai) (AGPLv3, built on Jan Framework)
|
||||
- [Nitro: run Local AI](https://github.com/janhq/nitro) (AGPLv3)
|
||||
- [Nitro: run Local AI](https://github.com/menloresearch/nitro) (AGPLv3)
|
||||
|
||||
## Build in Public
|
||||
|
||||
We use GitHub to build in public and welcome anyone to join in.
|
||||
|
||||
- [Jan's Kanban](https://github.com/orgs/janhq/projects/5)
|
||||
- [Jan's Roadmap](https://github.com/orgs/janhq/projects/5/views/29)
|
||||
- [Jan's Kanban](https://github.com/orgs/menloresearch/projects/5)
|
||||
- [Jan's Roadmap](https://github.com/orgs/menloresearch/projects/5/views/29)
|
||||
|
||||
## Collaboration
|
||||
|
||||
Our team spans the globe, working remotely to bring Jan to life. We coordinate through Discord and GitHub, valuing asynchronous communication and minimal, purposeful meetings. For collaboration and brainstorming, we utilize tools like [Excalidraw](https://excalidraw.com/) and [Miro](https://miro.com/), ensuring alignment and shared vision through visual storytelling and detailed documentation on [HackMD](https://hackmd.io/).
|
||||
|
||||
Check out the [Jan Framework](https://github.com/janhq/jan) and our desktop client & local server at [jan.ai](https://jan.ai), both licensed under AGPLv3 for maximum openness and user freedom.
|
||||
Check out the [Jan Framework](https://github.com/menloresearch/jan) and our desktop client & local server at [jan.ai](https://jan.ai), both licensed under AGPLv3 for maximum openness and user freedom.
|
||||
|
||||
@ -19,5 +19,5 @@ keywords:
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [Requirements](https://github.com/janhq/jan?tab=readme-ov-file#requirements-for-running-jan)
|
||||
- [Setting up local env](https://github.com/janhq/jan?tab=readme-ov-file#contributing)
|
||||
- [Requirements](https://github.com/menloresearch/jan?tab=readme-ov-file#requirements-for-running-jan)
|
||||
- [Setting up local env](https://github.com/menloresearch/jan?tab=readme-ov-file#contributing)
|
||||
|
||||
@ -20,7 +20,7 @@ import { Callout } from 'nextra/components'
|
||||
|
||||
# Project Management
|
||||
|
||||
We use the [Jan Monorepo Project](https://github.com/orgs/janhq/projects/5) in Github to manage our roadmap and sprint Kanbans.
|
||||
We use the [Jan Monorepo Project](https://github.com/orgs/menloresearch/projects/5) in Github to manage our roadmap and sprint Kanbans.
|
||||
|
||||
As much as possible, everyone owns their respective `epics` and `tasks`.
|
||||
|
||||
@ -30,37 +30,37 @@ As much as possible, everyone owns their respective `epics` and `tasks`.
|
||||
|
||||
## Quicklinks
|
||||
|
||||
- [High-level roadmap](https://github.com/orgs/janhq/projects/5/views/16): view used at at strategic level, for team wide alignment. Start & end dates reflect engineering implementation cycles. Typically product & design work preceeds these timelines.
|
||||
- [Standup Kanban](https://github.com/orgs/janhq/projects/5/views/25): view used during daily standup. Sprints should be up to date.
|
||||
- [High-level roadmap](https://github.com/orgs/menloresearch/projects/5/views/16): view used at at strategic level, for team wide alignment. Start & end dates reflect engineering implementation cycles. Typically product & design work preceeds these timelines.
|
||||
- [Standup Kanban](https://github.com/orgs/menloresearch/projects/5/views/25): view used during daily standup. Sprints should be up to date.
|
||||
|
||||
## Organization
|
||||
|
||||
[`Roadmap Labels`](https://github.com/janhq/jan/labels?q=roadmap)
|
||||
[`Roadmap Labels`](https://github.com/menloresearch/jan/labels?q=roadmap)
|
||||
|
||||
- `Roadmap Labels` tag large, long-term, & strategic projects that can span multiple teams and multiple sprints
|
||||
- Example label: `roadmap: Jan has Mobile`
|
||||
- `Roadmaps` contain `epics`
|
||||
|
||||
[`Epics`](https://github.com/janhq/jan/issues?q=is%3Aissue+is%3Aopen+label%3A%22type%3A+epic%22)
|
||||
[`Epics`](https://github.com/menloresearch/jan/issues?q=is%3Aissue+is%3Aopen+label%3A%22type%3A+epic%22)
|
||||
|
||||
- `Epics` track large stories that span 1-2 weeks, and it outlines specs, architecture decisions, designs
|
||||
- `Epics` contain `tasks`
|
||||
- `Epics` should always have 1 owner
|
||||
|
||||
[`Milestones`](https://github.com/janhq/jan/milestones)
|
||||
[`Milestones`](https://github.com/menloresearch/jan/milestones)
|
||||
|
||||
- `Milestones` track release versions. We use [semantic versioning](https://semver.org/)
|
||||
- `Milestones` span ~2 weeks and have deadlines
|
||||
- `Milestones` usually fit within 2-week sprint cycles
|
||||
|
||||
[`Tasks`](https://github.com/janhq/jan/issues)
|
||||
[`Tasks`](https://github.com/menloresearch/jan/issues)
|
||||
|
||||
- Tasks are individual issues (feats, bugs, chores) that can be completed within a few days
|
||||
- Tasks, except for critical bugs, should always belong to an `epic` (and thus fit into our roadmap)
|
||||
- Tasks are usually named per [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/#summary)
|
||||
- Tasks should always have 1 owner
|
||||
|
||||
We aim to always sprint on `tasks` that are a part of the [current roadmap](https://github.com/orgs/janhq/projects/5/views/16).
|
||||
We aim to always sprint on `tasks` that are a part of the [current roadmap](https://github.com/orgs/menloresearch/projects/5/views/16).
|
||||
|
||||
## Kanban
|
||||
|
||||
@ -80,4 +80,4 @@ We aim to always sprint on `tasks` that are a part of the [current roadmap](http
|
||||
|
||||
As a result, our feature prioritization can feel a bit black box at times.
|
||||
|
||||
We'd appreciate high quality insights and volunteers for user interviews through [Discord](https://discord.gg/af6SaTdzpx) and [Github](https://github.com/janhq).
|
||||
We'd appreciate high quality insights and volunteers for user interviews through [Discord](https://discord.gg/af6SaTdzpx) and [Github](https://github.com/menloresearch).
|
||||
|
||||
@ -37,7 +37,7 @@ We try to **keep routes consistent** to maintain SEO.
|
||||
|
||||
## How to Contribute
|
||||
|
||||
Refer to the [Contributing Guide](https://github.com/janhq/jan/blob/dev/CONTRIBUTING.md) for more comprehensive information on how to contribute to the Jan project.
|
||||
Refer to the [Contributing Guide](https://github.com/menloresearch/jan/blob/dev/CONTRIBUTING.md) for more comprehensive information on how to contribute to the Jan project.
|
||||
|
||||
## Pre-requisites and Installation
|
||||
|
||||
|
||||
@ -81,7 +81,7 @@ Our products are designed with [Extension APIs](/docs/extensions), and we do our
|
||||
|
||||
We are part of a larger open-source community and are committed to being a good jigsaw puzzle piece. We credit and actively contribute to upstream projects.
|
||||
|
||||
We adopt a public-by-default approach to [Project Management](https://github.com/orgs/janhq/projects/5), [Roadmaps](https://github.com/orgs/janhq/projects/5/views/31), and Helpdesk for our products.
|
||||
We adopt a public-by-default approach to [Project Management](https://github.com/orgs/menloresearch/projects/5), [Roadmaps](https://github.com/orgs/menloresearch/projects/5/views/31), and Helpdesk for our products.
|
||||
|
||||
## Inspirations
|
||||
|
||||
|
||||
@ -24,4 +24,4 @@ Fixes 💫
|
||||
|
||||
Update your product or download the latest: https://jan.ai
|
||||
|
||||
For more details, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.5.5).
|
||||
For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.5).
|
||||
@ -24,4 +24,4 @@ Jan now supports Mistral's new model Codestral. Thanks [Bartowski](https://huggi
|
||||
|
||||
More GGUF models can run in Jan - we rebased to llama.cpp b3012.Big thanks to [ggerganov](https://github.com/ggerganov)
|
||||
|
||||
For more details, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.5.0).
|
||||
For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.0).
|
||||
|
||||
@ -28,4 +28,4 @@ Jan now understands LaTeX, allowing users to process and understand complex math
|
||||
|
||||

|
||||
|
||||
For more details, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.4.12).
|
||||
For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.4.12).
|
||||
|
||||
@ -28,4 +28,4 @@ Users can now connect to OpenAI's new model GPT-4o.
|
||||
|
||||

|
||||
|
||||
For more details, see the [GitHub release notes.](https://github.com/janhq/jan/releases/tag/v0.4.13)
|
||||
For more details, see the [GitHub release notes.](https://github.com/menloresearch/jan/releases/tag/v0.4.13)
|
||||
|
||||
@ -16,4 +16,4 @@ More GGUF models can run in Jan - we rebased to llama.cpp b2961.
|
||||
|
||||
Huge shoutouts to [ggerganov](https://github.com/ggerganov) and contributors for llama.cpp, and [Bartowski](https://huggingface.co/bartowski) for GGUF models.
|
||||
|
||||
For more details, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.4.14).
|
||||
For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.4.14).
|
||||
|
||||
@ -26,4 +26,4 @@ We've updated to llama.cpp b3088 for better performance - thanks to [GG](https:/
|
||||
- Reduced chat font weight (back to normal!)
|
||||
- Restored the maximize button
|
||||
|
||||
For more details, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.5.1).
|
||||
For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.1).
|
||||
|
||||
@ -32,4 +32,4 @@ We've restored the tooltip hover functionality, which makes it easier to access
|
||||
|
||||
The right-click options for thread settings are now fully operational again. You can now manage your threads with this fix.
|
||||
|
||||
For more details, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.5.2).
|
||||
For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.2).
|
||||
|
||||
@ -23,4 +23,4 @@ We've been working on stability issues over the last few weeks. Jan is now more
|
||||
- Fixed the GPU memory utilization bar
|
||||
- Some UX and copy improvements
|
||||
|
||||
For more details, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.5.3).
|
||||
For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.3).
|
||||
|
||||
@ -32,4 +32,4 @@ Switching between threads used to reset your instruction settings. That’s fixe
|
||||
### Minor UI Tweaks & Bug Fixes
|
||||
We’ve also resolved issues with the input slider on the right panel and tackled several smaller bugs to keep everything running smoothly.
|
||||
|
||||
For more details, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.5.4).
|
||||
For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.4).
|
||||
|
||||
@ -23,4 +23,4 @@ Fixes 💫
|
||||
|
||||
Update your product or download the latest: https://jan.ai
|
||||
|
||||
For more details, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.5.7).
|
||||
For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.7).
|
||||
@ -22,4 +22,4 @@ Jan v0.5.9 is here: fixing what needed fixing
|
||||
|
||||
Update your product or download the latest: https://jan.ai
|
||||
|
||||
For more details, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.5.9).
|
||||
For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.9).
|
||||
@ -22,4 +22,4 @@ and various UI/UX enhancements 💫
|
||||
|
||||
Update your product or download the latest: https://jan.ai
|
||||
|
||||
For more details, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.5.8).
|
||||
For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.8).
|
||||
@ -19,4 +19,4 @@ Jan v0.5.10 is live: Jan is faster, smoother, and more reliable.
|
||||
|
||||
Update your product or download the latest: https://jan.ai
|
||||
|
||||
For more details, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.5.10).
|
||||
For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.10).
|
||||
@ -23,4 +23,4 @@ Jan v0.5.11 is here - critical issues fixed, Mac installation updated.
|
||||
|
||||
Update your product or download the latest: https://jan.ai
|
||||
|
||||
For more details, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.5.11).
|
||||
For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.11).
|
||||
@ -25,4 +25,4 @@ Jan v0.5.11 is here - critical issues fixed, Mac installation updated.
|
||||
|
||||
Update your product or download the latest: https://jan.ai
|
||||
|
||||
For more details, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.5.12).
|
||||
For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.12).
|
||||
@ -20,4 +20,4 @@ import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
|
||||
|
||||
Update your product or download the latest: https://jan.ai
|
||||
|
||||
For more details, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.5.13).
|
||||
For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.13).
|
||||
@ -33,4 +33,4 @@ Llama
|
||||
|
||||
Update your Jan or [download the latest](https://jan.ai/).
|
||||
|
||||
For more details, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.5.14).
|
||||
For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.14).
|
||||
@ -40,7 +40,7 @@ If you already use [Jan](/docs) or [Cortex](/cortex), cortex.llamacpp is bundled
|
||||
|
||||
## Usage
|
||||
|
||||
To include cortex.llamacpp in your own server implementation, follow this [server example](https://github.com/janhq/cortex.llamacpp/tree/main/examples/server).
|
||||
To include cortex.llamacpp in your own server implementation, follow this [server example](https://github.com/menloresearch/cortex.llamacpp/tree/main/examples/server).
|
||||
|
||||
## Interface
|
||||
|
||||
@ -140,4 +140,4 @@ The future plans for Cortex.llamacpp are focused on enhancing performance and ex
|
||||
- Performance Enhancements: Optimizing speed and reducing memory usage to ensure efficient processing of tasks.
|
||||
- Multimodal Model Compatibility: Expanding support to include a variety of multimodal models, enabling a broader range of applications and use cases.
|
||||
|
||||
To follow the latest developments, see the [cortex.llamacpp GitHub](https://github.com/janhq/cortex.llamacpp)
|
||||
To follow the latest developments, see the [cortex.llamacpp GitHub](https://github.com/menloresearch/cortex.llamacpp)
|
||||
@ -152,7 +152,7 @@ cortex -h
|
||||
|
||||
To install Cortex from the source, follow the steps below:
|
||||
|
||||
1. Clone the Cortex repository [here](https://github.com/janhq/cortex/tree/dev).
|
||||
1. Clone the Cortex repository [here](https://github.com/menloresearch/cortex/tree/dev).
|
||||
2. Navigate to the `cortex-js` folder.
|
||||
3. Open the terminal and run the following command to build the Cortex project:
|
||||
|
||||
|
||||
@ -118,7 +118,7 @@ cortex -h
|
||||
|
||||
To install Cortex from the source, follow the steps below:
|
||||
|
||||
1. Clone the Cortex repository [here](https://github.com/janhq/cortex/tree/dev).
|
||||
1. Clone the Cortex repository [here](https://github.com/menloresearch/cortex/tree/dev).
|
||||
2. Navigate to the `cortex-js` folder.
|
||||
3. Open the terminal and run the following command to build the Cortex project:
|
||||
|
||||
|
||||
@ -168,7 +168,7 @@ cortex -h
|
||||
|
||||
To install Cortex from the source, follow the steps below:
|
||||
|
||||
1. Clone the Cortex repository [here](https://github.com/janhq/cortex/tree/dev).
|
||||
1. Clone the Cortex repository [here](https://github.com/menloresearch/cortex/tree/dev).
|
||||
2. Navigate to the `cortex-js` folder.
|
||||
3. Open the terminal and run the following command to build the Cortex project:
|
||||
|
||||
|
||||
@ -46,7 +46,7 @@ You'll be able to use it with [Continue.dev](https://jan.ai/integrations/coding/
|
||||
### Philosophy
|
||||
|
||||
Jan is built to be [user-owned](about#-user-owned):
|
||||
- Open source via the [AGPLv3 license](https://github.com/janhq/jan/blob/dev/LICENSE)
|
||||
- Open source via the [AGPLv3 license](https://github.com/menloresearch/jan/blob/dev/LICENSE)
|
||||
- [Local-first](https://www.inkandswitch.com/local-first/), with all data stored locally
|
||||
- Runs 100% offline, with privacy by default
|
||||
- Free choice of AI models, both local and cloud-based
|
||||
@ -134,7 +134,7 @@ Jan has an extensible architecture like VSCode and Obsidian - you can build cust
|
||||
</FAQBox>
|
||||
|
||||
<FAQBox title="How can I contribute to Jan's development or suggest features?">
|
||||
Contributions can be made through [GitHub](https://github.com/janhq/jan) and [Discord](https://discord.gg/Exe46xPMbK), where you can also suggest features and make pull requests. No need to ask for permission. We're fully open-source!
|
||||
Contributions can be made through [GitHub](https://github.com/menloresearch/jan) and [Discord](https://discord.gg/Exe46xPMbK), where you can also suggest features and make pull requests. No need to ask for permission. We're fully open-source!
|
||||
</FAQBox>
|
||||
|
||||
<FAQBox title="How can I get involved with the Jan community?">
|
||||
@ -150,7 +150,7 @@ Jan has an extensible architecture like VSCode and Obsidian - you can build cust
|
||||
<FAQBox title="Can I self-host?">
|
||||
Yes! We love the self-hosted movement. You can:
|
||||
- [Download Jan](./download.mdx) and run it directly.
|
||||
- Fork and build from our [GitHub](https://github.com/janhq/jan) repository.
|
||||
- Fork and build from our [GitHub](https://github.com/menloresearch/jan) repository.
|
||||
</FAQBox>
|
||||
|
||||
<FAQBox title="What does Jan stand for?">
|
||||
|
||||
@ -96,7 +96,7 @@ my-extension/
|
||||
|
||||
### Example Extension Template
|
||||
|
||||
You can find a template for creating Jan extensions in our [example repository](https://github.com/janhq/extension-template).
|
||||
You can find a template for creating Jan extensions in our [example repository](https://github.com/menloresearch/extension-template).
|
||||
## Install Extensions
|
||||
|
||||
To install a custom extension in Jan:
|
||||
|
||||
@ -19,7 +19,7 @@ keywords:
|
||||
import Download from "@/components/Download"
|
||||
|
||||
export const getStaticProps = async() => {
|
||||
const resRelease = await fetch('https://api.github.com/repos/janhq/jan/releases/latest')
|
||||
const resRelease = await fetch('https://api.github.com/repos/menloresearch/jan/releases/latest')
|
||||
const release = await resRelease.json()
|
||||
|
||||
return {
|
||||
|
||||
@ -19,9 +19,9 @@ keywords:
|
||||
import Home from "@/components/Home"
|
||||
|
||||
export const getStaticProps = async() => {
|
||||
const resReleaseLatest = await fetch('https://api.github.com/repos/janhq/jan/releases/latest')
|
||||
const resRelease = await fetch('https://api.github.com/repos/janhq/jan/releases?per_page=500')
|
||||
const resRepo = await fetch('https://api.github.com/repos/janhq/jan')
|
||||
const resReleaseLatest = await fetch('https://api.github.com/repos/menloresearch/jan/releases/latest')
|
||||
const resRelease = await fetch('https://api.github.com/repos/menloresearch/jan/releases?per_page=500')
|
||||
const resRepo = await fetch('https://api.github.com/repos/menloresearch/jan')
|
||||
const repo = await resRepo.json()
|
||||
const latestRelease = await resReleaseLatest.json()
|
||||
const release = await resRelease.json()
|
||||
|
||||
@ -14,12 +14,12 @@ import CTABlog from '@/components/Blog/CTA'
|
||||
|
||||
Jan now supports [NVIDIA TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM) in addition to [llama.cpp](https://github.com/ggerganov/llama.cpp), making Jan multi-engine and ultra-fast for users with Nvidia GPUs.
|
||||
|
||||
We've been excited for TensorRT-LLM for a while, and [had a lot of fun implementing it](https://github.com/janhq/nitro-tensorrt-llm). As part of the process, we've run some benchmarks, to see how TensorRT-LLM fares on consumer hardware (e.g. [4090s](https://www.nvidia.com/en-us/geforce/graphics-cards/40-series/), [3090s](https://www.nvidia.com/en-us/geforce/graphics-cards/30-series/)) we commonly see in the [Jan's hardware community](https://discord.com/channels/1107178041848909847/1201834752206974996).
|
||||
We've been excited for TensorRT-LLM for a while, and [had a lot of fun implementing it](https://github.com/menloresearch/nitro-tensorrt-llm). As part of the process, we've run some benchmarks, to see how TensorRT-LLM fares on consumer hardware (e.g. [4090s](https://www.nvidia.com/en-us/geforce/graphics-cards/40-series/), [3090s](https://www.nvidia.com/en-us/geforce/graphics-cards/30-series/)) we commonly see in the [Jan's hardware community](https://discord.com/channels/1107178041848909847/1201834752206974996).
|
||||
|
||||
<Callout type="info" >
|
||||
**Give it a try!** Jan's [TensorRT-LLM extension](/docs/built-in/tensorrt-llm) is available in Jan v0.4.9 and up ([see more](/docs/built-in/tensorrt-llm)). We precompiled some TensorRT-LLM models for you to try: `Mistral 7b`, `TinyLlama-1.1b`, `TinyJensen-1.1b` 😂
|
||||
|
||||
Bugs or feedback? Let us know on [GitHub](https://github.com/janhq/jan) or via [Discord](https://discord.com/channels/1107178041848909847/1201832734704795688).
|
||||
Bugs or feedback? Let us know on [GitHub](https://github.com/menloresearch/jan) or via [Discord](https://discord.com/channels/1107178041848909847/1201832734704795688).
|
||||
</Callout>
|
||||
|
||||
<Callout type="info" >
|
||||
|
||||
@ -13,7 +13,7 @@ import CTABlog from '@/components/Blog/CTA'
|
||||
|
||||
## Abstract
|
||||
|
||||
We present a straightforward approach to customizing small, open-source models using fine-tuning and RAG that outperforms GPT-3.5 for specialized use cases. With it, we achieved superior Q&A results of [technical documentation](https://nitro.jan.ai/docs) for a small codebase [codebase](https://github.com/janhq/nitro).
|
||||
We present a straightforward approach to customizing small, open-source models using fine-tuning and RAG that outperforms GPT-3.5 for specialized use cases. With it, we achieved superior Q&A results of [technical documentation](https://nitro.jan.ai/docs) for a small codebase [codebase](https://github.com/menloresearch/nitro).
|
||||
|
||||
In short, (1) extending a general foundation model like [Mistral](https://huggingface.co/mistralai/Mistral-7B-v0.1) with strong math and coding, and (2) training it over a high-quality, synthetic dataset generated from the intended corpus, and (3) adding RAG capabilities, can lead to significant accuracy improvements.
|
||||
|
||||
@ -93,11 +93,11 @@ This final model can be found [here on Huggingface](https://huggingface.co/jan-h
|
||||
|
||||
As an additional step, we also added [Retrieval Augmented Generation (RAG)](https://blogs.nvidia.com/blog/what-is-retrieval-augmented-generation/) as an experiment parameter.
|
||||
|
||||
A simple RAG setup was done using **[Llamaindex](https://www.llamaindex.ai/)** and the **[bge-en-base-v1.5 embedding](https://huggingface.co/BAAI/bge-base-en-v1.5)** model for efficient documentation retrieval and question-answering. You can find the RAG implementation [here](https://github.com/janhq/open-foundry/blob/main/rag-is-not-enough/rag/nitro_rag.ipynb).
|
||||
A simple RAG setup was done using **[Llamaindex](https://www.llamaindex.ai/)** and the **[bge-en-base-v1.5 embedding](https://huggingface.co/BAAI/bge-base-en-v1.5)** model for efficient documentation retrieval and question-answering. You can find the RAG implementation [here](https://github.com/menloresearch/open-foundry/blob/main/rag-is-not-enough/rag/nitro_rag.ipynb).
|
||||
|
||||
## Benchmarking the Results
|
||||
|
||||
We curated a new set of [50 multiple-choice questions](https://github.com/janhq/open-foundry/blob/main/rag-is-not-enough/rag/mcq_nitro.csv) (MCQ) based on the Nitro docs. The questions had varying levels of difficulty and had trick components that challenged the model's ability to discern misleading information.
|
||||
We curated a new set of [50 multiple-choice questions](https://github.com/menloresearch/open-foundry/blob/main/rag-is-not-enough/rag/mcq_nitro.csv) (MCQ) based on the Nitro docs. The questions had varying levels of difficulty and had trick components that challenged the model's ability to discern misleading information.
|
||||
|
||||

|
||||
|
||||
@ -121,7 +121,7 @@ We conclude that this combination of model merging + finetuning + RAG yields pro
|
||||
|
||||
Anecdotally, we’ve had some success using this model in practice to onboard new team members to the Nitro codebase.
|
||||
|
||||
A full research report with more statistics can be found [here](https://github.com/janhq/open-foundry/blob/main/rag-is-not-enough/README.md).
|
||||
A full research report with more statistics can be found [here](https://github.com/menloresearch/open-foundry/blob/main/rag-is-not-enough/README.md).
|
||||
|
||||
# References
|
||||
|
||||
|
||||
@ -4,7 +4,7 @@ title: Support - Jan
|
||||
|
||||
# Support
|
||||
|
||||
- Bugs & requests: file a GitHub ticket [here](https://github.com/janhq/jan/issues)
|
||||
- Bugs & requests: file a GitHub ticket [here](https://github.com/menloresearch/jan/issues)
|
||||
- For discussion: join our Discord [here](https://discord.gg/FTk2MvZwJH)
|
||||
- For business inquiries: email hello@jan.ai
|
||||
- For jobs: please email hr@jan.ai
|
||||
@ -30,7 +30,7 @@ const config: DocsThemeConfig = {
|
||||
</div>
|
||||
</span>
|
||||
),
|
||||
docsRepositoryBase: 'https://github.com/janhq/jan/tree/dev/docs',
|
||||
docsRepositoryBase: 'https://github.com/menloresearch/jan/tree/dev/docs',
|
||||
feedback: {
|
||||
content: 'Question? Give us feedback →',
|
||||
labels: 'feedback',
|
||||
@ -59,7 +59,7 @@ const config: DocsThemeConfig = {
|
||||
<a href="https://twitter.com/jandotai" target="_blank">
|
||||
<RiTwitterXFill className="text-lg text-black/60 dark:text-white/60" />
|
||||
</a>
|
||||
<a href="https://github.com/janhq/jan" target="_blank">
|
||||
<a href="https://github.com/menloresearch/jan" target="_blank">
|
||||
<AiOutlineGithub className="text-xl text-black/60 dark:text-white/60" />
|
||||
</a>
|
||||
</div>
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
"author": "Jan <service@jan.ai>",
|
||||
"license": "MIT",
|
||||
"productName": "Jan",
|
||||
"homepage": "https://github.com/janhq/jan/tree/main/electron",
|
||||
"homepage": "https://github.com/menloresearch/jan/tree/main/electron",
|
||||
"description": "Use offline LLMs with your own data. Run open source models like Llama2 or Falcon on your internal computers/servers.",
|
||||
"build": {
|
||||
"appId": "jan.ai.app",
|
||||
|
||||
@ -70,6 +70,6 @@ There are a few things to keep in mind when writing your extension code:
|
||||
```
|
||||
|
||||
For more information about the Jan Extension Core module, see the
|
||||
[documentation](https://github.com/janhq/jan/blob/main/core/README.md).
|
||||
[documentation](https://github.com/menloresearch/jan/blob/main/core/README.md).
|
||||
|
||||
So, what are you waiting for? Go ahead and start customizing your extension!
|
||||
|
||||
@ -70,6 +70,6 @@ There are a few things to keep in mind when writing your extension code:
|
||||
```
|
||||
|
||||
For more information about the Jan Extension Core module, see the
|
||||
[documentation](https://github.com/janhq/jan/blob/main/core/README.md).
|
||||
[documentation](https://github.com/menloresearch/jan/blob/main/core/README.md).
|
||||
|
||||
So, what are you waiting for? Go ahead and start customizing your extension!
|
||||
|
||||
@ -5,11 +5,11 @@ set /p CORTEX_VERSION=<./bin/version.txt
|
||||
set ENGINE_VERSION=0.1.55
|
||||
|
||||
@REM Download cortex.llamacpp binaries
|
||||
set DOWNLOAD_URL=https://github.com/janhq/cortex.llamacpp/releases/download/v%ENGINE_VERSION%/cortex.llamacpp-%ENGINE_VERSION%-windows-amd64
|
||||
set CUDA_DOWNLOAD_URL=https://github.com/janhq/cortex.llamacpp/releases/download/v%ENGINE_VERSION%
|
||||
set DOWNLOAD_URL=https://github.com/menloresearch/cortex.llamacpp/releases/download/v%ENGINE_VERSION%/cortex.llamacpp-%ENGINE_VERSION%-windows-amd64
|
||||
set CUDA_DOWNLOAD_URL=https://github.com/menloresearch/cortex.llamacpp/releases/download/v%ENGINE_VERSION%
|
||||
set SUBFOLDERS=windows-amd64-noavx-cuda-12-0 windows-amd64-noavx-cuda-11-7 windows-amd64-avx2-cuda-12-0 windows-amd64-avx2-cuda-11-7 windows-amd64-noavx windows-amd64-avx windows-amd64-avx2 windows-amd64-avx512 windows-amd64-vulkan
|
||||
|
||||
call .\node_modules\.bin\download -e --strip 1 -o %BIN_PATH% https://github.com/janhq/cortex.cpp/releases/download/v%CORTEX_VERSION%/cortex-%CORTEX_VERSION%-windows-amd64.tar.gz
|
||||
call .\node_modules\.bin\download -e --strip 1 -o %BIN_PATH% https://github.com/menloresearch/cortex.cpp/releases/download/v%CORTEX_VERSION%/cortex-%CORTEX_VERSION%-windows-amd64.tar.gz
|
||||
call .\node_modules\.bin\download %DOWNLOAD_URL%-avx2-cuda-12-0.tar.gz -e --strip 1 -o %SHARED_PATH%/engines/cortex.llamacpp/windows-amd64-avx2-cuda-12-0/v%ENGINE_VERSION%
|
||||
call .\node_modules\.bin\download %DOWNLOAD_URL%-avx2-cuda-11-7.tar.gz -e --strip 1 -o %SHARED_PATH%/engines/cortex.llamacpp/windows-amd64-avx2-cuda-11-7/v%ENGINE_VERSION%
|
||||
call .\node_modules\.bin\download %DOWNLOAD_URL%-noavx-cuda-12-0.tar.gz -e --strip 1 -o %SHARED_PATH%/engines/cortex.llamacpp/windows-amd64-noavx-cuda-12-0/v%ENGINE_VERSION%
|
||||
|
||||
@ -3,9 +3,9 @@
|
||||
# Read CORTEX_VERSION
|
||||
CORTEX_VERSION=$(cat ./bin/version.txt)
|
||||
ENGINE_VERSION=0.1.55
|
||||
CORTEX_RELEASE_URL="https://github.com/janhq/cortex.cpp/releases/download"
|
||||
ENGINE_DOWNLOAD_URL="https://github.com/janhq/cortex.llamacpp/releases/download/v${ENGINE_VERSION}/cortex.llamacpp-${ENGINE_VERSION}"
|
||||
CUDA_DOWNLOAD_URL="https://github.com/janhq/cortex.llamacpp/releases/download/v${ENGINE_VERSION}"
|
||||
CORTEX_RELEASE_URL="https://github.com/menloresearch/cortex.cpp/releases/download"
|
||||
ENGINE_DOWNLOAD_URL="https://github.com/menloresearch/cortex.llamacpp/releases/download/v${ENGINE_VERSION}/cortex.llamacpp-${ENGINE_VERSION}"
|
||||
CUDA_DOWNLOAD_URL="https://github.com/menloresearch/cortex.llamacpp/releases/download/v${ENGINE_VERSION}"
|
||||
BIN_PATH=./bin
|
||||
SHARED_PATH="../../electron/shared"
|
||||
# Detect platform
|
||||
|
||||
@ -70,6 +70,6 @@ There are a few things to keep in mind when writing your extension code:
|
||||
```
|
||||
|
||||
For more information about the Jan Extension Core module, see the
|
||||
[documentation](https://github.com/janhq/jan/blob/main/core/README.md).
|
||||
[documentation](https://github.com/menloresearch/jan/blob/main/core/README.md).
|
||||
|
||||
So, what are you waiting for? Go ahead and start customizing your extension!
|
||||
|
||||
@ -2668,7 +2668,7 @@
|
||||
},
|
||||
"url": {
|
||||
"type": "string",
|
||||
"example": "https://api.github.com/repos/janhq/cortex.llamacpp/releases/186479804"
|
||||
"example": "https://api.github.com/repos/menloresearch/cortex.llamacpp/releases/186479804"
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4561,11 +4561,11 @@
|
||||
"type": "string",
|
||||
"enum": ["text", "audio"]
|
||||
},
|
||||
"description": "Specifies the modalities (types of input) supported by the model. Currently, cortex only support text modalities. We are actively working on this feature to bring cortex as fully OpenAI compatible platform. Planning and roadmap for this feature can be found [**here**](https://github.com/janhq/cortex.cpp/issues/1582).",
|
||||
"description": "Specifies the modalities (types of input) supported by the model. Currently, cortex only support text modalities. We are actively working on this feature to bring cortex as fully OpenAI compatible platform. Planning and roadmap for this feature can be found [**here**](https://github.com/menloresearch/cortex.cpp/issues/1582).",
|
||||
"example": ["text"]
|
||||
},
|
||||
"audio": {
|
||||
"description": "Parameters for audio output. Required when audio output is requested with `modalities: ['audio']`. We are actively working on this feature to bring cortex as fully OpenAI compatible platform. Planning and roadmap for this feature can be found [**here**](https://github.com/janhq/cortex.cpp/issues/1582).",
|
||||
"description": "Parameters for audio output. Required when audio output is requested with `modalities: ['audio']`. We are actively working on this feature to bring cortex as fully OpenAI compatible platform. Planning and roadmap for this feature can be found [**here**](https://github.com/menloresearch/cortex.cpp/issues/1582).",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"voice": {
|
||||
@ -4582,13 +4582,13 @@
|
||||
},
|
||||
"store": {
|
||||
"type": "boolean",
|
||||
"description": "Whether or not to store the output of this chat completion request for use in our model distillation or evals products. We are actively working on this feature to bring cortex as fully OpenAI compatible platform. Planning and roadmap for this feature can be found [**here**](https://github.com/janhq/cortex.cpp/issues/1582).",
|
||||
"description": "Whether or not to store the output of this chat completion request for use in our model distillation or evals products. We are actively working on this feature to bring cortex as fully OpenAI compatible platform. Planning and roadmap for this feature can be found [**here**](https://github.com/menloresearch/cortex.cpp/issues/1582).",
|
||||
"default": false,
|
||||
"example": false
|
||||
},
|
||||
"metadata": {
|
||||
"type": "object",
|
||||
"description": "Developer-defined tags and values used for filtering completions in the dashboard. We are actively working on this feature to bring cortex as fully OpenAI compatible platform. Planning and roadmap for this feature can be found [**here**](https://github.com/janhq/cortex.cpp/issues/1582).",
|
||||
"description": "Developer-defined tags and values used for filtering completions in the dashboard. We are actively working on this feature to bring cortex as fully OpenAI compatible platform. Planning and roadmap for this feature can be found [**here**](https://github.com/menloresearch/cortex.cpp/issues/1582).",
|
||||
"example": {
|
||||
"type": "conversation"
|
||||
}
|
||||
@ -4620,7 +4620,7 @@
|
||||
},
|
||||
"response_format": {
|
||||
"type": "object",
|
||||
"description": "An object specifying the format that the model must output. Setting to { \"type\": \"json_object\" } enables JSON mode, which guarantees the message the model generates is valid JSON. We are actively working on this feature to bring cortex as fully OpenAI compatible platform. Planning and roadmap for this feature can be found [**here**](https://github.com/janhq/cortex.cpp/issues/1582).",
|
||||
"description": "An object specifying the format that the model must output. Setting to { \"type\": \"json_object\" } enables JSON mode, which guarantees the message the model generates is valid JSON. We are actively working on this feature to bring cortex as fully OpenAI compatible platform. Planning and roadmap for this feature can be found [**here**](https://github.com/menloresearch/cortex.cpp/issues/1582).",
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
@ -4638,7 +4638,7 @@
|
||||
},
|
||||
"service_tier": {
|
||||
"type": "string",
|
||||
"description": "Specifies the latency tier to use for processing the request. This parameter is relevant for customers subscribed to the scale tier service:\n\n - If set to 'auto', and the Project is Scale tier enabled, the system will utilize scale tier credits until they are exhausted.\n- If set to 'auto', and the Project is not Scale tier enabled, the request will be processed using the default service tier with a lower uptime SLA and no latency guarentee.\n- If set to 'default', the request will be processed using the default service tier with a lower uptime SLA and no latency guarentee.\nWhen not set, the default behavior is 'auto'.\nWhen this parameter is set, the response body will include the service_tier utilized.\n\n We are actively working on this feature to bring cortex as fully OpenAI compatible platform. Planning and roadmap for this feature can be found [**here**](https://github.com/janhq/cortex.cpp/issues/1582)."
|
||||
"description": "Specifies the latency tier to use for processing the request. This parameter is relevant for customers subscribed to the scale tier service:\n\n - If set to 'auto', and the Project is Scale tier enabled, the system will utilize scale tier credits until they are exhausted.\n- If set to 'auto', and the Project is not Scale tier enabled, the request will be processed using the default service tier with a lower uptime SLA and no latency guarentee.\n- If set to 'default', the request will be processed using the default service tier with a lower uptime SLA and no latency guarentee.\nWhen not set, the default behavior is 'auto'.\nWhen this parameter is set, the response body will include the service_tier utilized.\n\n We are actively working on this feature to bring cortex as fully OpenAI compatible platform. Planning and roadmap for this feature can be found [**here**](https://github.com/menloresearch/cortex.cpp/issues/1582)."
|
||||
},
|
||||
"stream_options": {
|
||||
"type": "object",
|
||||
@ -4704,7 +4704,7 @@
|
||||
},
|
||||
"user": {
|
||||
"type": "string",
|
||||
"description": "A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. We are actively working on this feature to bring cortex as fully OpenAI compatible platform. Planning and roadmap for this feature can be found [**here**](https://github.com/janhq/cortex.cpp/issues/1582)."
|
||||
"description": "A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. We are actively working on this feature to bring cortex as fully OpenAI compatible platform. Planning and roadmap for this feature can be found [**here**](https://github.com/menloresearch/cortex.cpp/issues/1582)."
|
||||
},
|
||||
"dynatemp_range": {
|
||||
"type": "number",
|
||||
|
||||
@ -26,7 +26,6 @@
|
||||
- [ ] :key::warning: Uninstallation process removes the app successfully from the system.
|
||||
- [ ] Clean the data folder and open the app to check if it creates all the necessary folders, especially models and extensions.
|
||||
|
||||
|
||||
## B. Overview
|
||||
|
||||
### 1. Shortcut key
|
||||
@ -38,7 +37,7 @@
|
||||
- [ ] :key: The app correctly displays the state of the loading model (e.g., loading, ready, error).
|
||||
- [ ] :key: Confirm that the app allows users to switch between models if multiple are available.
|
||||
- [ ] Check that the app provides feedback or instructions if the model fails to load.
|
||||
- [ ] Verify the troubleshooting assistant correctly capture hardware / log info [#1784](https://github.com/janhq/jan/issues/1784)
|
||||
- [ ] Verify the troubleshooting assistant correctly capture hardware / log info [#1784](https://github.com/menloresearch/jan/issues/1784)
|
||||
|
||||
## C. Thread
|
||||
|
||||
@ -70,26 +69,29 @@
|
||||
- [ ] :key: Users switch between threads with different models, the app can handle it.
|
||||
|
||||
### 3. Model dropdown
|
||||
|
||||
- :key: Model list should highlight recommended based on user RAM (this is not really correct, I think it's based on static formula)
|
||||
- [ ] Model size should display (for both installed and imported models)
|
||||
|
||||
### 4. Users can click on a history thread
|
||||
|
||||
- [ ] Chat window displays the entire conversation from the selected history thread without any missing messages.
|
||||
- [ ] Historical threads reflect the exact state of the chat at that time, including settings.
|
||||
- [ ] :key: Ability to delete or clean old threads.
|
||||
- [ ] Changing the title of the thread updates correctly.
|
||||
|
||||
### 5. Users can config instructions for the assistant.
|
||||
|
||||
- [ ] Instructions set by the user are being followed by the assistant in subsequent conversations.
|
||||
- [ ] :key: Changes to instructions are updated in real time and do not require a restart of the application or session.
|
||||
- [ ] :key: Ability to reset instructions to default or clear them completely.
|
||||
- [ ] :key: RAG - Users can import documents and the system should process queries about the uploaded file, providing accurate and appropriate responses in the conversation thread.
|
||||
- [ ] :key: Jan can see - Users can import image and Model with vision can generate responses (e.g. LLaVa model). [#294](https://github.com/janhq/jan/issues/294)
|
||||
|
||||
- [ ] :key: Jan can see - Users can import image and Model with vision can generate responses (e.g. LLaVa model). [#294](https://github.com/menloresearch/jan/issues/294)
|
||||
|
||||
## D. Hub
|
||||
|
||||
### 1. Users can discover recommended models
|
||||
|
||||
- :key: Each model's recommendations are consistent with the user’s activity and preferences.
|
||||
- [ ] Search models and verify results / action on the results
|
||||
|
||||
@ -99,10 +101,10 @@
|
||||
- [ ] :key: Ensure that models are labeled with RAM requirements.
|
||||
- [ ] :key: Check the download model functionality and validate if the cancel download feature works correctly.
|
||||
|
||||
### 3. Users can download models via a HuggingFace URL [#1740](https://github.com/janhq/jan/issues/1740)
|
||||
### 3. Users can download models via a HuggingFace URL [#1740](https://github.com/menloresearch/jan/issues/1740)
|
||||
|
||||
- [ ] :key: Import via Hugging Face Id / full HuggingFace URL, check the progress bar reflects the download process
|
||||
- [ ] :key: Test deeplink import [#2876](https://github.com/janhq/jan/issues/2876)
|
||||
- [ ] :key: Test deeplink import [#2876](https://github.com/menloresearch/jan/issues/2876)
|
||||
- [ ] :key: Users can use / remove the imported model.
|
||||
|
||||
### 4. Users can import new models to the Hub
|
||||
@ -112,16 +114,16 @@
|
||||
- [ ] Users can add more info to the imported model / edit name
|
||||
- [ ] :key: Ensure the new model updates after restarting the app.
|
||||
|
||||
|
||||
### 5. Users can use the model as they want
|
||||
|
||||
- [ ] :key: Check `start` / `stop` / `delete` button response exactly what it does.
|
||||
- [ ] :key: Check `start` / `stop` / `delete` button response exactly what it does.
|
||||
- [ ] Check if starting another model stops the other model entirely.
|
||||
- [ ] :rocket: Navigate to `hub` > Click `Use` button to use model. Expect to jump to thread and see the model in dropdown model selector.
|
||||
- [ ] :key: Check when deleting a model it will delete all the files on the user's computer.
|
||||
- [ ] :warning:The recommended tags should present right for the user's hardware.
|
||||
|
||||
### 6. Users can Integrate With a Remote Server
|
||||
|
||||
- [ ] :key: Import openAI GPT model https://jan.ai/guides/using-models/integrate-with-remote-server/ and the model displayed in Hub / Thread dropdown
|
||||
- [ ] Users can use the remote model properly (openAI GPT, Groq)
|
||||
|
||||
@ -129,7 +131,7 @@
|
||||
|
||||
### 1. Users can see disk and RAM utilization
|
||||
|
||||
- [ ] :key: Verify that the RAM and VRAM utilization graphs accurately reported in real time.
|
||||
- [ ] :key: Verify that the RAM and VRAM utilization graphs accurately reported in real time.
|
||||
- [ ] :key: Validate that the utilization percentages reflect the actual usage compared to the system's total available resources.
|
||||
- [ ] :key: Ensure that the system monitors updates dynamically as the models run and stop.
|
||||
|
||||
@ -157,21 +159,21 @@
|
||||
- [ ] :key: Users can set valid Endpoint and API Key to use remote models
|
||||
- [ ] Monitoring extension should allow users to enable / disable log and set log Cleaning Interval
|
||||
|
||||
|
||||
### 4. Advanced settings
|
||||
|
||||
- [ ] :key: Test the `Experimental Mode` toggle to confirm it enables or disables experimental features as intended.
|
||||
- [ ] :key: Check the functionality of `Open App Directory` to ensure it opens the correct folder in the system file explorer.
|
||||
- [ ] Users can move **Jan data folder**
|
||||
- [ ] Validate that changes in advanced settings are applied immediately or provide appropriate instructions if a restart is needed.
|
||||
- [ ] Attemp to test downloading model from hub using **HTTP Proxy** [guideline](https://github.com/janhq/jan/pull/1562)
|
||||
- [ ] Attemp to test downloading model from hub using **HTTP Proxy** [guideline](https://github.com/menloresearch/jan/pull/1562)
|
||||
- [ ] Logs that are older than 7 days or exceed 1MB in size will be automatically cleared upon starting the application.
|
||||
- [ ] Users can click on Reset button to **factory reset** app settings to its original state & delete all usage data.
|
||||
- [ ] Keep the current app data location
|
||||
- [ ] Reset the current app data location
|
||||
- [ ] Keep the current app data location
|
||||
- [ ] Reset the current app data location
|
||||
- [ ] Users can enable the setting and chat using quick ask.
|
||||
|
||||
### 5. Engine
|
||||
|
||||
- [ ] :key: TensorRT Engine - Users able to chat with the model
|
||||
- [ ] :key: Onnx Engine - Users able to chat with the model
|
||||
- [ ] :key: Other remote Engine - Users able to chat with the model
|
||||
@ -179,9 +181,10 @@
|
||||
## G. Local API server
|
||||
|
||||
### 1. Local Server Usage with Server Options
|
||||
|
||||
- [ ] :key: Explore API Reference: Swagger API for sending/receiving requests
|
||||
- [ ] Use default server option
|
||||
- [ ] Configure and use custom server options
|
||||
- [ ] Use default server option
|
||||
- [ ] Configure and use custom server options
|
||||
- [ ] Test starting/stopping the local API server with different Model/Model settings
|
||||
- [ ] Server logs captured with correct Server Options provided
|
||||
- [ ] Verify functionality of Open logs/Clear feature
|
||||
|
||||
@ -21,7 +21,7 @@ const UpdatedFailedModal = () => {
|
||||
<span className="font-medium">{error}</span>. We appreciate your
|
||||
help with{' '}
|
||||
<a
|
||||
href="https://github.com/janhq/jan#download"
|
||||
href="https://github.com/menloresearch/jan#download"
|
||||
target="_blank"
|
||||
className="font-medium text-[hsla(var(--app-link))]"
|
||||
>
|
||||
@ -35,7 +35,10 @@ const UpdatedFailedModal = () => {
|
||||
<ModalClose
|
||||
asChild
|
||||
onClick={() => {
|
||||
window.open('https://github.com/janhq/jan#download', '_blank')
|
||||
window.open(
|
||||
'https://github.com/menloresearch/jan#download',
|
||||
'_blank'
|
||||
)
|
||||
setError(undefined)
|
||||
}}
|
||||
>
|
||||
|
||||
@ -23,7 +23,7 @@ const menuLinks = [
|
||||
{
|
||||
name: 'Github',
|
||||
icon: <FaGithub size={14} className="flex-shrink-0" />,
|
||||
link: 'https://github.com/janhq/jan',
|
||||
link: 'https://github.com/menloresearch/jan',
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@ -2,7 +2,9 @@
|
||||
import useSWR from 'swr'
|
||||
|
||||
const fetchLatestRelease = async (includeBeta: boolean) => {
|
||||
const res = await fetch('https://api.github.com/repos/janhq/jan/releases')
|
||||
const res = await fetch(
|
||||
'https://api.github.com/repos/menloresearch/jan/releases'
|
||||
)
|
||||
if (!res.ok) throw new Error('Failed to fetch releases')
|
||||
|
||||
const releases = await res.json()
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user