Merge branch 'main' into docs/add-guides

This commit is contained in:
Hieu 2024-01-01 08:31:26 +07:00 committed by GitHub
commit 4b2578b328
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 43 additions and 104 deletions

View File

@ -70,7 +70,7 @@ Jan is an open-source ChatGPT alternative that runs 100% offline on your compute
<tr style="text-align: center"> <tr style="text-align: center">
<td style="text-align:center"><b>Experimental (Nighlty Build)</b></td> <td style="text-align:center"><b>Experimental (Nighlty Build)</b></td>
<td style="text-align:center" colspan="4"> <td style="text-align:center" colspan="4">
<a href='https://github.com/janhq/jan/actions/runs/7359469616'> <a href='https://github.com/janhq/jan/actions/runs/7372465396'>
<b>Github action artifactory</b> <b>Github action artifactory</b>
</a> </a>
</td> </td>
@ -146,15 +146,15 @@ Contributions are welcome! Please read the [CONTRIBUTING.md](CONTRIBUTING.md) fi
### Instructions ### Instructions
1. **Clone the Repository:** 1. **Clone the repository and prepare:**
```bash ```bash
git clone https://github.com/janhq/jan git clone https://github.com/janhq/jan
git checkout DESIRED_BRANCH
cd jan cd jan
git checkout -b DESIRED_BRANCH
``` ```
2. **Run development and Using Jan Desktop** 2. **Run development and use Jan Desktop**
``` ```
make dev make dev
@ -165,10 +165,7 @@ Contributions are welcome! Please read the [CONTRIBUTING.md](CONTRIBUTING.md) fi
### For production build ### For production build
```bash ```bash
# Do step 1 and 2 in previous section # Do steps 1 and 2 in the previous section
git clone https://github.com/janhq/jan
cd jan
# Build the app # Build the app
make build make build
``` ```
@ -203,7 +200,7 @@ Jan builds on top of other open-source projects:
## Contact ## Contact
- Bugs & requests: file a Github ticket - Bugs & requests: file a GitHub ticket
- For discussion: join our Discord [here](https://discord.gg/FTk2MvZwJH) - For discussion: join our Discord [here](https://discord.gg/FTk2MvZwJH)
- For business inquiries: email hello@jan.ai - For business inquiries: email hello@jan.ai
- For jobs: please email hr@jan.ai - For jobs: please email hr@jan.ai

View File

@ -27,6 +27,6 @@ Jan runs on port `1337` by default, but this can be changed in Settings.
Check out the [API Reference](/api-reference) for more information on the API endpoints. Check out the [API Reference](/api-reference) for more information on the API endpoints.
``` ```
curl https://localhost:1337/v1/chat/completions curl http://localhost:1337/v1/chat/completions
``` ```

View File

@ -14,7 +14,7 @@ license:
name: AGPLv3 name: AGPLv3
url: "https://github.com/janhq/nitro/blob/main/LICENSE" url: "https://github.com/janhq/nitro/blob/main/LICENSE"
servers: servers:
- url: "https://localhost:1337/v1/" - url: "http://localhost:1337/v1/"
tags: tags:
- name: Models - name: Models
description: List and describe the various models available in the API. description: List and describe the various models available in the API.
@ -100,7 +100,7 @@ paths:
x-codeSamples: x-codeSamples:
- lang: cURL - lang: cURL
source: | source: |
curl https://localhost:1337/v1/models curl http://localhost:1337/v1/models
post: post:
operationId: downloadModel operationId: downloadModel
tags: tags:
@ -118,7 +118,7 @@ paths:
x-codeSamples: x-codeSamples:
- lang: cURL - lang: cURL
source: | source: |
curl -X POST https://localhost:1337/v1/models curl -X POST http://localhost:1337/v1/models
"/models/{model_id}": "/models/{model_id}":
get: get:
operationId: retrieveModel operationId: retrieveModel
@ -149,7 +149,7 @@ paths:
x-codeSamples: x-codeSamples:
- lang: cURL - lang: cURL
source: | source: |
curl https://localhost:1337/v1/models/{model_id} curl http://localhost:1337/v1/models/{model_id}
delete: delete:
operationId: deleteModel operationId: deleteModel
tags: tags:
@ -178,7 +178,7 @@ paths:
x-codeSamples: x-codeSamples:
- lang: cURL - lang: cURL
source: | source: |
curl -X DELETE https://localhost:1337/v1/models/{model_id} curl -X DELETE http://localhost:1337/v1/models/{model_id}
"/models/{model_id}/start": "/models/{model_id}/start":
put: put:
operationId: startModel operationId: startModel
@ -206,7 +206,7 @@ paths:
x-codeSamples: x-codeSamples:
- lang: cURL - lang: cURL
source: | source: |
curl -X PUT https://localhost:1337/v1/models/{model_id}/start curl -X PUT http://localhost:1337/v1/models/{model_id}/start
"/models/{model_id}/stop": "/models/{model_id}/stop":
put: put:
operationId: stopModel operationId: stopModel
@ -233,7 +233,7 @@ paths:
x-codeSamples: x-codeSamples:
- lang: cURL - lang: cURL
source: | source: |
curl -X PUT https://localhost:1337/v1/models/{model_id}/stop curl -X PUT http://localhost:1337/v1/models/{model_id}/stop
/threads: /threads:
post: post:
operationId: createThread operationId: createThread

View File

@ -1,39 +0,0 @@
#!/bin/bash
# Check if nvidia-smi exists and is executable
if ! command -v nvidia-smi &> /dev/null; then
echo "nvidia-smi not found, proceeding with CPU version..."
cd linux-cpu
./nitro "$@"
exit $?
fi
# Find the GPU with the highest VRAM
readarray -t gpus < <(nvidia-smi --query-gpu=index,memory.total --format=csv,noheader,nounits)
maxMemory=0
selectedGpuId=0
for gpu in "${gpus[@]}"; do
IFS=, read -ra gpuInfo <<< "$gpu"
gpuId=${gpuInfo[0]}
gpuMemory=${gpuInfo[1]}
if (( gpuMemory > maxMemory )); then
maxMemory=$gpuMemory
selectedGpuId=$gpuId
fi
done
echo "Selected GPU: $selectedGpuId"
export CUDA_VISIBLE_DEVICES=$selectedGpuId
# Attempt to run nitro_linux_amd64_cuda
cd linux-cuda
./nitro "$@" > output.log 2>&1 || (
echo "Check output log" &&
if grep -q "CUDA error" output.log; then
echo "CUDA error detected, attempting to run nitro_linux_amd64..."
cd ../linux-cpu && ./nitro "$@"
exit $?
fi
exit $?
)

View File

@ -1,45 +0,0 @@
@echo off
setlocal enabledelayedexpansion
set "maxMemory=0"
set "gpuId="
rem check if nvidia-smi command exist or not
where nvidia-smi >nul 2>&1
if %errorlevel% neq 0 (
echo nvidia-smi not found, proceeding with CPU version...
cd win-cuda
goto RunCpuVersion
)
set "tempFile=%temp%\nvidia_smi_output.txt"
nvidia-smi --query-gpu=index,memory.total --format=csv,noheader,nounits > "%tempFile%"
for /f "usebackq tokens=1-2 delims=, " %%a in ("%tempFile%") do (
set /a memorySize=%%b
if !memorySize! gtr !maxMemory! (
set "maxMemory=!memorySize!"
set "gpuId=%%a"
)
)
rem Echo the selected GPU
echo Selected GPU: !gpuId!
rem Set the GPU with the highest VRAM as the visible CUDA device
set CUDA_VISIBLE_DEVICES=!gpuId!
rem Attempt to run nitro_windows_amd64_cuda.exe
cd win-cuda
nitro.exe %* > output.log
type output.log | findstr /C:"CUDA error" >nul
if %errorlevel% equ 0 ( goto :RunCpuVersion ) else ( goto :End )
:RunCpuVersion
rem Run nitro_windows_amd64.exe...
cd ..\win-cpu
nitro.exe %*
:End
endlocal

View File

@ -8,7 +8,7 @@
"license": "AGPL-3.0", "license": "AGPL-3.0",
"scripts": { "scripts": {
"build": "tsc -b . && webpack --config webpack.config.js", "build": "tsc -b . && webpack --config webpack.config.js",
"downloadnitro:linux": "NITRO_VERSION=$(cat ./bin/version.txt) && download https://github.com/janhq/nitro/releases/download/v${NITRO_VERSION}/nitro-${NITRO_VERSION}-linux-amd64.tar.gz -e --strip 1 -o ./bin/linux-cpu && chmod +x ./bin/linux-cpu/nitro && chmod +x ./bin/linux-start.sh && download https://github.com/janhq/nitro/releases/download/v${NITRO_VERSION}/nitro-${NITRO_VERSION}-linux-amd64-cuda.tar.gz -e --strip 1 -o ./bin/linux-cuda && chmod +x ./bin/linux-cuda/nitro && chmod +x ./bin/linux-start.sh", "downloadnitro:linux": "NITRO_VERSION=$(cat ./bin/version.txt) && download https://github.com/janhq/nitro/releases/download/v${NITRO_VERSION}/nitro-${NITRO_VERSION}-linux-amd64.tar.gz -e --strip 1 -o ./bin/linux-cpu && chmod +x ./bin/linux-cpu/nitro && download https://github.com/janhq/nitro/releases/download/v${NITRO_VERSION}/nitro-${NITRO_VERSION}-linux-amd64-cuda.tar.gz -e --strip 1 -o ./bin/linux-cuda && chmod +x ./bin/linux-cuda/nitro",
"downloadnitro:darwin": "NITRO_VERSION=$(cat ./bin/version.txt) && download https://github.com/janhq/nitro/releases/download/v${NITRO_VERSION}/nitro-${NITRO_VERSION}-mac-arm64.tar.gz -e --strip 1 -o ./bin/mac-arm64 && chmod +x ./bin/mac-arm64/nitro && download https://github.com/janhq/nitro/releases/download/v${NITRO_VERSION}/nitro-${NITRO_VERSION}-mac-amd64.tar.gz -e --strip 1 -o ./bin/mac-x64 && chmod +x ./bin/mac-x64/nitro", "downloadnitro:darwin": "NITRO_VERSION=$(cat ./bin/version.txt) && download https://github.com/janhq/nitro/releases/download/v${NITRO_VERSION}/nitro-${NITRO_VERSION}-mac-arm64.tar.gz -e --strip 1 -o ./bin/mac-arm64 && chmod +x ./bin/mac-arm64/nitro && download https://github.com/janhq/nitro/releases/download/v${NITRO_VERSION}/nitro-${NITRO_VERSION}-mac-amd64.tar.gz -e --strip 1 -o ./bin/mac-x64 && chmod +x ./bin/mac-x64/nitro",
"downloadnitro:win32": "download.bat", "downloadnitro:win32": "download.bat",
"downloadnitro": "run-script-os", "downloadnitro": "run-script-os",

View File

@ -394,9 +394,9 @@ function spawnNitroProcess(nitroResourceProbe: any): Promise<any> {
} else { } else {
let nvida_info = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf8")); let nvida_info = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf8"));
if (nvida_info["run_mode"] === "cpu") { if (nvida_info["run_mode"] === "cpu") {
binaryFolder = path.join(binaryFolder, "win-cpu"); binaryFolder = path.join(binaryFolder, "linux-cpu");
} else { } else {
binaryFolder = path.join(binaryFolder, "win-cuda"); binaryFolder = path.join(binaryFolder, "linux-cuda");
cudaVisibleDevices = nvida_info["gpu_highest_vram"]; cudaVisibleDevices = nvida_info["gpu_highest_vram"];
} }
binaryName = "nitro"; binaryName = "nitro";

View File

@ -10,6 +10,23 @@ const JAN_API_PORT = Number.parseInt(process.env.JAN_API_PORT || "1337");
const server = fastify(); const server = fastify();
server.register(require("@fastify/cors"), {}); server.register(require("@fastify/cors"), {});
server.register(require("@fastify/swagger"), {
mode: "static",
specification: {
path: "./../docs/openapi/jan.yaml",
baseDir: "./../docs/openapi",
},
});
server.register(require("@fastify/swagger-ui"), {
routePrefix: "/docs",
baseDir: path.join(__dirname, "../..", "./docs/openapi"),
uiConfig: {
docExpansion: "full",
deepLinking: false,
},
staticCSP: true,
transformSpecificationClone: true,
});
server.register( server.register(
(childContext, _, done) => { (childContext, _, done) => {
childContext.register(require("@fastify/static"), { childContext.register(require("@fastify/static"), {

View File

@ -19,6 +19,8 @@
"dependencies": { "dependencies": {
"@fastify/cors": "^8.4.2", "@fastify/cors": "^8.4.2",
"@fastify/static": "^6.12.0", "@fastify/static": "^6.12.0",
"@fastify/swagger": "^8.13.0",
"@fastify/swagger-ui": "^2.0.1",
"@janhq/core": "link:./core", "@janhq/core": "link:./core",
"dotenv": "^16.3.1", "dotenv": "^16.3.1",
"fastify": "^4.24.3", "fastify": "^4.24.3",

View File

@ -136,6 +136,7 @@ export default function ThreadList() {
<Button <Button
themes="danger" themes="danger"
onClick={() => cleanThread(thread.id)} onClick={() => cleanThread(thread.id)}
autoFocus
> >
Yes Yes
</Button> </Button>

View File

@ -1,5 +1,7 @@
.message { .message {
@apply text-black dark:text-gray-300; @apply text-black dark:text-gray-300;
white-space: pre-line;
ul, ul,
ol { ol {
list-style: auto; list-style: auto;
@ -10,3 +12,7 @@
button[class*='react-scroll-to-bottom--'] { button[class*='react-scroll-to-bottom--'] {
display: none; display: none;
} }
.code-block {
white-space: normal;
}