Merge branch 'dev' into docs_updates_to_assistant_id

This commit is contained in:
Henry 2024-03-03 07:25:01 +09:00 committed by GitHub
commit 5a6c20c4c9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 395 additions and 82 deletions

View File

@ -76,31 +76,31 @@ Jan is an open-source ChatGPT alternative that runs 100% offline on your compute
<tr style="text-align:center">
<td style="text-align:center"><b>Experimental (Nightly Build)</b></td>
<td style="text-align:center">
<a href='https://delta.jan.ai/latest/jan-win-x64-0.4.7-291.exe'>
<a href='https://delta.jan.ai/latest/jan-win-x64-0.4.7-293.exe'>
<img src='./docs/static/img/windows.png' style="height:14px; width: 14px" />
<b>jan.exe</b>
</a>
</td>
<td style="text-align:center">
<a href='https://delta.jan.ai/latest/jan-mac-x64-0.4.7-291.dmg'>
<a href='https://delta.jan.ai/latest/jan-mac-x64-0.4.7-293.dmg'>
<img src='./docs/static/img/mac.png' style="height:15px; width: 15px" />
<b>Intel</b>
</a>
</td>
<td style="text-align:center">
<a href='https://delta.jan.ai/latest/jan-mac-arm64-0.4.7-291.dmg'>
<a href='https://delta.jan.ai/latest/jan-mac-arm64-0.4.7-293.dmg'>
<img src='./docs/static/img/mac.png' style="height:15px; width: 15px" />
<b>M1/M2</b>
</a>
</td>
<td style="text-align:center">
<a href='https://delta.jan.ai/latest/jan-linux-amd64-0.4.7-291.deb'>
<a href='https://delta.jan.ai/latest/jan-linux-amd64-0.4.7-293.deb'>
<img src='./docs/static/img/linux.png' style="height:14px; width: 14px" />
<b>jan.deb</b>
</a>
</td>
<td style="text-align:center">
<a href='https://delta.jan.ai/latest/jan-linux-x86_64-0.4.7-291.AppImage'>
<a href='https://delta.jan.ai/latest/jan-linux-x86_64-0.4.7-293.AppImage'>
<img src='./docs/static/img/linux.png' style="height:14px; width: 14px" />
<b>jan.AppImage</b>
</a>

View File

@ -42,6 +42,24 @@ export class Downloader implements Processor {
// Downloading file to a temp file first
const downloadingTempFile = `${destination}.download`
// adding initial download state
const initialDownloadState: DownloadState = {
modelId,
fileName,
time: {
elapsed: 0,
remaining: 0,
},
speed: 0,
percent: 0,
size: {
total: 0,
transferred: 0,
},
downloadState: 'downloading',
}
DownloadManager.instance.downloadProgressMap[modelId] = initialDownloadState
progress(rq, {})
.on('progress', (state: any) => {
const downloadState: DownloadState = {

View File

@ -93,6 +93,110 @@ paths:
"temperature": 0.7,
"top_p": 0.95
}'
- lang: JavaScript
source: |-
const data = {
messages: [
{
content: 'You are a helpful assistant.',
role: 'system'
},
{
content: 'Hello!',
role: 'user'
}
],
model: 'tinyllama-1.1b',
stream: true,
max_tokens: 2048,
stop: ['hello'],
frequency_penalty: 0,
presence_penalty: 0,
temperature: 0.7,
top_p: 0.95
};
fetch('http://localhost:1337/v1/chat/completions', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json'
},
body: JSON.stringify(data)
})
.then(response => response.json())
.then(data => console.log(data));
- lang: Node.js
source: |-
const fetch = require('node-fetch');
const data = {
messages: [
{
content: 'You are a helpful assistant.',
role: 'system'
},
{
content: 'Hello!',
role: 'user'
}
],
model: 'tinyllama-1.1b',
stream: true,
max_tokens: 2048,
stop: ['hello'],
frequency_penalty: 0,
presence_penalty: 0,
temperature: 0.7,
top_p: 0.95
};
fetch('http://localhost:1337/v1/chat/completions', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json'
},
body: JSON.stringify(data)
})
.then(response => response.json())
.then(data => console.log(data));
- lang: Python
source: >-
import requests
import json
data = {
"messages": [
{
"content": "You are a helpful assistant.",
"role": "system"
},
{
"content": "Hello!",
"role": "user"
}
],
"model": "tinyllama-1.1b",
"stream": true,
"max_tokens": 2048,
"stop": [
"hello"
],
"frequency_penalty": 0,
"presence_penalty": 0,
"temperature": 0.7,
"top_p": 0.95
}
response =
requests.post('http://localhost:1337/v1/chat/completions',
json=data)
print(response.json())
/models:
get:
operationId: listModels
@ -113,10 +217,38 @@ paths:
$ref: specs/models.yaml#/components/schemas/ListModelsResponse
x-codeSamples:
- lang: cURL
source: |
source: |-
curl -X 'GET' \
'http://localhost:1337/v1/models' \
-H 'accept: application/json'
- lang: JavaScript
source: |-
const response = await fetch('http://localhost:1337/v1/models', {
method: 'GET',
headers: {Accept: 'application/json'}
});
const data = await response.json();
- lang: Python
source: |-
import requests
url = 'http://localhost:1337/v1/models'
headers = {'Accept': 'application/json'}
response = requests.get(url, headers=headers)
data = response.json()
- lang: Node.js
source: |-
const fetch = require('node-fetch');
const url = 'http://localhost:1337/v1/models';
const options = {
method: 'GET',
headers: { Accept: 'application/json' }
};
fetch(url, options)
.then(res => res.json())
.then(json => console.log(json));
/models/download/{model_id}:
get:
operationId: downloadModel
@ -143,10 +275,40 @@ paths:
$ref: specs/models.yaml#/components/schemas/DownloadModelResponse
x-codeSamples:
- lang: cURL
source: |
source: |-
curl -X 'GET' \
'http://localhost:1337/v1/models/download/{model_id}' \
-H 'accept: application/json'
- lang: JavaScript
source: >-
const response = await
fetch('http://localhost:1337/v1/models/download/{model_id}', {
method: 'GET',
headers: {accept: 'application/json'}
});
const data = await response.json();
- lang: Node.js
source: |-
const fetch = require('node-fetch');
fetch('http://localhost:1337/v1/models/download/{model_id}', {
method: 'GET',
headers: {accept: 'application/json'}
})
.then(res => res.json())
.then(data => console.log(data));
- lang: Python
source: >-
import requests
response =
requests.get('http://localhost:1337/v1/models/download/{model_id}',
headers={'accept': 'application/json'})
data = response.json()
/models/{model_id}:
get:
operationId: retrieveModel
@ -176,10 +338,47 @@ paths:
$ref: specs/models.yaml#/components/schemas/GetModelResponse
x-codeSamples:
- lang: cURL
source: |
source: |-
curl -X 'GET' \
'http://localhost:1337/v1/models/{model_id}' \
-H 'accept: application/json'
- lang: JavaScript
source: |-
const fetch = require('node-fetch');
const modelId = 'mistral-ins-7b-q4';
fetch(`http://localhost:1337/v1/models/${modelId}`, {
method: 'GET',
headers: {'accept': 'application/json'}
})
.then(res => res.json())
.then(json => console.log(json));
- lang: Node.js
source: |-
const fetch = require('node-fetch');
const modelId = 'mistral-ins-7b-q4';
fetch(`http://localhost:1337/v1/models/${modelId}`, {
method: 'GET',
headers: {'accept': 'application/json'}
})
.then(res => res.json())
.then(json => console.log(json));
- lang: Python
source: >-
import requests
model_id = 'mistral-ins-7b-q4'
response =
requests.get(f'http://localhost:1337/v1/models/{model_id}',
headers={'accept': 'application/json'})
print(response.json())
delete:
operationId: deleteModel
tags:
@ -207,10 +406,45 @@ paths:
$ref: specs/models.yaml#/components/schemas/DeleteModelResponse
x-codeSamples:
- lang: cURL
source: |
source: |-
curl -X 'DELETE' \
'http://localhost:1337/v1/models/{model_id}' \
-H 'accept: application/json'
- lang: JavaScript
source: |-
const fetch = require('node-fetch');
const modelId = 'mistral-ins-7b-q4';
fetch(`http://localhost:1337/v1/models/${modelId}`, {
method: 'DELETE',
headers: { 'accept': 'application/json' }
})
.then(res => res.json())
.then(json => console.log(json));
- lang: Node.js
source: |-
const fetch = require('node-fetch');
const modelId = 'mistral-ins-7b-q4';
fetch(`http://localhost:1337/v1/models/${modelId}`, {
method: 'DELETE',
headers: { 'accept': 'application/json' }
})
.then(res => res.json())
.then(json => console.log(json));
- lang: Python
source: >-
import requests
model_id = 'mistral-ins-7b-q4'
response =
requests.delete(f'http://localhost:1337/v1/models/{model_id}',
headers={'accept': 'application/json'})
/threads:
post:
operationId: createThread
@ -249,6 +483,73 @@ paths:
"content": "How does AI work? Explain it in simple terms."
}]
}'
- lang: JavaScript
source: |-
const fetch = require('node-fetch');
fetch('http://localhost:1337/v1/threads', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
messages: [
{
role: 'user',
content: 'Hello, what is AI?',
file_ids: ['file-abc123']
},
{
role: 'user',
content: 'How does AI work? Explain it in simple terms.'
}
]
})
});
- lang: Node.js
source: |-
const fetch = require('node-fetch');
fetch('http://localhost:1337/v1/threads', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
messages: [
{
role: 'user',
content: 'Hello, what is AI?',
file_ids: ['file-abc123']
},
{
role: 'user',
content: 'How does AI work? Explain it in simple terms.'
}
]
})
});
- lang: Python
source: |-
import requests
url = 'http://localhost:1337/v1/threads'
payload = {
'messages': [
{
'role': 'user',
'content': 'Hello, what is AI?',
'file_ids': ['file-abc123']
},
{
'role': 'user',
'content': 'How does AI work? Explain it in simple terms.'
}
]
}
response = requests.post(url, json=payload)
print(response.text)
get:
operationId: listThreads
tags:
@ -282,9 +583,36 @@ paths:
metadata: {}
x-codeSamples:
- lang: cURL
source: |
source: |-
curl http://localhost:1337/v1/threads \
-H "Content-Type: application/json" \
-H "Content-Type: application/json"
- lang: JavaScript
source: |-
const fetch = require('node-fetch');
fetch('http://localhost:1337/v1/threads', {
method: 'GET',
headers: {'Content-Type': 'application/json'}
}).then(res => res.json())
.then(json => console.log(json));
- lang: Node.js
source: |-
const fetch = require('node-fetch');
fetch('http://localhost:1337/v1/threads', {
method: 'GET',
headers: {'Content-Type': 'application/json'}
}).then(res => res.json())
.then(json => console.log(json));
- lang: Python
source: |-
import requests
url = 'http://localhost:1337/v1/threads'
headers = {'Content-Type': 'application/json'}
response = requests.get(url, headers=headers)
print(response.json())
/threads/{thread_id}:
get:
operationId: getThread
@ -442,9 +770,35 @@ paths:
metadata: {}
x-codeSamples:
- lang: cURL
source: |
source: |-
curl http://localhost:1337/v1/assistants \
-H "Content-Type: application/json" \
-H "Content-Type: application/json"
- lang: JavaScript
source: |-
fetch('http://localhost:1337/v1/assistants', {
method: 'GET',
headers: {
'Content-Type': 'application/json'
}
})
- lang: Node.js
source: |-
const fetch = require('node-fetch');
fetch('http://localhost:1337/v1/assistants', {
method: 'GET',
headers: {
'Content-Type': 'application/json'
}
})
- lang: Python
source: |-
import requests
url = 'http://localhost:1337/v1/assistants'
headers = {'Content-Type': 'application/json'}
response = requests.get(url, headers=headers)
/assistants/{assistant_id}:
get:
operationId: getAssistant

View File

@ -42,69 +42,10 @@
--danger: 346.8 77.2% 49.8%;
--danger-foreground: 355.7 100% 97.3%;
--secondary: 60 4.8% 95.9%;
--secondary-foreground: 24 9.8% 10%;
--border: 20 5.9% 90%;
--input: 20 5.9% 90%;
--ring: 20 14.3% 4.1%;
.primary-blue {
--primary: 221 83% 53%;
--primary-foreground: 210 40% 98%;
--secondary: 60 4.8% 95.9%;
--secondary-foreground: 24 9.8% 10%;
}
.primary-green {
--primary: 142.1 76.2% 36.3%;
--primary-foreground: 355.7 100% 97.3%;
--secondary: 240 4.8% 95.9%;
--secondary-foreground: 240 5.9% 10%;
}
.primary-purple {
--primary: 262.1 83.3% 57.8%;
--primary-foreground: 210 20% 98%;
--secondary: 220 14.3% 95.9%;
--secondary-foreground: 220.9 39.3% 11%;
}
}
.dark {
--background: 20 14.3% 4.1%;
--foreground: 60 9.1% 97.8%;
--muted: 12 6.5% 15.1%;
--muted-foreground: 24 5.4% 63.9%;
--danger: 346.8 77.2% 49.8%;
--danger-foreground: 355.7 100% 97.3%;
--border: 12 6.5% 15.1%;
--input: 12 6.5% 15.1%;
--ring: 35.5 91.7% 32.9%;
.primary-blue {
--primary: 221 83% 53%;
--primary-foreground: 222.2 47.4% 11.2%;
--secondary: 12 6.5% 15.1%;
--secondary-foreground: 60 9.1% 97.8%;
}
.primary-green {
--primary: 142.1 70.6% 45.3%;
--primary-foreground: 144.9 80.4% 10%;
--secondary: 240 3.7% 15.9%;
--secondary-foreground: 0 0% 98%;
}
.primary-purple {
--primary: 263.4 70% 50.4%;
--primary-foreground: 210 20% 98%;
--secondary: 215 27.9% 16.9%;
--secondary-foreground: 210 20% 98%;
}
}