Merge branch 'main' into add/model-list
This commit is contained in:
commit
f68909bb20
@ -70,7 +70,7 @@ Jan is an open-source ChatGPT alternative that runs 100% offline on your compute
|
||||
<tr style="text-align: center">
|
||||
<td style="text-align:center"><b>Experimental (Nighlty Build)</b></td>
|
||||
<td style="text-align:center" colspan="4">
|
||||
<a href='https://github.com/janhq/jan/actions/runs/7350847486'>
|
||||
<a href='https://github.com/janhq/jan/actions/runs/7355602681'>
|
||||
<b>Github action artifactory</b>
|
||||
</a>
|
||||
</td>
|
||||
|
||||
@ -59,9 +59,11 @@ export default class JanInferenceNitroExtension implements InferenceExtension {
|
||||
/**
|
||||
* Subscribes to events emitted by the @janhq/core package.
|
||||
*/
|
||||
async onLoad(): Promise<void> {
|
||||
async onLoad() {
|
||||
if (!(await fs.existsSync(JanInferenceNitroExtension._homeDir))) {
|
||||
await fs.mkdirSync(JanInferenceNitroExtension._homeDir).catch((err) => console.debug(err));
|
||||
await fs
|
||||
.mkdirSync(JanInferenceNitroExtension._homeDir)
|
||||
.catch((err) => console.debug(err));
|
||||
}
|
||||
|
||||
if (!(await fs.existsSync(JanInferenceNitroExtension._settingsDir)))
|
||||
@ -87,21 +89,6 @@ export default class JanInferenceNitroExtension implements InferenceExtension {
|
||||
|
||||
// Attempt to fetch nvidia info
|
||||
await executeOnMain(MODULE, "updateNvidiaInfo", {});
|
||||
|
||||
const gpuDriverConf = await fs.readFileSync(
|
||||
join(JanInferenceNitroExtension._settingsDir, "settings.json")
|
||||
);
|
||||
if (gpuDriverConf.notify && gpuDriverConf.run_mode === "cpu") {
|
||||
// Driver is fully installed, but not in use
|
||||
if (gpuDriverConf.nvidia_driver?.exist && gpuDriverConf.cuda?.exist) {
|
||||
events.emit("OnGPUCompatiblePrompt", {});
|
||||
// Prompt user to switch
|
||||
} else if (gpuDriverConf.nvidia_driver?.exist) {
|
||||
// Prompt user to install cuda toolkit
|
||||
events.emit("OnGPUDriverMissingPrompt", {});
|
||||
}
|
||||
}
|
||||
Promise.resolve()
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -53,7 +53,7 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
|
||||
/**
|
||||
* Subscribes to events emitted by the @janhq/core package.
|
||||
*/
|
||||
async onLoad(): Promise<void> {
|
||||
async onLoad() {
|
||||
if (!(await fs.existsSync(JanInferenceOpenAIExtension._homeDir))) {
|
||||
await fs
|
||||
.mkdirSync(JanInferenceOpenAIExtension._homeDir)
|
||||
@ -77,7 +77,6 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
|
||||
events.on(EventName.OnInferenceStopped, () => {
|
||||
JanInferenceOpenAIExtension.handleInferenceStopped(this);
|
||||
});
|
||||
Promise.resolve();
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -119,8 +119,6 @@ export default function EventHandler({ children }: { children: ReactNode }) {
|
||||
}
|
||||
}
|
||||
}
|
||||
function handleGpuCompatiblePrompt() {}
|
||||
function handleGpuDriverMissingPrompt() {}
|
||||
|
||||
useEffect(() => {
|
||||
if (window.core?.events) {
|
||||
@ -129,8 +127,6 @@ export default function EventHandler({ children }: { children: ReactNode }) {
|
||||
events.on(EventName.OnModelReady, handleModelReady)
|
||||
events.on(EventName.OnModelFail, handleModelFail)
|
||||
events.on(EventName.OnModelStopped, handleModelStopped)
|
||||
events.on('OnGPUCompatiblePrompt', handleGpuCompatiblePrompt)
|
||||
events.on('OnGPUDriverMissingPrompt', handleGpuDriverMissingPrompt)
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [])
|
||||
|
||||
@ -75,7 +75,7 @@ const Providers = (props: PropsWithChildren) => {
|
||||
<FeatureToggleWrapper>
|
||||
<EventListenerWrapper>
|
||||
<TooltipProvider delayDuration={0}>{children}</TooltipProvider>
|
||||
<GPUDriverPrompt />
|
||||
{!isMac && <GPUDriverPrompt />}
|
||||
</EventListenerWrapper>
|
||||
<Toaster position="top-right" />
|
||||
</FeatureToggleWrapper>
|
||||
|
||||
@ -24,33 +24,34 @@ const Advanced = () => {
|
||||
return (
|
||||
<div className="block w-full">
|
||||
{/* CPU / GPU switching */}
|
||||
|
||||
<div className="flex w-full items-start justify-between border-b border-border py-4 first:pt-0 last:border-none">
|
||||
<div className="w-4/5 flex-shrink-0 space-y-1.5">
|
||||
<div className="flex gap-x-2">
|
||||
<h6 className="text-sm font-semibold capitalize">NVidia GPU</h6>
|
||||
{!isMac && (
|
||||
<div className="flex w-full items-start justify-between border-b border-border py-4 first:pt-0 last:border-none">
|
||||
<div className="w-4/5 flex-shrink-0 space-y-1.5">
|
||||
<div className="flex gap-x-2">
|
||||
<h6 className="text-sm font-semibold capitalize">NVidia GPU</h6>
|
||||
</div>
|
||||
<p className="whitespace-pre-wrap leading-relaxed">
|
||||
Enable GPU acceleration for NVidia GPUs.
|
||||
</p>
|
||||
</div>
|
||||
<p className="whitespace-pre-wrap leading-relaxed">
|
||||
Enable GPU acceleration for NVidia GPUs.
|
||||
</p>
|
||||
<Switch
|
||||
checked={gpuEnabled}
|
||||
onCheckedChange={(e: boolean) => {
|
||||
if (e === true) {
|
||||
saveSettings({ runMode: 'gpu' })
|
||||
setGpuEnabled(true)
|
||||
setShowNotification(false)
|
||||
setTimeout(() => {
|
||||
validateSettings()
|
||||
}, 300)
|
||||
} else {
|
||||
saveSettings({ runMode: 'cpu' })
|
||||
setGpuEnabled(false)
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<Switch
|
||||
checked={gpuEnabled}
|
||||
onCheckedChange={(e: boolean) => {
|
||||
if (e === true) {
|
||||
saveSettings({ runMode: 'gpu' })
|
||||
setGpuEnabled(true)
|
||||
setShowNotification(false)
|
||||
setTimeout(() => {
|
||||
validateSettings()
|
||||
}, 300)
|
||||
} else {
|
||||
saveSettings({ runMode: 'cpu' })
|
||||
setGpuEnabled(false)
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{/* Experimental */}
|
||||
<div className="flex w-full items-start justify-between border-b border-border py-4 first:pt-0 last:border-none">
|
||||
<div className="w-4/5 flex-shrink-0 space-y-1.5">
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user