feat: enhance chatCompletionRequest with advanced sampling parameters

Add comprehensive sampling parameters for fine-grained control over AI output generation, including dynamic temperature, Mirostat sampling, repetition penalties, and advanced prompt handling. These parameters enable more precise tuning of model behavior and output quality.
This commit is contained in:
Akarshan 2025-06-05 12:58:37 +05:30 committed by Louis
parent 6c769c5db9
commit c2b606a3fc
No known key found for this signature in database
GPG Key ID: 44FA9F4D33C37DE2

View File

@ -23,20 +23,53 @@ export interface InputAudio {
} }
export interface chatCompletionRequest { export interface chatCompletionRequest {
provider: string, model: string; // Model ID, though for local it might be implicit via sessionInfo
model: string // Model ID, though for local it might be implicit via sessionId messages: chatCompletionRequestMessage[];
messages: chatCompletionRequestMessage[]
temperature?: number | null // Core sampling parameters
top_p?: number | null temperature?: number | null;
n?: number | null dynatemp_range?: number | null;
stream?: boolean | null dynatemp_exponent?: number | null;
stop?: string | string[] | null top_k?: number | null;
max_tokens?: number top_p?: number | null;
presence_penalty?: number | null min_p?: number | null;
frequency_penalty?: number | null typical_p?: number | null;
logit_bias?: { [key: string]: number } | null repeat_penalty?: number | null;
user?: string repeat_last_n?: number | null;
// ... TODO: other OpenAI params presence_penalty?: number | null;
frequency_penalty?: number | null;
dry_multiplier?: number | null;
dry_base?: number | null;
dry_allowed_length?: number | null;
dry_penalty_last_n?: number | null;
dry_sequence_breakers?: string[] | null;
xtc_probability?: number | null;
xtc_threshold?: number | null;
mirostat?: number | null; // 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0
mirostat_tau?: number | null;
mirostat_eta?: number | null;
n_predict?: number | null;
n_indent?: number | null;
n_keep?: number | null;
stream?: boolean | null;
stop?: string | string[] | null;
seed?: number | null; // RNG seed
// Advanced sampling
logit_bias?: { [key: string]: number } | null;
n_probs?: number | null;
min_keep?: number | null;
t_max_predict_ms?: number | null;
image_data?: Array<{ data: string; id: number }> | null;
// Internal/optimization parameters
id_slot?: number | null;
cache_prompt?: boolean | null;
return_tokens?: boolean | null;
samplers?: string[] | null;
timings_per_token?: boolean | null;
post_sampling_probs?: boolean | null;
} }
export interface chatCompletionChunkChoiceDelta { export interface chatCompletionChunkChoiceDelta {