Added DeepSeek models from Groq, Fixed streaming for o1

This commit is contained in:
2025-02-16 21:29:53 +09:00
parent 368d002600
commit 98d4854ed6
4 changed files with 79 additions and 24 deletions

View File

@@ -213,7 +213,7 @@ export class ChatRequest {
if (value !== null) acc[key] = value
return acc
}, {}),
stream: modelDetail.reasoning ? false : opts.streaming
stream: modelDetail.stream ? false : opts.streaming
}
// Make the chat completion request

View File

@@ -176,6 +176,21 @@
}
}
const takeReason = (msg) => {
if(isAssistant) {
const regex = /<think>([\s\S]*?)<\/think>/;
const match = msg.match(regex);
if (match) {
message.reason = match[1];
msg = msg.replace(regex, '');
}
} else {
message.reason = "";
}
return msg;
};
let waitingForTruncateConfirm:any = 0
const checkTruncate = () => {
@@ -302,7 +317,6 @@
{:else}
<div
class="message-display"
on:touchend={editOnDoubleTap}
on:dblclick|preventDefault={() => edit()}
>
@@ -310,8 +324,17 @@
<p><b>Summarizing...</b></p>
{/if}
{#key refreshCounter}
{#if message.reason}
<details>
<summary>Reasoning..</summary>
<div style="background-color:#333;padding:10px;">
<SvelteMarkdown source={message.reason}/>
</div>
</details>
<br/>
{/if}
<SvelteMarkdown
source={replaceLatexDelimiters(displayMessage)}
source={takeReason(replaceLatexDelimiters(displayMessage))}
options={markdownOptions}
renderers={renderers}
/>

View File

@@ -155,11 +155,13 @@ export async function getChatModelOptions (): Promise<SelectOption[]> {
const model = models[i]
const modelDetail = getModelDetail(model)
await modelDetail.check(modelDetail)
result.push({
value: model,
text: modelDetail.label || model,
disabled: !modelDetail.enabled
})
if(modelDetail.enabled){
result.push({
value: model,
text: modelDetail.label || model,
disabled: !modelDetail.enabled
})
}
}
return result
}

View File

@@ -107,7 +107,6 @@ const gpt4128kpreview = {
}
const o1preview = {
...chatModelBase,
stream: false,
reasoning: true,
prompt: 0.00001, // $0.01 per 1000 tokens prompt
completion: 0.00003, // $0.03 per 1000 tokens completion
@@ -115,7 +114,6 @@ const o1preview = {
}
const o1mini = {
...chatModelBase,
stream: false,
reasoning: true,
prompt: 0.00001, // $0.01 per 1000 tokens prompt
completion: 0.00003, // $0.03 per 1000 tokens completion
@@ -123,7 +121,6 @@ const o1mini = {
}
const o1 = {
...chatModelBase,
stream: false,
reasoning: true,
prompt: 15 / 1_000_000,
completion: 60 / 1_000_000,
@@ -131,18 +128,17 @@ const o1 = {
}
const o3mini = {
...chatModelBase,
stream: false,
reasoning: true,
prompt: 1.1 / 1_000_000,
completion: 4.4 / 1_000_000,
max: 200000
}
const llama3 = {
...chatModelBase,
prompt: 0.00003,
completion: 0.00006,
max: 8192
}
...chatModelBase,
prompt: 0.00003,
completion: 0.00006,
max: 8192
}
const claude35sonnet = {
...chatModelBase,
prompt: 0.00000375, // $0.00375 per 1000 tokens prompt
@@ -180,13 +176,47 @@ export const chatModels : Record<string, ModelDetail> = {
'gpt-4-32k-0314': { ...gpt432k },
'o1-preview': { ...o1preview },
'o1-mini': { ...o1mini },
'o1': { ...o1 },
'o3-mini': { ...o3mini },
'mixtral-8x7b-32768': { ...llama3 },
'llama3-70b-8192': { ...llama3 },
'llama3-8b-8192': { ...llama3 },
'claude-3-5-sonnet-20241022': { ...claude35sonnet },
'claude-3-5-haiku-20241022': { ...claude35haiku }
'o1': {
...chatModelBase,
reasoning: true,
prompt: 15 / 1_000_000,
completion: 60 / 1_000_000,
max: 200000
},
'o3-mini': {
...chatModelBase,
reasoning: true,
prompt: 1.1 / 1_000_000,
completion: 4.4 / 1_000_000,
max: 200000
},
'claude-3-5-sonnet-20241022': {
...chatModelBase,
prompt: 3.75 / 1_000_000,
completion: 15.0 / 1_000_000,
max: 8192
},
'claude-3-5-haiku-20241022': {
...chatModelBase,
prompt: 1 / 1_000_000,
completion: 4 / 1_000_000,
max: 4096
},
'deepseek-r1-distill-qwen-32b': {
...chatModelBase,
prompt: 0.69 / 1_000_000,
completion: 0.69 / 1_000_000,
max: 16384
},
'deepseek-r1-distill-llama-70b': {
...chatModelBase,
prompt: 3 / 1_000_000,
completion: 3 / 1_000_000,
max: 4096
},
// 'mixtral-8x7b-32768': { ...llama3 },
// 'llama3-70b-8192': { ...llama3 },
// 'llama3-8b-8192': { ...llama3 },
}
const imageModelBase = {