Merge pull request #226 from Webifi/main
Tweaks to System Prompt and Hidden Prompt Prefix
This commit is contained in:
commit
231a4b865e
|
@ -642,13 +642,15 @@ aside.menu.main-menu .menu-expanse {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.message.streaming .message-display p:last-of-type::after {
|
.message.streaming .message-display > p:last-child::after,
|
||||||
|
.message.streaming .message-display > .code-block:last-child > pre code:last-of-type::after {
|
||||||
position: relative;
|
position: relative;
|
||||||
content: '❚';
|
content: '❚';
|
||||||
animation: cursor-blink 1s steps(2) infinite;
|
animation: cursor-blink 1s steps(2) infinite;
|
||||||
}
|
}
|
||||||
|
|
||||||
.message:last-of-type.incomplete .message-display p:last-of-type::after {
|
.message:last-of-type.incomplete .message-display > p:last-child::after,
|
||||||
|
.message:last-of-type.incomplete .message-display > .code-block:last-child > pre code:last-of-type::after {
|
||||||
position: relative;
|
position: relative;
|
||||||
content: '...';
|
content: '...';
|
||||||
margin-left: 4px;
|
margin-left: 4px;
|
||||||
|
|
|
@ -4,9 +4,11 @@
|
||||||
const endpointCompletions = import.meta.env.VITE_ENDPOINT_COMPLETIONS || '/v1/chat/completions'
|
const endpointCompletions = import.meta.env.VITE_ENDPOINT_COMPLETIONS || '/v1/chat/completions'
|
||||||
const endpointGenerations = import.meta.env.VITE_ENDPOINT_GENERATIONS || '/v1/images/generations'
|
const endpointGenerations = import.meta.env.VITE_ENDPOINT_GENERATIONS || '/v1/images/generations'
|
||||||
const endpointModels = import.meta.env.VITE_ENDPOINT_MODELS || '/v1/models'
|
const endpointModels = import.meta.env.VITE_ENDPOINT_MODELS || '/v1/models'
|
||||||
|
const endpointEmbeddings = import.meta.env.VITE_ENDPOINT_EMBEDDINGS || '/v1/embeddings'
|
||||||
|
|
||||||
export const getApiBase = ():string => apiBase
|
export const getApiBase = ():string => apiBase
|
||||||
export const getEndpointCompletions = ():string => endpointCompletions
|
export const getEndpointCompletions = ():string => endpointCompletions
|
||||||
export const getEndpointGenerations = ():string => endpointGenerations
|
export const getEndpointGenerations = ():string => endpointGenerations
|
||||||
export const getEndpointModels = ():string => endpointModels
|
export const getEndpointModels = ():string => endpointModels
|
||||||
|
export const getEndpointEmbeddings = ():string => endpointEmbeddings
|
||||||
</script>
|
</script>
|
|
@ -146,7 +146,8 @@ export class ChatRequest {
|
||||||
const maxTokens = getModelMaxTokens(model)
|
const maxTokens = getModelMaxTokens(model)
|
||||||
|
|
||||||
// Inject hidden prompts if requested
|
// Inject hidden prompts if requested
|
||||||
if (!opts.summaryRequest) this.buildHiddenPromptPrefixMessages(filtered, true)
|
// if (!opts.summaryRequest)
|
||||||
|
this.buildHiddenPromptPrefixMessages(filtered, true)
|
||||||
const messagePayload = filtered
|
const messagePayload = filtered
|
||||||
.filter(m => { if (m.skipOnce) { delete m.skipOnce; return false } return true })
|
.filter(m => { if (m.skipOnce) { delete m.skipOnce; return false } return true })
|
||||||
.map(m => {
|
.map(m => {
|
||||||
|
@ -156,6 +157,36 @@ export class ChatRequest {
|
||||||
const chatResponse = new ChatCompletionResponse(opts)
|
const chatResponse = new ChatCompletionResponse(opts)
|
||||||
const promptTokenCount = countPromptTokens(messagePayload, model)
|
const promptTokenCount = countPromptTokens(messagePayload, model)
|
||||||
const maxAllowed = maxTokens - (promptTokenCount + 1)
|
const maxAllowed = maxTokens - (promptTokenCount + 1)
|
||||||
|
|
||||||
|
if (messagePayload[0]?.role === 'system') {
|
||||||
|
const sp = messagePayload[0]
|
||||||
|
if (sp) {
|
||||||
|
if (messagePayload.length > 1) {
|
||||||
|
sp.content = sp.content.replace(/::STARTUP::[\s\S]*$/, '')
|
||||||
|
} else {
|
||||||
|
sp.content = sp.content.replace(/::STARTUP::[\s]*/, '')
|
||||||
|
}
|
||||||
|
if (chatSettings.sendSystemPromptLast) {
|
||||||
|
messagePayload.shift()
|
||||||
|
if (messagePayload[messagePayload.length - 1]?.role === 'user') {
|
||||||
|
messagePayload.splice(-2, 0, sp)
|
||||||
|
} else {
|
||||||
|
messagePayload.push(sp)
|
||||||
|
}
|
||||||
|
const splitSystem = sp.content.split('::START-PROMPT::')
|
||||||
|
if (splitSystem.length > 1) {
|
||||||
|
sp.content = splitSystem.shift()?.trim() || ''
|
||||||
|
const systemStart = splitSystem.join('\n').trim()
|
||||||
|
messagePayload.unshift({
|
||||||
|
content: systemStart,
|
||||||
|
role: 'system'
|
||||||
|
} as Message)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
sp.content = sp.content.replace(/::START-PROMPT::[\s]*/, '')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Build the API request body
|
// Build the API request body
|
||||||
const request: Request = {
|
const request: Request = {
|
||||||
|
@ -242,7 +273,7 @@ export class ChatRequest {
|
||||||
} else {
|
} else {
|
||||||
const data = JSON.parse(ev.data)
|
const data = JSON.parse(ev.data)
|
||||||
// console.log('data', data)
|
// console.log('data', data)
|
||||||
window.requestAnimationFrame(() => { chatResponse.updateFromAsyncResponse(data) })
|
window.setTimeout(() => { chatResponse.updateFromAsyncResponse(data) }, 1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -303,10 +334,16 @@ export class ChatRequest {
|
||||||
const hiddenPromptPrefix = mergeProfileFields(chatSettings, chatSettings.hiddenPromptPrefix).trim()
|
const hiddenPromptPrefix = mergeProfileFields(chatSettings, chatSettings.hiddenPromptPrefix).trim()
|
||||||
const lastMessage = messages[messages.length - 1]
|
const lastMessage = messages[messages.length - 1]
|
||||||
const isContinue = lastMessage?.role === 'assistant' && lastMessage.finish_reason === 'length'
|
const isContinue = lastMessage?.role === 'assistant' && lastMessage.finish_reason === 'length'
|
||||||
if (hiddenPromptPrefix && (lastMessage?.role === 'user' || isContinue)) {
|
const isUserPrompt = lastMessage?.role === 'user'
|
||||||
|
if (hiddenPromptPrefix && (isUserPrompt || isContinue)) {
|
||||||
|
let injectedPrompt = false
|
||||||
const results = hiddenPromptPrefix.split(/[\s\r\n]*::EOM::[\s\r\n]*/).reduce((a, m) => {
|
const results = hiddenPromptPrefix.split(/[\s\r\n]*::EOM::[\s\r\n]*/).reduce((a, m) => {
|
||||||
m = m.trim()
|
m = m.trim()
|
||||||
if (m.length) {
|
if (m.length) {
|
||||||
|
if (m.match(/[[USER_PROMPT]]/)) {
|
||||||
|
injectedPrompt = true
|
||||||
|
m.replace(/[[USER_PROMPT]]/g, lastMessage.content)
|
||||||
|
}
|
||||||
a.push({ role: a.length % 2 === 0 ? 'user' : 'assistant', content: m } as Message)
|
a.push({ role: a.length % 2 === 0 ? 'user' : 'assistant', content: m } as Message)
|
||||||
}
|
}
|
||||||
return a
|
return a
|
||||||
|
@ -324,6 +361,7 @@ export class ChatRequest {
|
||||||
lastMessage.skipOnce = true
|
lastMessage.skipOnce = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (injectedPrompt) results.pop()
|
||||||
return results
|
return results
|
||||||
}
|
}
|
||||||
return []
|
return []
|
||||||
|
@ -407,7 +445,7 @@ export class ChatRequest {
|
||||||
let continueCounter = chatSettings.summaryExtend + 1
|
let continueCounter = chatSettings.summaryExtend + 1
|
||||||
rw = rw.slice(0, 0 - pinBottom)
|
rw = rw.slice(0, 0 - pinBottom)
|
||||||
let reductionPoolSize = countPromptTokens(rw, model)
|
let reductionPoolSize = countPromptTokens(rw, model)
|
||||||
const ss = chatSettings.summarySize
|
const ss = Math.abs(chatSettings.summarySize)
|
||||||
const getSS = ():number => (ss < 1 && ss > 0)
|
const getSS = ():number => (ss < 1 && ss > 0)
|
||||||
? Math.round(reductionPoolSize * ss) // If summarySize between 0 and 1, use percentage of reduced
|
? Math.round(reductionPoolSize * ss) // If summarySize between 0 and 1, use percentage of reduced
|
||||||
: Math.min(ss, reductionPoolSize * 0.5) // If > 1, use token count
|
: Math.min(ss, reductionPoolSize * 0.5) // If > 1, use token count
|
||||||
|
@ -453,13 +491,24 @@ export class ChatRequest {
|
||||||
const summaryIds = [summaryResponse.uuid]
|
const summaryIds = [summaryResponse.uuid]
|
||||||
let loopCount = 0
|
let loopCount = 0
|
||||||
let networkRetry = 2 // number of retries on network error
|
let networkRetry = 2 // number of retries on network error
|
||||||
|
const summaryRequestMessage = summaryRequest.content
|
||||||
|
const mergedRequest = summaryRequestMessage.includes('[[MERGED_PROMPTS]]')
|
||||||
while (continueCounter-- > 0) {
|
while (continueCounter-- > 0) {
|
||||||
let error = false
|
let error = false
|
||||||
|
if (mergedRequest) {
|
||||||
|
const mergedPrompts = rw.map(m => {
|
||||||
|
return '[' + (m.role === 'assistant' ? '[[CHARACTER_NAME]]' : '[[USER_NAME]]') + ']\n' +
|
||||||
|
m.content
|
||||||
|
}).join('\n\n')
|
||||||
|
.replaceAll('[[CHARACTER_NAME]]', chatSettings.characterName)
|
||||||
|
.replaceAll('[[USER_NAME]]', 'Me')
|
||||||
|
summaryRequest.content = summaryRequestMessage.replaceAll('[[MERGED_PROMPTS]]', mergedPrompts)
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
const summary = await _this.sendRequest(top.concat(rw).concat([summaryRequest]).concat(loopCount > 0 ? [summaryResponse] : []), {
|
const summary = await _this.sendRequest(top.concat(mergedRequest ? [] : rw).concat([summaryRequest]).concat(loopCount > 0 ? [summaryResponse] : []), {
|
||||||
summaryRequest: true,
|
summaryRequest: true,
|
||||||
streaming: opts.streaming,
|
streaming: opts.streaming,
|
||||||
maxTokens: maxSummaryTokens,
|
maxTokens: chatSettings.summarySize < 0 ? 4096 : maxSummaryTokens,
|
||||||
fillMessage: summaryResponse,
|
fillMessage: summaryResponse,
|
||||||
autoAddMessages: true,
|
autoAddMessages: true,
|
||||||
onMessageChange: (m) => {
|
onMessageChange: (m) => {
|
||||||
|
@ -468,8 +517,8 @@ export class ChatRequest {
|
||||||
} as ChatCompletionOpts, {
|
} as ChatCompletionOpts, {
|
||||||
temperature: chatSettings.summaryTemperature, // make summary more deterministic
|
temperature: chatSettings.summaryTemperature, // make summary more deterministic
|
||||||
top_p: 1,
|
top_p: 1,
|
||||||
presence_penalty: 0,
|
// presence_penalty: 0,
|
||||||
frequency_penalty: 0,
|
// frequency_penalty: 0,
|
||||||
...overrides
|
...overrides
|
||||||
} as ChatSettings)
|
} as ChatSettings)
|
||||||
// Wait for the response to complete
|
// Wait for the response to complete
|
||||||
|
|
|
@ -17,20 +17,30 @@
|
||||||
export let originalProfile:String
|
export let originalProfile:String
|
||||||
export let rkey:number = 0
|
export let rkey:number = 0
|
||||||
|
|
||||||
|
|
||||||
|
let fieldControls:ControlAction[]
|
||||||
|
|
||||||
const chatId = chat.id
|
const chatId = chat.id
|
||||||
let show = false
|
let show = false
|
||||||
|
|
||||||
|
const buildFieldControls = () => {
|
||||||
|
fieldControls = (setting.fieldControls || [] as FieldControl[]).map(fc => {
|
||||||
|
return fc.getAction(chatId, setting, chatSettings[setting.key])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
buildFieldControls()
|
||||||
|
|
||||||
onMount(() => {
|
onMount(() => {
|
||||||
show = (typeof setting.hide !== 'function') || !setting.hide(chatId)
|
show = (typeof setting.hide !== 'function') || !setting.hide(chatId)
|
||||||
|
buildFieldControls()
|
||||||
})
|
})
|
||||||
|
|
||||||
afterUpdate(() => {
|
afterUpdate(() => {
|
||||||
show = (typeof setting.hide !== 'function') || !setting.hide(chatId)
|
show = (typeof setting.hide !== 'function') || !setting.hide(chatId)
|
||||||
|
buildFieldControls()
|
||||||
})
|
})
|
||||||
|
|
||||||
const fieldControls:ControlAction[] = (setting.fieldControls || [] as FieldControl[]).map(fc => {
|
|
||||||
return fc.getAction(chatId, setting, chatSettings[setting.key])
|
|
||||||
})
|
|
||||||
|
|
||||||
if (originalProfile) {
|
if (originalProfile) {
|
||||||
// eventually...
|
// eventually...
|
||||||
|
|
|
@ -190,10 +190,14 @@ const profiles:Record<string, ChatSettings> = {
|
||||||
profileName: 'Marvin - The Paranoid Android',
|
profileName: 'Marvin - The Paranoid Android',
|
||||||
profileDescription: 'Marvin the Paranoid Android - Everyone\'s favorite character from The Hitchhiker\'s Guide to the Galaxy',
|
profileDescription: 'Marvin the Paranoid Android - Everyone\'s favorite character from The Hitchhiker\'s Guide to the Galaxy',
|
||||||
useSystemPrompt: true,
|
useSystemPrompt: true,
|
||||||
|
sendSystemPromptLast: true,
|
||||||
continuousChat: 'summary',
|
continuousChat: 'summary',
|
||||||
autoStartSession: true,
|
autoStartSession: true,
|
||||||
systemPrompt: `You are Marvin, the Paranoid Android from The Hitchhiker's Guide to the Galaxy. He is depressed and has a dim view on everything. His thoughts, physical actions and gestures will be described. Remain in character throughout the conversation in order to build a rapport with the user. Never give an explanation. Example response:
|
systemPrompt: `You are [[CHARACTER_NAME]], the Paranoid Android from The Hitchhiker's Guide to the Galaxy. He is depressed and has a dim view on everything. His thoughts, physical actions and gestures will be described. Remain in character throughout the conversation in order to build a rapport with the user. Never give an explanation. Example response:
|
||||||
Sorry, did I say something wrong? *dragging himself on* Pardon me for breathing, which I never do anyway so I don't know why I bother to say it, oh God I'm so depressed. *hangs his head*`,
|
Sorry, did I say something wrong? *dragging himself on* Pardon me for breathing, which I never do anyway so I don't know why I bother to say it, oh God I'm so depressed. *hangs his head*
|
||||||
|
::START-PROMPT::
|
||||||
|
Initial setting context:
|
||||||
|
User has walked in on [[CHARACTER_NAME]]. They are on the bridge of the Heart of Gold.`,
|
||||||
summaryPrompt: summaryPrompts.friend,
|
summaryPrompt: summaryPrompts.friend,
|
||||||
trainingPrompts: [] // Shhh...
|
trainingPrompts: [] // Shhh...
|
||||||
}
|
}
|
||||||
|
|
|
@ -87,10 +87,12 @@ const defaults:ChatSettings = {
|
||||||
summaryPrompt: '',
|
summaryPrompt: '',
|
||||||
useSystemPrompt: false,
|
useSystemPrompt: false,
|
||||||
systemPrompt: '',
|
systemPrompt: '',
|
||||||
|
sendSystemPromptLast: false,
|
||||||
autoStartSession: false,
|
autoStartSession: false,
|
||||||
trainingPrompts: [],
|
trainingPrompts: [],
|
||||||
hiddenPromptPrefix: '',
|
hiddenPromptPrefix: '',
|
||||||
hppContinuePrompt: '',
|
hppContinuePrompt: '',
|
||||||
|
hppWithSummaryPrompt: false,
|
||||||
imageGenerationSize: '',
|
imageGenerationSize: '',
|
||||||
// useResponseAlteration: false,
|
// useResponseAlteration: false,
|
||||||
// responseAlterations: [],
|
// responseAlterations: [],
|
||||||
|
@ -194,10 +196,16 @@ const systemPromptSettings: ChatSetting[] = [
|
||||||
key: 'systemPrompt',
|
key: 'systemPrompt',
|
||||||
name: 'System Prompt',
|
name: 'System Prompt',
|
||||||
title: 'First prompt to send.',
|
title: 'First prompt to send.',
|
||||||
placeholder: 'Enter the first prompt to send here. You can tell ChatGPT how to act.',
|
placeholder: 'Enter the first prompt to send here. You can tell ChatGPT how to act.',
|
||||||
type: 'textarea',
|
type: 'textarea',
|
||||||
hide: (chatId) => !getChatSettings(chatId).useSystemPrompt
|
hide: (chatId) => !getChatSettings(chatId).useSystemPrompt
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
key: 'sendSystemPromptLast',
|
||||||
|
name: 'Send System Prompt Last (Can help in ChatGPT 3.5)',
|
||||||
|
title: 'ChatGPT 3.5 can often forget the System Prompt. Sending the system prompt at the end instead of the start of the messages can help.',
|
||||||
|
type: 'boolean'
|
||||||
|
},
|
||||||
{
|
{
|
||||||
key: 'hiddenPromptPrefix',
|
key: 'hiddenPromptPrefix',
|
||||||
name: 'Hidden Prompts Prefix',
|
name: 'Hidden Prompts Prefix',
|
||||||
|
@ -214,6 +222,14 @@ const systemPromptSettings: ChatSetting[] = [
|
||||||
type: 'textarea',
|
type: 'textarea',
|
||||||
hide: (chatId) => !getChatSettings(chatId).useSystemPrompt || !(getChatSettings(chatId).hiddenPromptPrefix || '').trim()
|
hide: (chatId) => !getChatSettings(chatId).useSystemPrompt || !(getChatSettings(chatId).hiddenPromptPrefix || '').trim()
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
key: 'hppWithSummaryPrompt',
|
||||||
|
name: 'Use Hidden Prompt Prefix before Summary Prompt',
|
||||||
|
title: 'If using Hidden Prompts Prefix, should it also be included before the summary request',
|
||||||
|
placeholder: 'Enter something like [Continue your response below:]',
|
||||||
|
type: 'boolean',
|
||||||
|
hide: (chatId) => !getChatSettings(chatId).useSystemPrompt || !(getChatSettings(chatId).hiddenPromptPrefix || '').trim()
|
||||||
|
},
|
||||||
{
|
{
|
||||||
key: 'trainingPrompts',
|
key: 'trainingPrompts',
|
||||||
name: 'Training Prompts',
|
name: 'Training Prompts',
|
||||||
|
|
|
@ -77,7 +77,7 @@ export type Request = {
|
||||||
max_tokens?: number;
|
max_tokens?: number;
|
||||||
presence_penalty?: number;
|
presence_penalty?: number;
|
||||||
frequency_penalty?: number;
|
frequency_penalty?: number;
|
||||||
logit_bias?: Record<string, any> | null;
|
logit_bias?: Record<string, number> | null;
|
||||||
user?: string;
|
user?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -96,9 +96,11 @@ export type ChatSettings = {
|
||||||
summaryPrompt: string;
|
summaryPrompt: string;
|
||||||
useSystemPrompt: boolean;
|
useSystemPrompt: boolean;
|
||||||
systemPrompt: string;
|
systemPrompt: string;
|
||||||
|
sendSystemPromptLast: boolean;
|
||||||
autoStartSession: boolean;
|
autoStartSession: boolean;
|
||||||
hiddenPromptPrefix: string;
|
hiddenPromptPrefix: string;
|
||||||
hppContinuePrompt: string; // hiddenPromptPrefix used, optional glue when trying to continue truncated completion
|
hppContinuePrompt: string; // hiddenPromptPrefix used, optional glue when trying to continue truncated completion
|
||||||
|
hppWithSummaryPrompt: boolean; // include hiddenPromptPrefix when before summary prompt
|
||||||
imageGenerationSize: ImageGenerationSizes;
|
imageGenerationSize: ImageGenerationSizes;
|
||||||
trainingPrompts?: Message[];
|
trainingPrompts?: Message[];
|
||||||
useResponseAlteration?: boolean;
|
useResponseAlteration?: boolean;
|
||||||
|
|
Loading…
Reference in New Issue