`;
return $(hiddenBlock);
}
function getCharacterBlock(item, id) {
let this_avatar = default_avatar;
if (item.avatar != 'none') {
this_avatar = getThumbnailUrl('avatar', item.avatar);
}
// Populate the template
const template = $('#character_template .character_select').clone();
template.attr({ 'chid': id, 'id': `CharID${id}` });
template.find('img').attr('src', this_avatar).attr('alt', item.name);
template.find('.avatar').attr('title', `[Character] ${item.name}\nFile: ${item.avatar}`);
template.find('.ch_name').text(item.name).attr('title', `[Character] ${item.name}`);
if (power_user.show_card_avatar_urls) {
template.find('.ch_avatar_url').text(item.avatar);
}
template.find('.ch_fav_icon').css('display', 'none');
template.toggleClass('is_fav', item.fav || item.fav == 'true');
template.find('.ch_fav').val(item.fav);
const description = item.data?.creator_notes || '';
if (description) {
template.find('.ch_description').text(description);
}
else {
template.find('.ch_description').hide();
}
const auxFieldName = power_user.aux_field || 'character_version';
const auxFieldValue = (item.data && item.data[auxFieldName]) || '';
if (auxFieldValue) {
template.find('.character_version').text(auxFieldValue);
}
else {
template.find('.character_version').hide();
}
// Display inline tags
const tagsElement = template.find('.tags');
printTagList(tagsElement, { forEntityOrKey: id });
// Add to the list
return template;
}
/**
* Prints the global character list, optionally doing a full refresh of the list
* Use this function whenever the reprinting of the character list is the primary focus, otherwise using `printCharactersDebounced` is preferred for a cleaner, non-blocking experience.
*
* The printing will also always reprint all filter options of the global list, to keep them up to date.
*
* @param {boolean} fullRefresh - If true, the list is fully refreshed and the navigation is being reset
*/
export async function printCharacters(fullRefresh = false) {
const storageKey = 'Characters_PerPage';
const listId = '#rm_print_characters_block';
let currentScrollTop = $(listId).scrollTop();
if (fullRefresh) {
saveCharactersPage = 0;
currentScrollTop = 0;
await delay(1);
}
// Before printing the personas, we check if we should enable/disable search sorting
verifyCharactersSearchSortRule();
// We are actually always reprinting filters, as it "doesn't hurt", and this way they are always up to date
printTagFilters(tag_filter_type.character);
printTagFilters(tag_filter_type.group_member);
// We are also always reprinting the lists on character/group edit window, as these ones doesn't get updated otherwise
applyTagsOnCharacterSelect();
applyTagsOnGroupSelect();
const entities = getEntitiesList({ doFilter: true });
$('#rm_print_characters_pagination').pagination({
dataSource: entities,
pageSize: Number(localStorage.getItem(storageKey)) || per_page_default,
sizeChangerOptions: [10, 25, 50, 100, 250, 500, 1000],
pageRange: 1,
pageNumber: saveCharactersPage || 1,
position: 'top',
showPageNumbers: false,
showSizeChanger: true,
prevText: '<',
nextText: '>',
formatNavigator: PAGINATION_TEMPLATE,
showNavigator: true,
callback: function (/** @type {Entity[]} */ data) {
$(listId).empty();
if (power_user.bogus_folders && isBogusFolderOpen()) {
$(listId).append(getBackBlock());
}
if (!data.length) {
$(listId).append(getEmptyBlock());
}
let displayCount = 0;
for (const i of data) {
switch (i.type) {
case 'character':
$(listId).append(getCharacterBlock(i.item, i.id));
displayCount++;
break;
case 'group':
$(listId).append(getGroupBlock(i.item));
displayCount++;
break;
case 'tag':
$(listId).append(getTagBlock(i.item, i.entities, i.hidden, i.isUseless));
break;
}
}
const hidden = (characters.length + groups.length) - displayCount;
if (hidden > 0 && entitiesFilter.hasAnyFilter()) {
$(listId).append(getHiddenBlock(hidden));
}
eventSource.emit(event_types.CHARACTER_PAGE_LOADED);
},
afterSizeSelectorChange: function (e) {
localStorage.setItem(storageKey, e.target.value);
},
afterPaging: function (e) {
saveCharactersPage = e;
},
afterRender: function () {
$(listId).scrollTop(currentScrollTop);
},
});
favsToHotswap();
}
/** Checks the state of the current search, and adds/removes the search sorting option accordingly */
function verifyCharactersSearchSortRule() {
const searchTerm = entitiesFilter.getFilterData(FILTER_TYPES.SEARCH);
const searchOption = $('#character_sort_order option[data-field="search"]');
const selector = $('#character_sort_order');
const isHidden = searchOption.attr('hidden') !== undefined;
// If we have a search term, we are displaying the sorting option for it
if (searchTerm && isHidden) {
searchOption.removeAttr('hidden');
searchOption.prop('selected', true);
flashHighlight(selector);
}
// If search got cleared, we make sure to hide the option and go back to the one before
if (!searchTerm && !isHidden) {
searchOption.attr('hidden', '');
$(`#character_sort_order option[data-order="${power_user.sort_order}"][data-field="${power_user.sort_field}"]`).prop('selected', true);
}
}
/** @typedef {object} Character - A character */
/** @typedef {object} Group - A group */
/**
* @typedef {object} Entity - Object representing a display entity
* @property {Character|Group|import('./scripts/tags.js').Tag|*} item - The item
* @property {string|number} id - The id
* @property {'character'|'group'|'tag'} type - The type of this entity (character, group, tag)
* @property {Entity[]?} [entities=null] - An optional list of entities relevant for this item
* @property {number?} [hidden=null] - An optional number representing how many hidden entities this entity contains
* @property {boolean?} [isUseless=null] - Specifies if the entity is useless (not relevant, but should still be displayed for consistency) and should be displayed greyed out
*/
/**
* Converts the given character to its entity representation
*
* @param {Character} character - The character
* @param {string|number} id - The id of this character
* @returns {Entity} The entity for this character
*/
export function characterToEntity(character, id) {
return { item: character, id, type: 'character' };
}
/**
* Converts the given group to its entity representation
*
* @param {Group} group - The group
* @returns {Entity} The entity for this group
*/
export function groupToEntity(group) {
return { item: group, id: group.id, type: 'group' };
}
/**
* Converts the given tag to its entity representation
*
* @param {import('./scripts/tags.js').Tag} tag - The tag
* @returns {Entity} The entity for this tag
*/
export function tagToEntity(tag) {
return { item: structuredClone(tag), id: tag.id, type: 'tag', entities: [] };
}
/**
* Builds the full list of all entities available
*
* They will be correctly marked and filtered.
*
* @param {object} param0 - Optional parameters
* @param {boolean} [param0.doFilter] - Whether this entity list should already be filtered based on the global filters
* @param {boolean} [param0.doSort] - Whether the entity list should be sorted when returned
* @returns {Entity[]} All entities
*/
export function getEntitiesList({ doFilter = false, doSort = true } = {}) {
let entities = [
...characters.map((item, index) => characterToEntity(item, index)),
...groups.map(item => groupToEntity(item)),
...(power_user.bogus_folders ? tags.filter(isBogusFolder).sort(compareTagsForSort).map(item => tagToEntity(item)) : []),
];
// We need to do multiple filter runs in a specific order, otherwise different settings might override each other
// and screw up tags and search filter, sub lists or similar.
// The specific filters are written inside the "filterByTagState" method and its different parameters.
// Generally what we do is the following:
// 1. First swipe over the list to remove the most obvious things
// 2. Build sub entity lists for all folders, filtering them similarly to the second swipe
// 3. We do the last run, where global filters are applied, and the search filters last
// First run filters, that will hide what should never be displayed
if (doFilter) {
entities = filterByTagState(entities);
}
// Run over all entities between first and second filter to save some states
for (const entity of entities) {
// For folders, we remember the sub entities so they can be displayed later, even if they might be filtered
// Those sub entities should be filtered and have the search filters applied too
if (entity.type === 'tag') {
let subEntities = filterByTagState(entities, { subForEntity: entity, filterHidden: false });
const subCount = subEntities.length;
subEntities = filterByTagState(entities, { subForEntity: entity });
if (doFilter) {
// sub entities filter "hacked" because folder filter should not be applied there, so even in "only folders" mode characters show up
subEntities = entitiesFilter.applyFilters(subEntities, { clearScoreCache: false, tempOverrides: { [FILTER_TYPES.FOLDER]: FILTER_STATES.UNDEFINED } });
}
if (doSort) {
sortEntitiesList(subEntities);
}
entity.entities = subEntities;
entity.hidden = subCount - subEntities.length;
}
}
// Second run filters, hiding whatever should be filtered later
if (doFilter) {
const beforeFinalEntities = filterByTagState(entities, { globalDisplayFilters: true });
entities = entitiesFilter.applyFilters(beforeFinalEntities);
// Magic for folder filter. If that one is enabled, and no folders are display anymore, we remove that filter to actually show the characters.
if (isFilterState(entitiesFilter.getFilterData(FILTER_TYPES.FOLDER), FILTER_STATES.SELECTED) && entities.filter(x => x.type == 'tag').length == 0) {
entities = entitiesFilter.applyFilters(beforeFinalEntities, { tempOverrides: { [FILTER_TYPES.FOLDER]: FILTER_STATES.UNDEFINED } });
}
}
// Final step, updating some properties after the last filter run
const nonTagEntitiesCount = entities.filter(entity => entity.type !== 'tag').length;
for (const entity of entities) {
if (entity.type === 'tag') {
if (entity.entities?.length == nonTagEntitiesCount) entity.isUseless = true;
}
}
// Sort before returning if requested
if (doSort) {
sortEntitiesList(entities);
}
return entities;
}
export async function getOneCharacter(avatarUrl) {
const response = await fetch('/api/characters/get', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
avatar_url: avatarUrl,
}),
});
if (response.ok) {
const getData = await response.json();
getData['name'] = DOMPurify.sanitize(getData['name']);
getData['chat'] = String(getData['chat']);
const indexOf = characters.findIndex(x => x.avatar === avatarUrl);
if (indexOf !== -1) {
characters[indexOf] = getData;
} else {
toastr.error(`Character ${avatarUrl} not found in the list`, 'Error', { timeOut: 5000, preventDuplicates: true });
}
}
}
function getCharacterSource(chId = this_chid) {
const character = characters[chId];
if (!character) {
return '';
}
const chubId = characters[chId]?.data?.extensions?.chub?.full_path;
if (chubId) {
return `https://chub.ai/characters/${chubId}`;
}
const pygmalionId = characters[chId]?.data?.extensions?.pygmalion_id;
if (pygmalionId) {
return `https://pygmalion.chat/${pygmalionId}`;
}
const githubRepo = characters[chId]?.data?.extensions?.github_repo;
if (githubRepo) {
return `https://github.com/${githubRepo}`;
}
const sourceUrl = characters[chId]?.data?.extensions?.source_url;
if (sourceUrl) {
return sourceUrl;
}
const risuId = characters[chId]?.data?.extensions?.risuai?.source;
if (Array.isArray(risuId) && risuId.length && typeof risuId[0] === 'string' && risuId[0].startsWith('risurealm:')) {
const realmId = risuId[0].split(':')[1];
return `https://realm.risuai.net/character/${realmId}`;
}
return '';
}
export async function getCharacters() {
const response = await fetch('/api/characters/all', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
'': '',
}),
});
if (response.ok === true) {
characters.splice(0, characters.length);
const getData = await response.json();
for (let i = 0; i < getData.length; i++) {
characters[i] = getData[i];
characters[i]['name'] = DOMPurify.sanitize(characters[i]['name']);
// For dropped-in cards
if (!characters[i]['chat']) {
characters[i]['chat'] = `${characters[i]['name']} - ${humanizedDateTime()}`;
}
characters[i]['chat'] = String(characters[i]['chat']);
}
if (this_chid !== undefined) {
$('#avatar_url_pole').val(characters[this_chid].avatar);
}
await getGroups();
await printCharacters(true);
}
}
async function delChat(chatfile) {
const response = await fetch('/api/chats/delete', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({
chatfile: chatfile,
avatar_url: characters[this_chid].avatar,
}),
});
if (response.ok === true) {
// choose another chat if current was deleted
const name = chatfile.replace('.jsonl', '');
if (name === characters[this_chid].chat) {
chat_metadata = {};
await replaceCurrentChat();
}
await eventSource.emit(event_types.CHAT_DELETED, name);
}
}
export async function replaceCurrentChat() {
await clearChat();
chat.length = 0;
const chatsResponse = await fetch('/api/characters/chats', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({ avatar_url: characters[this_chid].avatar }),
});
if (chatsResponse.ok) {
const chats = Object.values(await chatsResponse.json());
chats.sort((a, b) => sortMoments(timestampToMoment(a.last_mes), timestampToMoment(b.last_mes)));
// pick existing chat
if (chats.length && typeof chats[0] === 'object') {
characters[this_chid].chat = chats[0].file_name.replace('.jsonl', '');
$('#selected_chat_pole').val(characters[this_chid].chat);
saveCharacterDebounced();
await getChat();
}
// start new chat
else {
characters[this_chid].chat = `${name2} - ${humanizedDateTime()}`;
$('#selected_chat_pole').val(characters[this_chid].chat);
saveCharacterDebounced();
await getChat();
}
}
}
export function showMoreMessages() {
const firstDisplayedMesId = $('#chat').children('.mes').first().attr('mesid');
let messageId = Number(firstDisplayedMesId);
let count = power_user.chat_truncation || Number.MAX_SAFE_INTEGER;
// If there are no messages displayed, or the message somehow has no mesid, we default to one higher than last message id,
// so the first "new" message being shown will be the last available message
if (isNaN(messageId)) {
messageId = getLastMessageId() + 1;
}
console.debug('Inserting messages before', messageId, 'count', count, 'chat length', chat.length);
const prevHeight = $('#chat').prop('scrollHeight');
while (messageId > 0 && count > 0) {
let newMessageId = messageId - 1;
addOneMessage(chat[newMessageId], { insertBefore: messageId >= chat.length ? null : messageId, scroll: false, forceId: newMessageId });
count--;
messageId--;
}
if (messageId == 0) {
$('#show_more_messages').remove();
}
const newHeight = $('#chat').prop('scrollHeight');
$('#chat').scrollTop(newHeight - prevHeight);
}
export async function printMessages() {
let startIndex = 0;
let count = power_user.chat_truncation || Number.MAX_SAFE_INTEGER;
if (chat.length > count) {
startIndex = chat.length - count;
$('#chat').append('
Show more messages
');
}
for (let i = startIndex; i < chat.length; i++) {
const item = chat[i];
addOneMessage(item, { scroll: false, forceId: i, showSwipes: false });
}
// Scroll to bottom when all images are loaded
const images = document.querySelectorAll('#chat .mes img');
let imagesLoaded = 0;
for (let i = 0; i < images.length; i++) {
const image = images[i];
if (image instanceof HTMLImageElement) {
if (image.complete) {
incrementAndCheck();
} else {
image.addEventListener('load', incrementAndCheck);
}
}
}
$('#chat .mes').removeClass('last_mes');
$('#chat .mes').last().addClass('last_mes');
hideSwipeButtons();
showSwipeButtons();
scrollChatToBottom();
function incrementAndCheck() {
imagesLoaded++;
if (imagesLoaded === images.length) {
scrollChatToBottom();
}
}
}
export async function clearChat() {
closeMessageEditor();
extension_prompts = {};
if (is_delete_mode) {
$('#dialogue_del_mes_cancel').trigger('click');
}
$('#chat').children().remove();
if ($('.zoomed_avatar[forChar]').length) {
console.debug('saw avatars to remove');
$('.zoomed_avatar[forChar]').remove();
} else { console.debug('saw no avatars'); }
await saveItemizedPrompts(getCurrentChatId());
itemizedPrompts = [];
}
export async function deleteLastMessage() {
chat.length = chat.length - 1;
$('#chat').children('.mes').last().remove();
await eventSource.emit(event_types.MESSAGE_DELETED, chat.length);
}
export async function reloadCurrentChat() {
await clearChat();
chat.length = 0;
if (selected_group) {
await getGroupChat(selected_group, true);
}
else if (this_chid !== undefined) {
await getChat();
}
else {
resetChatState();
await getCharacters();
await printMessages();
await eventSource.emit(event_types.CHAT_CHANGED, getCurrentChatId());
}
hideSwipeButtons();
showSwipeButtons();
}
/**
* Send the message currently typed into the chat box.
*/
export function sendTextareaMessage() {
if (is_send_press) return;
if (isExecutingCommandsFromChatInput) return;
let generateType;
// "Continue on send" is activated when the user hits "send" (or presses enter) on an empty chat box, and the last
// message was sent from a character (not the user or the system).
const textareaText = String($('#send_textarea').val());
if (power_user.continue_on_send &&
!textareaText &&
!selected_group &&
chat.length &&
!chat[chat.length - 1]['is_user'] &&
!chat[chat.length - 1]['is_system']
) {
generateType = 'continue';
}
Generate(generateType);
}
/**
* Formats the message text into an HTML string using Markdown and other formatting.
* @param {string} mes Message text
* @param {string} ch_name Character name
* @param {boolean} isSystem If the message was sent by the system
* @param {boolean} isUser If the message was sent by the user
* @param {number} messageId Message index in chat array
* @returns {string} HTML string
*/
export function messageFormatting(mes, ch_name, isSystem, isUser, messageId) {
if (!mes) {
return '';
}
if (Number(messageId) === 0 && !isSystem && !isUser) {
const mesBeforeReplace = mes;
const chatMessage = chat[messageId];
mes = substituteParams(mes, undefined, ch_name);
if (chatMessage && chatMessage.mes === mesBeforeReplace && chatMessage.extra?.display_text !== mesBeforeReplace) {
chatMessage.mes = mes;
}
}
mesForShowdownParse = mes;
// Force isSystem = false on comment messages so they get formatted properly
if (ch_name === COMMENT_NAME_DEFAULT && isSystem && !isUser) {
isSystem = false;
}
// Let hidden messages have markdown
if (isSystem && ch_name !== systemUserName) {
isSystem = false;
}
// Prompt bias replacement should be applied on the raw message
if (!power_user.show_user_prompt_bias && ch_name && !isUser && !isSystem) {
mes = mes.replaceAll(substituteParams(power_user.user_prompt_bias), '');
}
if (!isSystem) {
function getRegexPlacement() {
try {
if (isUser) {
return regex_placement.USER_INPUT;
} else if (chat[messageId]?.extra?.type === 'narrator') {
return regex_placement.SLASH_COMMAND;
} else {
return regex_placement.AI_OUTPUT;
}
} catch {
return regex_placement.AI_OUTPUT;
}
}
const regexPlacement = getRegexPlacement();
const usableMessages = chat.map((x, index) => ({ message: x, index: index })).filter(x => !x.message.is_system);
const indexOf = usableMessages.findIndex(x => x.index === Number(messageId));
const depth = messageId >= 0 && indexOf !== -1 ? (usableMessages.length - indexOf - 1) : undefined;
// Always override the character name
mes = getRegexedString(mes, regexPlacement, {
characterOverride: ch_name,
isMarkdown: true,
depth: depth,
});
}
if (power_user.auto_fix_generated_markdown) {
mes = fixMarkdown(mes, true);
}
if (!isSystem && power_user.encode_tags) {
mes = mes.replaceAll('<', '<').replaceAll('>', '>');
}
if (this_chid === undefined && !selected_group) {
mes = mes.replace(/\*\*(.+?)\*\*/g, '$1');
} else if (!isSystem) {
// Save double quotes in tags as a special character to prevent them from being encoded
if (!power_user.encode_tags) {
mes = mes.replace(/<([^>]+)>/g, function (_, contents) {
return '<' + contents.replace(/"/g, '\ufffe') + '>';
});
}
mes = mes.replace(/```[\s\S]*?```|``[\s\S]*?``|`[\s\S]*?`|(".+?")|(\u201C.+?\u201D)/gm, function (match, p1, p2) {
if (p1) {
return '"' + p1.replace(/"/g, '') + '"';
} else if (p2) {
return '“' + p2.replace(/\u201C|\u201D/g, '') + '”';
} else {
return match;
}
});
// Restore double quotes in tags
if (!power_user.encode_tags) {
mes = mes.replace(/\ufffe/g, '"');
}
mes = mes.replaceAll('\\begin{align*}', '$$');
mes = mes.replaceAll('\\end{align*}', '$$');
mes = converter.makeHtml(mes);
mes = mes.replace(/[\s\S]*?<\/code>/g, function (match) {
// Firefox creates extra newlines from s in code blocks, so we replace them before converting newlines to s.
return match.replace(/\n/gm, '\u0000');
});
mes = mes.replace(/\u0000/g, '\n'); // Restore converted newlines
mes = mes.trim();
mes = mes.replace(/[\s\S]*?<\/code>/g, function (match) {
return match.replace(/&/g, '&');
});
}
/*
// Hides bias from empty messages send with slash commands
if (isSystem) {
mes = mes.replace(/\{\{[\s\S]*?\}\}/gm, "");
}
*/
if (!power_user.allow_name2_display && ch_name && !isUser && !isSystem) {
mes = mes.replace(new RegExp(`(^|\n)${escapeRegex(ch_name)}:`, 'g'), '$1');
}
/** @type {any} */
const config = { MESSAGE_SANITIZE: true, ADD_TAGS: ['custom-style'] };
mes = encodeStyleTags(mes);
mes = DOMPurify.sanitize(mes, config);
mes = decodeStyleTags(mes);
return mes;
}
/**
* Inserts or replaces an SVG icon adjacent to the provided message's timestamp.
*
* If the `extra.api` is "openai" and `extra.model` contains the substring "claude",
* the function fetches the "claude.svg". Otherwise, it fetches the SVG named after
* the value in `extra.api`.
*
* @param {JQuery} mes - The message element containing the timestamp where the icon should be inserted or replaced.
* @param {Object} extra - Contains the API and model details.
* @param {string} extra.api - The name of the API, used to determine which SVG to fetch.
* @param {string} extra.model - The model name, used to check for the substring "claude".
*/
function insertSVGIcon(mes, extra) {
// Determine the SVG filename
let modelName;
// Claude on OpenRouter or Anthropic
if (extra.api === 'openai' && extra.model?.toLowerCase().includes('claude')) {
modelName = 'claude';
}
// OpenAI on OpenRouter
else if (extra.api === 'openai' && extra.model?.toLowerCase().includes('openai')) {
modelName = 'openai';
}
// OpenRouter website model or other models
else if (extra.api === 'openai' && (extra.model === null || extra.model?.toLowerCase().includes('/'))) {
modelName = 'openrouter';
}
// Everything else
else {
modelName = extra.api;
}
const image = new Image();
// Add classes for styling and identification
image.classList.add('icon-svg', 'timestamp-icon');
image.src = `/img/${modelName}.svg`;
image.title = `${extra?.api ? extra.api + ' - ' : ''}${extra?.model ?? ''}`;
image.onload = async function () {
// Check if an SVG already exists adjacent to the timestamp
let existingSVG = mes.find('.timestamp').next('.timestamp-icon');
if (existingSVG.length) {
// Replace existing SVG
existingSVG.replaceWith(image);
} else {
// Append the new SVG if none exists
mes.find('.timestamp').after(image);
}
await SVGInject(this);
};
}
function getMessageFromTemplate({
mesId,
swipeId,
characterName,
isUser,
avatarImg,
bias,
isSystem,
title,
timerValue,
timerTitle,
bookmarkLink,
forceAvatar,
timestamp,
tokenCount,
extra,
}) {
const mes = messageTemplate.clone();
mes.attr({
'mesid': mesId,
'swipeid': swipeId,
'ch_name': characterName,
'is_user': isUser,
'is_system': !!isSystem,
'bookmark_link': bookmarkLink,
'force_avatar': !!forceAvatar,
'timestamp': timestamp,
});
mes.find('.avatar img').attr('src', avatarImg);
mes.find('.ch_name .name_text').text(characterName);
mes.find('.mes_bias').html(bias);
mes.find('.timestamp').text(timestamp).attr('title', `${extra?.api ? extra.api + ' - ' : ''}${extra?.model ?? ''}`);
mes.find('.mesIDDisplay').text(`#${mesId}`);
tokenCount && mes.find('.tokenCounterDisplay').text(`${tokenCount}t`);
title && mes.attr('title', title);
timerValue && mes.find('.mes_timer').attr('title', timerTitle).text(timerValue);
if (power_user.timestamp_model_icon && extra?.api) {
insertSVGIcon(mes, extra);
}
return mes;
}
export function updateMessageBlock(messageId, message) {
const messageElement = $(`#chat [mesid="${messageId}"]`);
const text = message?.extra?.display_text ?? message.mes;
messageElement.find('.mes_text').html(messageFormatting(text, message.name, message.is_system, message.is_user, messageId));
addCopyToCodeBlocks(messageElement);
appendMediaToMessage(message, messageElement);
}
/**
* Appends image or file to the message element.
* @param {object} mes Message object
* @param {JQuery} messageElement Message element
* @param {boolean} [adjustScroll=true] Whether to adjust the scroll position after appending the media
*/
export function appendMediaToMessage(mes, messageElement, adjustScroll = true) {
// Add image to message
if (mes.extra?.image) {
const container = messageElement.find('.mes_img_container');
const chatHeight = $('#chat').prop('scrollHeight');
const image = messageElement.find('.mes_img');
const text = messageElement.find('.mes_text');
const isInline = !!mes.extra?.inline_image;
image.off('load').on('load', function () {
if (!adjustScroll) {
return;
}
const scrollPosition = $('#chat').scrollTop();
const newChatHeight = $('#chat').prop('scrollHeight');
const diff = newChatHeight - chatHeight;
$('#chat').scrollTop(scrollPosition + diff);
});
image.attr('src', mes.extra?.image);
image.attr('title', mes.extra?.title || mes.title || '');
container.addClass('img_extra');
image.toggleClass('img_inline', isInline);
text.toggleClass('displayNone', !isInline);
const imageSwipes = mes.extra.image_swipes;
if (Array.isArray(imageSwipes) && imageSwipes.length > 0) {
container.addClass('img_swipes');
const counter = container.find('.mes_img_swipe_counter');
const currentImage = imageSwipes.indexOf(mes.extra.image) + 1;
counter.text(`${currentImage}/${imageSwipes.length}`);
const swipeLeft = container.find('.mes_img_swipe_left');
swipeLeft.off('click').on('click', function () {
eventSource.emit(event_types.IMAGE_SWIPED, { message: mes, element: messageElement, direction: 'left' });
});
const swipeRight = container.find('.mes_img_swipe_right');
swipeRight.off('click').on('click', function () {
eventSource.emit(event_types.IMAGE_SWIPED, { message: mes, element: messageElement, direction: 'right' });
});
}
}
// Add file to message
if (mes.extra?.file) {
messageElement.find('.mes_file_container').remove();
const messageId = messageElement.attr('mesid');
const template = $('#message_file_template .mes_file_container').clone();
template.find('.mes_file_name').text(mes.extra.file.name);
template.find('.mes_file_size').text(humanFileSize(mes.extra.file.size));
template.find('.mes_file_download').attr('mesid', messageId);
template.find('.mes_file_delete').attr('mesid', messageId);
messageElement.find('.mes_block').append(template);
} else {
messageElement.find('.mes_file_container').remove();
}
}
/**
* @deprecated Use appendMediaToMessage instead.
*/
export function appendImageToMessage(mes, messageElement) {
appendMediaToMessage(mes, messageElement);
}
export function addCopyToCodeBlocks(messageElement) {
const codeBlocks = $(messageElement).find('pre code');
for (let i = 0; i < codeBlocks.length; i++) {
hljs.highlightElement(codeBlocks.get(i));
if (navigator.clipboard !== undefined) {
const copyButton = document.createElement('i');
copyButton.classList.add('fa-solid', 'fa-copy', 'code-copy', 'interactable');
copyButton.title = 'Copy code';
codeBlocks.get(i).appendChild(copyButton);
copyButton.addEventListener('pointerup', function (event) {
navigator.clipboard.writeText(codeBlocks.get(i).innerText);
toastr.info('Copied!', '', { timeOut: 2000 });
});
}
}
}
export function addOneMessage(mes, { type = 'normal', insertAfter = null, scroll = true, insertBefore = null, forceId = null, showSwipes = true } = {}) {
let messageText = mes['mes'];
const momentDate = timestampToMoment(mes.send_date);
const timestamp = momentDate.isValid() ? momentDate.format('LL LT') : '';
if (mes?.extra?.display_text) {
messageText = mes.extra.display_text;
}
// Forbidden black magic
// This allows to use "continue" on user messages
if (type === 'swipe' && mes.swipe_id === undefined) {
mes.swipe_id = 0;
mes.swipes = [mes.mes];
}
let avatarImg = getUserAvatar(user_avatar);
const isSystem = mes.is_system;
const title = mes.title;
generatedPromptCache = '';
//for non-user mesages
if (!mes['is_user']) {
if (mes.force_avatar) {
avatarImg = mes.force_avatar;
} else if (this_chid === undefined) {
avatarImg = system_avatar;
} else {
if (characters[this_chid].avatar != 'none') {
avatarImg = getThumbnailUrl('avatar', characters[this_chid].avatar);
} else {
avatarImg = default_avatar;
}
}
//old processing:
//if messge is from sytem, use the name provided in the message JSONL to proceed,
//if not system message, use name2 (char's name) to proceed
//characterName = mes.is_system || mes.force_avatar ? mes.name : name2;
} else if (mes['is_user'] && mes['force_avatar']) {
// Special case for persona images.
avatarImg = mes['force_avatar'];
}
messageText = messageFormatting(
messageText,
mes.name,
isSystem,
mes.is_user,
chat.indexOf(mes),
);
const bias = messageFormatting(mes.extra?.bias ?? '', '', false, false, -1);
let bookmarkLink = mes?.extra?.bookmark_link ?? '';
let params = {
mesId: forceId ?? chat.length - 1,
swipeId: mes.swipe_id ?? 0,
characterName: mes.name,
isUser: mes.is_user,
avatarImg: avatarImg,
bias: bias,
isSystem: isSystem,
title: title,
bookmarkLink: bookmarkLink,
forceAvatar: mes.force_avatar,
timestamp: timestamp,
extra: mes.extra,
tokenCount: mes.extra?.token_count ?? 0,
...formatGenerationTimer(mes.gen_started, mes.gen_finished, mes.extra?.token_count),
};
const renderedMessage = getMessageFromTemplate(params);
if (type !== 'swipe') {
if (!insertAfter && !insertBefore) {
chatElement.append(renderedMessage);
}
else if (insertAfter) {
const target = chatElement.find(`.mes[mesid="${insertAfter}"]`);
$(renderedMessage).insertAfter(target);
} else {
const target = chatElement.find(`.mes[mesid="${insertBefore}"]`);
$(renderedMessage).insertBefore(target);
}
}
// Callers push the new message to chat before calling addOneMessage
const newMessageId = typeof forceId == 'number' ? forceId : chat.length - 1;
const newMessage = $(`#chat [mesid="${newMessageId}"]`);
const isSmallSys = mes?.extra?.isSmallSys;
if (isSmallSys === true) {
newMessage.addClass('smallSysMes');
}
//shows or hides the Prompt display button
let mesIdToFind = type == 'swipe' ? params.mesId - 1 : params.mesId; //Number(newMessage.attr('mesId'));
//if we have itemized messages, and the array isn't null..
if (params.isUser === false && Array.isArray(itemizedPrompts) && itemizedPrompts.length > 0) {
const itemizedPrompt = itemizedPrompts.find(x => Number(x.mesId) === Number(mesIdToFind));
if (itemizedPrompt) {
newMessage.find('.mes_prompt').show();
}
}
newMessage.find('.avatar img').on('error', function () {
$(this).hide();
$(this).parent().html('');
});
if (type === 'swipe') {
const swipeMessage = chatElement.find(`[mesid="${chat.length - 1}"]`);
swipeMessage.attr('swipeid', params.swipeId);
swipeMessage.find('.mes_text').html(messageText).attr('title', title);
swipeMessage.find('.timestamp').text(timestamp).attr('title', `${params.extra.api} - ${params.extra.model}`);
appendMediaToMessage(mes, swipeMessage);
if (power_user.timestamp_model_icon && params.extra?.api) {
insertSVGIcon(swipeMessage, params.extra);
}
if (mes.swipe_id == mes.swipes.length - 1) {
swipeMessage.find('.mes_timer').text(params.timerValue).attr('title', params.timerTitle);
swipeMessage.find('.tokenCounterDisplay').text(`${params.tokenCount}t`);
} else {
swipeMessage.find('.mes_timer').empty();
swipeMessage.find('.tokenCounterDisplay').empty();
}
} else {
const messageId = forceId ?? chat.length - 1;
chatElement.find(`[mesid="${messageId}"] .mes_text`).append(messageText);
appendMediaToMessage(mes, newMessage);
showSwipes && hideSwipeButtons();
}
addCopyToCodeBlocks(newMessage);
if (showSwipes) {
$('#chat .mes').last().addClass('last_mes');
$('#chat .mes').eq(-2).removeClass('last_mes');
hideSwipeButtons();
showSwipeButtons();
}
// Don't scroll if not inserting last
if (!insertAfter && !insertBefore && scroll) {
scrollChatToBottom();
}
}
/**
* Returns the URL of the avatar for the given character Id.
* @param {number} characterId Character Id
* @returns {string} Avatar URL
*/
export function getCharacterAvatar(characterId) {
const character = characters[characterId];
const avatarImg = character?.avatar;
if (!avatarImg || avatarImg === 'none') {
return default_avatar;
}
return formatCharacterAvatar(avatarImg);
}
export function formatCharacterAvatar(characterAvatar) {
return `characters/${characterAvatar}`;
}
/**
* Formats the title for the generation timer.
* @param {Date} gen_started Date when generation was started
* @param {Date} gen_finished Date when generation was finished
* @param {number} tokenCount Number of tokens generated (0 if not available)
* @returns {Object} Object containing the formatted timer value and title
* @example
* const { timerValue, timerTitle } = formatGenerationTimer(gen_started, gen_finished, tokenCount);
* console.log(timerValue); // 1.2s
* console.log(timerTitle); // Generation queued: 12:34:56 7 Jan 2021\nReply received: 12:34:57 7 Jan 2021\nTime to generate: 1.2 seconds\nToken rate: 5 t/s
*/
function formatGenerationTimer(gen_started, gen_finished, tokenCount) {
if (!gen_started || !gen_finished) {
return {};
}
const dateFormat = 'HH:mm:ss D MMM YYYY';
const start = moment(gen_started);
const finish = moment(gen_finished);
const seconds = finish.diff(start, 'seconds', true);
const timerValue = `${seconds.toFixed(1)}s`;
const timerTitle = [
`Generation queued: ${start.format(dateFormat)}`,
`Reply received: ${finish.format(dateFormat)}`,
`Time to generate: ${seconds} seconds`,
tokenCount > 0 ? `Token rate: ${Number(tokenCount / seconds).toFixed(1)} t/s` : '',
].join('\n');
if (isNaN(seconds) || seconds < 0) {
return { timerValue: '', timerTitle };
}
return { timerValue, timerTitle };
}
export function scrollChatToBottom() {
if (power_user.auto_scroll_chat_to_bottom) {
let position = chatElement[0].scrollHeight;
if (power_user.waifuMode) {
const lastMessage = chatElement.find('.mes').last();
if (lastMessage.length) {
const lastMessagePosition = lastMessage.position().top;
position = chatElement.scrollTop() + lastMessagePosition;
}
}
chatElement.scrollTop(position);
}
}
/**
* Substitutes {{macro}} parameters in a string.
* @param {string} content - The string to substitute parameters in.
* @param {Record} additionalMacro - Additional environment variables for substitution.
* @returns {string} The string with substituted parameters.
*/
export function substituteParamsExtended(content, additionalMacro = {}) {
return substituteParams(content, undefined, undefined, undefined, undefined, true, additionalMacro);
}
/**
* Substitutes {{macro}} parameters in a string.
* @param {string} content - The string to substitute parameters in.
* @param {string} [_name1] - The name of the user. Uses global name1 if not provided.
* @param {string} [_name2] - The name of the character. Uses global name2 if not provided.
* @param {string} [_original] - The original message for {{original}} substitution.
* @param {string} [_group] - The group members list for {{group}} substitution.
* @param {boolean} [_replaceCharacterCard] - Whether to replace character card macros.
* @param {Record} [additionalMacro] - Additional environment variables for substitution.
* @returns {string} The string with substituted parameters.
*/
export function substituteParams(content, _name1, _name2, _original, _group, _replaceCharacterCard = true, additionalMacro = {}) {
if (!content) {
return '';
}
const environment = {};
if (typeof _original === 'string') {
let originalSubstituted = false;
environment.original = () => {
if (originalSubstituted) {
return '';
}
originalSubstituted = true;
return _original;
};
}
const getGroupValue = () => {
if (typeof _group === 'string') {
return _group;
}
if (selected_group) {
const members = groups.find(x => x.id === selected_group)?.members;
const names = Array.isArray(members)
? members.map(m => characters.find(c => c.avatar === m)?.name).filter(Boolean).join(', ')
: '';
return names;
} else {
return _name2 ?? name2;
}
};
if (_replaceCharacterCard) {
const fields = getCharacterCardFields();
environment.charPrompt = fields.system || '';
environment.charJailbreak = fields.jailbreak || '';
environment.description = fields.description || '';
environment.personality = fields.personality || '';
environment.scenario = fields.scenario || '';
environment.persona = fields.persona || '';
environment.mesExamples = fields.mesExamples || '';
environment.charVersion = fields.version || '';
environment.char_version = fields.version || '';
}
// Must be substituted last so that they're replaced inside {{description}}
environment.user = _name1 ?? name1;
environment.char = _name2 ?? name2;
environment.group = environment.charIfNotGroup = getGroupValue();
environment.model = getGeneratingModel();
if (additionalMacro && typeof additionalMacro === 'object') {
Object.assign(environment, additionalMacro);
}
return evaluateMacros(content, environment);
}
/**
* Gets stopping sequences for the prompt.
* @param {boolean} isImpersonate A request is made to impersonate a user
* @param {boolean} isContinue A request is made to continue the message
* @returns {string[]} Array of stopping strings
*/
export function getStoppingStrings(isImpersonate, isContinue) {
const result = [];
if (power_user.context.names_as_stop_strings) {
const charString = `\n${name2}:`;
const userString = `\n${name1}:`;
result.push(isImpersonate ? charString : userString);
result.push(userString);
if (isContinue && Array.isArray(chat) && chat[chat.length - 1]?.is_user) {
result.push(charString);
}
// Add group members as stopping strings if generating for a specific group member or user. (Allow slash commands to work around name stopping string restrictions)
if (selected_group && (name2 || isImpersonate)) {
const group = groups.find(x => x.id === selected_group);
if (group && Array.isArray(group.members)) {
const names = group.members
.map(x => characters.find(y => y.avatar == x))
.filter(x => x && x.name && x.name !== name2)
.map(x => `\n${x.name}:`);
result.push(...names);
}
}
}
result.push(...getInstructStoppingSequences());
result.push(...getCustomStoppingStrings());
if (power_user.single_line) {
result.unshift('\n');
}
return result.filter(onlyUnique);
}
/**
* Background generation based on the provided prompt.
* @param {string} quiet_prompt Instruction prompt for the AI
* @param {boolean} quietToLoud Whether the message should be sent in a foreground (loud) or background (quiet) mode
* @param {boolean} skipWIAN whether to skip addition of World Info and Author's Note into the prompt
* @param {string} quietImage Image to use for the quiet prompt
* @param {string} quietName Name to use for the quiet prompt (defaults to "System:")
* @param {number} [responseLength] Maximum response length. If unset, the global default value is used.
* @returns
*/
export async function generateQuietPrompt(quiet_prompt, quietToLoud, skipWIAN, quietImage = null, quietName = null, responseLength = null) {
console.log('got into genQuietPrompt');
const responseLengthCustomized = typeof responseLength === 'number' && responseLength > 0;
let originalResponseLength = -1;
try {
/** @type {GenerateOptions} */
const options = {
quiet_prompt,
quietToLoud,
skipWIAN: skipWIAN,
force_name2: true,
quietImage: quietImage,
quietName: quietName,
};
originalResponseLength = responseLengthCustomized ? saveResponseLength(main_api, responseLength) : -1;
const generateFinished = await Generate('quiet', options);
return generateFinished;
} finally {
if (responseLengthCustomized) {
restoreResponseLength(main_api, originalResponseLength);
}
}
}
/**
* Executes slash commands and returns the new text and whether the generation was interrupted.
* @param {string} message Text to be sent
* @returns {Promise} Whether the message sending was interrupted
*/
export async function processCommands(message) {
if (!message || !message.trim().startsWith('/')) {
return false;
}
await executeSlashCommandsOnChatInput(message, {
clearChatInput: true,
});
return true;
}
export function sendSystemMessage(type, text, extra = {}) {
const systemMessage = system_messages[type];
if (!systemMessage) {
return;
}
const newMessage = { ...systemMessage, send_date: getMessageTimeStamp() };
if (text) {
newMessage.mes = text;
}
if (type == system_message_types.SLASH_COMMANDS) {
newMessage.mes = getSlashCommandsHelp();
}
if (!newMessage.extra) {
newMessage.extra = {};
}
newMessage.extra = Object.assign(newMessage.extra, extra);
newMessage.extra.type = type;
chat.push(newMessage);
addOneMessage(newMessage);
is_send_press = false;
if (type == system_message_types.SLASH_COMMANDS) {
const browser = new SlashCommandBrowser();
const spinner = document.querySelector('#chat .last_mes .custom-slashHelp');
const parent = spinner.parentElement;
spinner.remove();
browser.renderInto(parent);
browser.search.focus();
}
}
/**
* Extracts the contents of bias macros from a message.
* @param {string} message Message text
* @returns {string} Message bias extracted from the message (or an empty string if not found)
*/
export function extractMessageBias(message) {
if (!message) {
return '';
}
try {
const biasHandlebars = Handlebars.create();
const biasMatches = [];
biasHandlebars.registerHelper('bias', function (text) {
biasMatches.push(text);
return '';
});
const template = biasHandlebars.compile(message);
template({});
if (biasMatches && biasMatches.length > 0) {
return ` ${biasMatches.join(' ')}`;
}
return '';
} catch {
return '';
}
}
/**
* Removes impersonated group member lines from the group member messages.
* Doesn't do anything if group reply trimming is disabled.
* @param {string} getMessage Group message
* @returns Cleaned-up group message
*/
function cleanGroupMessage(getMessage) {
if (power_user.disable_group_trimming) {
return getMessage;
}
const group = groups.find((x) => x.id == selected_group);
if (group && Array.isArray(group.members) && group.members) {
for (let member of group.members) {
const character = characters.find(x => x.avatar == member);
if (!character) {
continue;
}
const name = character.name;
// Skip current speaker.
if (name === name2) {
continue;
}
const regex = new RegExp(`(^|\n)${escapeRegex(name)}:`);
const nameMatch = getMessage.match(regex);
if (nameMatch) {
getMessage = getMessage.substring(0, nameMatch.index);
}
}
}
return getMessage;
}
function addPersonaDescriptionExtensionPrompt() {
const INJECT_TAG = 'PERSONA_DESCRIPTION';
setExtensionPrompt(INJECT_TAG, '', extension_prompt_types.IN_PROMPT, 0);
if (!power_user.persona_description) {
return;
}
const promptPositions = [persona_description_positions.BOTTOM_AN, persona_description_positions.TOP_AN];
if (promptPositions.includes(power_user.persona_description_position) && shouldWIAddPrompt) {
const originalAN = extension_prompts[NOTE_MODULE_NAME].value;
const ANWithDesc = power_user.persona_description_position === persona_description_positions.TOP_AN
? `${power_user.persona_description}\n${originalAN}`
: `${originalAN}\n${power_user.persona_description}`;
setExtensionPrompt(NOTE_MODULE_NAME, ANWithDesc, chat_metadata[metadata_keys.position], chat_metadata[metadata_keys.depth], extension_settings.note.allowWIScan, chat_metadata[metadata_keys.role]);
}
if (power_user.persona_description_position === persona_description_positions.AT_DEPTH) {
setExtensionPrompt(INJECT_TAG, power_user.persona_description, extension_prompt_types.IN_CHAT, power_user.persona_description_depth, true, power_user.persona_description_role);
}
}
function getAllExtensionPrompts() {
const value = Object
.values(extension_prompts)
.filter(x => x.value)
.map(x => x.value.trim())
.join('\n');
return value.length ? substituteParams(value) : '';
}
// Wrapper to fetch extension prompts by module name
export function getExtensionPromptByName(moduleName) {
if (moduleName) {
return substituteParams(extension_prompts[moduleName]?.value);
} else {
return;
}
}
/**
* Returns the extension prompt for the given position, depth, and role.
* If multiple prompts are found, they are joined with a separator.
* @param {number} [position] Position of the prompt
* @param {number} [depth] Depth of the prompt
* @param {string} [separator] Separator for joining multiple prompts
* @param {number} [role] Role of the prompt
* @param {boolean} [wrap] Wrap start and end with a separator
* @returns {string} Extension prompt
*/
export function getExtensionPrompt(position = extension_prompt_types.IN_PROMPT, depth = undefined, separator = '\n', role = undefined, wrap = true) {
let extension_prompt = Object.keys(extension_prompts)
.sort()
.map((x) => extension_prompts[x])
.filter(x => x.position == position && x.value)
.filter(x => depth === undefined || x.depth === undefined || x.depth === depth)
.filter(x => role === undefined || x.role === undefined || x.role === role)
.map(x => x.value.trim())
.join(separator);
if (wrap && extension_prompt.length && !extension_prompt.startsWith(separator)) {
extension_prompt = separator + extension_prompt;
}
if (wrap && extension_prompt.length && !extension_prompt.endsWith(separator)) {
extension_prompt = extension_prompt + separator;
}
if (extension_prompt.length) {
extension_prompt = substituteParams(extension_prompt);
}
return extension_prompt;
}
export function baseChatReplace(value, name1, name2) {
if (value !== undefined && value.length > 0) {
const _ = undefined;
value = substituteParams(value, name1, name2, _, _, false);
if (power_user.collapse_newlines) {
value = collapseNewlines(value);
}
value = value.replace(/\r/g, '');
}
return value;
}
/**
* Returns the character card fields for the current character.
* @returns {{system: string, mesExamples: string, description: string, personality: string, persona: string, scenario: string, jailbreak: string, version: string}}
*/
export function getCharacterCardFields() {
const result = { system: '', mesExamples: '', description: '', personality: '', persona: '', scenario: '', jailbreak: '', version: '' };
const character = characters[this_chid];
if (!character) {
return result;
}
const scenarioText = chat_metadata['scenario'] || characters[this_chid]?.scenario;
result.description = baseChatReplace(characters[this_chid].description?.trim(), name1, name2);
result.personality = baseChatReplace(characters[this_chid].personality?.trim(), name1, name2);
result.scenario = baseChatReplace(scenarioText.trim(), name1, name2);
result.mesExamples = baseChatReplace(characters[this_chid].mes_example?.trim(), name1, name2);
result.persona = baseChatReplace(power_user.persona_description?.trim(), name1, name2);
result.system = power_user.prefer_character_prompt ? baseChatReplace(characters[this_chid].data?.system_prompt?.trim(), name1, name2) : '';
result.jailbreak = power_user.prefer_character_jailbreak ? baseChatReplace(characters[this_chid].data?.post_history_instructions?.trim(), name1, name2) : '';
result.version = characters[this_chid].data?.character_version ?? '';
if (selected_group) {
const groupCards = getGroupCharacterCards(selected_group, Number(this_chid));
if (groupCards) {
result.description = groupCards.description;
result.personality = groupCards.personality;
result.scenario = groupCards.scenario;
result.mesExamples = groupCards.mesExamples;
}
}
return result;
}
export function isStreamingEnabled() {
const noStreamSources = [chat_completion_sources.SCALE, chat_completion_sources.AI21];
return ((main_api == 'openai' && oai_settings.stream_openai && !noStreamSources.includes(oai_settings.chat_completion_source) && !(oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE && oai_settings.google_model.includes('bison')))
|| (main_api == 'kobold' && kai_settings.streaming_kobold && kai_flags.can_use_streaming)
|| (main_api == 'novel' && nai_settings.streaming_novel)
|| (main_api == 'textgenerationwebui' && textgen_settings.streaming));
}
function showStopButton() {
$('#mes_stop').css({ 'display': 'flex' });
}
function hideStopButton() {
// prevent NOOP, because hideStopButton() gets called multiple times
if ($('#mes_stop').css('display') !== 'none') {
$('#mes_stop').css({ 'display': 'none' });
eventSource.emit(event_types.GENERATION_ENDED, chat.length);
}
}
class StreamingProcessor {
/**
* Creates a new streaming processor.
* @param {string} type Generation type
* @param {boolean} forceName2 If true, force the use of name2
* @param {Date} timeStarted Date when generation was started
* @param {string} continueMessage Previous message if the type is 'continue'
*/
constructor(type, forceName2, timeStarted, continueMessage) {
this.result = '';
this.messageId = -1;
this.messageDom = null;
this.messageTextDom = null;
this.messageTimerDom = null;
this.messageTokenCounterDom = null;
/** @type {HTMLTextAreaElement} */
this.sendTextarea = document.querySelector('#send_textarea');
this.type = type;
this.force_name2 = forceName2;
this.isStopped = false;
this.isFinished = false;
this.generator = this.nullStreamingGeneration;
this.abortController = new AbortController();
this.firstMessageText = '...';
this.timeStarted = timeStarted;
this.continueMessage = type === 'continue' ? continueMessage : '';
this.swipes = [];
/** @type {import('./scripts/logprobs.js').TokenLogprobs[]} */
this.messageLogprobs = [];
}
#checkDomElements(messageId) {
if (this.messageDom === null || this.messageTextDom === null) {
this.messageDom = document.querySelector(`#chat .mes[mesid="${messageId}"]`);
this.messageTextDom = this.messageDom?.querySelector('.mes_text');
this.messageTimerDom = this.messageDom?.querySelector('.mes_timer');
this.messageTokenCounterDom = this.messageDom?.querySelector('.tokenCounterDisplay');
}
}
showMessageButtons(messageId) {
if (messageId == -1) {
return;
}
showStopButton();
$(`#chat .mes[mesid="${messageId}"] .mes_buttons`).css({ 'display': 'none' });
}
hideMessageButtons(messageId) {
if (messageId == -1) {
return;
}
hideStopButton();
$(`#chat .mes[mesid="${messageId}"] .mes_buttons`).css({ 'display': 'flex' });
}
async onStartStreaming(text) {
let messageId = -1;
if (this.type == 'impersonate') {
this.sendTextarea.value = '';
this.sendTextarea.dispatchEvent(new Event('input', { bubbles: true }));
}
else {
await saveReply(this.type, text, true);
messageId = chat.length - 1;
this.#checkDomElements(messageId);
this.showMessageButtons(messageId);
}
hideSwipeButtons();
scrollChatToBottom();
return messageId;
}
onProgressStreaming(messageId, text, isFinal) {
const isImpersonate = this.type == 'impersonate';
const isContinue = this.type == 'continue';
if (!isImpersonate && !isContinue && Array.isArray(this.swipes) && this.swipes.length > 0) {
for (let i = 0; i < this.swipes.length; i++) {
this.swipes[i] = cleanUpMessage(this.swipes[i], false, false, true, this.stoppingStrings);
}
}
let processedText = cleanUpMessage(text, isImpersonate, isContinue, !isFinal, this.stoppingStrings);
// Predict unbalanced asterisks / quotes during streaming
const charsToBalance = ['*', '"', '```'];
for (const char of charsToBalance) {
if (!isFinal && isOdd(countOccurrences(processedText, char))) {
// Add character at the end to balance it
const separator = char.length > 1 ? '\n' : '';
processedText = processedText.trimEnd() + separator + char;
}
}
if (isImpersonate) {
this.sendTextarea.value = processedText;
this.sendTextarea.dispatchEvent(new Event('input', { bubbles: true }));
}
else {
this.#checkDomElements(messageId);
const currentTime = new Date();
// Don't waste time calculating token count for streaming
const currentTokenCount = isFinal && power_user.message_token_count_enabled ? getTokenCount(processedText, 0) : 0;
const timePassed = formatGenerationTimer(this.timeStarted, currentTime, currentTokenCount);
chat[messageId]['mes'] = processedText;
chat[messageId]['gen_started'] = this.timeStarted;
chat[messageId]['gen_finished'] = currentTime;
if (currentTokenCount) {
if (!chat[messageId]['extra']) {
chat[messageId]['extra'] = {};
}
chat[messageId]['extra']['token_count'] = currentTokenCount;
if (this.messageTokenCounterDom instanceof HTMLElement) {
this.messageTokenCounterDom.textContent = `${currentTokenCount}t`;
}
}
if ((this.type == 'swipe' || this.type === 'continue') && Array.isArray(chat[messageId]['swipes'])) {
chat[messageId]['swipes'][chat[messageId]['swipe_id']] = processedText;
chat[messageId]['swipe_info'][chat[messageId]['swipe_id']] = { 'send_date': chat[messageId]['send_date'], 'gen_started': chat[messageId]['gen_started'], 'gen_finished': chat[messageId]['gen_finished'], 'extra': JSON.parse(JSON.stringify(chat[messageId]['extra'])) };
}
const formattedText = messageFormatting(
processedText,
chat[messageId].name,
chat[messageId].is_system,
chat[messageId].is_user,
messageId,
);
if (this.messageTextDom instanceof HTMLElement) {
this.messageTextDom.innerHTML = formattedText;
}
if (this.messageTimerDom instanceof HTMLElement) {
this.messageTimerDom.textContent = timePassed.timerValue;
this.messageTimerDom.title = timePassed.timerTitle;
}
this.setFirstSwipe(messageId);
}
if (!scrollLock) {
scrollChatToBottom();
}
}
async onFinishStreaming(messageId, text) {
this.hideMessageButtons(this.messageId);
this.onProgressStreaming(messageId, text, true);
addCopyToCodeBlocks($(`#chat .mes[mesid="${messageId}"]`));
if (Array.isArray(this.swipes) && this.swipes.length > 0) {
const message = chat[messageId];
const swipeInfo = {
send_date: message.send_date,
gen_started: message.gen_started,
gen_finished: message.gen_finished,
extra: structuredClone(message.extra),
};
const swipeInfoArray = [];
swipeInfoArray.length = this.swipes.length;
swipeInfoArray.fill(swipeInfo);
chat[messageId].swipes.push(...this.swipes);
chat[messageId].swipe_info.push(...swipeInfoArray);
}
if (this.type !== 'impersonate') {
await eventSource.emit(event_types.MESSAGE_RECEIVED, this.messageId);
await eventSource.emit(event_types.CHARACTER_MESSAGE_RENDERED, this.messageId);
} else {
await eventSource.emit(event_types.IMPERSONATE_READY, text);
}
saveLogprobsForActiveMessage(this.messageLogprobs.filter(Boolean), this.continueMessage);
await saveChatConditional();
unblockGeneration();
generatedPromptCache = '';
//console.log("Generated text size:", text.length, text)
if (power_user.auto_swipe) {
function containsBlacklistedWords(str, blacklist, threshold) {
const regex = new RegExp(`\\b(${blacklist.join('|')})\\b`, 'gi');
const matches = str.match(regex) || [];
return matches.length >= threshold;
}
const generatedTextFiltered = (text) => {
if (text) {
if (power_user.auto_swipe_minimum_length) {
if (text.length < power_user.auto_swipe_minimum_length && text.length !== 0) {
console.log('Generated text size too small');
return true;
}
}
if (power_user.auto_swipe_blacklist_threshold) {
if (containsBlacklistedWords(text, power_user.auto_swipe_blacklist, power_user.auto_swipe_blacklist_threshold)) {
console.log('Generated text has blacklisted words');
return true;
}
}
}
return false;
};
if (generatedTextFiltered(text)) {
swipe_right();
return;
}
}
playMessageSound();
}
onErrorStreaming() {
this.abortController.abort();
this.isStopped = true;
this.hideMessageButtons(this.messageId);
generatedPromptCache = '';
unblockGeneration();
}
setFirstSwipe(messageId) {
if (this.type !== 'swipe' && this.type !== 'impersonate') {
if (Array.isArray(chat[messageId]['swipes']) && chat[messageId]['swipes'].length === 1 && chat[messageId]['swipe_id'] === 0) {
chat[messageId]['swipes'][0] = chat[messageId]['mes'];
chat[messageId]['swipe_info'][0] = { 'send_date': chat[messageId]['send_date'], 'gen_started': chat[messageId]['gen_started'], 'gen_finished': chat[messageId]['gen_finished'], 'extra': JSON.parse(JSON.stringify(chat[messageId]['extra'])) };
}
}
}
onStopStreaming() {
this.onErrorStreaming();
}
/**
* @returns {Generator<{ text: string, swipes: string[], logprobs: import('./scripts/logprobs.js').TokenLogprobs }, void, void>}
*/
*nullStreamingGeneration() {
throw new Error('Generation function for streaming is not hooked up');
}
async generate() {
if (this.messageId == -1) {
this.messageId = await this.onStartStreaming(this.firstMessageText);
await delay(1); // delay for message to be rendered
scrollLock = false;
}
// Stopping strings are expensive to calculate, especially with macros enabled. To remove stopping strings
// when streaming, we cache the result of getStoppingStrings instead of calling it once per token.
const isImpersonate = this.type == 'impersonate';
const isContinue = this.type == 'continue';
this.stoppingStrings = getStoppingStrings(isImpersonate, isContinue);
try {
const sw = new Stopwatch(1000 / power_user.streaming_fps);
const timestamps = [];
for await (const { text, swipes, logprobs } of this.generator()) {
timestamps.push(Date.now());
if (this.isStopped) {
return;
}
this.result = text;
this.swipes = Array.from(swipes ?? []);
if (logprobs) {
this.messageLogprobs.push(...(Array.isArray(logprobs) ? logprobs : [logprobs]));
}
await sw.tick(() => this.onProgressStreaming(this.messageId, this.continueMessage + text));
}
const seconds = (timestamps[timestamps.length - 1] - timestamps[0]) / 1000;
console.warn(`Stream stats: ${timestamps.length} tokens, ${seconds.toFixed(2)} seconds, rate: ${Number(timestamps.length / seconds).toFixed(2)} TPS`);
}
catch (err) {
console.error(err);
this.onErrorStreaming();
return;
}
this.isFinished = true;
return this.result;
}
}
/**
* Generates a message using the provided prompt.
* @param {string} prompt Prompt to generate a message from
* @param {string} api API to use. Main API is used if not specified.
* @param {boolean} instructOverride true to override instruct mode, false to use the default value
* @param {boolean} quietToLoud true to generate a message in system mode, false to generate a message in character mode
* @param {string} [systemPrompt] System prompt to use. Only Instruct mode or OpenAI.
* @param {number} [responseLength] Maximum response length. If unset, the global default value is used.
* @returns {Promise} Generated message
*/
export async function generateRaw(prompt, api, instructOverride, quietToLoud, systemPrompt, responseLength) {
if (!api) {
api = main_api;
}
const abortController = new AbortController();
const responseLengthCustomized = typeof responseLength === 'number' && responseLength > 0;
let originalResponseLength = -1;
const isInstruct = power_user.instruct.enabled && api !== 'openai' && api !== 'novel' && !instructOverride;
const isQuiet = true;
if (systemPrompt) {
systemPrompt = substituteParams(systemPrompt);
systemPrompt = isInstruct ? formatInstructModeSystemPrompt(systemPrompt) : systemPrompt;
prompt = api === 'openai' ? prompt : `${systemPrompt}\n${prompt}`;
}
prompt = substituteParams(prompt);
prompt = api == 'novel' ? adjustNovelInstructionPrompt(prompt) : prompt;
prompt = isInstruct ? formatInstructModeChat(name1, prompt, false, true, '', name1, name2, false) : prompt;
prompt = isInstruct ? (prompt + formatInstructModePrompt(name2, false, '', name1, name2, isQuiet, quietToLoud)) : (prompt + '\n');
try {
originalResponseLength = responseLengthCustomized ? saveResponseLength(api, responseLength) : -1;
let generateData = {};
switch (api) {
case 'kobold':
case 'koboldhorde':
if (preset_settings === 'gui') {
generateData = { prompt: prompt, gui_settings: true, max_length: amount_gen, max_context_length: max_context, api_server };
} else {
const isHorde = api === 'koboldhorde';
const koboldSettings = koboldai_settings[koboldai_setting_names[preset_settings]];
generateData = getKoboldGenerationData(prompt, koboldSettings, amount_gen, max_context, isHorde, 'quiet');
}
break;
case 'novel': {
const novelSettings = novelai_settings[novelai_setting_names[nai_settings.preset_settings_novel]];
generateData = getNovelGenerationData(prompt, novelSettings, amount_gen, false, false, null, 'quiet');
break;
}
case 'textgenerationwebui':
generateData = getTextGenGenerationData(prompt, amount_gen, false, false, null, 'quiet');
break;
case 'openai': {
generateData = [{ role: 'user', content: prompt.trim() }];
if (systemPrompt) {
generateData.unshift({ role: 'system', content: systemPrompt.trim() });
}
} break;
}
let data = {};
if (api == 'koboldhorde') {
data = await generateHorde(prompt, generateData, abortController.signal, false);
} else if (api == 'openai') {
data = await sendOpenAIRequest('quiet', generateData, abortController.signal);
} else {
const generateUrl = getGenerateUrl(api);
const response = await fetch(generateUrl, {
method: 'POST',
headers: getRequestHeaders(),
cache: 'no-cache',
body: JSON.stringify(generateData),
signal: abortController.signal,
});
if (!response.ok) {
const error = await response.json();
throw error;
}
data = await response.json();
}
if (data.error) {
throw new Error(data.error);
}
const message = cleanUpMessage(extractMessageFromData(data), false, false, true);
if (!message) {
throw new Error('No message generated');
}
return message;
} finally {
if (responseLengthCustomized) {
restoreResponseLength(api, originalResponseLength);
}
}
}
/**
* Temporarily change the response length for the specified API.
* @param {string} api API to use.
* @param {number} responseLength Target response length.
* @returns {number} The original response length.
*/
function saveResponseLength(api, responseLength) {
let oldValue = -1;
if (api === 'openai') {
oldValue = oai_settings.openai_max_tokens;
oai_settings.openai_max_tokens = responseLength;
} else {
oldValue = amount_gen;
amount_gen = responseLength;
}
return oldValue;
}
/**
* Restore the original response length for the specified API.
* @param {string} api API to use.
* @param {number} responseLength Target response length.
* @returns {void}
*/
function restoreResponseLength(api, responseLength) {
if (api === 'openai') {
oai_settings.openai_max_tokens = responseLength;
} else {
amount_gen = responseLength;
}
}
/**
* Removes last message from the chat DOM.
* @returns {Promise} Resolves when the message is removed.
*/
function removeLastMessage() {
return new Promise((resolve) => {
const lastMes = $('#chat').children('.mes').last();
if (lastMes.length === 0) {
return resolve();
}
lastMes.hide(animation_duration, function () {
$(this).remove();
resolve();
});
});
}
/**
* Runs a generation using the current chat context.
* @param {string} type Generation type
* @param {GenerateOptions} options Generation options
* @param {boolean} dryRun Whether to actually generate a message or just assemble the prompt
* @returns {Promise} Returns a promise that resolves when the text is done generating.
* @typedef {{automatic_trigger?: boolean, force_name2?: boolean, quiet_prompt?: string, quietToLoud?: boolean, skipWIAN?: boolean, force_chid?: number, signal?: AbortSignal, quietImage?: string, maxLoops?: number, quietName?: string }} GenerateOptions
*/
export async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, quietToLoud, skipWIAN, force_chid, signal, quietImage, maxLoops, quietName } = {}, dryRun = false) {
console.log('Generate entered');
eventSource.emit(event_types.GENERATION_STARTED, type, { automatic_trigger, force_name2, quiet_prompt, quietToLoud, skipWIAN, force_chid, signal, quietImage, maxLoops }, dryRun);
setGenerationProgress(0);
generation_started = new Date();
// Don't recreate abort controller if signal is passed
if (!(abortController && signal)) {
abortController = new AbortController();
}
// OpenAI doesn't need instruct mode. Use OAI main prompt instead.
const isInstruct = power_user.instruct.enabled && main_api !== 'openai';
const isImpersonate = type == 'impersonate';
if (!(dryRun || type == 'regenerate' || type == 'swipe' || type == 'quiet')) {
const interruptedByCommand = await processCommands(String($('#send_textarea').val()));
if (interruptedByCommand) {
//$("#send_textarea").val('')[0].dispatchEvent(new Event('input', { bubbles:true }));
unblockGeneration(type);
return Promise.resolve();
}
}
if (main_api == 'kobold' && kai_settings.streaming_kobold && !kai_flags.can_use_streaming) {
toastr.error('Streaming is enabled, but the version of Kobold used does not support token streaming.', undefined, { timeOut: 10000, preventDuplicates: true });
unblockGeneration(type);
return Promise.resolve();
}
if (main_api === 'textgenerationwebui' &&
textgen_settings.streaming &&
textgen_settings.legacy_api &&
(textgen_settings.type === OOBA || textgen_settings.type === APHRODITE)) {
toastr.error('Streaming is not supported for the Legacy API. Update Ooba and use new API to enable streaming.', undefined, { timeOut: 10000, preventDuplicates: true });
unblockGeneration(type);
return Promise.resolve();
}
if (isHordeGenerationNotAllowed()) {
unblockGeneration(type);
return Promise.resolve();
}
if (!dryRun) {
// Ping server to make sure it is still alive
const pingResult = await pingServer();
if (!pingResult) {
unblockGeneration(type);
toastr.error('Verify that the server is running and accessible.', 'ST Server cannot be reached');
throw new Error('Server unreachable');
}
// Hide swipes if not in a dry run.
hideSwipeButtons();
// If generated any message, set the flag to indicate it can't be recreated again.
chat_metadata['tainted'] = true;
}
if (selected_group && !is_group_generating) {
if (!dryRun) {
// Returns the promise that generateGroupWrapper returns; resolves when generation is done
return generateGroupWrapper(false, type, { quiet_prompt, force_chid, signal: abortController.signal, quietImage, maxLoops });
}
const characterIndexMap = new Map(characters.map((char, index) => [char.avatar, index]));
const group = groups.find((x) => x.id === selected_group);
const enabledMembers = group.members.reduce((acc, member) => {
if (!group.disabled_members.includes(member) && !acc.includes(member)) {
acc.push(member);
}
return acc;
}, []);
const memberIds = enabledMembers
.map((member) => characterIndexMap.get(member))
.filter((index) => index !== undefined && index !== null);
if (memberIds.length > 0) {
setCharacterId(memberIds[0]);
setCharacterName('');
} else {
console.log('No enabled members found');
unblockGeneration(type);
return Promise.resolve();
}
}
//#########QUIET PROMPT STUFF##############
//this function just gives special care to novel quiet instruction prompts
if (quiet_prompt) {
quiet_prompt = substituteParams(quiet_prompt);
quiet_prompt = main_api == 'novel' && !quietToLoud ? adjustNovelInstructionPrompt(quiet_prompt) : quiet_prompt;
}
const isChatValid = online_status !== 'no_connection' && this_chid !== undefined;
// We can't do anything because we're not in a chat right now. (Unless it's a dry run, in which case we need to
// assemble the prompt so we can count its tokens regardless of whether a chat is active.)
if (!dryRun && !isChatValid) {
if (this_chid === undefined) {
toastr.warning('Сharacter is not selected');
}
is_send_press = false;
return Promise.resolve();
}
let textareaText;
if (type !== 'regenerate' && type !== 'swipe' && type !== 'quiet' && !isImpersonate && !dryRun) {
is_send_press = true;
textareaText = String($('#send_textarea').val());
$('#send_textarea').val('')[0].dispatchEvent(new Event('input', { bubbles: true }));
} else {
textareaText = '';
if (chat.length && chat[chat.length - 1]['is_user']) {
//do nothing? why does this check exist?
}
else if (type !== 'quiet' && type !== 'swipe' && !isImpersonate && !dryRun && chat.length) {
chat.length = chat.length - 1;
await removeLastMessage();
await eventSource.emit(event_types.MESSAGE_DELETED, chat.length);
}
}
const isContinue = type == 'continue';
// Rewrite the generation timer to account for the time passed for all the continuations.
if (isContinue && chat.length) {
const prevFinished = chat[chat.length - 1]['gen_finished'];
const prevStarted = chat[chat.length - 1]['gen_started'];
if (prevFinished && prevStarted) {
const timePassed = prevFinished - prevStarted;
generation_started = new Date(Date.now() - timePassed);
chat[chat.length - 1]['gen_started'] = generation_started;
}
}
if (!dryRun) {
deactivateSendButtons();
}
let { messageBias, promptBias, isUserPromptBias } = getBiasStrings(textareaText, type);
//*********************************
//PRE FORMATING STRING
//*********************************
//for normal messages sent from user..
if ((textareaText != '' || hasPendingFileAttachment()) && !automatic_trigger && type !== 'quiet' && !dryRun) {
// If user message contains no text other than bias - send as a system message
if (messageBias && !removeMacros(textareaText)) {
sendSystemMessage(system_message_types.GENERIC, ' ', { bias: messageBias });
}
else {
await sendMessageAsUser(textareaText, messageBias);
}
}
else if (textareaText == '' && !automatic_trigger && !dryRun && type === undefined && main_api == 'openai' && oai_settings.send_if_empty.trim().length > 0) {
// Use send_if_empty if set and the user message is empty. Only when sending messages normally
await sendMessageAsUser(oai_settings.send_if_empty.trim(), messageBias);
}
let {
description,
personality,
persona,
scenario,
mesExamples,
system,
jailbreak,
} = getCharacterCardFields();
if (isInstruct) {
system = power_user.prefer_character_prompt && system ? system : baseChatReplace(power_user.instruct.system_prompt, name1, name2);
system = formatInstructModeSystemPrompt(substituteParams(system, name1, name2, power_user.instruct.system_prompt));
}
// Depth prompt (character-specific A/N)
removeDepthPrompts();
const groupDepthPrompts = getGroupDepthPrompts(selected_group, Number(this_chid));
if (selected_group && Array.isArray(groupDepthPrompts) && groupDepthPrompts.length > 0) {
groupDepthPrompts.forEach((value, index) => {
const role = getExtensionPromptRoleByName(value.role);
setExtensionPrompt('DEPTH_PROMPT_' + index, value.text, extension_prompt_types.IN_CHAT, value.depth, extension_settings.note.allowWIScan, role);
});
} else {
const depthPromptText = baseChatReplace(characters[this_chid].data?.extensions?.depth_prompt?.prompt?.trim(), name1, name2) || '';
const depthPromptDepth = characters[this_chid].data?.extensions?.depth_prompt?.depth ?? depth_prompt_depth_default;
const depthPromptRole = getExtensionPromptRoleByName(characters[this_chid].data?.extensions?.depth_prompt?.role ?? depth_prompt_role_default);
setExtensionPrompt('DEPTH_PROMPT', depthPromptText, extension_prompt_types.IN_CHAT, depthPromptDepth, extension_settings.note.allowWIScan, depthPromptRole);
}
// First message in fresh 1-on-1 chat reacts to user/character settings changes
if (chat.length) {
chat[0].mes = substituteParams(chat[0].mes);
}
// Collect messages with usable content
let coreChat = chat.filter(x => !x.is_system);
if (type === 'swipe') {
coreChat.pop();
}
coreChat = await Promise.all(coreChat.map(async (chatItem, index) => {
let message = chatItem.mes;
let regexType = chatItem.is_user ? regex_placement.USER_INPUT : regex_placement.AI_OUTPUT;
let options = { isPrompt: true, depth: (coreChat.length - index - 1) };
let regexedMessage = getRegexedString(message, regexType, options);
regexedMessage = await appendFileContent(chatItem, regexedMessage);
if (chatItem?.extra?.append_title && chatItem?.extra?.title) {
regexedMessage = `${regexedMessage}\n\n${chatItem.extra.title}`;
}
return {
...chatItem,
mes: regexedMessage,
index,
};
}));
// Determine token limit
let this_max_context = getMaxContextSize();
if (!dryRun && type !== 'quiet') {
console.debug('Running extension interceptors');
const aborted = await runGenerationInterceptors(coreChat, this_max_context);
if (aborted) {
console.debug('Generation aborted by extension interceptors');
unblockGeneration(type);
return Promise.resolve();
}
} else {
console.debug('Skipping extension interceptors for dry run');
}
// Adjust token limit for Horde
let adjustedParams;
if (main_api == 'koboldhorde' && (horde_settings.auto_adjust_context_length || horde_settings.auto_adjust_response_length)) {
try {
adjustedParams = await adjustHordeGenerationParams(max_context, amount_gen);
}
catch {
unblockGeneration(type);
return Promise.resolve();
}
if (horde_settings.auto_adjust_context_length) {
this_max_context = (adjustedParams.maxContextLength - adjustedParams.maxLength);
}
}
console.log(`Core/all messages: ${coreChat.length}/${chat.length}`);
// kingbri MARK: - Make sure the prompt bias isn't the same as the user bias
if ((promptBias && !isUserPromptBias) || power_user.always_force_name2 || main_api == 'novel') {
force_name2 = true;
}
if (isImpersonate) {
force_name2 = false;
}
// TODO (kingbri): Migrate to a utility function
/**
* Parses an examples string.
* @param {string} examplesStr
* @returns {string[]} Examples array with block heading
*/
function parseMesExamples(examplesStr) {
if (examplesStr.length === 0 || examplesStr === '') {
return [];
}
if (!examplesStr.startsWith('')) {
examplesStr = '\n' + examplesStr.trim();
}
const exampleSeparator = power_user.context.example_separator ? `${substituteParams(power_user.context.example_separator)}\n` : '';
const blockHeading = main_api === 'openai' ? '\n' : (exampleSeparator || (isInstruct ? '\n' : ''));
const splitExamples = examplesStr.split(//gi).slice(1).map(block => `${blockHeading}${block.trim()}\n`);
return splitExamples;
}
let mesExamplesArray = parseMesExamples(mesExamples);
//////////////////////////////////
// Extension added strings
// Set non-WI AN
setFloatingPrompt();
// Add persona description to prompt
addPersonaDescriptionExtensionPrompt();
// Add WI to prompt (and also inject WI to AN value via hijack)
// Make quiet prompt available for WIAN
setExtensionPrompt('QUIET_PROMPT', quiet_prompt || '', extension_prompt_types.IN_PROMPT, 0, true);
const chatForWI = coreChat.map(x => world_info_include_names ? `${x.name}: ${x.mes}` : x.mes).reverse();
const { worldInfoString, worldInfoBefore, worldInfoAfter, worldInfoExamples, worldInfoDepth } = await getWorldInfoPrompt(chatForWI, this_max_context, dryRun);
setExtensionPrompt('QUIET_PROMPT', '', extension_prompt_types.IN_PROMPT, 0, true);
// Add message example WI
for (const example of worldInfoExamples) {
const exampleMessage = example.content;
if (exampleMessage.length === 0) {
continue;
}
const formattedExample = baseChatReplace(exampleMessage, name1, name2);
const cleanedExample = parseMesExamples(formattedExample);
// Insert depending on before or after position
if (example.position === wi_anchor_position.before) {
mesExamplesArray.unshift(...cleanedExample);
} else {
mesExamplesArray.push(...cleanedExample);
}
}
// At this point, the raw message examples can be created
const mesExamplesRawArray = [...mesExamplesArray];
if (mesExamplesArray && isInstruct) {
mesExamplesArray = formatInstructModeExamples(mesExamplesArray, name1, name2);
}
if (skipWIAN !== true) {
console.log('skipWIAN not active, adding WIAN');
// Add all depth WI entries to prompt
flushWIDepthInjections();
if (Array.isArray(worldInfoDepth)) {
worldInfoDepth.forEach((e) => {
const joinedEntries = e.entries.join('\n');
setExtensionPrompt(`customDepthWI-${e.depth}-${e.role}`, joinedEntries, extension_prompt_types.IN_CHAT, e.depth, false, e.role);
});
}
} else {
console.log('skipping WIAN');
}
// Inject all Depth prompts. Chat Completion does it separately
let injectedIndices = [];
if (main_api !== 'openai') {
injectedIndices = doChatInject(coreChat, isContinue);
}
// Insert character jailbreak as the last user message (if exists, allowed, preferred, and not using Chat Completion)
if (power_user.context.allow_jailbreak && power_user.prefer_character_jailbreak && main_api !== 'openai' && jailbreak) {
// Set "original" explicity to empty string since there's no original
jailbreak = substituteParams(jailbreak, name1, name2, '');
// When continuing generation of previous output, last user message precedes the message to continue
if (isContinue) {
coreChat.splice(coreChat.length - 1, 0, { mes: jailbreak, is_user: true });
}
else {
coreChat.push({ mes: jailbreak, is_user: true });
}
}
let chat2 = [];
let continue_mag = '';
const userMessageIndices = [];
const lastUserMessageIndex = coreChat.findLastIndex(x => x.is_user);
for (let i = coreChat.length - 1, j = 0; i >= 0; i--, j++) {
if (main_api == 'openai') {
chat2[i] = coreChat[j].mes;
if (i === 0 && isContinue) {
chat2[i] = chat2[i].slice(0, chat2[i].lastIndexOf(coreChat[j].mes) + coreChat[j].mes.length);
continue_mag = coreChat[j].mes;
}
continue;
}
chat2[i] = formatMessageHistoryItem(coreChat[j], isInstruct, false);
if (j === 0 && isInstruct) {
// Reformat with the first output sequence (if any)
chat2[i] = formatMessageHistoryItem(coreChat[j], isInstruct, force_output_sequence.FIRST);
}
if (lastUserMessageIndex >= 0 && j === lastUserMessageIndex && isInstruct) {
// Reformat with the last input sequence (if any)
chat2[i] = formatMessageHistoryItem(coreChat[j], isInstruct, force_output_sequence.LAST);
}
// Do not suffix the message for continuation
if (i === 0 && isContinue) {
if (isInstruct) {
// Reformat with the last output sequence (if any)
chat2[i] = formatMessageHistoryItem(coreChat[j], isInstruct, force_output_sequence.LAST);
}
chat2[i] = chat2[i].slice(0, chat2[i].lastIndexOf(coreChat[j].mes) + coreChat[j].mes.length);
continue_mag = coreChat[j].mes;
}
if (coreChat[j].is_user) {
userMessageIndices.push(i);
}
}
let addUserAlignment = isInstruct && power_user.instruct.user_alignment_message;
let userAlignmentMessage = '';
if (addUserAlignment) {
const alignmentMessage = {
name: name1,
mes: power_user.instruct.user_alignment_message,
is_user: true,
};
userAlignmentMessage = formatMessageHistoryItem(alignmentMessage, isInstruct, force_output_sequence.FIRST);
}
// Call combined AN into Generate
const beforeScenarioAnchor = getExtensionPrompt(extension_prompt_types.BEFORE_PROMPT).trimStart();
const afterScenarioAnchor = getExtensionPrompt(extension_prompt_types.IN_PROMPT);
const storyStringParams = {
description: description,
personality: personality,
persona: persona,
scenario: scenario,
system: isInstruct ? system : '',
char: name2,
user: name1,
wiBefore: worldInfoBefore,
wiAfter: worldInfoAfter,
loreBefore: worldInfoBefore,
loreAfter: worldInfoAfter,
mesExamples: mesExamplesArray.join(''),
mesExamplesRaw: mesExamplesRawArray.join(''),
};
const storyString = renderStoryString(storyStringParams);
// Story string rendered, safe to remove
if (power_user.strip_examples) {
mesExamplesArray = [];
}
let oaiMessages = [];
let oaiMessageExamples = [];
if (main_api === 'openai') {
oaiMessages = setOpenAIMessages(coreChat);
oaiMessageExamples = setOpenAIMessageExamples(mesExamplesArray);
}
// hack for regeneration of the first message
if (chat2.length == 0) {
chat2.push('');
}
let examplesString = '';
let chatString = '';
let cyclePrompt = '';
async function getMessagesTokenCount() {
const encodeString = [
beforeScenarioAnchor,
storyString,
afterScenarioAnchor,
examplesString,
chatString,
quiet_prompt,
cyclePrompt,
userAlignmentMessage,
].join('').replace(/\r/gm, '');
return getTokenCountAsync(encodeString, power_user.token_padding);
}
// Force pinned examples into the context
let pinExmString;
if (power_user.pin_examples) {
pinExmString = examplesString = mesExamplesArray.join('');
}
// Only add the chat in context if past the greeting message
if (isContinue && (chat2.length > 1 || main_api === 'openai')) {
cyclePrompt = chat2.shift();
}
// Collect enough messages to fill the context
let arrMes = new Array(chat2.length);
let tokenCount = await getMessagesTokenCount();
let lastAddedIndex = -1;
// Pre-allocate all injections first.
// If it doesn't fit - user shot himself in the foot
for (const index of injectedIndices) {
const item = chat2[index];
if (typeof item !== 'string') {
continue;
}
tokenCount += await getTokenCountAsync(item.replace(/\r/gm, ''));
chatString = item + chatString;
if (tokenCount < this_max_context) {
arrMes[index] = item;
lastAddedIndex = Math.max(lastAddedIndex, index);
} else {
break;
}
}
for (let i = 0; i < chat2.length; i++) {
// not needed for OAI prompting
if (main_api == 'openai') {
break;
}
// Skip already injected messages
if (arrMes[i] !== undefined) {
continue;
}
const item = chat2[i];
if (typeof item !== 'string') {
continue;
}
tokenCount += await getTokenCountAsync(item.replace(/\r/gm, ''));
chatString = item + chatString;
if (tokenCount < this_max_context) {
arrMes[i] = item;
lastAddedIndex = Math.max(lastAddedIndex, i);
} else {
break;
}
}
// Add user alignment message if last message is not a user message
const stoppedAtUser = userMessageIndices.includes(lastAddedIndex);
if (addUserAlignment && !stoppedAtUser) {
tokenCount += await getTokenCountAsync(userAlignmentMessage.replace(/\r/gm, ''));
chatString = userAlignmentMessage + chatString;
arrMes.push(userAlignmentMessage);
injectedIndices.push(arrMes.length - 1);
}
// Unsparse the array. Adjust injected indices
const newArrMes = [];
const newInjectedIndices = [];
for (let i = 0; i < arrMes.length; i++) {
if (arrMes[i] !== undefined) {
newArrMes.push(arrMes[i]);
if (injectedIndices.includes(i)) {
newInjectedIndices.push(newArrMes.length - 1);
}
}
}
arrMes = newArrMes;
injectedIndices = newInjectedIndices;
if (main_api !== 'openai') {
setInContextMessages(arrMes.length - injectedIndices.length, type);
}
// Estimate how many unpinned example messages fit in the context
tokenCount = await getMessagesTokenCount();
let count_exm_add = 0;
if (!power_user.pin_examples) {
for (let example of mesExamplesArray) {
tokenCount += await getTokenCountAsync(example.replace(/\r/gm, ''));
examplesString += example;
if (tokenCount < this_max_context) {
count_exm_add++;
} else {
break;
}
}
}
let mesSend = [];
console.debug('calling runGenerate');
if (isContinue) {
// Coping mechanism for OAI spacing
const isForceInstruct = isOpenRouterWithInstruct();
if (main_api === 'openai' && !isForceInstruct && !cyclePrompt.endsWith(' ')) {
cyclePrompt += oai_settings.continue_postfix;
continue_mag += oai_settings.continue_postfix;
}
}
const originalType = type;
if (!dryRun) {
is_send_press = true;
}
generatedPromptCache += cyclePrompt;
if (generatedPromptCache.length == 0 || type === 'continue') {
console.debug('generating prompt');
chatString = '';
arrMes = arrMes.reverse();
arrMes.forEach(function (item, i, arr) {
// OAI doesn't need all of this
if (main_api === 'openai') {
return;
}
// Cohee: This removes a newline from the end of the last message in the context
// Last prompt line will add a newline if it's not a continuation
// In instruct mode it only removes it if wrap is enabled and it's not a quiet generation
if (i === arrMes.length - 1 && type !== 'continue') {
if (!isInstruct || (power_user.instruct.wrap && type !== 'quiet')) {
item = item.replace(/\n?$/, '');
}
}
mesSend[mesSend.length] = { message: item, extensionPrompts: [] };
});
}
let mesExmString = '';
function setPromptString() {
if (main_api == 'openai') {
return;
}
console.debug('--setting Prompt string');
mesExmString = pinExmString ?? mesExamplesArray.slice(0, count_exm_add).join('');
if (mesSend.length) {
mesSend[mesSend.length - 1].message = modifyLastPromptLine(mesSend[mesSend.length - 1].message);
}
}
function modifyLastPromptLine(lastMesString) {
//#########QUIET PROMPT STUFF PT2##############
// Add quiet generation prompt at depth 0
if (quiet_prompt && quiet_prompt.length) {
// here name1 is forced for all quiet prompts..why?
const name = name1;
//checks if we are in instruct, if so, formats the chat as such, otherwise just adds the quiet prompt
const quietAppend = isInstruct ? formatInstructModeChat(name, quiet_prompt, false, true, '', name1, name2, false) : `\n${quiet_prompt}`;
//This begins to fix quietPrompts (particularly /sysgen) for instruct
//previously instruct input sequence was being appended to the last chat message w/o '\n'
//and no output sequence was added after the input's content.
//TODO: respect output_sequence vs last_output_sequence settings
//TODO: decide how to prompt this to clarify who is talking 'Narrator', 'System', etc.
if (isInstruct) {
lastMesString += quietAppend; // + power_user.instruct.output_sequence + '\n';
} else {
lastMesString += quietAppend;
}
// Ross: bailing out early prevents quiet prompts from respecting other instruct prompt toggles
// for sysgen, SD, and summary this is desireable as it prevents the AI from responding as char..
// but for idle prompting, we want the flexibility of the other prompt toggles, and to respect them as per settings in the extension
// need a detection for what the quiet prompt is being asked for...
// Bail out early?
if (!isInstruct && !quietToLoud) {
return lastMesString;
}
}
// Get instruct mode line
if (isInstruct && !isContinue) {
const name = (quiet_prompt && !quietToLoud && !isImpersonate) ? (quietName ?? 'System') : (isImpersonate ? name1 : name2);
const isQuiet = quiet_prompt && type == 'quiet';
lastMesString += formatInstructModePrompt(name, isImpersonate, promptBias, name1, name2, isQuiet, quietToLoud);
}
// Get non-instruct impersonation line
if (!isInstruct && isImpersonate && !isContinue) {
const name = name1;
if (!lastMesString.endsWith('\n')) {
lastMesString += '\n';
}
lastMesString += name + ':';
}
// Add character's name
// Force name append on continue (if not continuing on user message or first message)
const isContinuingOnFirstMessage = chat.length === 1 && isContinue;
if (!isInstruct && force_name2 && !isContinuingOnFirstMessage) {
if (!lastMesString.endsWith('\n')) {
lastMesString += '\n';
}
if (!isContinue || !(chat[chat.length - 1]?.is_user)) {
lastMesString += `${name2}:`;
}
}
return lastMesString;
}
// Clean up the already generated prompt for seamless addition
function cleanupPromptCache(promptCache) {
// Remove the first occurrance of character's name
if (promptCache.trimStart().startsWith(`${name2}:`)) {
promptCache = promptCache.replace(`${name2}:`, '').trimStart();
}
// Remove the first occurrance of prompt bias
if (promptCache.trimStart().startsWith(promptBias)) {
promptCache = promptCache.replace(promptBias, '');
}
// Add a space if prompt cache doesn't start with one
if (!/^\s/.test(promptCache) && !isInstruct) {
promptCache = ' ' + promptCache;
}
return promptCache;
}
async function checkPromptSize() {
console.debug('---checking Prompt size');
setPromptString();
const prompt = [
beforeScenarioAnchor,
storyString,
afterScenarioAnchor,
mesExmString,
mesSend.map((e) => `${e.extensionPrompts.join('')}${e.message}`).join(''),
'\n',
generatedPromptCache,
quiet_prompt,
].join('').replace(/\r/gm, '');
let thisPromptContextSize = await getTokenCountAsync(prompt, power_user.token_padding);
if (thisPromptContextSize > this_max_context) { //if the prepared prompt is larger than the max context size...
if (count_exm_add > 0) { // ..and we have example mesages..
count_exm_add--; // remove the example messages...
await checkPromptSize(); // and try agin...
} else if (mesSend.length > 0) { // if the chat history is longer than 0
mesSend.shift(); // remove the first (oldest) chat entry..
await checkPromptSize(); // and check size again..
} else {
//end
console.debug(`---mesSend.length = ${mesSend.length}`);
}
}
}
if (generatedPromptCache.length > 0 && main_api !== 'openai') {
console.debug('---Generated Prompt Cache length: ' + generatedPromptCache.length);
await checkPromptSize();
} else {
console.debug('---calling setPromptString ' + generatedPromptCache.length);
setPromptString();
}
// Fetches the combined prompt for both negative and positive prompts
const cfgGuidanceScale = getGuidanceScale();
const useCfgPrompt = cfgGuidanceScale && cfgGuidanceScale.value !== 1;
// For prompt bit itemization
let mesSendString = '';
function getCombinedPrompt(isNegative) {
// Only return if the guidance scale doesn't exist or the value is 1
// Also don't return if constructing the neutral prompt
if (isNegative && !useCfgPrompt) {
return;
}
// OAI has its own prompt manager. No need to do anything here
if (main_api === 'openai') {
return '';
}
// Deep clone
let finalMesSend = structuredClone(mesSend);
if (useCfgPrompt) {
const cfgPrompt = getCfgPrompt(cfgGuidanceScale, isNegative);
if (cfgPrompt.value) {
if (cfgPrompt.depth === 0) {
finalMesSend[finalMesSend.length - 1].message +=
/\s/.test(finalMesSend[finalMesSend.length - 1].message.slice(-1))
? cfgPrompt.value
: ` ${cfgPrompt.value}`;
} else {
// TODO: Make all extension prompts use an array/splice method
const lengthDiff = mesSend.length - cfgPrompt.depth;
const cfgDepth = lengthDiff >= 0 ? lengthDiff : 0;
finalMesSend[cfgDepth].extensionPrompts.push(`${cfgPrompt.value}\n`);
}
}
}
// Add prompt bias after everything else
// Always run with continue
if (!isInstruct && !isImpersonate) {
if (promptBias.trim().length !== 0) {
finalMesSend[finalMesSend.length - 1].message +=
/\s/.test(finalMesSend[finalMesSend.length - 1].message.slice(-1))
? promptBias.trimStart()
: ` ${promptBias.trimStart()}`;
}
}
// Prune from prompt cache if it exists
if (generatedPromptCache.length !== 0) {
generatedPromptCache = cleanupPromptCache(generatedPromptCache);
}
// Flattens the multiple prompt objects to a string.
const combine = () => {
// Right now, everything is suffixed with a newline
mesSendString = finalMesSend.map((e) => `${e.extensionPrompts.join('')}${e.message}`).join('');
// add a custom dingus (if defined)
mesSendString = addChatsSeparator(mesSendString);
// add chat preamble
mesSendString = addChatsPreamble(mesSendString);
let combinedPrompt = beforeScenarioAnchor +
storyString +
afterScenarioAnchor +
mesExmString +
mesSendString +
generatedPromptCache;
combinedPrompt = combinedPrompt.replace(/\r/gm, '');
if (power_user.collapse_newlines) {
combinedPrompt = collapseNewlines(combinedPrompt);
}
return combinedPrompt;
};
finalMesSend.forEach((item, i) => {
item.injected = injectedIndices.includes(finalMesSend.length - i - 1);
});
let data = {
api: main_api,
combinedPrompt: null,
description,
personality,
persona,
scenario,
char: name2,
user: name1,
worldInfoBefore,
worldInfoAfter,
beforeScenarioAnchor,
afterScenarioAnchor,
storyString,
mesExmString,
mesSendString,
finalMesSend,
generatedPromptCache,
main: system,
jailbreak,
naiPreamble: nai_settings.preamble,
};
// Before returning the combined prompt, give available context related information to all subscribers.
eventSource.emitAndWait(event_types.GENERATE_BEFORE_COMBINE_PROMPTS, data);
// If one or multiple subscribers return a value, forfeit the responsibillity of flattening the context.
return !data.combinedPrompt ? combine() : data.combinedPrompt;
}
let finalPrompt = getCombinedPrompt(false);
const eventData = { prompt: finalPrompt, dryRun: dryRun };
await eventSource.emit(event_types.GENERATE_AFTER_COMBINE_PROMPTS, eventData);
finalPrompt = eventData.prompt;
let maxLength = Number(amount_gen); // how many tokens the AI will be requested to generate
let thisPromptBits = [];
let generate_data;
switch (main_api) {
case 'koboldhorde':
case 'kobold':
if (main_api == 'koboldhorde' && horde_settings.auto_adjust_response_length) {
maxLength = Math.min(maxLength, adjustedParams.maxLength);
maxLength = Math.max(maxLength, MIN_LENGTH); // prevent validation errors
}
generate_data = {
prompt: finalPrompt,
gui_settings: true,
max_length: maxLength,
max_context_length: max_context,
api_server,
};
if (preset_settings != 'gui') {
const isHorde = main_api == 'koboldhorde';
const presetSettings = koboldai_settings[koboldai_setting_names[preset_settings]];
const maxContext = (adjustedParams && horde_settings.auto_adjust_context_length) ? adjustedParams.maxContextLength : max_context;
generate_data = getKoboldGenerationData(finalPrompt, presetSettings, maxLength, maxContext, isHorde, type);
}
break;
case 'textgenerationwebui': {
const cfgValues = useCfgPrompt ? { guidanceScale: cfgGuidanceScale, negativePrompt: getCombinedPrompt(true) } : null;
generate_data = getTextGenGenerationData(finalPrompt, maxLength, isImpersonate, isContinue, cfgValues, type);
break;
}
case 'novel': {
const cfgValues = useCfgPrompt ? { guidanceScale: cfgGuidanceScale } : null;
const presetSettings = novelai_settings[novelai_setting_names[nai_settings.preset_settings_novel]];
generate_data = getNovelGenerationData(finalPrompt, presetSettings, maxLength, isImpersonate, isContinue, cfgValues, type);
break;
}
case 'openai': {
let [prompt, counts] = await prepareOpenAIMessages({
name2: name2,
charDescription: description,
charPersonality: personality,
Scenario: scenario,
worldInfoBefore: worldInfoBefore,
worldInfoAfter: worldInfoAfter,
extensionPrompts: extension_prompts,
bias: promptBias,
type: type,
quietPrompt: quiet_prompt,
quietImage: quietImage,
cyclePrompt: cyclePrompt,
systemPromptOverride: system,
jailbreakPromptOverride: jailbreak,
personaDescription: persona,
messages: oaiMessages,
messageExamples: oaiMessageExamples,
}, dryRun);
generate_data = { prompt: prompt };
// TODO: move these side-effects somewhere else, so this switch-case solely sets generate_data
// counts will return false if the user has not enabled the token breakdown feature
if (counts) {
parseTokenCounts(counts, thisPromptBits);
}
if (!dryRun) {
setInContextMessages(openai_messages_count, type);
}
break;
}
}
await eventSource.emit(event_types.GENERATE_AFTER_DATA, generate_data);
if (dryRun) {
generatedPromptCache = '';
return Promise.resolve();
}
async function finishGenerating() {
if (power_user.console_log_prompts) {
console.log(generate_data.prompt);
}
console.debug('rungenerate calling API');
showStopButton();
//set array object for prompt token itemization of this message
let currentArrayEntry = Number(thisPromptBits.length - 1);
let additionalPromptStuff = {
...thisPromptBits[currentArrayEntry],
rawPrompt: generate_data.prompt || generate_data.input,
mesId: getNextMessageId(type),
allAnchors: getAllExtensionPrompts(),
chatInjects: injectedIndices?.map(index => arrMes[arrMes.length - index - 1])?.join('') || '',
summarizeString: (extension_prompts['1_memory']?.value || ''),
authorsNoteString: (extension_prompts['2_floating_prompt']?.value || ''),
smartContextString: (extension_prompts['chromadb']?.value || ''),
chatVectorsString: (extension_prompts['3_vectors']?.value || ''),
dataBankVectorsString: (extension_prompts['4_vectors_data_bank']?.value || ''),
worldInfoString: worldInfoString,
storyString: storyString,
beforeScenarioAnchor: beforeScenarioAnchor,
afterScenarioAnchor: afterScenarioAnchor,
examplesString: examplesString,
mesSendString: mesSendString,
generatedPromptCache: generatedPromptCache,
promptBias: promptBias,
finalPrompt: finalPrompt,
charDescription: description,
charPersonality: personality,
scenarioText: scenario,
this_max_context: this_max_context,
padding: power_user.token_padding,
main_api: main_api,
instruction: isInstruct ? substituteParams(power_user.prefer_character_prompt && system ? system : power_user.instruct.system_prompt) : '',
userPersona: (power_user.persona_description || ''),
};
//console.log(additionalPromptStuff);
const itemizedIndex = itemizedPrompts.findIndex((item) => item.mesId === additionalPromptStuff.mesId);
if (itemizedIndex !== -1) {
itemizedPrompts[itemizedIndex] = additionalPromptStuff;
}
else {
itemizedPrompts.push(additionalPromptStuff);
}
console.debug(`pushed prompt bits to itemizedPrompts array. Length is now: ${itemizedPrompts.length}`);
if (isStreamingEnabled() && type !== 'quiet') {
streamingProcessor = new StreamingProcessor(type, force_name2, generation_started, continue_mag);
if (isContinue) {
// Save reply does add cycle text to the prompt, so it's not needed here
streamingProcessor.firstMessageText = '';
}
streamingProcessor.generator = await sendStreamingRequest(type, generate_data);
hideSwipeButtons();
let getMessage = await streamingProcessor.generate();
let messageChunk = cleanUpMessage(getMessage, isImpersonate, isContinue, false);
if (isContinue) {
getMessage = continue_mag + getMessage;
}
if (streamingProcessor && !streamingProcessor.isStopped && streamingProcessor.isFinished) {
await streamingProcessor.onFinishStreaming(streamingProcessor.messageId, getMessage);
streamingProcessor = null;
triggerAutoContinue(messageChunk, isImpersonate);
return Object.defineProperties(new String(getMessage), {
'messageChunk': { value: messageChunk },
'fromStream': { value: true },
});
}
} else {
return await sendGenerationRequest(type, generate_data);
}
}
return finishGenerating().then(onSuccess, onError);
async function onSuccess(data) {
if (!data) return;
if (data?.fromStream) {
return data;
}
let messageChunk = '';
if (data.error) {
unblockGeneration(type);
generatedPromptCache = '';
if (data?.response) {
toastr.error(data.response, 'API Error');
}
throw data?.response;
}
//const getData = await response.json();
let getMessage = extractMessageFromData(data);
let title = extractTitleFromData(data);
kobold_horde_model = title;
const swipes = extractMultiSwipes(data, type);
messageChunk = cleanUpMessage(getMessage, isImpersonate, isContinue, false);
if (isContinue) {
getMessage = continue_mag + getMessage;
}
//Formating
const displayIncomplete = type === 'quiet' && !quietToLoud;
getMessage = cleanUpMessage(getMessage, isImpersonate, isContinue, displayIncomplete);
if (getMessage.length > 0 || data.allowEmptyResponse) {
if (isImpersonate) {
$('#send_textarea').val(getMessage)[0].dispatchEvent(new Event('input', { bubbles: true }));
generatedPromptCache = '';
await eventSource.emit(event_types.IMPERSONATE_READY, getMessage);
}
else if (type == 'quiet') {
unblockGeneration(type);
return getMessage;
}
else {
// Without streaming we'll be having a full message on continuation. Treat it as a last chunk.
if (originalType !== 'continue') {
({ type, getMessage } = await saveReply(type, getMessage, false, title, swipes));
}
else {
({ type, getMessage } = await saveReply('appendFinal', getMessage, false, title, swipes));
}
// This relies on `saveReply` having been called to add the message to the chat, so it must be last.
parseAndSaveLogprobs(data, continue_mag);
}
if (type !== 'quiet') {
playMessageSound();
}
} else {
// If maxLoops is not passed in (e.g. first time generating), set it to MAX_GENERATION_LOOPS
maxLoops ??= MAX_GENERATION_LOOPS;
if (maxLoops === 0) {
if (type !== 'quiet') {
throwCircuitBreakerError();
}
throw new Error('Generate circuit breaker interruption');
}
// regenerate with character speech reenforced
// to make sure we leave on swipe type while also adding the name2 appendage
await delay(1000);
// A message was already deleted on regeneration, so instead treat is as a normal gen
if (type === 'regenerate') {
type = 'normal';
}
// The first await is for waiting for the generate to start. The second one is waiting for it to finish
const result = await await Generate(type, { automatic_trigger, force_name2: true, quiet_prompt, quietToLoud, skipWIAN, force_chid, signal, quietImage, quietName, maxLoops: maxLoops - 1 });
return result;
}
if (power_user.auto_swipe) {
console.debug('checking for autoswipeblacklist on non-streaming message');
function containsBlacklistedWords(getMessage, blacklist, threshold) {
console.debug('checking blacklisted words');
const regex = new RegExp(`\\b(${blacklist.join('|')})\\b`, 'gi');
const matches = getMessage.match(regex) || [];
return matches.length >= threshold;
}
const generatedTextFiltered = (getMessage) => {
if (power_user.auto_swipe_blacklist_threshold) {
if (containsBlacklistedWords(getMessage, power_user.auto_swipe_blacklist, power_user.auto_swipe_blacklist_threshold)) {
console.debug('Generated text has blacklisted words');
return true;
}
}
return false;
};
if (generatedTextFiltered(getMessage)) {
console.debug('swiping right automatically');
is_send_press = false;
swipe_right();
// TODO: do we want to resolve after an auto-swipe?
return;
}
}
console.debug('/api/chats/save called by /Generate');
await saveChatConditional();
unblockGeneration(type);
streamingProcessor = null;
if (type !== 'quiet') {
triggerAutoContinue(messageChunk, isImpersonate);
}
// Don't break the API chain that expects a single string in return
return Object.defineProperty(new String(getMessage), 'messageChunk', { value: messageChunk });
}
function onError(exception) {
if (typeof exception?.error?.message === 'string') {
toastr.error(exception.error.message, 'Error', { timeOut: 10000, extendedTimeOut: 20000 });
}
generatedPromptCache = '';
unblockGeneration(type);
console.log(exception);
streamingProcessor = null;
throw exception;
}
}
/**
* Stops the generation and any streaming if it is currently running.
*/
export function stopGeneration() {
let stopped = false;
if (streamingProcessor) {
streamingProcessor.onStopStreaming();
streamingProcessor = null;
stopped = true;
}
if (abortController) {
abortController.abort('Clicked stop button');
hideStopButton();
stopped = true;
}
eventSource.emit(event_types.GENERATION_STOPPED);
return stopped;
}
/**
* Injects extension prompts into chat messages.
* @param {object[]} messages Array of chat messages
* @param {boolean} isContinue Whether the generation is a continuation. If true, the extension prompts of depth 0 are injected at position 1.
* @returns {number[]} Array of indices where the extension prompts were injected
*/
function doChatInject(messages, isContinue) {
const injectedIndices = [];
let totalInsertedMessages = 0;
messages.reverse();
for (let i = 0; i <= MAX_INJECTION_DEPTH; i++) {
// Order of priority (most important go lower)
const roles = [extension_prompt_roles.SYSTEM, extension_prompt_roles.USER, extension_prompt_roles.ASSISTANT];
const names = {
[extension_prompt_roles.SYSTEM]: '',
[extension_prompt_roles.USER]: name1,
[extension_prompt_roles.ASSISTANT]: name2,
};
const roleMessages = [];
const separator = '\n';
const wrap = false;
for (const role of roles) {
const extensionPrompt = String(getExtensionPrompt(extension_prompt_types.IN_CHAT, i, separator, role, wrap)).trimStart();
const isNarrator = role === extension_prompt_roles.SYSTEM;
const isUser = role === extension_prompt_roles.USER;
const name = names[role];
if (extensionPrompt) {
roleMessages.push({
name: name,
is_user: isUser,
mes: extensionPrompt,
extra: {
type: isNarrator ? system_message_types.NARRATOR : null,
},
});
}
}
if (roleMessages.length) {
const depth = isContinue && i === 0 ? 1 : i;
const injectIdx = depth + totalInsertedMessages;
messages.splice(injectIdx, 0, ...roleMessages);
totalInsertedMessages += roleMessages.length;
injectedIndices.push(...Array.from({ length: roleMessages.length }, (_, i) => injectIdx + i));
}
}
messages.reverse();
return injectedIndices;
}
function flushWIDepthInjections() {
//prevent custom depth WI entries (which have unique random key names) from duplicating
for (const key of Object.keys(extension_prompts)) {
if (key.startsWith('customDepthWI')) {
delete extension_prompts[key];
}
}
}
/**
* Unblocks the UI after a generation is complete.
* @param {string} [type] Generation type (optional)
*/
function unblockGeneration(type) {
// Don't unblock if a parallel stream is still running
if (type === 'quiet' && streamingProcessor && !streamingProcessor.isFinished) {
return;
}
is_send_press = false;
activateSendButtons();
showSwipeButtons();
setGenerationProgress(0);
flushEphemeralStoppingStrings();
flushWIDepthInjections();
}
export function getNextMessageId(type) {
return type == 'swipe' ? chat.length - 1 : chat.length;
}
/**
* Determines if the message should be auto-continued.
* @param {string} messageChunk Current message chunk
* @param {boolean} isImpersonate Is the user impersonation
* @returns {boolean} Whether the message should be auto-continued
*/
export function shouldAutoContinue(messageChunk, isImpersonate) {
if (!power_user.auto_continue.enabled) {
console.debug('Auto-continue is disabled by user.');
return false;
}
if (typeof messageChunk !== 'string') {
console.debug('Not triggering auto-continue because message chunk is not a string');
return false;
}
if (isImpersonate) {
console.log('Continue for impersonation is not implemented yet');
return false;
}
if (is_send_press) {
console.debug('Auto-continue is disabled because a message is currently being sent.');
return false;
}
if (power_user.auto_continue.target_length <= 0) {
console.log('Auto-continue target length is 0, not triggering auto-continue');
return false;
}
if (main_api === 'openai' && !power_user.auto_continue.allow_chat_completions) {
console.log('Auto-continue for OpenAI is disabled by user.');
return false;
}
const textareaText = String($('#send_textarea').val());
const USABLE_LENGTH = 5;
if (textareaText.length > 0) {
console.log('Not triggering auto-continue because user input is not empty');
return false;
}
if (messageChunk.trim().length > USABLE_LENGTH && chat.length) {
const lastMessage = chat[chat.length - 1];
const messageLength = getTokenCount(lastMessage.mes);
const shouldAutoContinue = messageLength < power_user.auto_continue.target_length;
if (shouldAutoContinue) {
console.log(`Triggering auto-continue. Message tokens: ${messageLength}. Target tokens: ${power_user.auto_continue.target_length}. Message chunk: ${messageChunk}`);
return true;
} else {
console.log(`Not triggering auto-continue. Message tokens: ${messageLength}. Target tokens: ${power_user.auto_continue.target_length}`);
return false;
}
} else {
console.log('Last generated chunk was empty, not triggering auto-continue');
return false;
}
}
/**
* Triggers auto-continue if the message meets the criteria.
* @param {string} messageChunk Current message chunk
* @param {boolean} isImpersonate Is the user impersonation
*/
export function triggerAutoContinue(messageChunk, isImpersonate) {
if (selected_group) {
console.debug('Auto-continue is disabled for group chat');
return;
}
if (shouldAutoContinue(messageChunk, isImpersonate)) {
$('#option_continue').trigger('click');
}
}
export function getBiasStrings(textareaText, type) {
if (type == 'impersonate' || type == 'continue') {
return { messageBias: '', promptBias: '', isUserPromptBias: false };
}
let promptBias = '';
let messageBias = extractMessageBias(textareaText);
// If user input is not provided, retrieve the bias of the most recent relevant message
if (!textareaText) {
for (let i = chat.length - 1; i >= 0; i--) {
const mes = chat[i];
if (type === 'swipe' && chat.length - 1 === i) {
continue;
}
if (mes && (mes.is_user || mes.is_system || mes.extra?.type === system_message_types.NARRATOR)) {
if (mes.extra?.bias?.trim()?.length > 0) {
promptBias = mes.extra.bias;
}
break;
}
}
}
promptBias = messageBias || promptBias || power_user.user_prompt_bias || '';
const isUserPromptBias = promptBias === power_user.user_prompt_bias;
// Substitute params for everything
messageBias = substituteParams(messageBias);
promptBias = substituteParams(promptBias);
return { messageBias, promptBias, isUserPromptBias };
}
/**
* @param {Object} chatItem Message history item.
* @param {boolean} isInstruct Whether instruct mode is enabled.
* @param {boolean|number} forceOutputSequence Whether to force the first/last output sequence for instruct mode.
*/
function formatMessageHistoryItem(chatItem, isInstruct, forceOutputSequence) {
const isNarratorType = chatItem?.extra?.type === system_message_types.NARRATOR;
const characterName = chatItem?.name ? chatItem.name : name2;
const itemName = chatItem.is_user ? chatItem['name'] : characterName;
const shouldPrependName = !isNarratorType;
// Don't include a name if it's empty
let textResult = chatItem?.name && shouldPrependName ? `${itemName}: ${chatItem.mes}\n` : `${chatItem.mes}\n`;
if (isInstruct) {
textResult = formatInstructModeChat(itemName, chatItem.mes, chatItem.is_user, isNarratorType, chatItem.force_avatar, name1, name2, forceOutputSequence);
}
return textResult;
}
/**
* Removes all {{macros}} from a string.
* @param {string} str String to remove macros from.
* @returns {string} String with macros removed.
*/
export function removeMacros(str) {
return (str ?? '').replace(/\{\{[\s\S]*?\}\}/gm, '').trim();
}
/**
* Inserts a user message into the chat history.
* @param {string} messageText Message text.
* @param {string} messageBias Message bias.
* @param {number} [insertAt] Optional index to insert the message at.
* @param {boolean} [compact] Send as a compact display message.
* @param {string} [name] Name of the user sending the message. Defaults to name1.
* @param {string} [avatar] Avatar of the user sending the message. Defaults to user_avatar.
* @returns {Promise} A promise that resolves when the message is inserted.
*/
export async function sendMessageAsUser(messageText, messageBias, insertAt = null, compact = false, name = name1, avatar = user_avatar) {
messageText = getRegexedString(messageText, regex_placement.USER_INPUT);
const message = {
name: name,
is_user: true,
is_system: false,
send_date: getMessageTimeStamp(),
mes: substituteParams(messageText),
extra: {
isSmallSys: compact,
},
};
if (power_user.message_token_count_enabled) {
message.extra.token_count = await getTokenCountAsync(message.mes, 0);
}
// Lock user avatar to a persona.
if (avatar in power_user.personas) {
message.force_avatar = getUserAvatar(avatar);
}
if (messageBias) {
message.extra.bias = messageBias;
message.mes = removeMacros(message.mes);
}
await populateFileAttachment(message);
statMesProcess(message, 'user', characters, this_chid, '');
if (typeof insertAt === 'number' && insertAt >= 0 && insertAt <= chat.length) {
chat.splice(insertAt, 0, message);
await saveChatConditional();
await eventSource.emit(event_types.MESSAGE_SENT, insertAt);
await reloadCurrentChat();
await eventSource.emit(event_types.USER_MESSAGE_RENDERED, insertAt);
} else {
chat.push(message);
const chat_id = (chat.length - 1);
await eventSource.emit(event_types.MESSAGE_SENT, chat_id);
addOneMessage(message);
await eventSource.emit(event_types.USER_MESSAGE_RENDERED, chat_id);
await saveChatConditional();
}
}
/**
* Gets the maximum usable context size for the current API.
* @param {number|null} overrideResponseLength Optional override for the response length.
* @returns {number} Maximum usable context size.
*/
export function getMaxContextSize(overrideResponseLength = null) {
if (typeof overrideResponseLength !== 'number' || overrideResponseLength <= 0 || isNaN(overrideResponseLength)) {
overrideResponseLength = null;
}
let this_max_context = 1487;
if (main_api == 'kobold' || main_api == 'koboldhorde' || main_api == 'textgenerationwebui') {
this_max_context = (max_context - (overrideResponseLength || amount_gen));
}
if (main_api == 'novel') {
this_max_context = Number(max_context);
if (nai_settings.model_novel.includes('clio')) {
this_max_context = Math.min(max_context, 8192);
}
if (nai_settings.model_novel.includes('kayra')) {
this_max_context = Math.min(max_context, 8192);
const subscriptionLimit = getKayraMaxContextTokens();
if (typeof subscriptionLimit === 'number' && this_max_context > subscriptionLimit) {
this_max_context = subscriptionLimit;
console.log(`NovelAI subscription limit reached. Max context size is now ${this_max_context}`);
}
}
this_max_context = this_max_context - (overrideResponseLength || amount_gen);
}
if (main_api == 'openai') {
this_max_context = oai_settings.openai_max_context - (overrideResponseLength || oai_settings.openai_max_tokens);
}
return this_max_context;
}
function parseTokenCounts(counts, thisPromptBits) {
/**
* @param {any[]} numbers
*/
function getSum(...numbers) {
return numbers.map(x => Number(x)).filter(x => !Number.isNaN(x)).reduce((acc, val) => acc + val, 0);
}
const total = getSum(Object.values(counts));
thisPromptBits.push({
oaiStartTokens: (counts?.start + counts?.controlPrompts) || 0,
oaiPromptTokens: getSum(counts?.prompt, counts?.charDescription, counts?.charPersonality, counts?.scenario) || 0,
oaiBiasTokens: counts?.bias || 0,
oaiNudgeTokens: counts?.nudge || 0,
oaiJailbreakTokens: counts?.jailbreak || 0,
oaiImpersonateTokens: counts?.impersonate || 0,
oaiExamplesTokens: (counts?.dialogueExamples + counts?.examples) || 0,
oaiConversationTokens: (counts?.conversation + counts?.chatHistory) || 0,
oaiNsfwTokens: counts?.nsfw || 0,
oaiMainTokens: counts?.main || 0,
oaiTotalTokens: total,
});
}
function addChatsPreamble(mesSendString) {
return main_api === 'novel'
? substituteParams(nai_settings.preamble) + '\n' + mesSendString
: mesSendString;
}
function addChatsSeparator(mesSendString) {
if (power_user.context.chat_start) {
return substituteParams(power_user.context.chat_start + '\n') + mesSendString;
}
else {
return mesSendString;
}
}
async function duplicateCharacter() {
if (!this_chid) {
toastr.warning('You must first select a character to duplicate!');
return '';
}
const confirmMessage = $(await renderTemplateAsync('duplicateConfirm'));
const confirm = await callGenericPopup(confirmMessage, POPUP_TYPE.CONFIRM);
if (!confirm) {
console.log('User cancelled duplication');
return '';
}
const body = { avatar_url: characters[this_chid].avatar };
const response = await fetch('/api/characters/duplicate', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify(body),
});
if (response.ok) {
toastr.success('Character Duplicated');
const data = await response.json();
await eventSource.emit(event_types.CHARACTER_DUPLICATED, { oldAvatar: body.avatar_url, newAvatar: data.path });
await getCharacters();
}
return '';
}
export async function itemizedParams(itemizedPrompts, thisPromptSet) {
const params = {
charDescriptionTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].charDescription),
charPersonalityTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].charPersonality),
scenarioTextTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].scenarioText),
userPersonaStringTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].userPersona),
worldInfoStringTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].worldInfoString),
allAnchorsTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].allAnchors),
summarizeStringTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].summarizeString),
authorsNoteStringTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].authorsNoteString),
smartContextStringTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].smartContextString),
beforeScenarioAnchorTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].beforeScenarioAnchor),
afterScenarioAnchorTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].afterScenarioAnchor),
zeroDepthAnchorTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].zeroDepthAnchor), // TODO: unused
thisPrompt_padding: itemizedPrompts[thisPromptSet].padding,
this_main_api: itemizedPrompts[thisPromptSet].main_api,
chatInjects: await getTokenCountAsync(itemizedPrompts[thisPromptSet].chatInjects),
chatVectorsStringTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].chatVectorsString),
dataBankVectorsStringTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].dataBankVectorsString),
};
if (params.chatInjects) {
params.ActualChatHistoryTokens = params.ActualChatHistoryTokens - params.chatInjects;
}
if (params.this_main_api == 'openai') {
//for OAI API
//console.log('-- Counting OAI Tokens');
//params.finalPromptTokens = itemizedPrompts[thisPromptSet].oaiTotalTokens;
params.oaiMainTokens = itemizedPrompts[thisPromptSet].oaiMainTokens;
params.oaiStartTokens = itemizedPrompts[thisPromptSet].oaiStartTokens;
params.ActualChatHistoryTokens = itemizedPrompts[thisPromptSet].oaiConversationTokens;
params.examplesStringTokens = itemizedPrompts[thisPromptSet].oaiExamplesTokens;
params.oaiPromptTokens = itemizedPrompts[thisPromptSet].oaiPromptTokens - (params.afterScenarioAnchorTokens + params.beforeScenarioAnchorTokens) + params.examplesStringTokens;
params.oaiBiasTokens = itemizedPrompts[thisPromptSet].oaiBiasTokens;
params.oaiJailbreakTokens = itemizedPrompts[thisPromptSet].oaiJailbreakTokens;
params.oaiNudgeTokens = itemizedPrompts[thisPromptSet].oaiNudgeTokens;
params.oaiImpersonateTokens = itemizedPrompts[thisPromptSet].oaiImpersonateTokens;
params.oaiNsfwTokens = itemizedPrompts[thisPromptSet].oaiNsfwTokens;
params.finalPromptTokens =
params.oaiStartTokens +
params.oaiPromptTokens +
params.oaiMainTokens +
params.oaiNsfwTokens +
params.oaiBiasTokens +
params.oaiImpersonateTokens +
params.oaiJailbreakTokens +
params.oaiNudgeTokens +
params.ActualChatHistoryTokens +
//charDescriptionTokens +
//charPersonalityTokens +
//allAnchorsTokens +
params.worldInfoStringTokens +
params.beforeScenarioAnchorTokens +
params.afterScenarioAnchorTokens;
// Max context size - max completion tokens
params.thisPrompt_max_context = (oai_settings.openai_max_context - oai_settings.openai_max_tokens);
//console.log('-- applying % on OAI tokens');
params.oaiStartTokensPercentage = ((params.oaiStartTokens / (params.finalPromptTokens)) * 100).toFixed(2);
params.storyStringTokensPercentage = (((params.afterScenarioAnchorTokens + params.beforeScenarioAnchorTokens + params.oaiPromptTokens) / (params.finalPromptTokens)) * 100).toFixed(2);
params.ActualChatHistoryTokensPercentage = ((params.ActualChatHistoryTokens / (params.finalPromptTokens)) * 100).toFixed(2);
params.promptBiasTokensPercentage = ((params.oaiBiasTokens / (params.finalPromptTokens)) * 100).toFixed(2);
params.worldInfoStringTokensPercentage = ((params.worldInfoStringTokens / (params.finalPromptTokens)) * 100).toFixed(2);
params.allAnchorsTokensPercentage = ((params.allAnchorsTokens / (params.finalPromptTokens)) * 100).toFixed(2);
params.selectedTokenizer = getFriendlyTokenizerName(params.this_main_api).tokenizerName;
params.oaiSystemTokens = params.oaiImpersonateTokens + params.oaiJailbreakTokens + params.oaiNudgeTokens + params.oaiStartTokens + params.oaiNsfwTokens + params.oaiMainTokens;
params.oaiSystemTokensPercentage = ((params.oaiSystemTokens / (params.finalPromptTokens)) * 100).toFixed(2);
} else {
//for non-OAI APIs
//console.log('-- Counting non-OAI Tokens');
params.finalPromptTokens = await getTokenCountAsync(itemizedPrompts[thisPromptSet].finalPrompt);
params.storyStringTokens = await getTokenCountAsync(itemizedPrompts[thisPromptSet].storyString) - params.worldInfoStringTokens;
params.examplesStringTokens = await getTokenCountAsync(itemizedPrompts[thisPromptSet].examplesString);
params.mesSendStringTokens = await getTokenCountAsync(itemizedPrompts[thisPromptSet].mesSendString);
params.ActualChatHistoryTokens = params.mesSendStringTokens - (params.allAnchorsTokens - (params.beforeScenarioAnchorTokens + params.afterScenarioAnchorTokens)) + power_user.token_padding;
params.instructionTokens = await getTokenCountAsync(itemizedPrompts[thisPromptSet].instruction);
params.promptBiasTokens = await getTokenCountAsync(itemizedPrompts[thisPromptSet].promptBias);
params.totalTokensInPrompt =
params.storyStringTokens + //chardefs total
params.worldInfoStringTokens +
params.examplesStringTokens + // example messages
params.ActualChatHistoryTokens + //chat history
params.allAnchorsTokens + // AN and/or legacy anchors
//afterScenarioAnchorTokens + //only counts if AN is set to 'after scenario'
//zeroDepthAnchorTokens + //same as above, even if AN not on 0 depth
params.promptBiasTokens; //{{}}
//- thisPrompt_padding; //not sure this way of calculating is correct, but the math results in same value as 'finalPrompt'
params.thisPrompt_max_context = itemizedPrompts[thisPromptSet].this_max_context;
params.thisPrompt_actual = params.thisPrompt_max_context - params.thisPrompt_padding;
//console.log('-- applying % on non-OAI tokens');
params.storyStringTokensPercentage = ((params.storyStringTokens / (params.totalTokensInPrompt)) * 100).toFixed(2);
params.ActualChatHistoryTokensPercentage = ((params.ActualChatHistoryTokens / (params.totalTokensInPrompt)) * 100).toFixed(2);
params.promptBiasTokensPercentage = ((params.promptBiasTokens / (params.totalTokensInPrompt)) * 100).toFixed(2);
params.worldInfoStringTokensPercentage = ((params.worldInfoStringTokens / (params.totalTokensInPrompt)) * 100).toFixed(2);
params.allAnchorsTokensPercentage = ((params.allAnchorsTokens / (params.totalTokensInPrompt)) * 100).toFixed(2);
params.selectedTokenizer = getFriendlyTokenizerName(params.this_main_api).tokenizerName;
}
return params;
}
export function findItemizedPromptSet(itemizedPrompts, incomingMesId) {
var thisPromptSet = undefined;
for (var i = 0; i < itemizedPrompts.length; i++) {
console.log(`looking for ${incomingMesId} vs ${itemizedPrompts[i].mesId}`);
if (itemizedPrompts[i].mesId === incomingMesId) {
console.log(`found matching mesID ${i}`);
thisPromptSet = i;
PromptArrayItemForRawPromptDisplay = i;
console.log(`wanting to raw display of ArrayItem: ${PromptArrayItemForRawPromptDisplay} which is mesID ${incomingMesId}`);
console.log(itemizedPrompts[thisPromptSet]);
break;
} else if (itemizedPrompts[i].rawPrompt) {
priorPromptArrayItemForRawPromptDisplay = i;
}
}
return thisPromptSet;
}
async function promptItemize(itemizedPrompts, requestedMesId) {
console.log('PROMPT ITEMIZE ENTERED');
var incomingMesId = Number(requestedMesId);
console.debug(`looking for MesId ${incomingMesId}`);
var thisPromptSet = findItemizedPromptSet(itemizedPrompts, incomingMesId);
if (thisPromptSet === undefined) {
console.log(`couldnt find the right mesId. looked for ${incomingMesId}`);
console.log(itemizedPrompts);
return null;
}
const params = await itemizedParams(itemizedPrompts, thisPromptSet);
const flatten = (rawPrompt) => Array.isArray(rawPrompt) ? rawPrompt.map(x => x.content).join('\n') : rawPrompt;
const template = params.this_main_api == 'openai'
? await renderTemplateAsync('itemizationChat', params)
: await renderTemplateAsync('itemizationText', params);
const popup = new Popup(template, POPUP_TYPE.TEXT);
/** @type {HTMLElement} */
const diffPrevPrompt = popup.dlg.querySelector('#diffPrevPrompt');
if (priorPromptArrayItemForRawPromptDisplay) {
diffPrevPrompt.style.display = '';
diffPrevPrompt.addEventListener('click', function () {
const dmp = new diff_match_patch();
const text1 = flatten(itemizedPrompts[priorPromptArrayItemForRawPromptDisplay].rawPrompt);
const text2 = flatten(itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt);
dmp.Diff_Timeout = 2.0;
const d = dmp.diff_main(text1, text2);
let ds = dmp.diff_prettyHtml(d);
// make it readable
ds = ds.replaceAll('background:#e6ffe6;', 'background:#b9f3b9; color:black;');
ds = ds.replaceAll('background:#ffe6e6;', 'background:#f5b4b4; color:black;');
ds = ds.replaceAll('¶', '');
const container = document.createElement('div');
container.innerHTML = DOMPurify.sanitize(ds);
const rawPromptWrapper = document.getElementById('rawPromptWrapper');
rawPromptWrapper.replaceChildren(container);
$('#rawPromptPopup').slideToggle();
});
} else {
diffPrevPrompt.style.display = 'none';
}
popup.dlg.querySelector('#copyPromptToClipboard').addEventListener('click', function () {
let rawPrompt = itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt;
let rawPromptValues = rawPrompt;
if (Array.isArray(rawPrompt)) {
rawPromptValues = rawPrompt.map(x => x.content).join('\n');
}
navigator.clipboard.writeText(rawPromptValues);
toastr.info('Copied!');
});
popup.dlg.querySelector('#showRawPrompt').addEventListener('click', function () {
//console.log(itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt);
console.log(PromptArrayItemForRawPromptDisplay);
console.log(itemizedPrompts);
console.log(itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt);
const rawPrompt = flatten(itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt);
//let DisplayStringifiedPrompt = JSON.stringify(itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt).replace(/\n+/g, ' ');
const rawPromptWrapper = document.getElementById('rawPromptWrapper');
rawPromptWrapper.innerText = rawPrompt;
$('#rawPromptPopup').slideToggle();
});
await popup.show();
}
function setInContextMessages(lastmsg, type) {
$('#chat .mes').removeClass('lastInContext');
if (type === 'swipe' || type === 'regenerate' || type === 'continue') {
lastmsg++;
}
const lastMessageBlock = $('#chat .mes:not([is_system="true"])').eq(-lastmsg);
lastMessageBlock.addClass('lastInContext');
if (lastMessageBlock.length === 0) {
const firstMessageId = getFirstDisplayedMessageId();
$(`#chat .mes[mesid="${firstMessageId}"`).addClass('lastInContext');
}
}
/**
* Sends a non-streaming request to the API.
* @param {string} type Generation type
* @param {object} data Generation data
* @returns {Promise