Spaces:
Runtime error
Runtime error
File size: 2,646 Bytes
13095e0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 |
import { Conversation } from '@/types/chat';
import { OpenAIModelID, OpenAIModels } from '@/types/openai';
import { DEFAULT_SYSTEM_PROMPT, DEFAULT_TEMPERATURE } from './const';
export const cleanSelectedConversation = (conversation: Conversation) => {
// added model for each conversation (3/20/23)
// added system prompt for each conversation (3/21/23)
// added folders (3/23/23)
// added prompts (3/26/23)
// added messages (4/16/23)
let updatedConversation = conversation;
// check for model on each conversation
if (!updatedConversation.model) {
updatedConversation = {
...updatedConversation,
model: updatedConversation.model || OpenAIModels[OpenAIModelID.GPT_3_5],
};
}
// check for system prompt on each conversation
if (!updatedConversation.prompt) {
updatedConversation = {
...updatedConversation,
prompt: updatedConversation.prompt || DEFAULT_SYSTEM_PROMPT,
};
}
if (!updatedConversation.temperature) {
updatedConversation = {
...updatedConversation,
temperature: updatedConversation.temperature || DEFAULT_TEMPERATURE,
};
}
if (!updatedConversation.folderId) {
updatedConversation = {
...updatedConversation,
folderId: updatedConversation.folderId || null,
};
}
if (!updatedConversation.messages) {
updatedConversation = {
...updatedConversation,
messages: updatedConversation.messages || [],
};
}
return updatedConversation;
};
export const cleanConversationHistory = (history: any[]): Conversation[] => {
// added model for each conversation (3/20/23)
// added system prompt for each conversation (3/21/23)
// added folders (3/23/23)
// added prompts (3/26/23)
// added messages (4/16/23)
if (!Array.isArray(history)) {
console.warn('history is not an array. Returning an empty array.');
return [];
}
return history.reduce((acc: any[], conversation) => {
try {
if (!conversation.model) {
conversation.model = OpenAIModels[OpenAIModelID.GPT_3_5];
}
if (!conversation.prompt) {
conversation.prompt = DEFAULT_SYSTEM_PROMPT;
}
if (!conversation.temperature) {
conversation.temperature = DEFAULT_TEMPERATURE;
}
if (!conversation.folderId) {
conversation.folderId = null;
}
if (!conversation.messages) {
conversation.messages = [];
}
acc.push(conversation);
return acc;
} catch (error) {
console.warn(
`error while cleaning conversations' history. Removing culprit`,
error,
);
}
return acc;
}, []);
};
|