Skip to content

Commit 8d1c51b

Browse files
committed
- Fixed (hopefully for real...) the tendency of certain models such as DeepSeek to overwrite character profiles. They should now correctly use a separate characterProgression section.
- Better error handling for summarization, ensuring that it still occurs even if it errors out. - Slight changes to some character profiles.
1 parent 013e790 commit 8d1c51b

File tree

5 files changed

+304
-65
lines changed

5 files changed

+304
-65
lines changed

src/components/views/ChatInterface.vue

Lines changed: 171 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -378,6 +378,7 @@ import prompts from '@/utils/json/prompts.json'
378378
import { marked } from 'marked'
379379
import { animationMappings } from '@/utils/animationMappings'
380380
import { cleanWikiContent, sanitizeActions, splitNarration, parseFallback, parseAIResponse, isWholeWordPresent } from '@/utils/chatUtils'
381+
import { normalizeAiActionCharacterData } from '@/utils/aiActionNormalization'
381382
382383
// Helper to get honorific with fallback to "Commander"
383384
const getHonorific = (characterName: string): string => {
@@ -421,10 +422,46 @@ const showRetry = ref(false)
421422
const lastPrompt = ref('')
422423
const storySummary = ref('')
423424
const lastSummarizedIndex = ref(0)
425+
const summarizationRetryPending = ref(false)
426+
const summarizationAttemptCount = ref(0)
427+
const summarizationLastError = ref<string | null>(null)
424428
let nextActionResolver: (() => void) | null = null
425429
let yapTimeoutId: any = null
426430
const chatHistory = ref<{ role: string, content: string }[]>([])
427431
const characterProfiles = ref<Record<string, any>>({})
432+
const characterProgression = ref<Record<string, any>>({})
433+
434+
// Effective profiles = base profiles + progression overlays (personality + relationships only)
435+
const effectiveCharacterProfiles = computed<Record<string, any>>(() => {
436+
const base = characterProfiles.value || {}
437+
const progression = characterProgression.value || {}
438+
const merged: Record<string, any> = {}
439+
440+
const progressionKeys = Object.keys(progression)
441+
442+
for (const [name, profile] of Object.entries(base)) {
443+
const outProfile = profile && typeof profile === 'object' && !Array.isArray(profile) ? { ...(profile as any) } : profile
444+
const progKey = progressionKeys.find((k) => k.toLowerCase() === name.toLowerCase())
445+
const update = progKey ? (progression as any)[progKey] : undefined
446+
447+
if (outProfile && typeof outProfile === 'object' && !Array.isArray(outProfile) && update && typeof update === 'object') {
448+
if ((update as any).personality) {
449+
;(outProfile as any).personality = (update as any).personality
450+
}
451+
452+
if ((update as any).relationships && typeof (update as any).relationships === 'object') {
453+
;(outProfile as any).relationships = {
454+
...((outProfile as any).relationships || {}),
455+
...((update as any).relationships || {})
456+
}
457+
}
458+
}
459+
460+
merged[name] = outProfile
461+
}
462+
463+
return merged
464+
})
428465
const chatHistoryRef = ref<HTMLElement | null>(null)
429466
const fileInput = ref<HTMLInputElement | null>(null)
430467
const openRouterModels = ref<any[]>([])
@@ -713,6 +750,7 @@ const saveSession = () => {
713750
const sessionData = {
714751
chatHistory: chatHistory.value,
715752
characterProfiles: characterProfiles.value,
753+
characterProgression: characterProgression.value,
716754
storySummary: storySummary.value,
717755
lastSummarizedIndex: lastSummarizedIndex.value,
718756
mode: mode.value,
@@ -769,6 +807,9 @@ const handleFileUpload = (event: Event) => {
769807
if (data.characterProfiles) {
770808
characterProfiles.value = data.characterProfiles
771809
}
810+
if (data.characterProgression) {
811+
characterProgression.value = data.characterProgression
812+
}
772813
if (data.storySummary) {
773814
storySummary.value = data.storySummary
774815
}
@@ -1160,22 +1201,35 @@ const callAI = async (isRetry: boolean = false): Promise<string> => {
11601201
else if (tokenUsage.value === 'goddess') historyLimit = 99999 // Effectively infinite
11611202
11621203
if (tokenUsage.value !== 'goddess') {
1163-
let userMsgCount = 0
1164-
// Count user messages in the unsummarized portion, excluding the current pending prompt
1165-
for (let i = lastSummarizedIndex.value; i < chatHistory.value.length - 1; i++) {
1166-
if (chatHistory.value[i].role === 'user') {
1167-
userMsgCount++
1168-
}
1169-
}
1170-
1171-
if (userMsgCount >= historyLimit) {
1172-
const endIndex = chatHistory.value.length - 1
1173-
const chunkToSummarize = chatHistory.value.slice(lastSummarizedIndex.value, endIndex)
1174-
logDebug(`[callAI] Summarizing ${chunkToSummarize.length} messages (Tumbling Window)...`)
1175-
await summarizeChunk(chunkToSummarize)
1176-
setRandomLoadingMessage()
1177-
lastSummarizedIndex.value = endIndex
1178-
}
1204+
const endIndex = chatHistory.value.length - 1
1205+
1206+
let userMsgCount = 0
1207+
// Count user messages in the unsummarized portion, excluding the current pending prompt
1208+
for (let i = lastSummarizedIndex.value; i < endIndex; i++) {
1209+
if (chatHistory.value[i].role === 'user') {
1210+
userMsgCount++
1211+
}
1212+
}
1213+
1214+
const shouldRetryFailedSummarization = summarizationRetryPending.value && endIndex > lastSummarizedIndex.value
1215+
const shouldSummarizeByLimit = userMsgCount >= historyLimit
1216+
1217+
// If summarization previously failed, retry on next user prompt/retry.
1218+
if (shouldRetryFailedSummarization || shouldSummarizeByLimit) {
1219+
const chunkToSummarize = chatHistory.value.slice(lastSummarizedIndex.value, endIndex)
1220+
logDebug(`[callAI] Summarizing ${chunkToSummarize.length} messages (Tumbling Window)...`)
1221+
const ok = await summarizeChunk(chunkToSummarize)
1222+
setRandomLoadingMessage()
1223+
1224+
if (ok) {
1225+
lastSummarizedIndex.value = endIndex
1226+
summarizationRetryPending.value = false
1227+
summarizationAttemptCount.value = 0
1228+
} else {
1229+
summarizationRetryPending.value = true
1230+
summarizationAttemptCount.value++
1231+
}
1232+
}
11791233
}
11801234
11811235
if (storySummary.value && tokenUsage.value !== 'goddess') {
@@ -1332,22 +1386,35 @@ const callAIWithoutSearch = async (isRetry: boolean = false): Promise<string> =>
13321386
else if (tokenUsage.value === 'goddess') historyLimit = 99999 // Effectively infinite
13331387
13341388
if (tokenUsage.value !== 'goddess') {
1335-
let userMsgCount = 0
1336-
// Count user messages in the unsummarized portion, excluding the current pending prompt
1337-
for (let i = lastSummarizedIndex.value; i < chatHistory.value.length - 1; i++) {
1338-
if (chatHistory.value[i].role === 'user') {
1339-
userMsgCount++
1340-
}
1341-
}
1342-
1343-
if (userMsgCount >= historyLimit) {
1344-
const endIndex = chatHistory.value.length - 1
1345-
const chunkToSummarize = chatHistory.value.slice(lastSummarizedIndex.value, endIndex)
1346-
logDebug(`[callAIWithoutSearch] Summarizing ${chunkToSummarize.length} messages (Tumbling Window)...`)
1347-
await summarizeChunk(chunkToSummarize)
1348-
setRandomLoadingMessage()
1349-
lastSummarizedIndex.value = endIndex
1350-
}
1389+
const endIndex = chatHistory.value.length - 1
1390+
1391+
let userMsgCount = 0
1392+
// Count user messages in the unsummarized portion, excluding the current pending prompt
1393+
for (let i = lastSummarizedIndex.value; i < endIndex; i++) {
1394+
if (chatHistory.value[i].role === 'user') {
1395+
userMsgCount++
1396+
}
1397+
}
1398+
1399+
const shouldRetryFailedSummarization = summarizationRetryPending.value && endIndex > lastSummarizedIndex.value
1400+
const shouldSummarizeByLimit = userMsgCount >= historyLimit
1401+
1402+
// If summarization previously failed, retry on next user prompt/retry.
1403+
if (shouldRetryFailedSummarization || shouldSummarizeByLimit) {
1404+
const chunkToSummarize = chatHistory.value.slice(lastSummarizedIndex.value, endIndex)
1405+
logDebug(`[callAIWithoutSearch] Summarizing ${chunkToSummarize.length} messages (Tumbling Window)...`)
1406+
const ok = await summarizeChunk(chunkToSummarize)
1407+
setRandomLoadingMessage()
1408+
1409+
if (ok) {
1410+
lastSummarizedIndex.value = endIndex
1411+
summarizationRetryPending.value = false
1412+
summarizationAttemptCount.value = 0
1413+
} else {
1414+
summarizationRetryPending.value = true
1415+
summarizationAttemptCount.value++
1416+
}
1417+
}
13511418
}
13521419
13531420
if (storySummary.value && tokenUsage.value !== 'goddess') {
@@ -1755,7 +1822,7 @@ const searchForCharactersPerplexity = async (characterNames: string[]): Promise<
17551822
}
17561823
17571824
const generateSystemPrompt = (enableWebSearch: boolean) => {
1758-
const knownCharacterNames = Object.keys(characterProfiles.value)
1825+
const knownCharacterNames = Object.keys(effectiveCharacterProfiles.value)
17591826
17601827
// Build a minimal character ID lookup for characters mentioned in profiles or current character
17611828
// This prevents massive token usage from including all 200+ characters
@@ -1805,7 +1872,7 @@ const generateSystemPrompt = (enableWebSearch: boolean) => {
18051872
${prompts.systemPrompt.jsonStructure}
18061873
18071874
${prompts.systemPrompt.knownProfiles}
1808-
${knownCharacterNames.length > 0 ? JSON.stringify(characterProfiles.value, null, 2) : '(None yet - this is the first turn, use web search to gather information)'}
1875+
${knownCharacterNames.length > 0 ? JSON.stringify(effectiveCharacterProfiles.value, null, 2) : '(None yet - this is the first turn, use web search to gather information)'}
18091876
18101877
${prompts.systemPrompt.idReference}
18111878
${relevantCharacterIds.length > 0 ? relevantCharacterIds.join(', ') : 'No characters loaded yet. Use the character NAME and the system will resolve it.'}
@@ -2484,17 +2551,52 @@ const executeAction = async (data: any) => {
24842551
logDebug('[AI Debug Info]:', data.debug_info)
24852552
}
24862553
2554+
// Compatibility Fix: Map legacy/hallucinated 'characterProfile' or 'characterProfiles' to 'memory'
2555+
// This ensures they are treated as NEW character definitions and IGNORED if the character already exists.
2556+
if (data.characterProfile || data.characterProfiles) {
2557+
const legacyData = data.characterProfile || data.characterProfiles
2558+
logDebug('[AI Compatibility] Remapping characterProfile/s to memory (Read-Only for existing):', legacyData)
2559+
2560+
if (!data.memory) {
2561+
data.memory = legacyData
2562+
} else {
2563+
data.memory = { ...data.memory, ...legacyData }
2564+
}
2565+
}
2566+
2567+
// Normalize legacy/misused profile updates (e.g., DeepSeek using characterProfile/memory) so they
2568+
// cannot overwrite existing profiles and instead become characterProgression updates.
2569+
data = normalizeAiActionCharacterData(data, characterProfiles.value)
2570+
24872571
if (data.memory) {
24882572
logDebug('[AI Memory Update - New Characters Only]:', data.memory)
24892573
const newProfiles: Record<string, any> = {}
24902574
24912575
for (const [charName, profile] of Object.entries(data.memory)) {
2492-
// Skip if character already exists in profiles
2493-
if (characterProfiles.value[charName]) {
2494-
logDebug(`[AI Memory] Skipping existing character '${charName}' in memory block. Use characterProgression to update.`)
2576+
// 1. Check if already in active profiles (Case-Insensitive)
2577+
const existingKey = Object.keys(characterProfiles.value).find(k => k.toLowerCase() === charName.toLowerCase())
2578+
2579+
if (existingKey) {
2580+
logDebug(`[AI Memory] Skipping existing character '${charName}' (matched '${existingKey}') in memory block. Use characterProgression to update.`)
24952581
continue
24962582
}
24972583
2584+
// 2. Check if in LOCAL profiles (if enabled) - ENFORCE READ-ONLY FROM DB
2585+
if (useLocalProfiles.value) {
2586+
const localKey = Object.keys(localCharacterProfiles).find(k => k.toLowerCase() === charName.toLowerCase())
2587+
if (localKey) {
2588+
logDebug(`[AI Memory] Found local profile for '${charName}' (matched '${localKey}'). IGNORING AI memory and loading local profile instead.`)
2589+
2590+
const localProfile = (localCharacterProfiles as any)[localKey]
2591+
// Add the LOCAL profile to newProfiles, effectively overwriting the AI's suggestion with the correct data
2592+
newProfiles[charName] = {
2593+
...localProfile,
2594+
id: localProfile.id || l2d.find(c => c.name.toLowerCase() === charName.toLowerCase())?.id
2595+
}
2596+
continue
2597+
}
2598+
}
2599+
24982600
if (typeof profile === 'object' && profile !== null) {
24992601
const { honorific_for_commander, honorific_to_commander, honorific, relationships, ...rest } = profile as any
25002602
@@ -2522,44 +2624,45 @@ const executeAction = async (data: any) => {
25222624
// Handle 'characterProgression' - For EXISTING characters (Personality/Relationships ONLY)
25232625
if (data.characterProgression) {
25242626
logDebug('[AI Character Progression]:', data.characterProgression)
2525-
const updatedProfiles = { ...characterProfiles.value }
2627+
const updatedProgression = { ...characterProgression.value }
25262628
let hasUpdates = false
25272629
25282630
for (const [charName, progression] of Object.entries(data.characterProgression)) {
2529-
if (updatedProfiles[charName] && typeof progression === 'object' && progression !== null) {
2530-
const currentProfile = updatedProfiles[charName]
2631+
// Find target profile (Case-Insensitive) in BASE profiles.
2632+
const targetKey = Object.keys(characterProfiles.value).find(k => k.toLowerCase() === charName.toLowerCase())
2633+
const resolvedKey = targetKey || charName
2634+
2635+
if (typeof progression === 'object' && progression !== null) {
25312636
const updates = progression as any
2532-
2533-
// 1. Update Personality if provided
2637+
const current = (updatedProgression as any)[resolvedKey] && typeof (updatedProgression as any)[resolvedKey] === 'object'
2638+
? (updatedProgression as any)[resolvedKey]
2639+
: {}
2640+
25342641
if (updates.personality) {
2535-
currentProfile.personality = updates.personality
2642+
current.personality = updates.personality
25362643
hasUpdates = true
25372644
}
25382645
2539-
// 2. Update Relationships if provided
25402646
if (updates.relationships && typeof updates.relationships === 'object') {
2541-
// Filter Commander out
2542-
const { Commander, commander, ...otherRelationships } = updates.relationships
2543-
const validRelationships = Object.keys(otherRelationships).length > 0 ? otherRelationships : {}
2544-
2545-
currentProfile.relationships = {
2546-
...(currentProfile.relationships || {}),
2547-
...validRelationships
2647+
// NOTE: We allow "Commander" here as a dynamic relationship/attitude field.
2648+
current.relationships = {
2649+
...(current.relationships || {}),
2650+
...(updates.relationships || {})
25482651
}
25492652
hasUpdates = true
25502653
}
25512654
25522655
// CRITICAL: Explicitly IGNORE speech_style updates
25532656
if (updates.speech_style) {
2554-
logDebug(`[AI Character Progression] BLOCKED attempt to change speech_style for '${charName}'`)
2657+
logDebug(`[AI Character Progression] BLOCKED attempt to change speech_style for '${resolvedKey}'`)
25552658
}
2556-
2557-
updatedProfiles[charName] = currentProfile
2659+
2660+
;(updatedProgression as any)[resolvedKey] = current
25582661
}
25592662
}
25602663
25612664
if (hasUpdates) {
2562-
characterProfiles.value = updatedProfiles
2665+
characterProgression.value = updatedProgression
25632666
}
25642667
}
25652668
@@ -2783,15 +2886,19 @@ const resetSession = () => {
27832886
if (confirmed) {
27842887
chatHistory.value = []
27852888
characterProfiles.value = {}
2889+
characterProgression.value = {}
27862890
storySummary.value = ''
27872891
lastSummarizedIndex.value = 0
2892+
summarizationRetryPending.value = false
2893+
summarizationAttemptCount.value = 0
2894+
summarizationLastError.value = null
27882895
lastPrompt.value = ''
27892896
market.live2d.isVisible = false
27902897
}
27912898
}
27922899
2793-
const summarizeChunk = async (messages: { role: string, content: string }[]) => {
2794-
if (messages.length === 0) return
2900+
const summarizeChunk = async (messages: { role: string, content: string }[]): Promise<boolean> => {
2901+
if (messages.length === 0) return true
27952902
27962903
loadingStatus.value = "Summarizing story so far..."
27972904
const textToSummarize = messages.map(m => `${m.role}: ${m.content}`).join('\n\n')
@@ -2811,15 +2918,21 @@ const summarizeChunk = async (messages: { role: string, content: string }[]) =>
28112918
summary = await callOpenRouter(msgs, false)
28122919
}
28132920
2814-
if (summary) {
2921+
if (summary && summary.trim().length > 0) {
28152922
if (storySummary.value) {
28162923
storySummary.value += '\n\n' + summary
28172924
} else {
28182925
storySummary.value = summary
28192926
}
2927+
summarizationLastError.value = null
2928+
return true
28202929
}
2930+
summarizationLastError.value = 'Summarization returned empty output.'
2931+
return false
28212932
} catch (e) {
28222933
console.error('Failed to summarize chunk:', e)
2934+
summarizationLastError.value = e instanceof Error ? e.message : String(e)
2935+
return false
28232936
}
28242937
}
28252938
</script>

0 commit comments

Comments
 (0)