Skip to content

Commit 54e37c8

Browse files
committed
made streaming primary
1 parent 88ed93d commit 54e37c8

13 files changed

Lines changed: 419 additions & 478 deletions

File tree

application/single_app/config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@
9494
EXECUTOR_TYPE = 'thread'
9595
EXECUTOR_MAX_WORKERS = 30
9696
SESSION_TYPE = 'filesystem'
97-
VERSION = "0.239.125"
97+
VERSION = "0.239.127"
9898

9999
SECRET_KEY = os.getenv('SECRET_KEY', 'dev-secret-key-change-in-production')
100100

application/single_app/functions_settings.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -276,7 +276,7 @@ def get_settings(use_cosmos=False):
276276
'enable_external_healthcheck': False,
277277

278278
# Streaming settings
279-
'streamingEnabled': False,
279+
'streamingEnabled': True,
280280

281281
# Reasoning effort settings (per-model)
282282
'reasoningEffortSettings': {},

application/single_app/route_backend_chats.py

Lines changed: 90 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2904,7 +2904,8 @@ def result_requires_message_reload(result: Any) -> bool:
29042904
'conversation_id': conversation_id,
29052905
'conversation_title': conversation_item['title'],
29062906
'model_deployment_name': image_gen_model,
2907-
'message_id': image_message_id
2907+
'message_id': image_message_id,
2908+
'user_message_id': user_message_id
29082909
}), 200
29092910
except Exception as e:
29102911
debug_print(f"Image generation error: {str(e)}")
@@ -4361,6 +4362,94 @@ def chat_stream_api():
43614362
request_start_time = time.time()
43624363
except Exception as e:
43634364
return jsonify({'error': f'Failed to parse request: {str(e)}'}), 400
4365+
4366+
compatibility_mode = bool(data.get('image_generation')) or bool(
4367+
data.get('retry_user_message_id') or data.get('edited_user_message_id')
4368+
)
4369+
4370+
def normalize_legacy_chat_payload(payload):
4371+
"""Convert the legacy JSON response shape into the streaming terminal payload."""
4372+
return {
4373+
'done': True,
4374+
'conversation_id': payload.get('conversation_id'),
4375+
'conversation_title': payload.get('conversation_title'),
4376+
'classification': payload.get('classification', []),
4377+
'model_deployment_name': payload.get('model_deployment_name'),
4378+
'message_id': payload.get('message_id'),
4379+
'user_message_id': payload.get('user_message_id'),
4380+
'augmented': payload.get('augmented', False),
4381+
'hybrid_citations': payload.get('hybrid_citations', []),
4382+
'web_search_citations': payload.get('web_search_citations', []),
4383+
'agent_citations': payload.get('agent_citations', []),
4384+
'agent_display_name': payload.get('agent_display_name'),
4385+
'agent_name': payload.get('agent_name'),
4386+
'full_content': payload.get('reply', ''),
4387+
'image_url': payload.get('image_url'),
4388+
'reload_messages': payload.get('reload_messages', False),
4389+
'kernel_fallback_notice': payload.get('kernel_fallback_notice'),
4390+
'thoughts_enabled': payload.get('thoughts_enabled', False),
4391+
'blocked': payload.get('blocked', False),
4392+
}
4393+
4394+
def generate_compatibility_response():
4395+
"""Bridge legacy JSON chat handling into a terminal SSE event for parity cases."""
4396+
try:
4397+
if data.get('image_generation'):
4398+
prompt_text = (data.get('message') or '').strip()
4399+
prompt_preview = prompt_text[:120] + '...' if len(prompt_text) > 120 else prompt_text
4400+
4401+
yield f"data: {json.dumps({
4402+
'type': 'thought',
4403+
'step_type': 'generation',
4404+
'content': f'Generating image based on \"{prompt_preview}\"' if prompt_preview else 'Generating image from your prompt'
4405+
})}\n\n"
4406+
4407+
yield f"data: {json.dumps({
4408+
'type': 'thought',
4409+
'step_type': 'generation',
4410+
'content': 'Preparing image model request'
4411+
})}\n\n"
4412+
4413+
legacy_result = chat_api()
4414+
legacy_response = legacy_result
4415+
status_code = 200
4416+
4417+
if isinstance(legacy_result, tuple):
4418+
legacy_response = legacy_result[0]
4419+
if len(legacy_result) > 1 and isinstance(legacy_result[1], int):
4420+
status_code = legacy_result[1]
4421+
4422+
if hasattr(legacy_response, 'get_json'):
4423+
payload = legacy_response.get_json(silent=True) or {}
4424+
else:
4425+
payload = {}
4426+
4427+
if status_code >= 400:
4428+
error_message = payload.get('error') or f'Compatibility chat request failed ({status_code})'
4429+
yield f"data: {json.dumps({'error': error_message})}\n\n"
4430+
return
4431+
4432+
if payload.get('image_url'):
4433+
yield f"data: {json.dumps({
4434+
'type': 'thought',
4435+
'step_type': 'generation',
4436+
'content': 'Image generated and ready to display'
4437+
})}\n\n"
4438+
4439+
yield f"data: {json.dumps(normalize_legacy_chat_payload(payload))}\n\n"
4440+
except Exception as compatibility_error:
4441+
yield f"data: {json.dumps({'error': str(compatibility_error)})}\n\n"
4442+
4443+
if compatibility_mode:
4444+
return Response(
4445+
stream_with_context(generate_compatibility_response()),
4446+
mimetype='text/event-stream',
4447+
headers={
4448+
'Cache-Control': 'no-cache',
4449+
'X-Accel-Buffering': 'no',
4450+
'Connection': 'keep-alive'
4451+
}
4452+
)
43644453

43654454
def generate():
43664455
try:

application/single_app/static/js/chat/chat-edit.js

Lines changed: 33 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33

44
import { showToast } from './chat-toast.js';
55
import { showLoadingIndicatorInChatbox, hideLoadingIndicatorInChatbox } from './chat-loading-indicator.js';
6+
import { sendMessageWithStreaming } from './chat-streaming.js';
67

78
/**
89
* Handle edit button click - opens edit modal
@@ -146,70 +147,48 @@ window.executeMessageEdit = function() {
146147
console.log(' retry_thread_id:', data.chat_request.retry_thread_id);
147148
console.log(' retry_thread_attempt:', data.chat_request.retry_thread_attempt);
148149
console.log(' Full chat_request:', data.chat_request);
149-
150-
// Call chat API with the edit parameters
151-
return fetch('/api/chat', {
152-
method: 'POST',
153-
headers: {
154-
'Content-Type': 'application/json',
155-
},
156-
credentials: 'same-origin',
157-
body: JSON.stringify(data.chat_request)
158-
});
150+
151+
const streamStarted = sendMessageWithStreaming(
152+
data.chat_request,
153+
null,
154+
data.chat_request.conversation_id,
155+
{
156+
onDone: () => {
157+
const conversationId = window.chatConversations?.getCurrentConversationId() || data.chat_request.conversation_id;
158+
if (conversationId) {
159+
import('./chat-messages.js').then(module => {
160+
module.loadMessages(conversationId);
161+
}).catch(err => {
162+
console.error('❌ Error loading chat-messages module:', err);
163+
showToast('Failed to reload messages', 'error');
164+
});
165+
}
166+
},
167+
onError: (errorMessage) => {
168+
showToast(`Edit failed: ${errorMessage}`, 'error');
169+
},
170+
onFinally: () => {
171+
hideLoadingIndicatorInChatbox();
172+
}
173+
}
174+
);
175+
176+
if (!streamStarted) {
177+
throw new Error('Failed to start streaming edit response');
178+
}
179+
180+
return null;
159181
} else {
160182
throw new Error('Edit response missing chat_request');
161183
}
162184
})
163-
.then(response => {
164-
if (!response.ok) {
165-
return response.json().then(data => {
166-
throw new Error(data.error || 'Chat API failed');
167-
});
168-
}
169-
return response.json();
170-
})
171-
.then(chatData => {
172-
console.log('✅ Chat API response:', chatData);
173-
174-
// Hide typing indicator
175-
hideLoadingIndicatorInChatbox();
176-
console.log('🧹 Typing indicator removed');
177-
178-
// Get current conversation ID using the proper API
179-
const conversationId = window.chatConversations?.getCurrentConversationId();
180-
181-
console.log(`🔍 Current conversation ID: ${conversationId}`);
182-
183-
// Reload messages to show edited message and new response
184-
if (conversationId) {
185-
console.log('🔄 Reloading messages for conversation:', conversationId);
186-
187-
// Import loadMessages dynamically
188-
import('./chat-messages.js').then(module => {
189-
console.log('📦 chat-messages.js module loaded, calling loadMessages...');
190-
module.loadMessages(conversationId);
191-
// No toast - the reloaded messages are enough feedback
192-
}).catch(err => {
193-
console.error('❌ Error loading chat-messages module:', err);
194-
showToast('error', 'Failed to reload messages');
195-
});
196-
} else {
197-
console.error('❌ No currentConversationId found!');
198-
199-
// Try to force a page refresh as fallback
200-
console.log('🔄 Attempting page refresh as fallback...');
201-
setTimeout(() => {
202-
window.location.reload();
203-
}, 1000);
204-
}
205-
})
206185
.catch(error => {
207186
console.error('❌ Edit error:', error);
208187

209188
// Hide typing indicator on error
210189
hideLoadingIndicatorInChatbox();
211190

212-
showToast('error', `Edit failed: ${error.message}`);
191+
showToast(`Edit failed: ${error.message}`, 'error');
213192
})
214193
.finally(() => {
215194
// Clean up pending edit

application/single_app/static/js/chat/chat-input-actions.js

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -319,7 +319,6 @@ if (imageGenBtn) {
319319
const docBtn = document.getElementById("search-documents-btn");
320320
const webBtn = document.getElementById("search-web-btn");
321321
const fileBtn = document.getElementById("choose-file-btn");
322-
const streamingBtn = document.getElementById("streaming-toggle-btn");
323322
const modelSelectContainer = document.getElementById("model-select-container");
324323

325324
if (isImageGenEnabled) {
@@ -335,21 +334,13 @@ if (imageGenBtn) {
335334
fileBtn.disabled = true;
336335
fileBtn.classList.remove("active");
337336
}
338-
// Hide streaming toggle and model selector for image generation
339-
if (streamingBtn) {
340-
streamingBtn.style.display = "none";
341-
}
342337
if (modelSelectContainer) {
343338
modelSelectContainer.style.display = "none";
344339
}
345340
} else {
346341
if (docBtn) docBtn.disabled = false;
347342
if (webBtn) webBtn.disabled = false;
348343
if (fileBtn) fileBtn.disabled = false;
349-
// Show streaming toggle and model selector when not in image generation mode
350-
if (streamingBtn) {
351-
streamingBtn.style.display = "flex";
352-
}
353344
if (modelSelectContainer) {
354345
modelSelectContainer.style.display = "block";
355346
}

0 commit comments

Comments
 (0)