webui: add OAI-Compat Harmony tool-call streaming visualization and persistence in chat UI (#16618)

* webui: add OAI-Compat Harmony tool-call live streaming visualization and persistence in chat UI

- Purely visual and diagnostic change, no effect on model context, prompt
  construction, or inference behavior

- Captured assistant tool call payloads during streaming and non-streaming
  completions, and persisted them in chat state and storage for downstream use

- Exposed parsed tool call labels beneath the assistant's model info line
  with graceful fallback when parsing fails

- Added tool call badges beneath assistant responses that expose JSON tooltips
  and copy their payloads when clicked, matching the existing model badge styling

- Added a user-facing setting to toggle tool call visibility to the Developer
  settings section directly under the model selector option

* webui: remove scroll listener causing unnecessary layout updates (model selector)

* Update tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessageAssistant.svelte

Co-authored-by: Aleksander Grygier <aleksander.grygier@gmail.com>

* Update tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessageAssistant.svelte

Co-authored-by: Aleksander Grygier <aleksander.grygier@gmail.com>

* chore: npm run format & update webui build output

* chore: update webui build output

---------

Co-authored-by: Aleksander Grygier <aleksander.grygier@gmail.com>
This commit is contained in:
Pascal
2025-11-15 21:09:32 +01:00
committed by GitHub
parent 662192e1dc
commit 1411d9275a
12 changed files with 376 additions and 19 deletions
@@ -205,6 +205,7 @@ class ChatStore {
type,
timestamp: Date.now(),
thinking: '',
toolCalls: '',
children: [],
extra: extras
},
@@ -360,6 +361,7 @@ class ChatStore {
): Promise<void> {
let streamedContent = '';
let streamedReasoningContent = '';
let streamedToolCallContent = '';
let resolvedModel: string | null = null;
let modelPersisted = false;
@@ -468,6 +470,20 @@ class ChatStore {
this.updateMessageAtIndex(messageIndex, { thinking: streamedReasoningContent });
},
onToolCallChunk: (toolCallChunk: string) => {
const chunk = toolCallChunk.trim();
if (!chunk) {
return;
}
streamedToolCallContent = chunk;
const messageIndex = this.findMessageIndex(assistantMessage.id);
this.updateMessageAtIndex(messageIndex, { toolCalls: streamedToolCallContent });
},
onModel: (modelName: string) => {
recordModel(modelName);
},
@@ -475,18 +491,21 @@ class ChatStore {
onComplete: async (
finalContent?: string,
reasoningContent?: string,
timings?: ChatMessageTimings
timings?: ChatMessageTimings,
toolCallContent?: string
) => {
slotsService.stopStreaming();
const updateData: {
content: string;
thinking: string;
toolCalls: string;
timings?: ChatMessageTimings;
model?: string;
} = {
content: finalContent || streamedContent,
thinking: reasoningContent || streamedReasoningContent,
toolCalls: toolCallContent || streamedToolCallContent,
timings: timings
};
@@ -499,7 +518,11 @@ class ChatStore {
const messageIndex = this.findMessageIndex(assistantMessage.id);
const localUpdateData: { timings?: ChatMessageTimings; model?: string } = {
const localUpdateData: {
timings?: ChatMessageTimings;
model?: string;
toolCalls?: string;
} = {
timings: timings
};
@@ -507,6 +530,10 @@ class ChatStore {
localUpdateData.model = updateData.model;
}
if (updateData.toolCalls !== undefined) {
localUpdateData.toolCalls = updateData.toolCalls;
}
this.updateMessageAtIndex(messageIndex, localUpdateData);
await DatabaseStore.updateCurrentNode(assistantMessage.convId, assistantMessage.id);
@@ -620,6 +647,7 @@ class ChatStore {
content: '',
timestamp: Date.now(),
thinking: '',
toolCalls: '',
children: [],
model: null
},
@@ -1443,6 +1471,7 @@ class ChatStore {
role: messageToEdit.role,
content: newContent,
thinking: messageToEdit.thinking || '',
toolCalls: messageToEdit.toolCalls || '',
children: [],
model: messageToEdit.model // Preserve original model info when branching
},
@@ -1518,6 +1547,7 @@ class ChatStore {
role: messageToEdit.role,
content: newContent,
thinking: messageToEdit.thinking || '',
toolCalls: messageToEdit.toolCalls || '',
children: [],
extra: messageToEdit.extra ? JSON.parse(JSON.stringify(messageToEdit.extra)) : undefined,
model: messageToEdit.model // Preserve original model info when branching
@@ -1589,6 +1619,7 @@ class ChatStore {
role: 'assistant',
content: '',
thinking: '',
toolCalls: '',
children: [],
model: null
},
@@ -1647,6 +1678,7 @@ class ChatStore {
role: 'assistant',
content: '',
thinking: '',
toolCalls: '',
children: [],
model: null
},