server : (webui) revamp the input area, plus many small UI improvements (#13365)

* rework the input area

* process selected file

* change all icons to heroicons

* fix thought process collapse

* move conversation more menu to sidebar

* sun icon --> moon icon

* rm default system message

* stricter upload file check, only allow image if server has mtmd

* build it

* add renaming

* better autoscroll

* build

* add conversation group

* fix scroll

* extra context first, then user input in the end

* fix <hr> tag

* clean up a bit

* build

* add mb-3 for <pre>

* throttle adjustTextareaHeight to make it less laggy

* (nits) missing padding in sidebar

* rm stray console log
This commit is contained in:
Xuan-Son Nguyen
2025-05-08 15:37:29 +02:00
committed by GitHub
parent 1a844be132
commit 8c83449cb7
23 changed files with 1142 additions and 320 deletions

View File

@@ -1,6 +1,11 @@
// @ts-expect-error this package does not have typing
import TextLineStream from 'textlinestream';
import { APIMessage, Message } from './types';
import {
APIMessage,
APIMessageContentPart,
LlamaCppServerProps,
Message,
} from './types';
// ponyfill for missing ReadableStream asyncIterator on Safari
import { asyncIterator } from '@sec-ant/readable-stream/ponyfill/asyncIterator';
@@ -57,19 +62,47 @@ export const copyStr = (textToCopy: string) => {
*/
export function normalizeMsgsForAPI(messages: Readonly<Message[]>) {
return messages.map((msg) => {
let newContent = '';
if (msg.role !== 'user' || !msg.extra) {
return {
role: msg.role,
content: msg.content,
} as APIMessage;
}
// extra content first, then user text message in the end
// this allow re-using the same cache prefix for long context
const contentArr: APIMessageContentPart[] = [];
for (const extra of msg.extra ?? []) {
if (extra.type === 'context') {
newContent += `${extra.content}\n\n`;
contentArr.push({
type: 'text',
text: extra.content,
});
} else if (extra.type === 'textFile') {
contentArr.push({
type: 'text',
text: `File: ${extra.name}\nContent:\n\n${extra.content}`,
});
} else if (extra.type === 'imageFile') {
contentArr.push({
type: 'image_url',
image_url: { url: extra.base64Url },
});
} else {
throw new Error('Unknown extra type');
}
}
newContent += msg.content;
// add user message to the end
contentArr.push({
type: 'text',
text: msg.content,
});
return {
role: msg.role,
content: newContent,
content: contentArr,
};
}) as APIMessage[];
}
@@ -78,13 +111,19 @@ export function normalizeMsgsForAPI(messages: Readonly<Message[]>) {
* recommended for DeepsSeek-R1, filter out content between <think> and </think> tags
*/
export function filterThoughtFromMsgs(messages: APIMessage[]) {
console.debug({ messages });
return messages.map((msg) => {
if (msg.role !== 'assistant') {
return msg;
}
// assistant message is always a string
const contentStr = msg.content as string;
return {
role: msg.role,
content:
msg.role === 'assistant'
? msg.content.split('</think>').at(-1)!.trim()
: msg.content,
? contentStr.split('</think>').at(-1)!.trim()
: contentStr,
} as APIMessage;
});
}
@@ -126,3 +165,25 @@ export const cleanCurrentUrl = (removeQueryParams: string[]) => {
});
window.history.replaceState({}, '', url.toString());
};
export const getServerProps = async (
baseUrl: string,
apiKey?: string
): Promise<LlamaCppServerProps> => {
try {
const response = await fetch(`${baseUrl}/props`, {
headers: {
'Content-Type': 'application/json',
...(apiKey ? { Authorization: `Bearer ${apiKey}` } : {}),
},
});
if (!response.ok) {
throw new Error('Failed to fetch server props');
}
const data = await response.json();
return data as LlamaCppServerProps;
} catch (error) {
console.error('Error fetching server props:', error);
throw error;
}
};