Skip to content

Added queue for the user message with limit of 5 #1

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
86 changes: 81 additions & 5 deletions apps/studio/src/lib/editor/engine/chat/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,16 @@ import {
ChatMessageRole,
StreamRequestType,
type AssistantChatMessage,
type ChatMessageContext,
type CompletedStreamResponse,
type ErrorStreamResponse,
type QueuedMessage,
type RateLimitedStreamResponse,
} from '@onlook/models/chat';
import { MainChannels } from '@onlook/models/constants';
import type { ParsedError } from '@onlook/utility';
import type { CoreMessage } from 'ai';
import { makeAutoObservable } from 'mobx';
import { makeAutoObservable, runInAction } from 'mobx';
import { nanoid } from 'nanoid/non-secure';
import type { EditorEngine } from '..';
import { ChatCodeManager } from './code';
Expand All @@ -31,13 +33,17 @@ export class ChatManager {
context: ChatContext;
stream: StreamResolver;
suggestions: SuggestionManager;
messageQueue: QueuedMessage[] = [];
private maxQueueSize = 5;

constructor(
private editorEngine: EditorEngine,
private projectsManager: ProjectsManager,
private userManager: UserManager,
) {
makeAutoObservable(this);
makeAutoObservable(this, {
messageQueue: true,
});
this.context = new ChatContext(this.editorEngine, this.projectsManager);
this.conversation = new ConversationManager(this.editorEngine, this.projectsManager);
this.stream = new StreamResolver();
Expand All @@ -49,14 +55,41 @@ export class ChatManager {
window.dispatchEvent(new Event(FOCUS_CHAT_INPUT_EVENT));
}

async sendNewMessage(content: string): Promise<void> {
get queueSize(): number {
return this.messageQueue.length;
}

async processMessageQueue() {
if (this.messageQueue.length === 0 || this.isWaiting) {
return;
}
const nextMessage = this.messageQueue.shift()!;
await this.processMessage(nextMessage.content, nextMessage.context);

if (this.messageQueue.length > 0) {
await this.processMessageQueue();
}
}

private async processMessage(content: string, context?: ChatMessageContext[]) {
if (!this.conversation.current) {
console.error('No conversation found');
return;
}

const context = await this.context.getChatContext();
const userMessage = this.conversation.addUserMessage(content, context);
if (this.isWaiting) {
if (this.messageQueue.length >= this.maxQueueSize) {
console.warn('Message queue is full');
return;
}
runInAction(() => {
this.messageQueue.push({ content, context });
});
return;
}

const messageContext = context ?? (await this.context.getChatContext());
const userMessage = this.conversation.addUserMessage(content, messageContext);
this.conversation.current.updateName(content);
if (!userMessage) {
console.error('Failed to add user message');
Expand All @@ -68,6 +101,27 @@ export class ChatManager {
await this.sendChatToAi(StreamRequestType.CHAT, content);
}

async sendNewMessage(content: string): Promise<void> {
if (!this.conversation.current) {
console.error('No conversation found');
return;
}

if (this.isWaiting) {
if (this.messageQueue.length >= this.maxQueueSize) {
console.warn('Message queue is full');
return;
}
runInAction(() => {
this.messageQueue.push({ content });
});
return;
}

await this.processMessage(content);
this.processMessageQueue();
}

async sendFixErrorToAi(errors: ParsedError[]): Promise<boolean> {
if (!this.conversation.current) {
console.error('No conversation found');
Expand All @@ -77,6 +131,22 @@ export class ChatManager {
const prompt = `How can I resolve these errors? If you propose a fix, please make it concise.`;
const errorContexts = this.context.getMessageContext(errors);
const projectContexts = this.context.getProjectContext();

if (this.isWaiting) {
if (this.messageQueue.length >= this.maxQueueSize) {
console.warn('Message queue is full');
return false;
}
runInAction(() => {
this.messageQueue.push({
content: prompt,
context: [...errorContexts, ...projectContexts],
});
});
console.log(`Error-fix message queued. Queue size: ${this.messageQueue.length}`);
return true;
}

const userMessage = this.conversation.addUserMessage(prompt, [
...errorContexts,
...projectContexts,
Expand Down Expand Up @@ -139,6 +209,10 @@ export class ChatManager {
invokeMainChannel(MainChannels.SEND_STOP_STREAM_REQUEST, {
requestId,
});

runInAction(() => {
this.messageQueue = [];
});
sendAnalytics('stop chat stream');
}

Expand Down Expand Up @@ -204,6 +278,8 @@ export class ChatManager {
}

this.context.clearAttachments();

await this.processMessageQueue();
}

handleNewCoreMessages(messages: CoreMessage[]) {
Expand Down
13 changes: 12 additions & 1 deletion apps/studio/src/routes/editor/EditPanel/ChatTab/ChatInput.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ export const ChatInput = observer(() => {
const [actionTooltipOpen, setActionTooltipOpen] = useState(false);
const [isDragging, setIsDragging] = useState(false);

const queueSize = editorEngine.chat.queueSize;
const MAX_QUEUE_SIZE = 5;

const focusInput = () => {
requestAnimationFrame(() => {
textareaRef.current?.focus();
Expand Down Expand Up @@ -428,6 +431,11 @@ export const ChatInput = observer(() => {
<span className="text-smallPlus">File Reference</span>
</Button>
</div>
{queueSize > 0 && (
<span className="text-micro text-foreground-secondary">
{queueSize} message{queueSize > 1 ? 's' : ''} queued
</span>
)}
{editorEngine.chat.isWaiting ? (
<Tooltip open={actionTooltipOpen} onOpenChange={setActionTooltipOpen}>
<TooltipTrigger asChild>
Expand All @@ -450,7 +458,10 @@ export const ChatInput = observer(() => {
size={'icon'}
variant={'secondary'}
className="text-smallPlus w-fit h-full py-0.5 px-2.5 text-primary"
disabled={inputEmpty || editorEngine.chat.isWaiting}
disabled={
inputEmpty ||
(editorEngine.chat.isWaiting && queueSize >= MAX_QUEUE_SIZE)
}
onClick={sendMessage}
>
<Icons.ArrowRight />
Expand Down
12 changes: 11 additions & 1 deletion packages/models/src/chat/conversation/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
import type { AssistantChatMessage, ChatMessage, TokenUsage } from '../message/index.ts';
import type {
AssistantChatMessage,
ChatMessage,
ChatMessageContext,
TokenUsage,
} from '../message/index.ts';

export type ChatConversation = {
id: string;
Expand All @@ -10,3 +15,8 @@ export type ChatConversation = {
summaryMessage?: AssistantChatMessage | null;
tokenUsage?: TokenUsage;
};

export type QueuedMessage = {
content: string;
context?: ChatMessageContext[];
};