Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: centralize LLM communication tracking in chat model #15146

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions packages/ai-chat-ui/src/browser/chat-view-widget.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { CommandService, deepClone, Emitter, Event, MessageService } from '@theia/core';
import { ChatRequest, ChatRequestModel, ChatService, ChatSession } from '@theia/ai-chat';
import { ChatRequest, ChatRequestModel, ChatService, ChatSession, isActiveSessionChangedEvent } from '@theia/ai-chat';
import { BaseWidget, codicon, ExtractableWidget, Message, PanelLayout, PreferenceService, StatefulWidget } from '@theia/core/lib/browser';
import { nls } from '@theia/core/lib/common/nls';
import { inject, injectable, postConstruct } from '@theia/core/shared/inversify';
Expand Down Expand Up @@ -114,7 +114,10 @@ export class ChatViewWidget extends BaseWidget implements ExtractableWidget, Sta

protected initListeners(): void {
this.toDispose.push(
this.chatService.onActiveSessionChanged(event => {
this.chatService.onSessionEvent(event => {
if (!isActiveSessionChangedEvent(event)) {
return;
}
const session = event.sessionId ? this.chatService.getSession(event.sessionId) : this.chatService.createSession();
if (session) {
this.chatSession = session;
Expand Down
8 changes: 8 additions & 0 deletions packages/ai-chat/src/browser/ai-chat-frontend-module.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ import { FileChatVariableContribution } from './file-chat-variable-contribution'
import { ContextSummaryVariableContribution } from '../common/context-summary-variable';
import { ContextDetailsVariableContribution } from '../common/context-details-variable';
import { ChangeSetVariableContribution } from './change-set-variable';
import { ChatLanguageModelService, ChatLanguageModelServiceImpl } from '../common/chat-language-model-service';
import { ChatCommunicationRecorder } from '../common/chat-communication-recorder';

export default new ContainerModule(bind => {
bindContributionProvider(bind, Agent);
Expand All @@ -56,6 +58,9 @@ export default new ContainerModule(bind => {
bind(ChatAgentService).toService(ChatAgentServiceImpl);
bind(PinChatAgent).toConstantValue(true);

bind(ChatCommunicationRecorder).toSelf().inSingletonScope();
bind(FrontendApplicationContribution).toService(ChatCommunicationRecorder);

bindContributionProvider(bind, ResponseContentMatcherProvider);
bind(DefaultResponseContentMatcherProvider).toSelf().inSingletonScope();
bind(ResponseContentMatcherProvider).toService(DefaultResponseContentMatcherProvider);
Expand All @@ -69,6 +74,9 @@ export default new ContainerModule(bind => {
bind(FrontendChatServiceImpl).toSelf().inSingletonScope();
bind(ChatService).toService(FrontendChatServiceImpl);

bind(ChatLanguageModelServiceImpl).toSelf().inSingletonScope();
bind(ChatLanguageModelService).toService(ChatLanguageModelServiceImpl);

bind(PreferenceContribution).toConstantValue({ schema: aiChatPreferences });

bind(CustomChatAgent).toSelf();
Expand Down
47 changes: 13 additions & 34 deletions packages/ai-chat/src/common/chat-agents.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import {
AgentSpecificVariables,
AIVariableContext,
CommunicationRecordingService,
getTextOfResponse,
LanguageModel,
LanguageModelRequirement,
Expand All @@ -42,7 +41,7 @@ import {
LanguageModelStreamResponsePart,
MessageActor,
} from '@theia/ai-core/lib/common';
import { CancellationToken, ContributionProvider, ILogger, isArray } from '@theia/core';
import { ContributionProvider, ILogger, isArray } from '@theia/core';
import { inject, injectable, named, postConstruct } from '@theia/core/shared/inversify';
import { ChatAgentService } from './chat-agent-service';
import {
Expand All @@ -56,8 +55,8 @@ import {
} from './chat-model';
import { findFirstMatch, parseContents } from './parse-contents';
import { DefaultResponseContentFactory, ResponseContentMatcher, ResponseContentMatcherProvider } from './response-content-matcher';
import { ChatHistoryEntry } from './chat-history-entry';
import { ChatToolRequestService } from './chat-tool-request-service';
import { ChatToolRequest, ChatToolRequestService } from './chat-tool-request-service';
import { ChatLanguageModelService } from './chat-language-model-service';

/**
* A conversation consists of a sequence of ChatMessages.
Expand Down Expand Up @@ -138,8 +137,8 @@ export interface ChatAgent extends Agent {
export abstract class AbstractChatAgent implements ChatAgent {
@inject(LanguageModelRegistry) protected languageModelRegistry: LanguageModelRegistry;
@inject(ILogger) protected logger: ILogger;
@inject(CommunicationRecordingService) protected recordingService: CommunicationRecordingService;
@inject(ChatToolRequestService) protected chatToolRequestService: ChatToolRequestService;
@inject(ChatLanguageModelService) protected chatLanguageModelService: ChatLanguageModelService;
@inject(PromptService) protected promptService: PromptService;

@inject(ContributionProvider) @named(ResponseContentMatcherProvider)
Expand All @@ -160,7 +159,6 @@ export abstract class AbstractChatAgent implements ChatAgent {
agentSpecificVariables: AgentSpecificVariables[] = [];
functions: string[] = [];
protected readonly abstract defaultLanguageModelPurpose: string;
protected defaultLogging: boolean = true;
protected systemPromptId: string | undefined = undefined;
protected additionalToolRequests: ToolRequest[] = [];
protected contentMatchers: ResponseContentMatcher[] = [];
Expand All @@ -183,15 +181,6 @@ export abstract class AbstractChatAgent implements ChatAgent {
}
const systemMessageDescription = await this.getSystemMessageDescription({ model: request.session, request } satisfies ChatSessionContext);
const messages = await this.getMessages(request.session);
if (this.defaultLogging) {
this.recordingService.recordRequest(
ChatHistoryEntry.fromRequest(
this.id, request, {
messages,
systemMessage: systemMessageDescription?.text
})
);
}

if (systemMessageDescription) {
const systemMsg: ChatMessage = {
Expand All @@ -210,17 +199,11 @@ export abstract class AbstractChatAgent implements ChatAgent {
...this.chatToolRequestService.toChatToolRequests(this.additionalToolRequests, request)
];

const languageModelResponse = await this.callLlm(
languageModel,
messages,
tools.length > 0 ? tools : undefined,
request.response.cancellationToken
);
const languageModelResponse = await this.sendLlmRequest(request, messages, tools, languageModel);

await this.addContentsToResponse(languageModelResponse, request);
await this.onResponseComplete(request);
if (this.defaultLogging) {
this.recordingService.recordResponse(ChatHistoryEntry.fromResponse(this.id, request));
}

} catch (e) {
this.handleError(request, e);
}
Expand Down Expand Up @@ -288,19 +271,15 @@ export abstract class AbstractChatAgent implements ChatAgent {
return requestMessages;
}

protected async callLlm(
languageModel: LanguageModel,
protected async sendLlmRequest(
request: MutableChatRequestModel,
messages: ChatMessage[],
tools: ToolRequest[] | undefined,
token: CancellationToken
toolRequests: ChatToolRequest[],
languageModel: LanguageModel
): Promise<LanguageModelResponse> {
const settings = this.getLlmSettings();
const languageModelResponse = languageModel.request({
messages,
tools,
settings,
}, token);
return languageModelResponse;
const tools = toolRequests.length > 0 ? toolRequests : undefined;
return this.chatLanguageModelService.sendRequest({ messages, tools, settings }, request, languageModel);
}

/**
Expand Down
89 changes: 89 additions & 0 deletions packages/ai-chat/src/common/chat-communication-recorder.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
// *****************************************************************************
// Copyright (C) 2025 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************

import { CommunicationRecordingService } from '@theia/ai-core';
import { DisposableCollection } from '@theia/core';
import { inject, injectable, postConstruct } from '@theia/core/shared/inversify';
import { ChatHistoryEntry } from './chat-history-entry';
import {
ActiveSessionChangedEvent,
ChatService,
isSessionCreatedEvent,
isSessionDeletedEvent,
SessionCreatedEvent,
SessionDeletedEvent
} from './chat-service';

@injectable()
export class ChatCommunicationRecorder {
private readonly chatModelListeners = new Map<string, DisposableCollection>();
private readonly trackedSessions = new Set<string>();
private readonly recordedResponses = new Set<string>();

@inject(ChatService)
private readonly chatService: ChatService;

@inject(CommunicationRecordingService)
protected recordingService: CommunicationRecordingService;

@postConstruct()
protected initialize(): void {
this.chatService.onSessionEvent(this.handleSessionEvent.bind(this));
}

private handleSessionEvent(event: ActiveSessionChangedEvent | SessionCreatedEvent | SessionDeletedEvent): void {
if (isSessionCreatedEvent(event)) {
this.setupSessionListener(event.sessionId);
} else if (isSessionDeletedEvent(event)) {
this.cleanupSessionListener(event.sessionId);
}
}

private setupSessionListener(sessionId: string): void {
const session = this.chatService.getSession(sessionId);
if (!session || this.trackedSessions.has(sessionId)) { return; }

const toDispose = new DisposableCollection();
this.trackedSessions.add(sessionId);
this.chatModelListeners.set(session.id, toDispose);

toDispose.push(
session.model.onDidChange(modelChangeEvent => {
if (modelChangeEvent.kind !== 'addRequest') { return; }

const { request } = modelChangeEvent;
const agentId = request.agentId || 'unknown';

this.recordingService.recordRequest(ChatHistoryEntry.fromRequest(agentId, request));

toDispose.push(
request.response.onDidChange(() => {
if (request.response.isComplete && !this.recordedResponses.has(request.id)) {
this.recordingService.recordResponse(ChatHistoryEntry.fromResponse(agentId, request));
this.recordedResponses.add(request.id);
}
})
);
})
);
}

private cleanupSessionListener(sessionId: string): void {
this.trackedSessions.delete(sessionId);
this.chatModelListeners.get(sessionId)?.dispose();
this.chatModelListeners.delete(sessionId);
}
}
1 change: 1 addition & 0 deletions packages/ai-chat/src/common/chat-history-entry.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ export namespace ChatHistoryEntry {
sessionId: request.session.id,
requestId: request.id,
request: request.request.text,
llmRequests: request.llmRequests,
...args,
};
}
Expand Down
Loading
Loading