Skip to content

Commit

Permalink
feat(context-agent): tool status callbacks and process support (#6451)
Browse files Browse the repository at this point in the history
CLOSE https://linear.app/sourcegraph/issue/CODY-4280 &
https://linear.app/sourcegraph/issue/CODY-4282

![Screenshot 2024-12-27 at 12 27
50 AM](https://github.com/user-attachments/assets/a5d385c1-d932-40f8-914b-99d56cfb5270)

This change introduces the following improvements to the Cody tool
system:

1. Added support for displaying a loading indicator when the context is
being loaded as shown in
[figma](https://www.figma.com/design/f078wFMKsIOaEwj7Iwj5xy/Unified-Cody?node-id=7560-18281&t=Un5MlNVhg9RBCImr-0)
2. Updated the step message item to display the step ID and content in a
more compact and readable format.
3. Added support for displaying an error icon when a step fails.
4. Adjusted the styling and layout of the step message items to improve
the overall visual appearance.
5. Added a 30-second timeout for tool execution to prevent indefinite
blocking.
6. Implemented a `Promise.race` between the tool execution and the
timeout promise to ensure timely completion.
7. Updated the `DeepCodyAgent` to handle tool execution errors and
return an empty result set instead of propagating the error.


## Test plan

<!-- Required. See
https://docs-legacy.sourcegraph.com/dev/background-information/testing_principles.
-->

See demo:


https://github.com/user-attachments/assets/57ac1f7d-a596-4671-bfa1-3ffd1611e327

Update storybook:


![image](https://github.com/user-attachments/assets/506d0766-de88-4455-b7cd-485278b31c19)

## Changelog

<!-- OPTIONAL; info at
https://www.notion.so/sourcegraph/Writing-a-changelog-entry-dd997f411d524caabf0d8d38a24a878c
-->

---------

Co-authored-by: Valery Bugakov <[email protected]>
  • Loading branch information
abeatrix and valerybugakov authored Dec 27, 2024
1 parent 0c1699a commit 30145b9
Show file tree
Hide file tree
Showing 19 changed files with 746 additions and 137 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ data class SerializedChatMessage(
val intent: IntentEnum? = null, // Oneof: search, chat, edit, insert
val manuallySelectedIntent: ManuallySelectedIntentEnum? = null, // Oneof: search, chat, edit, insert
val search: Any? = null,
val processes: List<>? = null,
) {

enum class SpeakerEnum {
Expand Down
1 change: 1 addition & 0 deletions lib/shared/src/chat/transcript/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,5 +34,6 @@ export function serializeChatMessage(chatMessage: ChatMessage): SerializedChatMe
intent: chatMessage.intent,
manuallySelectedIntent: chatMessage.manuallySelectedIntent,
search: chatMessage.search,
processes: chatMessage.processes,
}
}
46 changes: 46 additions & 0 deletions lib/shared/src/chat/transcript/messages.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,42 @@ export interface ChatMessage extends Message {
intent?: 'search' | 'chat' | 'edit' | 'insert' | undefined | null
manuallySelectedIntent?: 'search' | 'chat' | 'edit' | 'insert' | undefined | null
search?: ChatMessageSearch | undefined | null
processes?: ProcessingStep[] | undefined | null
}

/**
* Represents an individual step in a chat message processing pipeline, typically used
* to track and display the progress of context fetching and analysis operations.
*/
export interface ProcessingStep {
/**
* Unique identifier or name for the processing step
*/
id: string

/**
* Description of what the step is doing or has completed
*/
content: string

/**
* Current state of the step
* - 'pending': Step is currently in progress
* - 'success': Step completed successfully
* - 'error': Step failed to complete
*/
status: 'pending' | 'success' | 'error'

/**
* Optional numerical order of the step in the sequence.
* Used to display the steps in the correct order.
*/
step?: number

/**
* Error information if the step failed
*/
error?: ChatError
}

export type ChatMessageWithSearch = ChatMessage & { search: ChatMessageSearch }
Expand All @@ -65,6 +101,7 @@ export interface SerializedChatMessage {
intent?: ChatMessage['intent']
manuallySelectedIntent?: ChatMessage['manuallySelectedIntent']
search?: ChatMessage['search']
processes?: ChatMessage['processes']
}

export interface ChatError {
Expand Down Expand Up @@ -155,3 +192,12 @@ export function errorToChatError(error: Error): ChatError {
name: error.name,
}
}

export function createProcessingStep(data: Partial<ProcessingStep>): ProcessingStep {
return {
id: data.id ?? '',
content: data.content ?? '',
status: data.status ?? 'pending',
step: data.step ?? 0,
}
}
1 change: 1 addition & 0 deletions lib/shared/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ export type {
RankedContext,
ChatMessageWithSearch,
ChatMessageSearch,
ProcessingStep,
} from './chat/transcript/messages'
export {
CODY_PASSTHROUGH_VSCODE_OPEN_COMMAND_ID,
Expand Down
20 changes: 20 additions & 0 deletions lib/shared/src/prompt/templates.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import type { URI } from 'vscode-uri'

import { CodyIDE } from '..'
import type { RangeData } from '../common/range'
import type { ActiveTextEditorDiagnostic } from '../editor'
import { displayPath } from '../editor/displayPath'
Expand Down Expand Up @@ -110,3 +111,22 @@ export function populateCodeGenerationContextTemplate(
.replaceAll('{filePath}', PromptString.fromDisplayPath(fileUri))
.replaceAll('{outputTag}', tag)
}

export function getClientPromptString(client: CodyIDE): PromptString {
switch (client) {
case CodyIDE.Web:
return ps`Sourcegraph Web`
case CodyIDE.VisualStudio:
return ps`Visual Studio`
case CodyIDE.JetBrains:
return ps`JetBrains`
case CodyIDE.Eclipse:
return ps`Eclipse`
case CodyIDE.Emacs:
return ps`Emacs`
case CodyIDE.Neovim:
return ps`Neovim`
default:
return ps`VS Code`
}
}
77 changes: 62 additions & 15 deletions vscode/src/chat/agentic/CodyChatAgent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,23 @@ import { getCategorizedMentions } from '../../prompt-builder/utils'
import type { ChatBuilder } from '../chat-view/ChatBuilder'
import { DefaultPrompter } from '../chat-view/prompt'
import type { CodyTool } from './CodyTool'
import type { ToolStatusCallback } from './CodyToolProvider'
import { ProcessManager } from './ProcessManager'

export abstract class CodyChatAgent {
protected readonly multiplexer = new BotResponseMultiplexer()
protected readonly promptMixins: PromptMixin[] = []
protected readonly toolHandlers: Map<string, CodyTool>
protected statusCallback?: ToolStatusCallback
protected postMessageCallback?: (model: string) => void

constructor(
protected readonly chatBuilder: ChatBuilder,
protected readonly chatClient: Pick<ChatClient, 'chat'>,
protected readonly tools: CodyTool[],
protected context: ContextItem[] = []
) {
// Initialize handlers and mixins in constructor
this.toolHandlers = new Map(tools.map(tool => [tool.config.tags.tag.toString(), tool]))
this.initializeMultiplexer()
this.promptMixins.push(newPromptMixin(this.buildPrompt()))
Expand All @@ -32,7 +37,7 @@ export abstract class CodyChatAgent {
for (const [tag, tool] of this.toolHandlers) {
this.multiplexer.sub(tag, {
onResponse: async (content: string) => tool.stream(content),
onTurnComplete: async () => tool.stream(''),
onTurnComplete: async () => {},
})
}
}
Expand All @@ -49,27 +54,30 @@ export abstract class CodyChatAgent {
): Promise<string> {
const stream = await this.chatClient.chat(
message,
{ model: model, maxTokensToSample: 4000 },
{ model, maxTokensToSample: 4000 },
new AbortController().signal,
requestID
)
const accumulated = new StringBuilder()
try {
for await (const msg of stream) {
if (signal?.aborted) break
if (msg.type === 'change') {
const newText = msg.text.slice(accumulated.length)
accumulated.append(newText)
await this.processResponseText(newText)
}

let accumulated = ''
for await (const msg of stream) {
if (signal?.aborted) break

if (msg.type === 'change') {
const newText = msg.text.slice(accumulated.length)
accumulated += newText
await this.processResponseText(newText)
} else if (msg.type === 'complete' || msg.type === 'error') {
await this.multiplexer.notifyTurnComplete()
if (msg.type === 'error') throw new Error('Error while streaming')
break
if (msg.type === 'complete' || msg.type === 'error') {
if (msg.type === 'error') throw new Error('Error while streaming')
break
}
}
} finally {
await this.multiplexer.notifyTurnComplete()
}

return accumulated
return accumulated.toString()
}

protected getPrompter(items: ContextItem[]): DefaultPrompter {
Expand All @@ -78,6 +86,45 @@ export abstract class CodyChatAgent {
return new DefaultPrompter(explicitMentions, implicitMentions.slice(-MAX_SEARCH_ITEMS))
}

public setStatusCallback(postMessage: (model: string) => void): void {
this.postMessageCallback = postMessage
const model = this.chatBuilder.selectedModel ?? ''

// Create a steps manager to handle state updates efficiently
const stepsManager = new ProcessManager(steps => {
this.chatBuilder.setLastMessageProcesses(steps)
this.postMessageCallback?.(model)
})

this.statusCallback = {
onStart: () => {
stepsManager.initializeStep()
},
onStream: (toolName, content) => {
stepsManager.addStep(toolName, content)
},
onComplete: (toolName, error) => {
stepsManager.completeStep(toolName, error)
},
}
}

// Abstract methods that must be implemented by derived classes
protected abstract buildPrompt(): PromptString
}

class StringBuilder {
private parts: string[] = []

append(str: string): void {
this.parts.push(str)
}

toString(): string {
return this.parts.join('')
}

get length(): number {
return this.parts.reduce((acc, part) => acc + part.length, 0)
}
}
Loading

0 comments on commit 30145b9

Please sign in to comment.