Skip to content

Commit

Permalink
chore(audoedit): consistent use of the ouptut channel logger (#6472)
Browse files Browse the repository at this point in the history
- No functional changes.
- Renames `autoeditsLogger` to `autoeditsOutputChannelLogger` to avoid
confusion since we plan to integrate the analytics logger soon.
- Ensures consistent use of the `autoeditsOutputChannelLogger`. There's
no need to prefix it with `Autoedits` since it's already scoped to it.
  • Loading branch information
valerybugakov authored Dec 27, 2024
1 parent d0cbb2d commit d6919f2
Show file tree
Hide file tree
Showing 22 changed files with 196 additions and 147 deletions.
10 changes: 7 additions & 3 deletions vscode/src/autoedits/adapters/cody-gateway.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { currentResolvedConfig, dotcomTokenToGatewayToken } from '@sourcegraph/cody-shared'

import { autoeditsLogger } from '../logger'
import { autoeditsOutputChannelLogger } from '../output-channel-logger'

import type { AutoeditModelOptions, AutoeditsModelAdapter } from './base'
import {
Expand All @@ -24,7 +24,11 @@ export class CodyGatewayAdapter implements AutoeditsModelAdapter {
}
return response.choices[0].text
} catch (error) {
autoeditsLogger.logDebug('AutoEdits', 'Error calling Cody Gateway:', error)
autoeditsOutputChannelLogger.logError(
'getModelResponse',
'Error calling Cody Gateway:',
error
)
throw error
}
}
Expand All @@ -33,7 +37,7 @@ export class CodyGatewayAdapter implements AutoeditsModelAdapter {
const resolvedConfig = await currentResolvedConfig()
const fastPathAccessToken = dotcomTokenToGatewayToken(resolvedConfig.auth.accessToken)
if (!fastPathAccessToken) {
autoeditsLogger.logError('Autoedits', 'FastPath access token is not available')
autoeditsOutputChannelLogger.logError('getApiKey', 'FastPath access token is not available')
throw new Error('FastPath access token is not available')
}
return fastPathAccessToken
Expand Down
7 changes: 5 additions & 2 deletions vscode/src/autoedits/adapters/create-adapter.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import type { AutoEditsModelConfig, ChatClient } from '@sourcegraph/cody-shared'

import { autoeditsLogger } from '../logger'
import { autoeditsOutputChannelLogger } from '../output-channel-logger'

import type { AutoeditsModelAdapter } from './base'
import { CodyGatewayAdapter } from './cody-gateway'
Expand Down Expand Up @@ -30,7 +30,10 @@ export function createAutoeditsModelAdapter({
? new SourcegraphChatAdapter(chatClient)
: new SourcegraphCompletionsAdapter()
default:
autoeditsLogger.logDebug('Config', `Provider ${providerName} not supported`)
autoeditsOutputChannelLogger.logDebug(
'createAutoeditsModelAdapter',
`Provider ${providerName} not supported`
)
throw new Error(`Provider ${providerName} not supported`)
}
}
13 changes: 10 additions & 3 deletions vscode/src/autoedits/adapters/fireworks.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { autoeditsProviderConfig } from '../autoedits-config'
import { autoeditsLogger } from '../logger'
import { autoeditsOutputChannelLogger } from '../output-channel-logger'

import type { AutoeditModelOptions, AutoeditsModelAdapter } from './base'
import {
Expand All @@ -16,7 +16,10 @@ export class FireworksAdapter implements AutoeditsModelAdapter {
const apiKey = autoeditsProviderConfig.experimentalAutoeditsConfigOverride?.apiKey

if (!apiKey) {
autoeditsLogger.logError('Autoedits', 'No api key provided in the config override')
autoeditsOutputChannelLogger.logError(
'getModelResponse',
'No api key provided in the config override'
)
throw new Error('No api key provided in the config override')
}
const response = await getModelResponse(option.url, body, apiKey)
Expand All @@ -25,7 +28,11 @@ export class FireworksAdapter implements AutoeditsModelAdapter {
}
return response.choices[0].text
} catch (error) {
autoeditsLogger.logDebug('AutoEdits', 'Error calling Fireworks API:', error)
autoeditsOutputChannelLogger.logError(
'getModelResponse',
'Error calling Fireworks API:',
error
)
throw error
}
}
Expand Down
9 changes: 6 additions & 3 deletions vscode/src/autoedits/adapters/openai.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { autoeditsProviderConfig } from '../autoedits-config'
import { autoeditsLogger } from '../logger'
import { autoeditsOutputChannelLogger } from '../output-channel-logger'

import type { AutoeditModelOptions, AutoeditsModelAdapter } from './base'
import { getModelResponse, getOpenaiCompatibleChatPrompt } from './utils'
Expand All @@ -10,7 +10,10 @@ export class OpenAIAdapter implements AutoeditsModelAdapter {
const apiKey = autoeditsProviderConfig.experimentalAutoeditsConfigOverride?.apiKey

if (!apiKey) {
autoeditsLogger.logError('Autoedits', 'No api key provided in the config override')
autoeditsOutputChannelLogger.logError(
'getModelResponse',
'No api key provided in the config override'
)
throw new Error('No api key provided in the config override')
}

Expand All @@ -32,7 +35,7 @@ export class OpenAIAdapter implements AutoeditsModelAdapter {
)
return response.choices[0].message.content
} catch (error) {
autoeditsLogger.logDebug('AutoEdits', 'Error calling OpenAI API:', error)
autoeditsOutputChannelLogger.logError('getModelResponse', 'Error calling OpenAI API:', error)
throw error
}
}
Expand Down
8 changes: 6 additions & 2 deletions vscode/src/autoedits/adapters/sourcegraph-chat.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import type { ChatClient, Message } from '@sourcegraph/cody-shared'
import { autoeditsLogger } from '../logger'
import { autoeditsOutputChannelLogger } from '../output-channel-logger'
import type { AutoeditModelOptions, AutoeditsModelAdapter } from './base'
import { getMaxOutputTokensForAutoedits, getSourcegraphCompatibleChatPrompt } from './utils'

Expand Down Expand Up @@ -38,7 +38,11 @@ export class SourcegraphChatAdapter implements AutoeditsModelAdapter {
}
return accumulated
} catch (error) {
autoeditsLogger.logDebug('AutoEdits', 'Error calling Sourcegraph Chat:', error)
autoeditsOutputChannelLogger.logError(
'getModelResponse',
'Error calling Sourcegraph Chat:',
error
)
throw error
}
}
Expand Down
8 changes: 6 additions & 2 deletions vscode/src/autoedits/adapters/sourcegraph-completions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import type {
ModelRefStr,
} from '@sourcegraph/cody-shared'
import { defaultCodeCompletionsClient } from '../../completions/default-client'
import { autoeditsLogger } from '../logger'
import { autoeditsOutputChannelLogger } from '../output-channel-logger'
import type { AutoeditModelOptions, AutoeditsModelAdapter } from './base'
import { getMaxOutputTokensForAutoedits, getSourcegraphCompatibleChatPrompt } from './utils'

Expand Down Expand Up @@ -48,7 +48,11 @@ export class SourcegraphCompletionsAdapter implements AutoeditsModelAdapter {
}
return accumulated
} catch (error) {
autoeditsLogger.logDebug('AutoEdits', 'Error calling Sourcegraph Completions:', error)
autoeditsOutputChannelLogger.logError(
'getModelResponse',
'Error calling Sourcegraph Completions:',
error
)
throw error
}
}
Expand Down
149 changes: 74 additions & 75 deletions vscode/src/autoedits/autoedits-provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@ import { ContextMixer } from '../completions/context/context-mixer'
import { DefaultContextStrategyFactory } from '../completions/context/context-strategy'
import { getCurrentDocContext } from '../completions/get-current-doc-context'

import type { AutoeditsModelAdapter } from './adapters/base'
import type { AutoeditsModelAdapter, AutoeditsPrompt } from './adapters/base'
import { createAutoeditsModelAdapter } from './adapters/create-adapter'
import { autoeditsProviderConfig } from './autoedits-config'
import { FilterPredictionBasedOnRecentEdits } from './filter-prediction-edits'
import { autoeditsLogger } from './logger'
import { autoeditsOutputChannelLogger } from './output-channel-logger'
import type { CodeToReplaceData } from './prompt/prompt-utils'
import { ShortTermPromptStrategy } from './prompt/short-term-diff-prompt-strategy'
import type { DecorationInfo } from './renderer/decorators/base'
Expand Down Expand Up @@ -135,9 +135,10 @@ export class AutoeditsProvider implements vscode.InlineCompletionItemProvider, v
public async provideInlineCompletionItems(
document: vscode.TextDocument,
position: vscode.Position,
context: vscode.InlineCompletionContext,
inlineCompletionContext: vscode.InlineCompletionContext,
token?: vscode.CancellationToken
): Promise<vscode.InlineCompletionItem[] | vscode.InlineCompletionList | null> {
const start = Date.now()
const controller = new AbortController()
const abortSignal = controller.signal
token?.onCancellationRequested(() => controller.abort())
Expand All @@ -154,39 +155,63 @@ export class AutoeditsProvider implements vscode.InlineCompletionItemProvider, v
maxSuffixLength: tokensToChars(autoeditsProviderConfig.tokenLimit.suffixTokens),
})

const autoeditResponse = await this.inferEdit({
const { context } = await this.contextMixer.getContext({
document,
position,
docContext,
abortSignal,
maxChars: 32_000,
})
if (abortSignal.aborted) {
return null
}

if (abortSignal.aborted || !autoeditResponse) {
const { codeToReplaceData, prompt } = this.promptStrategy.getPromptForModelType({
document,
position,
docContext,
context,
tokenBudget: autoeditsProviderConfig.tokenLimit,
isChatModel: autoeditsProviderConfig.isChatModel,
})

const initialPrediction = await this.getPrediction({
document,
position,
prompt,
codeToReplaceData,
})
if (abortSignal?.aborted || !initialPrediction) {
return null
}

let { prediction, codeToReplaceData } = autoeditResponse
autoeditsOutputChannelLogger.logDebug(
'provideInlineCompletionItems',
`========================== Response:\n${initialPrediction}\n` +
`========================== Time Taken: ${Date.now() - start}ms`
)

const prediction = shrinkPredictionUntilSuffix({
prediction: initialPrediction,
codeToReplaceData,
})

const { codeToRewrite } = codeToReplaceData
if (prediction === codeToRewrite) {
autoeditsOutputChannelLogger.logDebug('skip', 'prediction equals to code to rewrite')
return null
}

const shouldFilterPredictionBasedRecentEdits = this.filterPrediction.shouldFilterPrediction(
document.uri,
const shouldFilterPredictionBasedRecentEdits = this.filterPrediction.shouldFilterPrediction({
uri: document.uri,
prediction,
codeToRewrite
)
codeToRewrite,
})

if (shouldFilterPredictionBasedRecentEdits) {
autoeditsLogger.logDebug('Autoedits', 'Skipping autoedit - based on recent edits')
autoeditsOutputChannelLogger.logDebug('skip', 'based on recent edits')
return null
}

prediction = shrinkPredictionUntilSuffix(prediction, codeToReplaceData)
if (prediction === codeToRewrite) {
autoeditsLogger.logDebug(
'Autoedits',
'Skipping autoedit - prediction equals to code to rewrite'
)
return null
}
const decorationInfo = getDecorationInfoFromPrediction(document, prediction, codeToReplaceData)

if (
Expand All @@ -196,10 +221,7 @@ export class AutoeditsProvider implements vscode.InlineCompletionItemProvider, v
suffix: codeToReplaceData.suffixInArea + codeToReplaceData.suffixAfterArea,
})
) {
autoeditsLogger.logDebug(
'Autoedits',
'Skipping autoedit - predicted text already exists in suffix'
)
autoeditsOutputChannelLogger.logDebug('skip', 'prediction equals to code to rewrite')
return null
}

Expand All @@ -212,70 +234,47 @@ export class AutoeditsProvider implements vscode.InlineCompletionItemProvider, v
docContext,
decorationInfo,
})

return inlineCompletions
}

private async inferEdit(options: AutoEditsProviderOptions): Promise<AutoeditsPrediction | null> {
const start = Date.now()
const { document, position, docContext, abortSignal } = options

const { context } = await this.contextMixer.getContext({
document,
position,
docContext,
maxChars: 32_000,
})

const { codeToReplace, prompt } = this.promptStrategy.getPromptForModelType({
document,
position,
docContext,
context,
tokenBudget: autoeditsProviderConfig.tokenLimit,
isChatModel: autoeditsProviderConfig.isChatModel,
})

let response: string | undefined = undefined
private async getPrediction({
document,
position,
codeToReplaceData,
prompt,
}: {
document: vscode.TextDocument
position: vscode.Position
codeToReplaceData: CodeToReplaceData
prompt: AutoeditsPrompt
}): Promise<string | undefined> {
if (autoeditsProviderConfig.isMockResponseFromCurrentDocumentTemplateEnabled) {
const responseMetadata = extractAutoEditResponseFromCurrentDocumentCommentTemplate(
document,
position
)

if (responseMetadata) {
response = shrinkReplacerTextToCodeToReplaceRange(responseMetadata, codeToReplace)
const prediction = shrinkReplacerTextToCodeToReplaceRange(
responseMetadata,
codeToReplaceData
)

if (prediction) {
return prediction
}
}
}

if (response === undefined) {
response = await this.modelAdapter.getModelResponse({
url: autoeditsProviderConfig.url,
model: autoeditsProviderConfig.model,
prompt,
codeToRewrite: codeToReplace.codeToRewrite,
userId: (await currentResolvedConfig()).clientState.anonymousUserID,
isChatModel: autoeditsProviderConfig.isChatModel,
})
}

if (abortSignal?.aborted || !response) {
return null
}

autoeditsLogger.logDebug(
'Autoedits',
'========================== Response:\n',
response,
'\n',
'========================== Time Taken For LLM (Msec): ',
(Date.now() - start).toString(),
'\n'
)

return {
codeToReplaceData: codeToReplace,
prediction: response,
}
return this.modelAdapter.getModelResponse({
url: autoeditsProviderConfig.url,
model: autoeditsProviderConfig.model,
prompt,
codeToRewrite: codeToReplaceData.codeToRewrite,
userId: (await currentResolvedConfig()).clientState.anonymousUserID,
isChatModel: autoeditsProviderConfig.isChatModel,
})
}

public dispose(): void {
Expand Down
Loading

0 comments on commit d6919f2

Please sign in to comment.