-
Notifications
You must be signed in to change notification settings - Fork 298
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
## Context The PR enables a basic setup for `auto-edits`. The goal of the PR is for us to check the model quality by manually triggering the `auto-edits` and have a vibe check. The actual integration of next edits should be done with the existing autocomplete infra. So, this PR is supposed to live behind a feature flag just for the internal testing and model iterations. 1. The PR takes 4 lines surrounding the cursor and than propose a diff for those 4 lines. 2. The current implementation uses a fine-tuned gpt-4o-mini model. The name of the model is: `ft:gpt-4o-mini-2024-07-18:sourcegraph-production::AFXNjNiC` ## Steps to run the autoedits (in debug mode): 1. Set the following following setting in the vscode config ``` "cody.experimental.autoedits": { "provider": "openai", "model": "ft:gpt-4o-mini-2024-07-18:sourcegraph-production::AGgXey7l", "apiKey": "<openai_token>", "tokenLimit": { "prefixTokens": 2500, "suffixTokens": 2500, "maxPrefixLinesInArea": 12, "maxSuffixLinesInArea": 5, "codeToRewritePrefixLines": 2, "codeToRewriteSuffixLines": 3, "contextSpecificTokenLimit": { "recent-view-port": 1000, "diagnostics": 1000, "recent-copy": 1000, "jaccard-similarity": 1000, "recent-edits": 1000 } } }, ``` 2. Press `ctrl+shift+enter` to trigger the 3. The debug console `Cody by Sourcegraph` will show the diff in the console. 4. The suggestion will also be shown on the UI as a `ghost text` and we can press `tab` to apply the changes and `escape` to reject the changes. ## Test plan Updated CI checks and manual testing. Please see a demo below https://github.com/user-attachments/assets/7c1cf50d-c1fa-48bc-a71e-364e184404ce --------- Co-authored-by: Beatrix <[email protected]>
- Loading branch information
1 parent
f84a318
commit 9b9f64c
Showing
26 changed files
with
1,289 additions
and
85 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,110 @@ | ||
import { | ||
type AutoEditsTokenLimit, | ||
type DocumentContext, | ||
logDebug, | ||
tokensToChars, | ||
} from '@sourcegraph/cody-shared' | ||
import { Observable } from 'observable-fns' | ||
import * as vscode from 'vscode' | ||
import { ContextMixer } from '../completions/context/context-mixer' | ||
import { DefaultContextStrategyFactory } from '../completions/context/context-strategy' | ||
import { getCurrentDocContext } from '../completions/get-current-doc-context' | ||
import { getConfiguration } from '../configuration' | ||
import type { PromptProvider } from './prompt-provider' | ||
import { DeepSeekPromptProvider } from './providers/deepseek' | ||
import { OpenAIPromptProvider } from './providers/openai' | ||
import { AutoEditsRenderer } from './renderer' | ||
|
||
const AUTOEDITS_CONTEXT_STRATEGY = 'auto-edits' | ||
|
||
export interface AutoEditsProviderOptions { | ||
document: vscode.TextDocument | ||
position: vscode.Position | ||
} | ||
|
||
export class AutoeditsProvider implements vscode.Disposable { | ||
private disposables: vscode.Disposable[] = [] | ||
private contextMixer: ContextMixer = new ContextMixer({ | ||
strategyFactory: new DefaultContextStrategyFactory(Observable.of(AUTOEDITS_CONTEXT_STRATEGY)), | ||
dataCollectionEnabled: false, | ||
}) | ||
private autoEditsTokenLimit: AutoEditsTokenLimit | undefined | ||
private provider: PromptProvider | undefined | ||
private model: string | undefined | ||
private apiKey: string | undefined | ||
private renderer: AutoEditsRenderer = new AutoEditsRenderer() | ||
|
||
constructor() { | ||
const config = getConfiguration().experimentalAutoedits | ||
if (config === undefined) { | ||
logDebug('AutoEdits', 'No Configuration found in the settings') | ||
return | ||
} | ||
this.initizlizePromptProvider(config.provider) | ||
this.autoEditsTokenLimit = config.tokenLimit as AutoEditsTokenLimit | ||
this.model = config.model | ||
this.apiKey = config.apiKey | ||
this.disposables.push( | ||
this.contextMixer, | ||
this.renderer, | ||
vscode.commands.registerCommand('cody.experimental.suggest', () => this.getAutoedit()) | ||
) | ||
} | ||
|
||
private initizlizePromptProvider(provider: string) { | ||
if (provider === 'openai') { | ||
this.provider = new OpenAIPromptProvider() | ||
} else if (provider === 'deepseek') { | ||
this.provider = new DeepSeekPromptProvider() | ||
} else { | ||
logDebug('AutoEdits', `provider ${provider} not supported`) | ||
} | ||
} | ||
|
||
public getAutoedit() { | ||
this.predictAutoeditAtDocAndPosition({ | ||
document: vscode.window.activeTextEditor!.document, | ||
position: vscode.window.activeTextEditor!.selection.active, | ||
}) | ||
} | ||
|
||
public async predictAutoeditAtDocAndPosition(options: AutoEditsProviderOptions) { | ||
if (!this.provider || !this.autoEditsTokenLimit || !this.model || !this.apiKey) { | ||
logDebug('AutoEdits', 'No Provider or Token Limit found in the settings') | ||
return | ||
} | ||
const start = Date.now() | ||
const docContext = this.getDocContext(options.document, options.position) | ||
const { context } = await this.contextMixer.getContext({ | ||
document: options.document, | ||
position: options.position, | ||
docContext: docContext, | ||
maxChars: 100000, | ||
}) | ||
const { codeToReplace, promptResponse: prompt } = this.provider.getPrompt( | ||
docContext, | ||
options.document, | ||
context, | ||
this.autoEditsTokenLimit | ||
) | ||
const response = await this.provider.getModelResponse(this.model, this.apiKey, prompt) | ||
const timeToResponse = Date.now() - start | ||
logDebug('AutoEdits: (Time LLM Query):', timeToResponse.toString()) | ||
await this.renderer.render(options, codeToReplace, response) | ||
} | ||
|
||
private getDocContext(document: vscode.TextDocument, position: vscode.Position): DocumentContext { | ||
return getCurrentDocContext({ | ||
document, | ||
position, | ||
maxPrefixLength: tokensToChars(this.autoEditsTokenLimit?.prefixTokens ?? 0), | ||
maxSuffixLength: tokensToChars(this.autoEditsTokenLimit?.suffixTokens ?? 0), | ||
}) | ||
} | ||
|
||
public dispose() { | ||
for (const disposable of this.disposables) { | ||
disposable.dispose() | ||
} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,50 @@ | ||
import type { AutoEditsTokenLimit, PromptString } from '@sourcegraph/cody-shared' | ||
import type * as vscode from 'vscode' | ||
import type { | ||
AutocompleteContextSnippet, | ||
DocumentContext, | ||
} from '../../../lib/shared/src/completions/types' | ||
import type * as utils from './prompt-utils' | ||
export type CompletionsPrompt = PromptString | ||
export type ChatPrompt = { | ||
role: 'system' | 'user' | 'assistant' | ||
content: PromptString | ||
}[] | ||
export type PromptProviderResponse = CompletionsPrompt | ChatPrompt | ||
|
||
export interface PromptResponseData { | ||
codeToReplace: utils.CodeToReplaceData | ||
promptResponse: PromptProviderResponse | ||
} | ||
|
||
export interface PromptProvider { | ||
getPrompt( | ||
docContext: DocumentContext, | ||
document: vscode.TextDocument, | ||
context: AutocompleteContextSnippet[], | ||
tokenBudget: AutoEditsTokenLimit | ||
): PromptResponseData | ||
|
||
postProcessResponse(completion: string | null): string | ||
|
||
getModelResponse(model: string, apiKey: string, prompt: PromptProviderResponse): Promise<string> | ||
} | ||
|
||
export async function getModelResponse(url: string, body: string, apiKey: string): Promise<any> { | ||
const response = await fetch(url, { | ||
method: 'POST', | ||
headers: { | ||
'Content-Type': 'application/json', | ||
Authorization: `Bearer ${apiKey}`, | ||
}, | ||
body: body, | ||
}) | ||
if (response.status !== 200) { | ||
const errorText = await response.text() | ||
throw new Error(`HTTP error! status: ${response.status}, message: ${errorText}`) | ||
} | ||
const data = await response.json() | ||
return data | ||
} | ||
|
||
// ################################################################################################################ |
Oops, something went wrong.