Skip to content

Commit

Permalink
Autocomplete: Mark Ollama support experimental (#3077)
Browse files Browse the repository at this point in the history
## Test plan

- Try with both `"cody.autocomplete.advanced.provider":
"unstable-ollama"` and `"cody.autocomplete.advanced.provider":
"experimental-ollama"`in the settings
- Check that it resolves to the correct provider by logging: 
    ```
█ CodyCompletionProvider:initialized:
experimental-ollama/deepseek-coder:6.7b-base-q4_K_M
    ```

<img width="808" alt="Screenshot 2024-02-07 at 20 21 37"
src="https://github.com/sourcegraph/cody/assets/458591/ab7dbf4f-8e8e-4371-aef1-be9d7afc4b49">
  • Loading branch information
philipp-spiess authored Feb 7, 2024
1 parent 20e1a9e commit 726828c
Show file tree
Hide file tree
Showing 9 changed files with 31 additions and 17 deletions.
7 changes: 6 additions & 1 deletion lib/shared/src/configuration.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,12 @@ export interface Configuration {
*/
autocomplete: boolean
autocompleteLanguages: Record<string, boolean>
autocompleteAdvancedProvider: 'anthropic' | 'fireworks' | 'unstable-openai' | null
autocompleteAdvancedProvider:
| 'anthropic'
| 'fireworks'
| 'unstable-openai'
| 'experimental-ollama'
| null
autocompleteAdvancedModel: string | null
autocompleteCompleteSuggestWidgetSelection?: boolean
autocompleteFormatOnAccept?: boolean
Expand Down
1 change: 1 addition & 0 deletions vscode/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ This is a log of all notable changes to Cody for VS Code. [Unreleased] changes a
- Chat: Edit buttons are disabled on messages generated by the default commands. [pull/3005](https://github.com/sourcegraph/cody/pull/3005)
- Autocomplete: Expanded the configuration list to include `astro`, `rust`, `svelte`, and `elixir` for enhanced detection of multiline triggers. [pulls/3044](https://github.com/sourcegraph/cody/pull/3044)
- Autocomplete: Multiline completions are now enabled only for languages from a predefined list. [pulls/3044](https://github.com/sourcegraph/cody/pull/3044)
- Autocomplete: Rename `unstable-ollama` option to `experimental-ollama` to better communicate the current state. We still support `unstable-ollama` in the config for backward compatibility. [pull/3077](https://github.com/sourcegraph/cody/pull/3077)

## [1.2.3]

Expand Down
2 changes: 1 addition & 1 deletion vscode/doc/ollama.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,5 +6,5 @@
2. Download one of the support local models:
- `ollama pull deepseek-coder:6.7b-base-q4_K_M` for [deepseek-coder](https://ollama.ai/library/deepseek-coder)
- `ollama pull codellama:7b-code` for [codellama](https://ollama.ai/library/codellama)
3. Update Cody's VS Code settings to use the `unstable-ollama` autocomplete provider.
3. Update Cody's VS Code settings to use the `experimental-ollama` autocomplete provider.
4. Confirm Cody uses Ollama by looking at the Cody output channel or the autocomplete trace view (in the command palette).
2 changes: 1 addition & 1 deletion vscode/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -877,7 +877,7 @@
"cody.autocomplete.advanced.provider": {
"type": "string",
"default": null,
"enum": [null, "anthropic", "fireworks", "unstable-openai", "unstable-ollama"],
"enum": [null, "anthropic", "fireworks", "unstable-openai", "experimental-ollama"],
"markdownDescription": "The provider used for code autocomplete. Most providers other than `anthropic` require the `cody.autocomplete.advanced.serverEndpoint` and `cody.autocomplete.advanced.accessToken` settings to also be set. Check the Cody output channel for error messages if autocomplete is not working as expected."
},
"cody.autocomplete.advanced.serverEndpoint": {
Expand Down
2 changes: 1 addition & 1 deletion vscode/src/completions/inline-completion-item-provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ import {
type AutocompleteItem,
} from './suggested-autocomplete-items-cache'
import type { ProvideInlineCompletionItemsTracer, ProvideInlineCompletionsItemTraceData } from './tracer'
import { isLocalCompletionsProvider } from './providers/unstable-ollama'
import { isLocalCompletionsProvider } from './providers/experimental-ollama'
import { completionProviderConfig } from './completion-provider-config'

interface AutocompleteResult extends vscode.InlineCompletionList {
Expand Down
7 changes: 5 additions & 2 deletions vscode/src/completions/providers/create-provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import {
type FireworksOptions,
} from './fireworks'
import type { ProviderConfig } from './provider'
import { createProviderConfig as createUnstableOllamaProviderConfig } from './unstable-ollama'
import { createProviderConfig as createExperimentalOllamaProviderConfig } from './experimental-ollama'
import { createProviderConfig as createUnstableOpenAIProviderConfig } from './unstable-openai'
import type { AuthStatus } from '../../chat/protocol'

Expand Down Expand Up @@ -49,8 +49,11 @@ export async function createProviderConfig(
case 'anthropic': {
return createAnthropicProviderConfig({ client })
}
case 'experimental-ollama':
case 'unstable-ollama': {
return createUnstableOllamaProviderConfig(config.autocompleteExperimentalOllamaOptions)
return createExperimentalOllamaProviderConfig(
config.autocompleteExperimentalOllamaOptions
)
}
default:
logError(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ function fileNameLine(uri: vscode.Uri, commentStart: string): string {
* The provider communicates with an Ollama server's [REST
* API](https://github.com/jmorganca/ollama#rest-api).
*/
class UnstableOllamaProvider extends Provider {
class ExperimentalOllamaProvider extends Provider {
constructor(
options: ProviderOptions,
private readonly ollamaOptions: OllamaOptions
Expand Down Expand Up @@ -167,7 +167,7 @@ class UnstableOllamaProvider extends Provider {
}
}

const PROVIDER_IDENTIFIER = 'unstable-ollama'
const PROVIDER_IDENTIFIER = 'experimental-ollama'

export function isLocalCompletionsProvider(providerId: string): boolean {
return providerId === PROVIDER_IDENTIFIER
Expand All @@ -176,7 +176,7 @@ export function isLocalCompletionsProvider(providerId: string): boolean {
export function createProviderConfig(ollamaOptions: OllamaOptions): ProviderConfig {
return {
create(options: Omit<ProviderOptions, 'id'>) {
return new UnstableOllamaProvider(
return new ExperimentalOllamaProvider(
{
...options,
// Always generate just one completion for a better perf.
Expand Down
2 changes: 1 addition & 1 deletion vscode/src/completions/request-manager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import {
import type { ContextSnippet } from './types'
import { lines, removeIndentation } from './text-processing'
import { logDebug } from '../log'
import { isLocalCompletionsProvider } from './providers/unstable-ollama'
import { isLocalCompletionsProvider } from './providers/experimental-ollama'

export interface RequestParams {
/** The request's document */
Expand Down
19 changes: 12 additions & 7 deletions vscode/src/configuration.ts
Original file line number Diff line number Diff line change
Expand Up @@ -50,13 +50,18 @@ export function getConfiguration(
debugRegex = /.*/
}

let autocompleteAdvancedProvider: Configuration['autocompleteAdvancedProvider'] = config.get(
CONFIG_KEY.autocompleteAdvancedProvider,
null
)
// Handle the old `unstable-fireworks` option
if (autocompleteAdvancedProvider === 'unstable-fireworks') {
autocompleteAdvancedProvider = 'fireworks'
let autocompleteAdvancedProvider = config.get<
Configuration['autocompleteAdvancedProvider'] | 'unstable-ollama' | 'unstable-fireworks'
>(CONFIG_KEY.autocompleteAdvancedProvider, null)

// Handle deprecated provider identifiers
switch (autocompleteAdvancedProvider) {
case 'unstable-fireworks':
autocompleteAdvancedProvider = 'fireworks'
break
case 'unstable-ollama':
autocompleteAdvancedProvider = 'experimental-ollama'
break
}

// check if the configured enum values are valid
Expand Down

0 comments on commit 726828c

Please sign in to comment.