Skip to content

Commit

Permalink
wip: Vim script entrypoint
Browse files Browse the repository at this point in the history
  • Loading branch information
kyoh86 committed Dec 28, 2023
1 parent f1cedcf commit 1e1e129
Show file tree
Hide file tree
Showing 4 changed files with 42 additions and 32 deletions.
4 changes: 4 additions & 0 deletions autoload/ollama.vim
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
function ollama#generate_completion() abort
let l:prompt = input("Prompt:")
call denops#notify("ollama", "generate_completion", ["codellama", l:prompt, {}])
endfunction
20 changes: 20 additions & 0 deletions denops/ollama/api/base.ts
Original file line number Diff line number Diff line change
Expand Up @@ -76,3 +76,23 @@ export async function parseJSONList<T>(
);
return { response, body };
}

export function parseJSONStream<T>(
response: Response,
predicate: Predicate<T>,
) {
return {
response,
body: response.body
?.pipeThrough(new TextDecoderStream())
.pipeThrough(new JSONLinesParseStream())
.pipeThrough(
new TransformStream<JSONValue, T>({
transform: (value, controller) => {
const item = ensure(value, predicate);
controller.enqueue(item);
},
}),
),
};
}
27 changes: 6 additions & 21 deletions denops/ollama/api/generate_completion.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
import {
ensure,
is,
ObjectOf as O,
Predicate as P,
} from "https://deno.land/x/[email protected]/mod.ts";
import { isFormat, RequestOptions, Result } from "./types.ts";
import { parseJSONList } from "./base.ts";
import { parseJSONStream } from "./base.ts";
import { doPost } from "./base.ts";

// Definitions for the endpoint to "Generate a completion"
Expand Down Expand Up @@ -77,30 +76,16 @@ export const isGenerateCompletionResponse: P<GenerateCompletionResponse> = is
generateCompletionResponseFields,
);

export async function generateCompletion(
param: GenerateCompletionParam & { stream?: true },
options?: RequestOptions,
): Promise<Result<GenerateCompletionResponse[]>>;

export async function generateCompletion(
param: GenerateCompletionParam & { stream: false },
options?: RequestOptions,
): Promise<Result<GenerateCompletionResponse>>;

/** Generate a response for a given prompt with a provided model.
* This is a streaming endpoint, so there will be a series of responses.
* The final response object will include statistics and additional data from the request.
*/
export async function generateCompletion(
param: GenerateCompletionParam,
options?: RequestOptions,
): Promise<Result<GenerateCompletionResponse[] | GenerateCompletionResponse>> {
const response = await doPost("/api/generate", param, options);
if (param.stream === undefined || param.stream) {
return await parseJSONList(response, isGenerateCompletionResponse);
}
return {
response,
body: ensure(await response.json(), isGenerateCompletionResponse),
};
) {
return parseJSONStream(
await doPost("/api/generate", param, options),
isGenerateCompletionResponse,
);
}
23 changes: 12 additions & 11 deletions denops/ollama/main.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
import { Denops } from "https://deno.land/x/[email protected]/mod.ts";
import { batch } from "https://deno.land/x/[email protected]/batch/mod.ts";
import xdg from "https://deno.land/x/[email protected]/src/mod.deno.ts";
import { join } from "https://deno.land/[email protected]/path/mod.ts";
import { ensureFile } from "https://deno.land/[email protected]/fs/mod.ts";
import { ensure, is } from "https://deno.land/x/[email protected]/mod.ts";
import * as fn from "https://deno.land/x/[email protected]/function/mod.ts";
import * as v from "https://deno.land/x/[email protected]/variable/mod.ts";
import {
getLogger,
handlers,
Expand Down Expand Up @@ -49,17 +47,20 @@ export async function main(denops: Denops) {
const prompt = ensure(uPrompt, is.String);
const context = ensure(uContext, is.Record);
console.debug(`reserved context: ${context}`);
const result = await generateCompletion({
model,
prompt,
});
const result = await generateCompletion({ model, prompt });
const bufnr = await fn.bufadd(denops, "ollama://outputs");
await fn.bufload(denops, bufnr);
await fn.appendbufline(
denops,
bufnr,
"$",
result.body.map((entry) => entry.response).join(""),
await result.body?.pipeTo(
new WritableStream({
write: async (chunk) => {
const lastLine = await fn.getbufline(denops, bufnr, "$");
const newLines = chunk.response.split(/\r?\n/);
await fn.setbufline(denops, bufnr, "$", lastLine + newLines[0]);
if (newLines.length > 0) {
await fn.appendbufline(denops, bufnr, "$", newLines.slice(1));
}
},
}),
);
} catch (err) {
getLogger("denops-ollama").error(err);
Expand Down

0 comments on commit 1e1e129

Please sign in to comment.