From 1e1e129003d6299e4dc224d48c6c32ce2c450118 Mon Sep 17 00:00:00 2001 From: kyoh86 Date: Thu, 28 Dec 2023 17:49:41 +0900 Subject: [PATCH] wip: Vim script entrypoint --- autoload/ollama.vim | 4 ++++ denops/ollama/api/base.ts | 20 ++++++++++++++++++ denops/ollama/api/generate_completion.ts | 27 ++++++------------------ denops/ollama/main.ts | 23 ++++++++++---------- 4 files changed, 42 insertions(+), 32 deletions(-) create mode 100644 autoload/ollama.vim diff --git a/autoload/ollama.vim b/autoload/ollama.vim new file mode 100644 index 0000000..cc84cde --- /dev/null +++ b/autoload/ollama.vim @@ -0,0 +1,4 @@ +function ollama#generate_completion() abort + let l:prompt = input("Prompt:") + call denops#notify("ollama", "generate_completion", ["codellama", l:prompt, {}]) +endfunction diff --git a/denops/ollama/api/base.ts b/denops/ollama/api/base.ts index d603762..32864a2 100644 --- a/denops/ollama/api/base.ts +++ b/denops/ollama/api/base.ts @@ -76,3 +76,23 @@ export async function parseJSONList( ); return { response, body }; } + +export function parseJSONStream( + response: Response, + predicate: Predicate, +) { + return { + response, + body: response.body + ?.pipeThrough(new TextDecoderStream()) + .pipeThrough(new JSONLinesParseStream()) + .pipeThrough( + new TransformStream({ + transform: (value, controller) => { + const item = ensure(value, predicate); + controller.enqueue(item); + }, + }), + ), + }; +} diff --git a/denops/ollama/api/generate_completion.ts b/denops/ollama/api/generate_completion.ts index 9266f31..98fe326 100644 --- a/denops/ollama/api/generate_completion.ts +++ b/denops/ollama/api/generate_completion.ts @@ -1,11 +1,10 @@ import { - ensure, is, ObjectOf as O, Predicate as P, } from "https://deno.land/x/unknownutil@v3.11.0/mod.ts"; import { isFormat, RequestOptions, Result } from "./types.ts"; -import { parseJSONList } from "./base.ts"; +import { parseJSONStream } from "./base.ts"; import { doPost } from "./base.ts"; // Definitions for the endpoint to "Generate a completion" @@ -77,16 +76,6 @@ export const isGenerateCompletionResponse: P = is generateCompletionResponseFields, ); -export async function generateCompletion( - param: GenerateCompletionParam & { stream?: true }, - options?: RequestOptions, -): Promise>; - -export async function generateCompletion( - param: GenerateCompletionParam & { stream: false }, - options?: RequestOptions, -): Promise>; - /** Generate a response for a given prompt with a provided model. * This is a streaming endpoint, so there will be a series of responses. * The final response object will include statistics and additional data from the request. @@ -94,13 +83,9 @@ export async function generateCompletion( export async function generateCompletion( param: GenerateCompletionParam, options?: RequestOptions, -): Promise> { - const response = await doPost("/api/generate", param, options); - if (param.stream === undefined || param.stream) { - return await parseJSONList(response, isGenerateCompletionResponse); - } - return { - response, - body: ensure(await response.json(), isGenerateCompletionResponse), - }; +) { + return parseJSONStream( + await doPost("/api/generate", param, options), + isGenerateCompletionResponse, + ); } diff --git a/denops/ollama/main.ts b/denops/ollama/main.ts index 7b1cc88..f378bdd 100644 --- a/denops/ollama/main.ts +++ b/denops/ollama/main.ts @@ -1,11 +1,9 @@ import { Denops } from "https://deno.land/x/denops_std@v5.2.0/mod.ts"; -import { batch } from "https://deno.land/x/denops_std@v5.2.0/batch/mod.ts"; import xdg from "https://deno.land/x/xdg@v10.6.0/src/mod.deno.ts"; import { join } from "https://deno.land/std@0.210.0/path/mod.ts"; import { ensureFile } from "https://deno.land/std@0.210.0/fs/mod.ts"; import { ensure, is } from "https://deno.land/x/unknownutil@v3.11.0/mod.ts"; import * as fn from "https://deno.land/x/denops_std@v5.2.0/function/mod.ts"; -import * as v from "https://deno.land/x/denops_std@v5.2.0/variable/mod.ts"; import { getLogger, handlers, @@ -49,17 +47,20 @@ export async function main(denops: Denops) { const prompt = ensure(uPrompt, is.String); const context = ensure(uContext, is.Record); console.debug(`reserved context: ${context}`); - const result = await generateCompletion({ - model, - prompt, - }); + const result = await generateCompletion({ model, prompt }); const bufnr = await fn.bufadd(denops, "ollama://outputs"); await fn.bufload(denops, bufnr); - await fn.appendbufline( - denops, - bufnr, - "$", - result.body.map((entry) => entry.response).join(""), + await result.body?.pipeTo( + new WritableStream({ + write: async (chunk) => { + const lastLine = await fn.getbufline(denops, bufnr, "$"); + const newLines = chunk.response.split(/\r?\n/); + await fn.setbufline(denops, bufnr, "$", lastLine + newLines[0]); + if (newLines.length > 0) { + await fn.appendbufline(denops, bufnr, "$", newLines.slice(1)); + } + }, + }), ); } catch (err) { getLogger("denops-ollama").error(err);