From 2b85fd5efe9880c101f0818c35b9e56cdda17f06 Mon Sep 17 00:00:00 2001 From: Phodal Huang Date: Thu, 30 Nov 2023 09:54:50 +0800 Subject: [PATCH] fix: fix compile issue for sample --- .../editor/action/command-functions.ts | 152 +++++++++--------- src/prompts/prebuild-prompts.ts | 4 +- src/types/llm-model.type.ts | 6 + 3 files changed, 83 insertions(+), 79 deletions(-) diff --git a/src/components/editor/action/command-functions.ts b/src/components/editor/action/command-functions.ts index 9d6feb8..6a6fada 100644 --- a/src/components/editor/action/command-functions.ts +++ b/src/components/editor/action/command-functions.ts @@ -1,95 +1,93 @@ -import {Commands, Extension} from "@tiptap/react"; -import {Editor} from "@tiptap/core"; -import {Transaction} from "prosemirror-state"; +import { Commands, Extension } from "@tiptap/react"; +import { Editor } from "@tiptap/core"; +import { Transaction } from "prosemirror-state"; import { - ChangeForm, - FacetType, - PromptAction, + FacetType, + PromptAction, } from "@/types/custom-action.type"; -import {PromptsManager} from "@/prompts/prompts-manager"; -import {Range} from "@tiptap/core"; -import {ActionExecutor} from "@/components/editor/action/ActionExecutor"; +import { PromptsManager } from "@/prompts/prompts-manager"; +import { ActionExecutor } from "@/components/editor/action/ActionExecutor"; declare module "@tiptap/core" { - interface Commands { - variable: { - variable: () => ReturnType; - }; + interface Commands { + variable: { + variable: () => ReturnType; + }; - getSelectedText: { - getSelectedText: () => string; - }; + getSelectedText: { + getSelectedText: () => string; + }; - callLlm: { - callLlm: (action: PromptAction) => void; - } + callLlm: { + callLlm: (action: PromptAction) => void; + } - getAiActions: { - getAiActions: (facet: FacetType) => PromptAction[]; - }; + getAiActions: { + getAiActions: (facet: FacetType) => PromptAction[]; + }; - runAiAction: { - runAiAction: (action: PromptAction) => ReturnType; - }; - setBackgroundContext: () => ReturnType, - } + runAiAction: { + runAiAction: (action: PromptAction) => ReturnType; + }; + setBackgroundContext: () => ReturnType, + } } export const CommandFunctions = Extension.create({ - name: "commandFunctions", - // @ts-ignore - addCommands: () => { - return { - // for examples: $selection, $beforeCursor - variable: (variableName: string, variableValue: string) => () => { - console.log("variable", variableName, variableValue); - }, - getSelectedText: - () => - ({editor}: { editor: Editor }) => { - if (!editor.state) return null; + name: "commandFunctions", + // @ts-ignore + addCommands: () => { + return { + // for examples: $selection, $beforeCursor + variable: (variableName: string, variableValue: string) => () => { + console.log("variable", variableName, variableValue); + }, + getSelectedText: + () => + ({ editor }: { editor: Editor }) => { + if (!editor.state) return null; - const {from, to, empty} = editor.state.selection; + const { from, to, empty } = editor.state.selection; - if (empty) return null; + if (empty) return null; - return editor.state.doc.textBetween(from, to, " "); - }, - callLlm: - (action: PromptAction) => - async ({tr, commands, editor}: { tr: Transaction; commands: Commands, editor: Editor }) => { - // do execute action - const actionExecutor = new ActionExecutor(action, editor); - actionExecutor.compile(); - if (action.compiledTemplate == null) { - throw Error("template is not been compiled yet! compile it first"); - } + return editor.state.doc.textBetween(from, to, " "); + }, + callLlm: + (action: PromptAction) => + async ({ tr, commands, editor }: { tr: Transaction; commands: Commands, editor: Editor }) => { + // do execute action + const actionExecutor = new ActionExecutor(action, editor); + actionExecutor.compile(); + if (action.compiledTemplate == null) { + throw Error("template is not been compiled yet! compile it first"); + } + console.info("compiledTemplate: \n\n", action.compiledTemplate); - const msg = await fetch("/api/completion", { - method: "POST", - body: JSON.stringify({prompt: action.compiledTemplate}), - }).then(it => it.text()); + const msg = await fetch("/api/completion/openai", { + method: "POST", + body: JSON.stringify({ prompt: action.compiledTemplate }), + }).then(it => it.text()); - const posInfo = actionExecutor.position(editor.state.selection); - editor.chain().focus().insertContentAt(posInfo, msg).run(); + const posInfo = actionExecutor.position(editor.state.selection); + editor.chain().focus().insertContentAt(posInfo, msg).run(); - }, - getAiActions: - (facet: FacetType) => - ({editor}: { editor: Editor }) => { - return PromptsManager.getInstance().get(facet); - }, - runAiAction: - (action: PromptAction) => - ({editor}: { editor: Editor }) => { - // call LLM - console.log("executeAction", action); - }, - setBackgroundContext: - (context: string) => - ({editor}: { editor: Editor }) => { - PromptsManager.getInstance().saveBackgroundContext(context); - }, - }; - }, + }, + getAiActions: + (facet: FacetType) => + ({ editor }: { editor: Editor }) => { + return PromptsManager.getInstance().get(facet); + }, + runAiAction: + (action: PromptAction) => + ({ editor }: { editor: Editor }) => { + editor.commands.callLlm(action); + }, + setBackgroundContext: + (context: string) => + ({ editor }: { editor: Editor }) => { + PromptsManager.getInstance().saveBackgroundContext(context); + }, + }; + }, }); diff --git a/src/prompts/prebuild-prompts.ts b/src/prompts/prebuild-prompts.ts index 9aa332c..b025c7b 100644 --- a/src/prompts/prebuild-prompts.ts +++ b/src/prompts/prebuild-prompts.ts @@ -74,10 +74,10 @@ const BubbleMenu: PromptAction[] = [ const SlashCommands: PromptAction[] = [ { - sourceType: SourceType.SELECTION, + sourceType: SourceType.BEFORE_CURSOR, name: 'Summarize', i18Name: true, - template: `You are an assistant helping to summarize a document. Output in markdown format. \n ###{{${DefinedVariable.SELECTION}}}###`, + template: `You are an assistant helping to summarize a document. Output in markdown format. \n ###{{${DefinedVariable.BEFORE_CURSOR}}}###`, facetType: FacetType.SLASH_COMMAND, outputForm: OutputForm.STREAMING } diff --git a/src/types/llm-model.type.ts b/src/types/llm-model.type.ts index 33e1253..8afef7f 100644 --- a/src/types/llm-model.type.ts +++ b/src/types/llm-model.type.ts @@ -4,6 +4,12 @@ export enum OPENAI_MODEL { CHATGPT_4 = 'chatgpt-4', } +// aka Yiyan +export enum ERNIEBOT { + ERNIEBOT = 'ernie-bot', +} + export const LlmModelType = { OPENAI: OPENAI_MODEL, + ERNIEBOT: ERNIEBOT, }