diff --git a/api/package.json b/api/package.json index 506e35e1b..0f9378c81 100644 --- a/api/package.json +++ b/api/package.json @@ -39,6 +39,9 @@ "fpx": "bin/cli.js" }, "dependencies": { + "@ai-sdk/anthropic": "^0.0.51", + "@ai-sdk/mistral": "^0.0.42", + "@ai-sdk/openai": "^0.0.66", "@anthropic-ai/sdk": "^0.24.3", "@fiberplane/fpx-types": "workspace:*", "@hono/node-server": "^1.11.1", @@ -48,6 +51,7 @@ "@libsql/client": "^0.6.2", "acorn": "^8.11.3", "acorn-walk": "^8.3.2", + "ai": "^3.4.10", "chalk": "^5.3.0", "dotenv": "^16.4.5", "drizzle-kit": "^0.24.2", diff --git a/api/src/lib/ai/anthropic.ts b/api/src/lib/ai/anthropic.ts deleted file mode 100644 index 24fdc6afb..000000000 --- a/api/src/lib/ai/anthropic.ts +++ /dev/null @@ -1,118 +0,0 @@ -import Anthropic from "@anthropic-ai/sdk"; -import logger from "../../logger.js"; -import { getSystemPrompt, invokeRequestGenerationPrompt } from "./prompts.js"; -import { makeRequestTool as makeRequestToolBase } from "./tools.js"; - -// Convert the tool call into the format that Anthropic suggests (different than openai's api) -const makeRequestTool = { - name: makeRequestToolBase.function.name, - description: makeRequestToolBase.function.description, - input_schema: makeRequestToolBase.function.parameters, -}; - -type GenerateRequestOptions = { - apiKey: string; - model: string; - persona: string; - method: string; - path: string; - handler: string; - baseUrl?: string; - history?: Array; - handlerContext?: string; - openApiSpec?: string; - middleware?: { - handler: string; - method: string; - path: string; - }[]; - middlewareContext?: string; -}; - -/** - * Generates request data for a route handler - * - uses Anthropic's tool-calling feature. - * - returns the request data as JSON. - * - * See the JSON Schema definition for the request data in the `make_request` tool. - */ -export async function generateRequestWithAnthropic({ - apiKey, - baseUrl, - model, - persona, - method, - path, - handler, - handlerContext, - history, - openApiSpec, - middleware, - middlewareContext, -}: GenerateRequestOptions) { - logger.debug( - "Generating request data with Anthropic", - `model: ${model}`, - `baseUrl: ${baseUrl}`, - `persona: ${persona}`, - `method: ${method}`, - `path: ${path}`, - `handler: ${handler}`, - // `handlerContext: ${handlerContext}`, - // `openApiSpec: ${openApiSpec}`, - // `middleware: ${middleware}`, - // `middlewareContext: ${middlewareContext}`, - ); - const anthropicClient = new Anthropic({ apiKey, baseURL: baseUrl }); - const userPrompt = await invokeRequestGenerationPrompt({ - persona, - method, - path, - handler, - handlerContext, - history, - openApiSpec, - middleware, - middlewareContext, - }); - - const toolChoice: Anthropic.Messages.MessageCreateParams.ToolChoiceTool = { - type: "tool", - name: makeRequestTool.name, - }; - - const response = await anthropicClient.messages.create({ - model, - tool_choice: toolChoice, - tools: [makeRequestTool], - system: getSystemPrompt(persona), - messages: [ - { - role: "user", - content: userPrompt, - }, - ], - temperature: 0.06, - max_tokens: 2048, - }); - - const { content } = response; - - let toolArgs: Anthropic.Messages.ToolUseBlock["input"]; - for (const message of content) { - if (message.type === "tool_use") { - logger.debug( - "Anthropic tool use response:", - JSON.stringify(message, null, 2), - ); - toolArgs = message.input; - return toolArgs; - } - } - - logger.error( - "Parsing tool-call response from Anthropic failed. Response content:", - JSON.stringify(content, null, 2), - ); - throw new Error("Could not parse response from Anthropic"); -} diff --git a/api/src/lib/ai/index.ts b/api/src/lib/ai/index.ts index 04a4bfbc7..43073dc57 100644 --- a/api/src/lib/ai/index.ts +++ b/api/src/lib/ai/index.ts @@ -1,6 +1,45 @@ +import { createAnthropic } from "@ai-sdk/anthropic"; +import { createMistral } from "@ai-sdk/mistral"; +import { createOpenAI } from "@ai-sdk/openai"; import type { Settings } from "@fiberplane/fpx-types"; -import { generateRequestWithAnthropic } from "./anthropic.js"; -import { generateRequestWithOpenAI } from "./openai.js"; +import { generateObject } from "ai"; +import logger from "../../logger.js"; +import { invokeRequestGenerationPrompt } from "./prompts.js"; +import { requestSchema } from "./tools.js"; + +function configureProvider( + aiProvider: string, + providerConfig: { + apiKey: string; + baseUrl?: string | undefined; + model: string; + }, +) { + if (aiProvider === "openai") { + const openai = createOpenAI({ + apiKey: providerConfig.apiKey, + baseURL: providerConfig.baseUrl ?? undefined, + }); + return openai(providerConfig.model, { structuredOutputs: true }); + } + if (aiProvider === "anthropic") { + const anthropic = createAnthropic({ + apiKey: providerConfig.apiKey, + baseURL: providerConfig.baseUrl ?? undefined, + }); + return anthropic(providerConfig.model); + } + + if (aiProvider === "mistral") { + const mistral = createMistral({ + apiKey: providerConfig.apiKey, + baseURL: providerConfig.baseUrl ?? undefined, + }); + return mistral(providerConfig.model); + } + + throw new Error("Unknown AI provider"); +} export async function generateRequestWithAiProvider({ inferenceConfig, @@ -29,67 +68,78 @@ export async function generateRequestWithAiProvider({ }[]; middlewareContext?: string; }) { - const { - openaiApiKey, - openaiModel, - openaiBaseUrl, - anthropicApiKey, - anthropicModel, - anthropicBaseUrl, - aiProviderType, - } = inferenceConfig; - if (aiProviderType === "openai") { - return generateRequestWithOpenAI({ - apiKey: openaiApiKey ?? "", - model: openaiModel ?? "", - baseUrl: openaiBaseUrl, - persona, - method, - path, - handler, - handlerContext, - history, - openApiSpec, - middleware, - middlewareContext, - }).then( - (parsedArgs) => { - return { data: parsedArgs, error: null }; - }, - (error) => { - if (error instanceof Error) { - return { data: null, error: { message: error.message } }; - } - return { data: null, error: { message: "Unknown error" } }; - }, - ); + const { aiEnabled, aiProviderConfigurations, aiProvider } = inferenceConfig; + if (!aiEnabled) { + return { data: null, error: { message: "AI is not enabled" } }; } - if (aiProviderType === "anthropic") { - return generateRequestWithAnthropic({ - apiKey: anthropicApiKey ?? "", - baseUrl: anthropicBaseUrl, - model: anthropicModel ?? "", - persona, - method, - path, - handler, - handlerContext, - history, - openApiSpec, - middleware, - middlewareContext, - }).then( - (parsedArgs) => { - return { data: parsedArgs, error: null }; - }, - (error) => { - if (error instanceof Error) { - return { data: null, error: { message: error.message } }; - } - return { data: null, error: { message: "Unknown error" } }; - }, - ); + + if (!aiProvider) { + return { data: null, error: { message: "AI provider is not set" } }; } - return { data: null, error: { message: "Unknown AI provider" } }; + if (!aiProviderConfigurations || !aiProviderConfigurations[aiProvider]) { + return { + data: null, + error: { message: "AI provider is not configured properly" }, + }; + } + + const providerConfig = aiProviderConfigurations[aiProvider]; + + const provider = configureProvider(aiProvider, providerConfig); + + logger.debug("Generating request with AI provider", { + aiProvider, + providerConfig, + }); + + try { + const { + object: generatedObject, + warnings, + usage, + } = await generateObject({ + model: provider, + schema: requestSchema, + prompt: await invokeRequestGenerationPrompt({ + handler, + handlerContext, + history, + openApiSpec, + middleware, + middlewareContext, + persona, + method, + path, + }), + }); + + logger.debug("Generated object, warnings, usage", { + generatedObject, + warnings, + usage, + }); + + // Remove x-fpx-trace-id header from the generated object + const filteredHeaders = generatedObject?.headers?.filter( + (header) => header.key.toLowerCase() !== "x-fpx-trace-id", + ); + + return { + data: { ...generatedObject, headers: filteredHeaders }, + error: null, + }; + } catch (error) { + logger.error("Error generating request with AI provider", { + error, + }); + const errorMessage = + error instanceof Error + ? error.message + : "Error generating request with AI provider"; + return { + data: null, + error: { message: errorMessage }, + }; + } } diff --git a/api/src/lib/ai/openai.ts b/api/src/lib/ai/openai.ts deleted file mode 100644 index ddfca3bf4..000000000 --- a/api/src/lib/ai/openai.ts +++ /dev/null @@ -1,110 +0,0 @@ -import OpenAI from "openai"; -import logger from "../../logger.js"; -import { getSystemPrompt, invokeRequestGenerationPrompt } from "./prompts.js"; -import { makeRequestTool } from "./tools.js"; - -type GenerateRequestOptions = { - apiKey: string; - baseUrl?: string; - model: string; - persona: string; - method: string; - path: string; - handler: string; - handlerContext?: string; - history?: Array; - openApiSpec?: string; - middleware?: { - handler: string; - method: string; - path: string; - }[]; - middlewareContext?: string; -}; - -/** - * Generates request data for a route handler - * - uses OpenAI's tool-calling feature. - * - returns the request data as JSON. - * - * See the JSON Schema definition for the request data in the `make_request` tool. - */ -export async function generateRequestWithOpenAI({ - apiKey, - baseUrl, - model, - persona, - method, - path, - handler, - handlerContext, - history, - openApiSpec, - middleware, - middlewareContext, -}: GenerateRequestOptions) { - logger.debug( - "Generating request data with OpenAI", - `model: ${model}`, - `baseUrl: ${baseUrl}`, - `persona: ${persona}`, - `method: ${method}`, - `path: ${path}`, - // `handler: ${handler}`, - // `handlerContext: ${handlerContext}`, - // `openApiSpec: ${openApiSpec}`, - // `middleware: ${middleware}`, - // `middlewareContext: ${middlewareContext}`, - ); - const openaiClient = new OpenAI({ apiKey, baseURL: baseUrl }); - const userPrompt = await invokeRequestGenerationPrompt({ - persona, - method, - path, - handler, - handlerContext, - history, - openApiSpec, - middleware, - middlewareContext, - }); - - const response = await openaiClient.chat.completions.create({ - // NOTE - Later models (gpt-4o, gpt-4-turbo) should guarantee function calling to have json output - model, - // NOTE - We can restrict the response to be from this single tool call - tool_choice: { - type: "function", - function: { name: makeRequestTool.function.name }, - }, - // Define the make_request tool - tools: [makeRequestTool], - messages: [ - { - role: "system", - content: getSystemPrompt(persona), - }, - { - role: "user", - content: userPrompt, - }, - ], - temperature: 0.12, - max_tokens: 2048, - }); - - const { - choices: [{ message }], - } = response; - - const makeRequestCall = message.tool_calls?.[0]; - const toolArgs = makeRequestCall?.function?.arguments; - - try { - const parsedArgs = toolArgs ? JSON.parse(toolArgs) : null; - return parsedArgs; - } catch (error) { - logger.error("Parsing tool-call response from OpenAI failed:", error); - throw new Error("Could not parse response from OpenAI"); - } -} diff --git a/api/src/lib/ai/prompts.ts b/api/src/lib/ai/prompts.ts index 2bd07c1e6..375826b69 100644 --- a/api/src/lib/ai/prompts.ts +++ b/api/src/lib/ai/prompts.ts @@ -208,7 +208,7 @@ If it appears that more fields are coming alongside a file, return a body type o For form data, you can return a body type of "form-data". You can still return a JSON object like above, I will handle converting it to form data. -Never add the x-fpx-trace-id header to the request. +Even if you might see it in history - never add the x-fpx-trace-id header to the request. === @@ -296,7 +296,7 @@ Try strategies like specifying invalid data, missing data, or invalid data types Try to break the system. But do not break yourself! Keep your responses to a reasonable length. Including your random data. -Never add the x-fpx-trace-id header to the request. +Even if you might see it in history - never add the x-fpx-trace-id header to the request. Use the tool "make_request". Always respond in valid JSON. ***Don't make your responses too long, otherwise we cannot parse your JSON response.*** diff --git a/api/src/lib/ai/tools.ts b/api/src/lib/ai/tools.ts index e05063150..9b410df06 100644 --- a/api/src/lib/ai/tools.ts +++ b/api/src/lib/ai/tools.ts @@ -1,76 +1,47 @@ +import { z } from "zod"; + +// NOTE - We cannot use `.optional` from zod because it does not play nicely with structured output +export const requestSchema = z.object({ + path: z.string(), + pathParams: z + .array( + z.object({ + key: z.string(), + value: z.string(), + }), + ) + .nullable(), + queryParams: z + .array( + z.object({ + key: z.string(), + value: z.string(), + }), + ) + .nullable(), + body: z.string().nullable(), + bodyType: z + .object({ + type: z.enum(["json", "text", "form-data", "file"]), + isMultipart: z.boolean(), + }) + .nullable(), + headers: z + .array( + z.object({ + key: z.string(), + value: z.string(), + }), + ) + .nullable(), +}); + export const makeRequestTool = { type: "function", function: { name: "make_request", description: "Generates some random data for an http request to an api backend", - // Describe parameters as json schema https://json-schema.org/understanding-json-schema/ - parameters: { - type: "object", - properties: { - path: { - type: "string", - }, - pathParams: { - type: "array", - items: { - type: "object", - properties: { - key: { - type: "string", - }, - value: { - type: "string", - }, - }, - }, - }, - queryParams: { - type: "array", - items: { - type: "object", - properties: { - key: { - type: "string", - }, - value: { - type: "string", - }, - }, - }, - }, - body: { - type: "string", - }, - bodyType: { - type: "object" as const, - properties: { - type: { - type: "string" as const, - enum: ["json", "text", "form-data", "file"], - }, - isMultipart: { - type: "boolean" as const, - }, - }, - }, - headers: { - type: "array", - items: { - type: "object", - properties: { - key: { - type: "string", - }, - value: { - type: "string", - }, - }, - }, - }, - }, - // TODO - Mark fields like `pathParams` as required based on the route definition? - required: ["path"], - }, + parameters: requestSchema, }, -} as const; +}; diff --git a/api/src/routes/inference/inference.ts b/api/src/routes/inference/inference.ts index f81519ac8..139572558 100644 --- a/api/src/routes/inference/inference.ts +++ b/api/src/routes/inference/inference.ts @@ -1,11 +1,9 @@ import { zValidator } from "@hono/zod-validator"; import { Hono } from "hono"; import { cors } from "hono/cors"; -import OpenAI from "openai"; import { z } from "zod"; import { USER_PROJECT_ROOT_DIR } from "../../constants.js"; import { generateRequestWithAiProvider } from "../../lib/ai/index.js"; -import { cleanPrompt } from "../../lib/ai/prompts.js"; import { expandFunction } from "../../lib/expand-function/index.js"; import { getInferenceConfig } from "../../lib/settings/index.js"; import type { Bindings, Variables } from "../../lib/types.js"; @@ -101,64 +99,4 @@ app.post( }, ); -app.post( - "/v0/analyze-error", - cors(), - zValidator( - "json", - z.object({ errorMessage: z.string(), handlerSourceCode: z.string() }), - ), - async (ctx) => { - const { handlerSourceCode, errorMessage } = ctx.req.valid("json"); - - const db = ctx.get("db"); - const inferenceConfig = await getInferenceConfig(db); - if (!inferenceConfig) { - return ctx.json( - { - error: "No OpenAI configuration found", - }, - 403, - ); - } - const { openaiApiKey, openaiModel } = inferenceConfig; - const openaiClient = new OpenAI({ - apiKey: openaiApiKey, - }); - const response = await openaiClient.chat.completions.create({ - model: openaiModel ?? "gpt-4o", // TODO - Update this to use correct model and provider (later problem) - messages: [ - { - role: "system", - content: cleanPrompt(` - You are a code debugging assistant for apps that use Hono (web framework), - Neon (serverless postgres), Drizzle (ORM), and run on Cloudflare workers. - You are given a function and an error message. - Provide a succinct suggestion to fix the error, or say "I need more context to help fix this". - `), - }, - { - role: "user", - content: cleanPrompt(` - I hit the following error: - ${errorMessage} - This error originated in the following route handler for my Hono application: - ${handlerSourceCode} - `), - }, - ], - temperature: 0, - max_tokens: 2048, - }); - - const { - choices: [{ message }], - } = response; - - return ctx.json({ - suggestion: message.content, - }); - }, -); - export default app; diff --git a/api/src/routes/settings.ts b/api/src/routes/settings.ts index 05b46dd1e..001d5155f 100644 --- a/api/src/routes/settings.ts +++ b/api/src/routes/settings.ts @@ -31,10 +31,10 @@ app.post("/v0/settings", cors(), async (ctx) => { const parsedContent = SettingsSchema.parse(content); // Remove the stored api key if the feature is disabled - if (!parsedContent.aiEnabled) { - parsedContent.openaiApiKey = undefined; - parsedContent.anthropicApiKey = undefined; - } + // if (!parsedContent.aiEnabled) { + // parsedContent.aiProviderConfigurations?.openai?.apiKey = ""; + // parsedContent.aiProviderConfigurations?.anthropic?.apiKey = ""; + // } logger.debug("Updating settings", { content }); diff --git a/packages/types/package.json b/packages/types/package.json index e811dc113..d98efabc7 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -16,5 +16,10 @@ }, "devDependencies": { "zod": "^3.23.8" + }, + "dependencies": { + "@ai-sdk/anthropic": "^0.0.51", + "@ai-sdk/mistral": "^0.0.42", + "@ai-sdk/openai": "^0.0.66" } } diff --git a/packages/types/src/settings.ts b/packages/types/src/settings.ts index d3d089c28..a8f5b21f3 100644 --- a/packages/types/src/settings.ts +++ b/packages/types/src/settings.ts @@ -1,62 +1,117 @@ +import type { AnthropicProvider } from "@ai-sdk/anthropic"; +import type { MistralProvider } from "@ai-sdk/mistral"; +import type { OpenAIProvider } from "@ai-sdk/openai"; import { z } from "zod"; -export const CLAUDE_3_5_SONNET = "claude-3-5-sonnet-20240620"; -export const CLAUDE_3_OPUS = "claude-3-opus-20240229"; -export const CLAUDE_3_SONNET = "claude-3-sonnet-20240229"; -export const CLAUDE_3_HAIKU = "claude-3-haiku-20240307"; +export type MistralModelOptionsType = MistralProvider extends ( + modelId: infer T, + ...args: unknown[] +) => unknown + ? T + : never; -export const AnthropicModelOptions = { - [CLAUDE_3_5_SONNET]: "Claude 3.5 Sonnet", - [CLAUDE_3_OPUS]: "Claude 3 Opus", - [CLAUDE_3_SONNET]: "Claude 3 Sonnet", - [CLAUDE_3_HAIKU]: "Claude 3 Haiku", -} as const; +export const MistralModelOptions: Partial< + Record +> = { + "open-mistral-7b": "Open Mistral 7B", + "open-mixtral-8x7b": "Open Mixtral 8x7B", + "open-mixtral-8x22b": "Open Mixtral 8x22B", + "open-mistral-nemo": "Open Mistral Nemo", + "mistral-small-latest": "Mistral Small (Latest)", + "mistral-large-latest": "Mistral Large (Latest)", +}; + +export const MistralModelSchema = z.union([ + z.literal("open-mistral-7b"), + z.literal("open-mixtral-8x7b"), + z.literal("open-mixtral-8x22b"), + z.literal("open-mistral-nemo"), + z.literal("mistral-small-latest"), + z.literal("mistral-large-latest"), +]); + +export type AnthropicModelOptionsType = AnthropicProvider extends ( + modelId: infer T, + ...args: unknown[] +) => unknown + ? T + : never; + +export const AnthropicModelOptions: Partial< + Record +> = { + "claude-3-opus-20240229": "Claude 3 Opus", + "claude-3-sonnet-20240229": "Claude 3 Sonnet", + "claude-3-haiku-20240307": "Claude 3 Haiku", + "claude-3-5-sonnet-20240620": "Claude 3.5 Sonnet", +}; export const AnthropicModelSchema = z.union([ - z.literal(CLAUDE_3_5_SONNET), - z.literal(CLAUDE_3_OPUS), - z.literal(CLAUDE_3_SONNET), - z.literal(CLAUDE_3_HAIKU), + z.literal("claude-3-opus-20240229"), + z.literal("claude-3-sonnet-20240229"), + z.literal("claude-3-haiku-20240307"), + z.literal("claude-3-5-sonnet-20240620"), ]); export type AnthropicModel = z.infer; -export const GPT_4o = "gpt-4o"; -export const GPT_4o_MINI = "gpt-4o-mini"; -export const GPT_4_TURBO = "gpt-4-turbo"; +export type OpenAIModelOptionsType = OpenAIProvider extends ( + modelId: infer T, + ...args: unknown[] +) => unknown + ? T + : never; -export const OpenAiModelOptions = { - [GPT_4o]: "GPT-4o", - [GPT_4o_MINI]: "GPT-4o Mini", - [GPT_4_TURBO]: "GPT-4 Turbo", -} as const; +export const OpenAIModelOptions: Partial< + Record +> = { + "gpt-4": "GPT-4", + "gpt-3.5-turbo": "GPT-3.5 Turbo", + "gpt-4o": "GPT-4o", + "gpt-4o-mini": "GPT-4o Mini", + "gpt-4-turbo": "GPT-4 Turbo", +}; -export const OpenAiModelSchema = z.union([ - z.literal(GPT_4o), - z.literal(GPT_4o_MINI), - z.literal(GPT_4_TURBO), +export const OpenAIModelSchema = z.union([ + z.literal("gpt-4"), + z.literal("gpt-3.5-turbo"), + z.literal("gpt-4o"), + z.literal("gpt-4o-mini"), + z.literal("gpt-4-turbo"), ]); -export type OpenAiModel = z.infer; +export type OpenAIModel = z.infer; export const ProviderOptions = { openai: "OpenAI", anthropic: "Anthropic", + mistral: "Mistral", } as const; export const AiProviderTypeSchema = z.union([ z.literal("openai"), z.literal("anthropic"), + z.literal("mistral"), ]); export type AiProviderType = z.infer; export const SettingsSchema = z.object({ aiEnabled: z.boolean().optional(), - aiProviderType: AiProviderTypeSchema.optional(), - anthropicApiKey: z.string().optional(), - anthropicBaseUrl: z.string().optional(), - anthropicModel: AnthropicModelSchema.optional(), + aiProvider: AiProviderTypeSchema.optional(), + aiProviderConfigurations: z + .record( + AiProviderTypeSchema, + z.object({ + apiKey: z.string(), + baseUrl: z.string().optional(), + model: z.string(), + }), + ) + .optional(), + proxyBaseUrl: z.string().optional(), + proxyRequestsEnabled: z.boolean().optional(), + webhoncConnectionId: z.string().optional(), fpxWorkerProxy: z .object({ enabled: z.boolean().optional(), @@ -66,12 +121,6 @@ export const SettingsSchema = z.object({ baseUrl: z.union([z.literal(""), z.string().trim().url()]).optional(), }) .optional(), - openaiApiKey: z.string().optional(), - openaiBaseUrl: z.string().optional(), - openaiModel: OpenAiModelSchema.optional(), - proxyBaseUrl: z.string().optional(), - proxyRequestsEnabled: z.boolean().optional(), - webhoncConnectionId: z.string().optional(), }); export type Settings = z.infer; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 5cb992bd8..5988ff861 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -26,6 +26,15 @@ importers: api: dependencies: + '@ai-sdk/anthropic': + specifier: ^0.0.51 + version: 0.0.51(zod@3.23.8) + '@ai-sdk/mistral': + specifier: ^0.0.42 + version: 0.0.42(zod@3.23.8) + '@ai-sdk/openai': + specifier: ^0.0.66 + version: 0.0.66(zod@3.23.8) '@anthropic-ai/sdk': specifier: ^0.24.3 version: 0.24.3(encoding@0.1.13) @@ -53,6 +62,9 @@ importers: acorn-walk: specifier: ^8.3.2 version: 8.3.3 + ai: + specifier: ^3.4.10 + version: 3.4.10(openai@4.55.4(encoding@0.1.13)(zod@3.23.8))(react@18.3.1)(sswr@2.1.0(svelte@4.2.19))(svelte@4.2.19)(vue@3.5.12(typescript@5.6.2))(zod@3.23.8) chalk: specifier: ^5.3.0 version: 5.3.0 @@ -305,6 +317,16 @@ importers: version: 1.6.0(@types/node@20.14.15) packages/types: + dependencies: + '@ai-sdk/anthropic': + specifier: ^0.0.51 + version: 0.0.51(zod@3.23.8) + '@ai-sdk/mistral': + specifier: ^0.0.42 + version: 0.0.42(zod@3.23.8) + '@ai-sdk/openai': + specifier: ^0.0.66 + version: 0.0.66(zod@3.23.8) devDependencies: zod: specifier: ^3.23.8 @@ -597,6 +619,85 @@ importers: packages: + '@ai-sdk/anthropic@0.0.51': + resolution: {integrity: sha512-XPLBvdwdMlNAvGMyfsDgrCDXN2Wz7M+wfCJthqiwdiKHmq2jDLGdt0ZCAozgxxW28HVzMfJlFjuyECiA5Le3YA==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + + '@ai-sdk/mistral@0.0.42': + resolution: {integrity: sha512-wvtvfXJcyOAWNuc3ZaWogIcTfKyHLw7sudzzyGYb7jHQgZRRlrythuMeoR9OcC4USEKE2xgYyAQ/JF31TZUPKw==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + + '@ai-sdk/openai@0.0.66': + resolution: {integrity: sha512-V4XeDnlNl5/AY3GB3ozJUjqnBLU5pK3DacKTbCNH3zH8/MggJoH6B8wRGdLUPVFMcsMz60mtvh4DC9JsIVFrKw==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + + '@ai-sdk/provider-utils@1.0.20': + resolution: {integrity: sha512-ngg/RGpnA00eNOWEtXHenpX1MsM2QshQh4QJFjUfwcqHpM5kTfG7je7Rc3HcEDP+OkRVv2GF+X4fC1Vfcnl8Ow==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + peerDependenciesMeta: + zod: + optional: true + + '@ai-sdk/provider@0.0.24': + resolution: {integrity: sha512-XMsNGJdGO+L0cxhhegtqZ8+T6nn4EoShS819OvCgI2kLbYTIvk0GWFGD0AXJmxkxs3DrpsJxKAFukFR7bvTkgQ==} + engines: {node: '>=18'} + + '@ai-sdk/react@0.0.62': + resolution: {integrity: sha512-1asDpxgmeHWL0/EZPCLENxfOHT+0jce0z/zasRhascodm2S6f6/KZn5doLG9jdmarcb+GjMjFmmwyOVXz3W1xg==} + engines: {node: '>=18'} + peerDependencies: + react: ^18 || ^19 + zod: ^3.0.0 + peerDependenciesMeta: + react: + optional: true + zod: + optional: true + + '@ai-sdk/solid@0.0.49': + resolution: {integrity: sha512-KnfWTt640cS1hM2fFIba8KHSPLpOIWXtEm28pNCHTvqasVKlh2y/zMQANTwE18pF2nuXL9P9F5/dKWaPsaEzQw==} + engines: {node: '>=18'} + peerDependencies: + solid-js: ^1.7.7 + peerDependenciesMeta: + solid-js: + optional: true + + '@ai-sdk/svelte@0.0.51': + resolution: {integrity: sha512-aIZJaIds+KpCt19yUDCRDWebzF/17GCY7gN9KkcA2QM6IKRO5UmMcqEYja0ZmwFQPm1kBZkF2njhr8VXis2mAw==} + engines: {node: '>=18'} + peerDependencies: + svelte: ^3.0.0 || ^4.0.0 + peerDependenciesMeta: + svelte: + optional: true + + '@ai-sdk/ui-utils@0.0.46': + resolution: {integrity: sha512-ZG/wneyJG+6w5Nm/hy1AKMuRgjPQToAxBsTk61c9sVPUTaxo+NNjM2MhXQMtmsja2N5evs8NmHie+ExEgpL3cA==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + peerDependenciesMeta: + zod: + optional: true + + '@ai-sdk/vue@0.0.54': + resolution: {integrity: sha512-Ltu6gbuii8Qlp3gg7zdwdnHdS4M8nqKDij2VVO1223VOtIFwORFJzKqpfx44U11FW8z2TPVBYN+FjkyVIcN2hg==} + engines: {node: '>=18'} + peerDependencies: + vue: ^3.3.4 + peerDependenciesMeta: + vue: + optional: true + '@alloc/quick-lru@5.2.0': resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==} engines: {node: '>=10'} @@ -3317,6 +3418,9 @@ packages: '@types/debug@4.1.12': resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==} + '@types/diff-match-patch@1.0.36': + resolution: {integrity: sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg==} + '@types/estree-jsx@1.0.5': resolution: {integrity: sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==} @@ -3500,6 +3604,35 @@ packages: '@vscode/l10n@0.0.18': resolution: {integrity: sha512-KYSIHVmslkaCDyw013pphY+d7x1qV8IZupYfeIfzNA+nsaWHbn5uPuQRvdRFsa9zFzGeudPuoGoZ1Op4jrJXIQ==} + '@vue/compiler-core@3.5.12': + resolution: {integrity: sha512-ISyBTRMmMYagUxhcpyEH0hpXRd/KqDU4ymofPgl2XAkY9ZhQ+h0ovEZJIiPop13UmR/54oA2cgMDjgroRelaEw==} + + '@vue/compiler-dom@3.5.12': + resolution: {integrity: sha512-9G6PbJ03uwxLHKQ3P42cMTi85lDRvGLB2rSGOiQqtXELat6uI4n8cNz9yjfVHRPIu+MsK6TE418Giruvgptckg==} + + '@vue/compiler-sfc@3.5.12': + resolution: {integrity: sha512-2k973OGo2JuAa5+ZlekuQJtitI5CgLMOwgl94BzMCsKZCX/xiqzJYzapl4opFogKHqwJk34vfsaKpfEhd1k5nw==} + + '@vue/compiler-ssr@3.5.12': + resolution: {integrity: sha512-eLwc7v6bfGBSM7wZOGPmRavSWzNFF6+PdRhE+VFJhNCgHiF8AM7ccoqcv5kBXA2eWUfigD7byekvf/JsOfKvPA==} + + '@vue/reactivity@3.5.12': + resolution: {integrity: sha512-UzaN3Da7xnJXdz4Okb/BGbAaomRHc3RdoWqTzlvd9+WBR5m3J39J1fGcHes7U3za0ruYn/iYy/a1euhMEHvTAg==} + + '@vue/runtime-core@3.5.12': + resolution: {integrity: sha512-hrMUYV6tpocr3TL3Ad8DqxOdpDe4zuQY4HPY3X/VRh+L2myQO8MFXPAMarIOSGNu0bFAjh1yBkMPXZBqCk62Uw==} + + '@vue/runtime-dom@3.5.12': + resolution: {integrity: sha512-q8VFxR9A2MRfBr6/55Q3umyoN7ya836FzRXajPB6/Vvuv0zOPL+qltd9rIMzG/DbRLAIlREmnLsplEF/kotXKA==} + + '@vue/server-renderer@3.5.12': + resolution: {integrity: sha512-I3QoeDDeEPZm8yR28JtY+rk880Oqmj43hreIBVTicisFTx/Dl7JpG72g/X7YF8hnQD3IFhkky5i2bPonwrTVPg==} + peerDependencies: + vue: 3.5.12 + + '@vue/shared@3.5.12': + resolution: {integrity: sha512-L2RPSAwUFbgZH20etwrXyVyCBu9OxRSi8T/38QsvnkJyvq2LufW2lDCOzm7t/U9C1mkhJGWYfCuFBCmIuNivrg==} + abort-controller@3.0.0: resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} engines: {node: '>=6.5'} @@ -3526,6 +3659,27 @@ packages: resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==} engines: {node: '>=8'} + ai@3.4.10: + resolution: {integrity: sha512-K+Nm8+AsVWEJDd/DsezSlAteIvcA+HjCUDQhuXKVTBU4z+4lpyji6j9wsyaGBhwFDqGZgNzR/D0r5FQLasA4Jg==} + engines: {node: '>=18'} + peerDependencies: + openai: ^4.42.0 + react: ^18 || ^19 + sswr: ^2.1.0 + svelte: ^3.0.0 || ^4.0.0 + zod: ^3.0.0 + peerDependenciesMeta: + openai: + optional: true + react: + optional: true + sswr: + optional: true + svelte: + optional: true + zod: + optional: true + ajv-draft-04@1.0.0: resolution: {integrity: sha512-mv00Te6nmYbRp5DCwclxtt7yV/joXJPGS7nM+97GdxvuttCOfgI3K4U25zboyeX0O+myI8ERluxQe5wljMmVIw==} peerDependencies: @@ -3893,6 +4047,9 @@ packages: resolution: {integrity: sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==} engines: {node: '>= 10'} + client-only@0.0.1: + resolution: {integrity: sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==} + cliui@8.0.1: resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} engines: {node: '>=12'} @@ -3918,6 +4075,9 @@ packages: react: ^18.0.0 react-dom: ^18.0.0 + code-red@1.0.4: + resolution: {integrity: sha512-7qJWqItLA8/VPVlKJlFXU+NBlo/qyfs39aJcuMT/2ere32ZqvF5OSxgdM5xOfJJ7O429gg2HM47y8v9P+9wrNw==} + codemirror@6.0.1: resolution: {integrity: sha512-J8j+nZ+CdWmIeFIGXEFbFPtpiYacFMDR8GlHK3IyHQJMCaVRfGx9NT+Hxivv1ckLWPvNdZqndbr/7lVhrf/Svg==} @@ -4162,6 +4322,9 @@ packages: didyoumean@1.2.2: resolution: {integrity: sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==} + diff-match-patch@1.0.5: + resolution: {integrity: sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw==} + diff-sequences@29.6.3: resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} @@ -4538,6 +4701,10 @@ packages: eventemitter3@5.0.1: resolution: {integrity: sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==} + eventsource-parser@1.1.2: + resolution: {integrity: sha512-v0eOBUbiaFojBu2s2NPBfYUoRR9GjcDNvCXVaqEf5vVfpIAh9f8RCo4vXTP8c63QRKCFwoLpMpTdPwwhEKVgzA==} + engines: {node: '>=14.18'} + execa@0.7.0: resolution: {integrity: sha512-RztN09XglpYI7aBBrJCPW95jEH7YF1UEPOoX9yDhUTPdp7mK+CQvnLTuD10BNXZ3byLTu2uehZ8EcKT/4CGiFw==} engines: {node: '>=4'} @@ -5199,6 +5366,9 @@ packages: json-schema-traverse@1.0.0: resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==} + json-schema@0.4.0: + resolution: {integrity: sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==} + json5@2.2.3: resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==} engines: {node: '>=6'} @@ -5210,6 +5380,11 @@ packages: jsonc-parser@3.3.1: resolution: {integrity: sha512-HUgH65KyejrUFPvHFPbqOY0rsFip3Bo5wb4ngvdi1EpCYWUQDC5V+Y7mZws+DLkr4M//zQJoanu1SP+87Dv1oQ==} + jsondiffpatch@0.6.0: + resolution: {integrity: sha512-3QItJOXp2AP1uv7waBkao5nCvhEv+QmJAd38Ybq7wNI74Q+BBmnLn4EDKz6yI9xGAIQoUF87qHt+kc1IVxB4zQ==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + jsonfile@6.1.0: resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} @@ -5255,6 +5430,7 @@ packages: libsql@0.3.19: resolution: {integrity: sha512-Aj5cQ5uk/6fHdmeW0TiXK42FqUlwx7ytmMLPSaUQPin5HKKKuUPD62MAbN4OEweGBBI7q1BekoEN4gPUEL6MZA==} + cpu: [x64, arm64, wasm32] os: [darwin, linux, win32] lilconfig@2.1.0: @@ -5276,6 +5452,9 @@ packages: resolution: {integrity: sha512-ok6z3qlYyCDS4ZEU27HaU6x/xZa9Whf8jD4ptH5UZTQYZVYeb9bnZ3ojVhiJNLiXK1Hfc0GNbLXcmZ5plLDDBg==} engines: {node: '>=14'} + locate-character@3.0.0: + resolution: {integrity: sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==} + locate-path@5.0.0: resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} engines: {node: '>=8'} @@ -5679,6 +5858,11 @@ packages: mz@2.7.0: resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} + nanoid@3.3.6: + resolution: {integrity: sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + nanoid@3.3.7: resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} @@ -6536,6 +6720,9 @@ packages: resolution: {integrity: sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==} engines: {node: '>=4'} + secure-json-parse@2.7.0: + resolution: {integrity: sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==} + selfsigned@2.4.1: resolution: {integrity: sha512-th5B4L2U+eGLq1TVh7zNRGBapioSORUeymIydxgFpwww9d2qyKvtuPU2jJuHvYAwwqi2Y596QBL3eEqcPEYL8Q==} engines: {node: '>=10'} @@ -6680,6 +6867,11 @@ packages: resolution: {integrity: sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + sswr@2.1.0: + resolution: {integrity: sha512-Cqc355SYlTAaUt8iDPaC/4DPPXK925PePLMxyBKuWd5kKc5mwsG3nT9+Mq2tyguL5s7b4Jg+IRMpTRsNTAfpSQ==} + peerDependencies: + svelte: ^4.0.0 || ^5.0.0-next.0 + stackback@0.0.2: resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} @@ -6825,6 +7017,10 @@ packages: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} + svelte@4.2.19: + resolution: {integrity: sha512-IY1rnGr6izd10B0A8LqsBfmlT5OILVuZ7XsI0vdGPEvuonFV7NYEUK4dAkm9Zg2q0Um92kYjTpS1CAP3Nh/KWw==} + engines: {node: '>=16'} + svg-parser@2.0.4: resolution: {integrity: sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==} @@ -6833,6 +7029,19 @@ packages: engines: {node: '>=14.0.0'} hasBin: true + swr@2.2.5: + resolution: {integrity: sha512-QtxqyclFeAsxEUeZIYmsaQ0UjimSq1RZ9Un7I68/0ClKK/U3LoyQunwkQfJZr2fc22DfIXLNDc2wFyTEikCUpg==} + peerDependencies: + react: ^16.11.0 || ^17.0.0 || ^18.0.0 + + swrev@4.0.0: + resolution: {integrity: sha512-LqVcOHSB4cPGgitD1riJ1Hh4vdmITOp+BkmfmXRh4hSF/t7EnS4iD+SOTmq7w5pPm/SiPeto4ADbKS6dHUDWFA==} + + swrv@1.0.4: + resolution: {integrity: sha512-zjEkcP8Ywmj+xOJW3lIT65ciY/4AL4e/Or7Gj0MzU3zBJNMdJiT8geVZhINavnlHRMMCcJLHhraLTAiDOTmQ9g==} + peerDependencies: + vue: '>=3.2.26 < 4' + tailwind-merge@2.5.2: resolution: {integrity: sha512-kjEBm+pvD+6eAwzJL2Bi+02/9LFLal1Gs61+QB7HvTfQQ0aXwC5LGT8PEt1gS0CWKktKe6ysPTAy3cBC5MeiIg==} @@ -7410,6 +7619,14 @@ packages: vscode-uri@3.0.8: resolution: {integrity: sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw==} + vue@3.5.12: + resolution: {integrity: sha512-CLVZtXtn2ItBIi/zHZ0Sg1Xkb7+PU32bJJ8Bmy7ts3jxXTcbfsEfBivFYYWz1Hur+lalqGAh65Coin0r+HRUfg==} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + w3c-keyname@2.2.8: resolution: {integrity: sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==} @@ -7639,6 +7856,83 @@ packages: snapshots: + '@ai-sdk/anthropic@0.0.51(zod@3.23.8)': + dependencies: + '@ai-sdk/provider': 0.0.24 + '@ai-sdk/provider-utils': 1.0.20(zod@3.23.8) + zod: 3.23.8 + + '@ai-sdk/mistral@0.0.42(zod@3.23.8)': + dependencies: + '@ai-sdk/provider': 0.0.24 + '@ai-sdk/provider-utils': 1.0.20(zod@3.23.8) + zod: 3.23.8 + + '@ai-sdk/openai@0.0.66(zod@3.23.8)': + dependencies: + '@ai-sdk/provider': 0.0.24 + '@ai-sdk/provider-utils': 1.0.20(zod@3.23.8) + zod: 3.23.8 + + '@ai-sdk/provider-utils@1.0.20(zod@3.23.8)': + dependencies: + '@ai-sdk/provider': 0.0.24 + eventsource-parser: 1.1.2 + nanoid: 3.3.6 + secure-json-parse: 2.7.0 + optionalDependencies: + zod: 3.23.8 + + '@ai-sdk/provider@0.0.24': + dependencies: + json-schema: 0.4.0 + + '@ai-sdk/react@0.0.62(react@18.3.1)(zod@3.23.8)': + dependencies: + '@ai-sdk/provider-utils': 1.0.20(zod@3.23.8) + '@ai-sdk/ui-utils': 0.0.46(zod@3.23.8) + swr: 2.2.5(react@18.3.1) + optionalDependencies: + react: 18.3.1 + zod: 3.23.8 + + '@ai-sdk/solid@0.0.49(zod@3.23.8)': + dependencies: + '@ai-sdk/provider-utils': 1.0.20(zod@3.23.8) + '@ai-sdk/ui-utils': 0.0.46(zod@3.23.8) + transitivePeerDependencies: + - zod + + '@ai-sdk/svelte@0.0.51(svelte@4.2.19)(zod@3.23.8)': + dependencies: + '@ai-sdk/provider-utils': 1.0.20(zod@3.23.8) + '@ai-sdk/ui-utils': 0.0.46(zod@3.23.8) + sswr: 2.1.0(svelte@4.2.19) + optionalDependencies: + svelte: 4.2.19 + transitivePeerDependencies: + - zod + + '@ai-sdk/ui-utils@0.0.46(zod@3.23.8)': + dependencies: + '@ai-sdk/provider': 0.0.24 + '@ai-sdk/provider-utils': 1.0.20(zod@3.23.8) + json-schema: 0.4.0 + secure-json-parse: 2.7.0 + zod-to-json-schema: 3.23.2(zod@3.23.8) + optionalDependencies: + zod: 3.23.8 + + '@ai-sdk/vue@0.0.54(vue@3.5.12(typescript@5.6.2))(zod@3.23.8)': + dependencies: + '@ai-sdk/provider-utils': 1.0.20(zod@3.23.8) + '@ai-sdk/ui-utils': 0.0.46(zod@3.23.8) + swrv: 1.0.4(vue@3.5.12(typescript@5.6.2)) + optionalDependencies: + vue: 3.5.12(typescript@5.6.2) + transitivePeerDependencies: + - zod + '@alloc/quick-lru@5.2.0': {} '@ampproject/remapping@2.3.0': @@ -10196,6 +10490,8 @@ snapshots: dependencies: '@types/ms': 0.7.34 + '@types/diff-match-patch@1.0.36': {} + '@types/estree-jsx@1.0.5': dependencies: '@types/estree': 1.0.5 @@ -10438,6 +10734,60 @@ snapshots: '@vscode/l10n@0.0.18': {} + '@vue/compiler-core@3.5.12': + dependencies: + '@babel/parser': 7.25.6 + '@vue/shared': 3.5.12 + entities: 4.5.0 + estree-walker: 2.0.2 + source-map-js: 1.2.1 + + '@vue/compiler-dom@3.5.12': + dependencies: + '@vue/compiler-core': 3.5.12 + '@vue/shared': 3.5.12 + + '@vue/compiler-sfc@3.5.12': + dependencies: + '@babel/parser': 7.25.6 + '@vue/compiler-core': 3.5.12 + '@vue/compiler-dom': 3.5.12 + '@vue/compiler-ssr': 3.5.12 + '@vue/shared': 3.5.12 + estree-walker: 2.0.2 + magic-string: 0.30.11 + postcss: 8.4.47 + source-map-js: 1.2.1 + + '@vue/compiler-ssr@3.5.12': + dependencies: + '@vue/compiler-dom': 3.5.12 + '@vue/shared': 3.5.12 + + '@vue/reactivity@3.5.12': + dependencies: + '@vue/shared': 3.5.12 + + '@vue/runtime-core@3.5.12': + dependencies: + '@vue/reactivity': 3.5.12 + '@vue/shared': 3.5.12 + + '@vue/runtime-dom@3.5.12': + dependencies: + '@vue/reactivity': 3.5.12 + '@vue/runtime-core': 3.5.12 + '@vue/shared': 3.5.12 + csstype: 3.1.3 + + '@vue/server-renderer@3.5.12(vue@3.5.12(typescript@5.6.2))': + dependencies: + '@vue/compiler-ssr': 3.5.12 + '@vue/shared': 3.5.12 + vue: 3.5.12(typescript@5.6.2) + + '@vue/shared@3.5.12': {} + abort-controller@3.0.0: dependencies: event-target-shim: 5.0.1 @@ -10461,6 +10811,32 @@ snapshots: clean-stack: 2.2.0 indent-string: 4.0.0 + ai@3.4.10(openai@4.55.4(encoding@0.1.13)(zod@3.23.8))(react@18.3.1)(sswr@2.1.0(svelte@4.2.19))(svelte@4.2.19)(vue@3.5.12(typescript@5.6.2))(zod@3.23.8): + dependencies: + '@ai-sdk/provider': 0.0.24 + '@ai-sdk/provider-utils': 1.0.20(zod@3.23.8) + '@ai-sdk/react': 0.0.62(react@18.3.1)(zod@3.23.8) + '@ai-sdk/solid': 0.0.49(zod@3.23.8) + '@ai-sdk/svelte': 0.0.51(svelte@4.2.19)(zod@3.23.8) + '@ai-sdk/ui-utils': 0.0.46(zod@3.23.8) + '@ai-sdk/vue': 0.0.54(vue@3.5.12(typescript@5.6.2))(zod@3.23.8) + '@opentelemetry/api': 1.9.0 + eventsource-parser: 1.1.2 + json-schema: 0.4.0 + jsondiffpatch: 0.6.0 + nanoid: 3.3.6 + secure-json-parse: 2.7.0 + zod-to-json-schema: 3.23.2(zod@3.23.8) + optionalDependencies: + openai: 4.55.4(encoding@0.1.13)(zod@3.23.8) + react: 18.3.1 + sswr: 2.1.0(svelte@4.2.19) + svelte: 4.2.19 + zod: 3.23.8 + transitivePeerDependencies: + - solid-js + - vue + ajv-draft-04@1.0.0(ajv@8.17.1): optionalDependencies: ajv: 8.17.1 @@ -10939,6 +11315,8 @@ snapshots: cli-width@3.0.0: {} + client-only@0.0.1: {} + cliui@8.0.1: dependencies: string-width: 4.2.3 @@ -10965,6 +11343,14 @@ snapshots: - '@types/react' - '@types/react-dom' + code-red@1.0.4: + dependencies: + '@jridgewell/sourcemap-codec': 1.5.0 + '@types/estree': 1.0.5 + acorn: 8.12.1 + estree-walker: 3.0.3 + periscopic: 3.1.0 + codemirror@6.0.1(@lezer/common@1.2.1): dependencies: '@codemirror/autocomplete': 6.18.0(@codemirror/language@6.10.2)(@codemirror/state@6.4.1)(@codemirror/view@6.33.0)(@lezer/common@1.2.1) @@ -11170,6 +11556,8 @@ snapshots: didyoumean@1.2.2: {} + diff-match-patch@1.0.5: {} + diff-sequences@29.6.3: {} diff@5.2.0: {} @@ -11498,6 +11886,8 @@ snapshots: eventemitter3@5.0.1: {} + eventsource-parser@1.1.2: {} + execa@0.7.0: dependencies: cross-spawn: 5.1.0 @@ -12266,12 +12656,20 @@ snapshots: json-schema-traverse@1.0.0: {} + json-schema@0.4.0: {} + json5@2.2.3: {} jsonc-parser@2.3.1: {} jsonc-parser@3.3.1: {} + jsondiffpatch@0.6.0: + dependencies: + '@types/diff-match-patch': 1.0.36 + chalk: 5.3.0 + diff-match-patch: 1.0.5 + jsonfile@6.1.0: dependencies: universalify: 2.0.1 @@ -12337,6 +12735,8 @@ snapshots: mlly: 1.7.1 pkg-types: 1.1.3 + locate-character@3.0.0: {} + locate-path@5.0.0: dependencies: p-locate: 4.1.0 @@ -13041,6 +13441,8 @@ snapshots: object-assign: 4.1.1 thenify-all: 1.6.0 + nanoid@3.3.6: {} + nanoid@3.3.7: {} napi-build-utils@1.0.2: {} @@ -14046,6 +14448,8 @@ snapshots: extend-shallow: 2.0.1 kind-of: 6.0.3 + secure-json-parse@2.7.0: {} + selfsigned@2.4.1: dependencies: '@types/node-forge': 1.3.11 @@ -14208,6 +14612,11 @@ snapshots: dependencies: minipass: 7.1.2 + sswr@2.1.0(svelte@4.2.19): + dependencies: + svelte: 4.2.19 + swrev: 4.0.0 + stackback@0.0.2: {} stackframe@1.3.4: {} @@ -14348,6 +14757,23 @@ snapshots: supports-preserve-symlinks-flag@1.0.0: {} + svelte@4.2.19: + dependencies: + '@ampproject/remapping': 2.3.0 + '@jridgewell/sourcemap-codec': 1.5.0 + '@jridgewell/trace-mapping': 0.3.25 + '@types/estree': 1.0.5 + acorn: 8.12.1 + aria-query: 5.3.0 + axobject-query: 4.1.0 + code-red: 1.0.4 + css-tree: 2.3.1 + estree-walker: 3.0.3 + is-reference: 3.0.2 + locate-character: 3.0.0 + magic-string: 0.30.11 + periscopic: 3.1.0 + svg-parser@2.0.4: {} svgo@3.3.2: @@ -14360,6 +14786,18 @@ snapshots: csso: 5.0.5 picocolors: 1.0.1 + swr@2.2.5(react@18.3.1): + dependencies: + client-only: 0.0.1 + react: 18.3.1 + use-sync-external-store: 1.2.2(react@18.3.1) + + swrev@4.0.0: {} + + swrv@1.0.4(vue@3.5.12(typescript@5.6.2)): + dependencies: + vue: 3.5.12(typescript@5.6.2) + tailwind-merge@2.5.2: {} tailwindcss-animate@1.0.7(tailwindcss@3.4.9): @@ -14994,6 +15432,16 @@ snapshots: vscode-uri@3.0.8: {} + vue@3.5.12(typescript@5.6.2): + dependencies: + '@vue/compiler-dom': 3.5.12 + '@vue/compiler-sfc': 3.5.12 + '@vue/runtime-dom': 3.5.12 + '@vue/server-renderer': 3.5.12(vue@3.5.12(typescript@5.6.2)) + '@vue/shared': 3.5.12 + optionalDependencies: + typescript: 5.6.2 + w3c-keyname@2.2.8: {} wcwidth@1.0.1: diff --git a/studio/src/Layout/Settings/SettingsScreen.tsx b/studio/src/Layout/Settings/SettingsScreen.tsx index 2bfb4e147..38ecdc84f 100644 --- a/studio/src/Layout/Settings/SettingsScreen.tsx +++ b/studio/src/Layout/Settings/SettingsScreen.tsx @@ -17,25 +17,23 @@ export function SettingsScreen({ return ( - -
-
-
+ +
+
+
Settings -
-
- - Manage your settings and preferences. - - - -
+ + Manage your settings and preferences. +
+ +
diff --git a/studio/src/pages/SettingsPage/AISettingsForm.tsx b/studio/src/pages/SettingsPage/AISettingsForm.tsx index 3d21cf7e4..d7ef0ea43 100644 --- a/studio/src/pages/SettingsPage/AISettingsForm.tsx +++ b/studio/src/pages/SettingsPage/AISettingsForm.tsx @@ -15,7 +15,7 @@ import { FormLabel, } from "@/components/ui/form"; import { Input } from "@/components/ui/input"; -import { Switch } from "@/components/ui/switch"; +import { RadioGroup, RadioGroupItem } from "@/components/ui/radio-group"; import { Tooltip, TooltipContent, @@ -23,8 +23,10 @@ import { } from "@/components/ui/tooltip"; import { cn } from "@/utils"; import { + type AiProviderType, AnthropicModelOptions, - OpenAiModelOptions, + MistralModelOptions, + OpenAIModelOptions, ProviderOptions, type Settings, } from "@fiberplane/fpx-types"; @@ -33,11 +35,73 @@ import { EyeClosedIcon, EyeOpenIcon, InfoCircledIcon, - SlashIcon, } from "@radix-ui/react-icons"; -import { useState } from "react"; +import { useMemo, useState } from "react"; import { useSettingsForm } from "./form"; +const getAnthropicModelReleaseDate = (model: string) => { + const chunks = model.split("-"); + return chunks[chunks.length - 1]; +}; + +function useModelOptions(provider: AiProviderType) { + return useMemo(() => { + const modelOptions = Object.entries( + provider === "openai" + ? OpenAIModelOptions + : provider === "anthropic" + ? AnthropicModelOptions + : provider === "mistral" + ? MistralModelOptions + : {}, + ); + + // HACK - Anthropic models end in their date of release, so we sort by release date descending + // This makes sure sonnet-3.5 is at the top of the list + if (provider === "anthropic") { + modelOptions.sort((a, b) => { + const modelNameA = a[0]; + const modelNameB = b[0]; + const dateStringA = getAnthropicModelReleaseDate(modelNameA); + const dateStringB = getAnthropicModelReleaseDate(modelNameB); + return dateStringB.localeCompare(dateStringA); + }); + } + + // Sort mistral models by the "latest" keyword + if (provider === "mistral") { + modelOptions.sort((a, b) => { + const modelNameA = a[0]; + const modelNameB = b[0]; + if (modelNameA.includes("latest")) { + return -1; + } + if (modelNameB.includes("latest")) { + return 1; + } + return modelNameA.localeCompare(modelNameB); + }); + } + + // Sort gpt-4o to the top of the list + if (provider === "openai") { + modelOptions.sort((a, b) => { + const modelNameA = a[0]; + const modelNameB = b[0]; + if (modelNameA.includes("gpt-4o")) { + return -1; + } + if (modelNameB.includes("gpt-4o")) { + return 1; + } + return modelNameA.localeCompare(modelNameB); + }); + } + + return modelOptions; + }, [provider]); +} + export function AISettingsForm({ settings, }: { @@ -50,236 +114,167 @@ export function AISettingsForm({ (key) => !["proxyRequestsEnabled", "proxyBaseUrl"].includes(key), ).length > 0; + const sortedProviderOptions = useMemo(() => { + const options = Object.entries(ProviderOptions); + options.sort((a, b) => a[0].localeCompare(b[0])); + return options; + }, []); + + const selectedProvider = form.watch("aiProvider"); + + const activeProvider = Object.keys(ProviderOptions).find( + (provider) => provider === selectedProvider, + ) as AiProviderType; + + const modelOptions = useModelOptions(activeProvider); + return (
- + { + onSubmit(data); + }, + (error) => { + // TODO - Show error to user + // This can show up with the new settings form in a bad way + console.error("Form submission error:", error); + }, + )} + className="w-full space-y-4 pb-8" + >
-

+

Request Autofill Settings

-
+
( - -
-
- - Enable Request Autofill - - (Beta) - - - - Generate sample request data with AI. - -
- - - + +
+ AI Provider + + Select the AI provider for request autofill. +
- {field.value ? ( - ( -
- -
-
- - Provider Configuration - -
-
-
- - Select the AI provider and model you want to - use. - - -
- - - - - - - providerField.onChange(value) - } - > - {Object.entries(ProviderOptions).map( - ([option, label]) => ( - - {label} - - ), - )} - - - - - ( - - - - - - - field.onChange(value) - } - > - {Object.entries( - providerField.value === "openai" - ? OpenAiModelOptions - : AnthropicModelOptions, - ).map(([option, label]) => ( - - {label} - - ))} - - - - )} - /> -
-
-
- ( -
- - API Key - - - Your api key is stored locally in{" "} - - .fpxconfig/fpx.db - {" "} - to make requests to the{" "} - { - ProviderOptions[ - providerField.value ?? "openai" - ] - }{" "} - API. It should be ignored by version control - by default. - - - - -
- )} - /> - ( -
- - Base URL - - - You can configure the base URL used by{" "} - { - ProviderOptions[ - providerField.value ?? "openai" - ] - }{" "} - API client to use any compatible endpoint. - - - - -
- )} - /> -
-
- -
-
- )} - /> - ) : null} + + + {sortedProviderOptions.map(([option, label]) => ( + + + + + + {label as React.ReactNode} + + + ))} + +
)} /> + {sortedProviderOptions.map(([provider]) => ( +
+

+ {ProviderOptions[provider as AiProviderType]} +

+ ( + + Model + + + + + + + + {modelOptions.map(([option, label]) => ( + + {label as React.ReactNode} + + ))} + + + + + + )} + /> + ( + + API Key + + field.onChange(e.target.value)} + placeholder={`Enter ${ProviderOptions[provider as AiProviderType]} API Key`} + // HACK - Prevent clipping of focus ring + className="mx-[2px]" + /> + + + )} + /> + ( + + Base URL + + + + + )} + /> +
+ ))}
-
+ +
- - - setActiveTab(AI_TAB)}> - {AI_TAB} - - setActiveTab(PROXY_REQUESTS_TAB)}> - {PROXY_REQUESTS_TAB} - - setActiveTab(FPX_WORKER_PROXY_TAB)} - > - {FPX_WORKER_PROXY_TAB} - - - - {/* For md breakpoint, show tab triggers */} Request Autofill Proxy Requests {FPX_WORKER_PROXY_TAB} -
+
@@ -130,7 +96,7 @@ function SettingsLayout({ settings }: { settings: Settings }) { function SettingsSkeleton() { return (
-
+
@@ -138,9 +104,9 @@ function SettingsSkeleton() {
- - - + + +
); diff --git a/studio/src/pages/SettingsPage/form/form.tsx b/studio/src/pages/SettingsPage/form/form.tsx index 0be1db430..68fd4ff78 100644 --- a/studio/src/pages/SettingsPage/form/form.tsx +++ b/studio/src/pages/SettingsPage/form/form.tsx @@ -1,20 +1,27 @@ import { useToast } from "@/components/ui/use-toast"; import { useUpdateSettings } from "@/queries"; import { errorHasMessage } from "@/utils"; -import { - CLAUDE_3_5_SONNET, - GPT_4o, - type Settings, - SettingsSchema, -} from "@fiberplane/fpx-types"; +import { type Settings, SettingsSchema } from "@fiberplane/fpx-types"; import { zodResolver } from "@hookform/resolvers/zod"; import { useForm } from "react-hook-form"; const DEFAULT_VALUES = { aiEnabled: false, - aiProviderType: "openai", - openaiModel: GPT_4o, - anthropicModel: CLAUDE_3_5_SONNET, + aiProvider: "openai", + aiProviderConfigurations: { + openai: { + model: "gpt-4o", + apiKey: "", + }, + anthropic: { + model: "claude-3-5-sonnet-20240620", + apiKey: "", + }, + mistral: { + model: "mistral-large-latest", + apiKey: "", + }, + }, proxyRequestsEnabled: false, proxyBaseUrl: "https://webhonc.mies.workers.dev", } satisfies Settings; @@ -24,11 +31,16 @@ export function useSettingsForm(settings: Settings) { const { mutate: updateSettings } = useUpdateSettings(); + // TODO - Derive default values from the fiberplane types package const form = useForm({ resolver: zodResolver(SettingsSchema), defaultValues: { ...DEFAULT_VALUES, ...settings, + aiProviderConfigurations: { + ...DEFAULT_VALUES.aiProviderConfigurations, + ...settings.aiProviderConfigurations, + }, }, }); diff --git a/www/src/content/changelog/!canary.mdx b/www/src/content/changelog/!canary.mdx index 1ab032dac..3a0742c61 100644 --- a/www/src/content/changelog/!canary.mdx +++ b/www/src/content/changelog/!canary.mdx @@ -1,27 +1,9 @@ --- -date: 2024-09-30 +date: 2024-10-18 version: canary draft: true --- ### Features -- **Rendering outside requests** External requests are now rendered in the request panel, instead of the timeline panel. - -- **Improved AI request generation** We've added some static analysis smarts for wrangler projects in order to give LLM providers more context about the code that powers each route. - -- **Improved Route Updating** We've added some improvements to how we refresh your list of API routes in the Studio. - -- **Render audio files** When your api returns a binary audio response, we will render an audio player for you to listen to it. - ### Bug fixes - -- **D1 autoinstrumentation** The client library, `@fiberplane/hono-otel`, now instruments D1 queries in latest versions of wrangler/miniflare. - -- **Set response attributes** Fixed an issue where setting response attributes would not work correctly (for synchronous endpoints). - -- **Side panel not closing** Fixed an issue where the side panel would not close when clicking outside of it. - -- **Missing log information** Fixed an issue where logs would not be displayed fully in the logs panel. - -- **Render images returned from API** Fixed an issue where images returned from an API request made through Studio would not be rendered in the response panel. diff --git a/www/src/content/changelog/2024-10-18-v0_9_0.mdx b/www/src/content/changelog/2024-10-18-v0_9_0.mdx new file mode 100644 index 000000000..8b9264d08 --- /dev/null +++ b/www/src/content/changelog/2024-10-18-v0_9_0.mdx @@ -0,0 +1,31 @@ +--- +date: 2024-10-18 +version: 0.9.0 +draft: false +--- + +### Features + +- **Mistral support**: Fiberplane Studio now supports Mistral for generating request data. + +- **Improved settings screen**: we made some improvements to the settings screen to make it easier to add multiple API keys for LLM providers. + +- **Rendering outside requests** External requests are now rendered in the request panel, instead of the timeline panel. + +- **Improved AI request generation** We've added some static analysis smarts for wrangler projects in order to give LLM providers more context about the code that powers each route. + +- **Improved Route Updating** We've added some improvements to how we refresh your list of API routes in the Studio. + +- **Render audio files** When your api returns a binary audio response, we will render an audio player for you to listen to it. + +### Bug fixes + +- **D1 autoinstrumentation** The client library, `@fiberplane/hono-otel`, now instruments D1 queries in latest versions of wrangler/miniflare. + +- **Set response attributes** Fixed an issue where setting response attributes would not work correctly (for synchronous endpoints). + +- **Side panel not closing** Fixed an issue where the side panel would not close when clicking outside of it. + +- **Missing log information** Fixed an issue where logs would not be displayed fully in the logs panel. + +- **Render images returned from API** Fixed an issue where images returned from an API request made through Studio would not be rendered in the response panel.