Skip to content

Commit

Permalink
Merge pull request #962 from superagent-ai/bugfix/integrations-llm-fo…
Browse files Browse the repository at this point in the history
…rm-cant-save

[bugfix] [refactor] LLM integrations form
  • Loading branch information
elisalimli authored Apr 15, 2024
2 parents 06d2de6 + 2e7b983 commit ca574f3
Show file tree
Hide file tree
Showing 5 changed files with 109 additions and 56 deletions.
6 changes: 4 additions & 2 deletions libs/ui/app/agents/[agentId]/add-datasource.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ interface AddDatasourceProps {
profile: any
agent: any
onSuccess: () => void
llmProvider: LLMProvider
llmProvider: keyof typeof LLMProvider
}

const supabase = getSupabase()
Expand Down Expand Up @@ -111,7 +111,9 @@ function AddDatasource({
}
}

function getEmbeddingsModelProvider(llmProvider: LLMProvider): LLMProvider {
function getEmbeddingsModelProvider(
llmProvider: keyof typeof LLMProvider
): keyof typeof LLMProvider {
if (llmProvider === LLMProvider.AZURE_OPENAI)
return LLMProvider.AZURE_OPENAI

Expand Down
2 changes: 1 addition & 1 deletion libs/ui/app/agents/[agentId]/settings.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ export default function Settings({
},
})
const avatar = form.watch("avatar")
const currLlmProvider = form.watch("llms") as LLMProvider
const currLlmProvider = form.watch("llms") as keyof typeof LLMProvider
async function onSubmit(values: z.infer<typeof formSchema>) {
const { tools, datasources } = values

Expand Down
85 changes: 59 additions & 26 deletions libs/ui/app/integrations/llm.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@

import * as React from "react"
import { useRouter } from "next/navigation"
import { LLMProvider } from "@/models/models"
import { zodResolver } from "@hookform/resolvers/zod"
import { useForm } from "react-hook-form"
import * as z from "zod"

import { siteConfig } from "@/config/site"
import { LLMForm, siteConfig } from "@/config/site"
import { Api } from "@/lib/api"
import { Button } from "@/components/ui/button"
import {
Expand All @@ -30,25 +31,58 @@ import {
import { Input } from "@/components/ui/input"
import { Spinner } from "@/components/ui/spinner"

const azureSchema = z.object({
azure_endpoint: z.string().nonempty("Endpoint cannot be empty"),
openai_api_version: z.string().nonempty("API version cannnot be empty"),
azure_deployment: z.string().nonempty("Deployment cannnot be empty"),
const openAiSchema = z.object({
llmType: z.literal(LLMProvider.OPENAI),
apiKey: z.string().nonempty("API key is required"),
options: z.object({}),
})

const bedrockSchema = z.object({
aws_access_key_id: z.string().nonempty("Access key ID cannot be empty"),
aws_secret_access_key: z
.string()
.nonempty("Secret access key cannot be empty"),
aws_region: z.string().nonempty("Region cannot be empty"),
const perplexityAiSchema = z.object({
llmType: z.literal(LLMProvider.PERPLEXITY),
apiKey: z.string().nonempty("API key is required"),
options: z.object({}),
})

const formSchema = z.object({
apiKey: z.string().optional(),
options: z.union([azureSchema, bedrockSchema]).optional(),
const togetherAiSchema = z.object({
llmType: z.literal(LLMProvider.TOGETHER_AI),
apiKey: z.string().nonempty("API key is required"),
options: z.object({}),
})

const antrophicSchema = z.object({
llmType: z.literal(LLMProvider.ANTHROPIC),
apiKey: z.string().nonempty("API key is required"),
options: z.object({}),
})
const amazonBedrockSchema = z.object({
llmType: z.literal(LLMProvider.BEDROCK),
apiKey: z.literal(""),
options: z.object({
aws_access_key_id: z.string(),
aws_secret_access_key: z.string(),
aws_region_name: z.string(),
}),
})

const azureOpenAiSchema = z.object({
llmType: z.literal(LLMProvider.AZURE_OPENAI),
apiKey: z.string().nonempty("API key is required"),
options: z.object({
azure_endpoint: z.string(),
openai_api_version: z.string(),
azure_deployment: z.string(),
}),
})

const formSchema = z.discriminatedUnion("llmType", [
openAiSchema,
perplexityAiSchema,
togetherAiSchema,
antrophicSchema,
amazonBedrockSchema,
azureOpenAiSchema,
])

export default function LLM({
profile,
configuredLLMs,
Expand All @@ -57,13 +91,15 @@ export default function LLM({
configuredLLMs: any
}) {
const [open, setOpen] = React.useState<boolean>()
const [selectedProvider, setSelectedProvider] = React.useState<any>()
const [selectedProvider, setSelectedProvider] = React.useState<LLMForm[0]>()
const router = useRouter()
const api = new Api(profile.api_key)
const { ...form } = useForm<z.infer<typeof formSchema>>({
resolver: zodResolver(formSchema),
defaultValues: {
values: {
llmType: selectedProvider?.provider ?? LLMProvider.OPENAI,
apiKey: "",
options: {} as any,
},
})

Expand All @@ -76,26 +112,23 @@ export default function LLM({
: values.options,
}

const currentProviderLLMs = configuredLLMs.filter(
(db: any) => db.provider === selectedProvider.provider
const isExistingConnection = configuredLLMs.find(
(db: any) => db.provider === selectedProvider?.provider
)

if (currentProviderLLMs.length > 0) {
currentProviderLLMs.forEach(async (llm: any) => {
await api.patchLLM(llm.id, {
...payload,
provider: selectedProvider.provider,
})
if (isExistingConnection) {
await api.patchLLM(isExistingConnection.id, {
...payload,
provider: selectedProvider?.provider,
})
} else {
await api.createLLM({ ...payload, provider: selectedProvider.provider })
await api.createLLM({ ...payload, provider: selectedProvider?.provider })
}

form.reset()
router.refresh()
setOpen(false)
}

return (
<div className="container flex max-w-4xl flex-col space-y-10 pt-10">
<div className="flex flex-col">
Expand Down
57 changes: 36 additions & 21 deletions libs/ui/config/site.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { VectorDbProvider } from "@/models/models"
import { LLMProvider, VectorDbProvider } from "@/models/models"
import {
TbBrandDiscord,
TbFileCode,
Expand All @@ -11,6 +11,21 @@ import {

export type SiteConfig = typeof siteConfig

export type LLMForm = {
disabled: boolean
formDescription: string
provider: keyof typeof LLMProvider
name: string
metadata: {
key: string
type: "input" | "password" | "json" | "select"
label: string
helpText?: string
json?: Record<string, { type: string; description: string }>
options?: { value: string; title: string }[]
}[]
}[]

export const siteConfig = {
name: "Superagent Cloud",
description: "The agent framework for large language models",
Expand Down Expand Up @@ -436,7 +451,7 @@ export const siteConfig = {
{
disabled: false,
formDescription: "Please enter your OpenAI API key.",
provider: "OPENAI",
provider: LLMProvider.OPENAI,
name: "OpenAI",
metadata: [
{
Expand All @@ -449,7 +464,7 @@ export const siteConfig = {
{
disabled: false,
formDescription: "Please enter your Perplexity API key.",
provider: "PERPLEXITY",
provider: LLMProvider.PERPLEXITY,
name: "Perplexity AI",
metadata: [
{
Expand All @@ -462,7 +477,7 @@ export const siteConfig = {
{
disabled: false,
formDescription: "Please enter your Together API key.",
provider: "TOGETHER_AI",
provider: LLMProvider.TOGETHER_AI,
name: "Together AI",
metadata: [
{
Expand All @@ -475,7 +490,7 @@ export const siteConfig = {
{
disabled: false,
formDescription: "Please enter your Anthropic API key.",
provider: "ANTHROPIC",
provider: LLMProvider.ANTHROPIC,
name: "Anthropic",
metadata: [
{
Expand All @@ -488,7 +503,7 @@ export const siteConfig = {
{
disabled: false,
formDescription: "Please enter your AWS credentials.",
provider: "BEDROCK",
provider: LLMProvider.BEDROCK,
name: "Amazon Bedrock",
metadata: [
{
Expand All @@ -511,7 +526,7 @@ export const siteConfig = {
{
disabled: false,
formDescription: "Please enter your Azure OpenAI API key.",
provider: "AZURE_OPENAI",
provider: LLMProvider.AZURE_OPENAI,
name: "Azure OpenAI",
metadata: [
{
Expand All @@ -536,20 +551,20 @@ export const siteConfig = {
},
],
},
{
disabled: true,
formDescription: "Please enter your HF API key.",
provider: "HUGGINGFACE",
name: "Hugging Face",
metadata: [
{
key: "apiKey",
type: "input",
label: "HF API Key",
},
],
},
],
// {
// disabled: true,
// formDescription: "Please enter your HF API key.",
// provider: LLMProvider.HUGGINGFACE,
// name: "Hugging Face",
// metadata: [
// {
// key: "apiKey",
// type: "input",
// label: "HF API Key",
// },
// ],
// },
] satisfies LLMForm,
vectorDbs: [
{
provider: VectorDbProvider[VectorDbProvider.PINECONE],
Expand Down
15 changes: 9 additions & 6 deletions libs/ui/models/models.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
export enum LLMProvider {
OPENAI = "OPENAI",
AZURE_OPENAI = "AZURE_OPENAI",
HUGGINGFACE = "HUGGINGFACE",
}
export const LLMProvider = {
OPENAI: "OPENAI",
PERPLEXITY: "PERPLEXITY",
TOGETHER_AI: "TOGETHER_AI",
ANTHROPIC: "ANTHROPIC",
BEDROCK: "BEDROCK",
AZURE_OPENAI: "AZURE_OPENAI",
} as const

export enum LLMModel {
GPT_3_5_TURBO_16K_0613,
Expand Down Expand Up @@ -252,7 +255,7 @@ export class AgentTool {

export class LLM {
id: string
provider: LLMProvider
provider: typeof LLMProvider
apiKey: string
options?: JSON
agents: AgentLLM[]
Expand Down

0 comments on commit ca574f3

Please sign in to comment.