From cb3728cd3e14a72aef8dcdcb86d2b0a6759a816e Mon Sep 17 00:00:00 2001 From: alisalim17 Date: Fri, 12 Apr 2024 14:14:38 +0400 Subject: [PATCH 1/6] deps: add boto3 --- libs/superagent/poetry.lock | 71 +++++++++++++++++++++++++++++++++- libs/superagent/pyproject.toml | 1 + 2 files changed, 71 insertions(+), 1 deletion(-) diff --git a/libs/superagent/poetry.lock b/libs/superagent/poetry.lock index 80b409862..457f01d74 100644 --- a/libs/superagent/poetry.lock +++ b/libs/superagent/poetry.lock @@ -539,6 +539,47 @@ d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "boto3" +version = "1.34.83" +description = "The AWS SDK for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3-1.34.83-py3-none-any.whl", hash = "sha256:33cf93f6de5176f1188c923f4de1ae149ed723b89ed12e434f2b2f628491769e"}, + {file = "boto3-1.34.83.tar.gz", hash = "sha256:9733ce811bd82feab506ad9309e375a79cabe8c6149061971c17754ce8997551"}, +] + +[package.dependencies] +botocore = ">=1.34.83,<1.35.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.10.0,<0.11.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.34.83" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">=3.8" +files = [ + {file = "botocore-1.34.83-py3-none-any.whl", hash = "sha256:0a3fbbe018416aeefa8978454fb0b8129adbaf556647b72269bf02e4bf1f4161"}, + {file = "botocore-1.34.83.tar.gz", hash = "sha256:0f302aa76283d4df62b4fbb6d3d20115c1a8957fc02171257fc93904d69d5636"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = [ + {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, +] + +[package.extras] +crt = ["awscrt (==0.19.19)"] + [[package]] name = "bs4" version = "0.0.1" @@ -1894,6 +1935,17 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + [[package]] name = "joblib" version = "1.3.2" @@ -4467,6 +4519,23 @@ files = [ {file = "ruff-0.0.265.tar.gz", hash = "sha256:53c17f0dab19ddc22b254b087d1381b601b155acfa8feed514f0d6a413d0ab3a"}, ] +[[package]] +name = "s3transfer" +version = "0.10.1" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">= 3.8" +files = [ + {file = "s3transfer-0.10.1-py3-none-any.whl", hash = "sha256:ceb252b11bcf87080fb7850a224fb6e05c8a776bab8f2b64b7f25b969464839d"}, + {file = "s3transfer-0.10.1.tar.gz", hash = "sha256:5683916b4c724f799e600f41dd9e10a9ff19871bf87623cc8f491cb4f5fa0a19"}, +] + +[package.dependencies] +botocore = ">=1.33.2,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] + [[package]] name = "scikit-learn" version = "1.3.2" @@ -5956,4 +6025,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.1, <3.12" -content-hash = "7beee27d48f14e756545373efd0e6ee914d8a8291200a417f3e7d8f826765852" +content-hash = "7f8ac6f83e02a391bddfb33c98a03e83d63bbc29a9c953d062b6afd28e3fea6d" diff --git a/libs/superagent/pyproject.toml b/libs/superagent/pyproject.toml index dda6a8408..0cfd974e5 100644 --- a/libs/superagent/pyproject.toml +++ b/libs/superagent/pyproject.toml @@ -66,6 +66,7 @@ langsmith = "^0.1.9" langfuse = "2.21.3" tavily-python = "^0.3.1" scrapingbee = "^2.0.1" +boto3 = "^1.34.83" From b894624e052776e2b03bcc18503669f12576235a Mon Sep 17 00:00:00 2001 From: alisalim17 Date: Fri, 12 Apr 2024 14:18:01 +0400 Subject: [PATCH 2/6] deps: upgrade litellm from 1.29.4 to 13.5.2 --- libs/superagent/poetry.lock | 14 +++++++------- libs/superagent/pyproject.toml | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/libs/superagent/poetry.lock b/libs/superagent/poetry.lock index 457f01d74..a5f2389a3 100644 --- a/libs/superagent/poetry.lock +++ b/libs/superagent/poetry.lock @@ -2311,13 +2311,13 @@ requests = ">=2,<3" [[package]] name = "litellm" -version = "1.29.4" +version = "1.35.2" description = "Library to easily interface with LLM API providers" optional = false -python-versions = ">=3.8, !=2.7.*, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*, !=3.7.*" +python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" files = [ - {file = "litellm-1.29.4-py3-none-any.whl", hash = "sha256:014b03fd37864d12acb095511f42bb46b74bf77a0c7086eb5d7d3ea0a27cc238"}, - {file = "litellm-1.29.4.tar.gz", hash = "sha256:14a3e5c5aaa042b2a732374f56260afd7761625d8ee6ac38f6e1de1c5ee5f792"}, + {file = "litellm-1.35.2-py3-none-any.whl", hash = "sha256:686ee040154d7062b0078d882fa6399c5c7cc5ec9b5266490dee68f1b8905a36"}, + {file = "litellm-1.35.2.tar.gz", hash = "sha256:062e5be75196da7348ae0c4f60d396f0b23ee874708ed81c40f7675161213385"}, ] [package.dependencies] @@ -2332,8 +2332,8 @@ tiktoken = ">=0.4.0" tokenizers = "*" [package.extras] -extra-proxy = ["streamlit (>=1.29.0,<2.0.0)"] -proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "backoff", "fastapi (>=0.104.1,<0.105.0)", "fastapi-sso (>=0.10.0,<0.11.0)", "gunicorn (>=21.2.0,<22.0.0)", "orjson (>=3.9.7,<4.0.0)", "python-multipart (>=0.0.6,<0.0.7)", "pyyaml (>=6.0.1,<7.0.0)", "rq", "uvicorn (>=0.22.0,<0.23.0)"] +extra-proxy = ["azure-identity (>=1.15.0,<2.0.0)", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "resend (>=0.8.0,<0.9.0)"] +proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "backoff", "cryptography (>=42.0.5,<43.0.0)", "fastapi (>=0.109.1,<0.110.0)", "fastapi-sso (>=0.10.0,<0.11.0)", "gunicorn (>=21.2.0,<22.0.0)", "orjson (>=3.9.7,<4.0.0)", "python-multipart (>=0.0.9,<0.0.10)", "pyyaml (>=6.0.1,<7.0.0)", "rq", "uvicorn (>=0.22.0,<0.23.0)"] [[package]] name = "llama-index" @@ -6025,4 +6025,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.1, <3.12" -content-hash = "7f8ac6f83e02a391bddfb33c98a03e83d63bbc29a9c953d062b6afd28e3fea6d" +content-hash = "c390f22730e24482e7f42cc8140d339a1025fb1e25021c6d886cd4dafab3a622" diff --git a/libs/superagent/pyproject.toml b/libs/superagent/pyproject.toml index 0cfd974e5..d2988ed32 100644 --- a/libs/superagent/pyproject.toml +++ b/libs/superagent/pyproject.toml @@ -50,7 +50,7 @@ openai = "^1.1.1" langchain-experimental = "^0.0.37" pydub = "^0.25.1" algoliasearch = "^3.0.0" -litellm = "^1.29.4" +litellm = "1.35.2" weaviate-client = "^3.25.3" qdrant-client = "^1.6.9" vecs = "^0.4.2" From ef390ac3bd2213f623bd5d6eb177c062b5a4aecc Mon Sep 17 00:00:00 2001 From: alisalim17 Date: Fri, 12 Apr 2024 14:20:00 +0400 Subject: [PATCH 3/6] feat(backend): add amazon bedrock --- libs/superagent/app/agents/llm.py | 3 +++ libs/superagent/app/api/workflow_configs/saml_schema.py | 2 ++ .../migrations/20240412075016_add_aws_bedrock/migration.sql | 2 ++ libs/superagent/prisma/schema.prisma | 1 + 4 files changed, 8 insertions(+) create mode 100644 libs/superagent/prisma/migrations/20240412075016_add_aws_bedrock/migration.sql diff --git a/libs/superagent/app/agents/llm.py b/libs/superagent/app/agents/llm.py index f5a85f9eb..fcb19ce16 100644 --- a/libs/superagent/app/agents/llm.py +++ b/libs/superagent/app/agents/llm.py @@ -84,6 +84,9 @@ def get_llm_params(self): return { "temperature": options.get("temperature"), "max_tokens": options.get("max_tokens"), + "aws_access_key_id": options.get("aws_access_key_id"), + "aws_secret_access_key": options.get("aws_secret_access_key"), + "aws_region_name": options.get("aws_region_name"), } async def _get_prompt(self): diff --git a/libs/superagent/app/api/workflow_configs/saml_schema.py b/libs/superagent/app/api/workflow_configs/saml_schema.py index 11b34d33d..837da1242 100644 --- a/libs/superagent/app/api/workflow_configs/saml_schema.py +++ b/libs/superagent/app/api/workflow_configs/saml_schema.py @@ -148,6 +148,7 @@ class LLMAgentTool(BaseAgentToolModel, LLMAgent): LLMProvider.PERPLEXITY.value, LLMProvider.TOGETHER_AI.value, LLMProvider.ANTHROPIC.value, + LLMProvider.BEDROCK.value, ] @@ -157,6 +158,7 @@ class Workflow(BaseModel): # ~~OSS LLM providers~~ perplexity: Optional[LLMAgent] together_ai: Optional[LLMAgent] + bedrock: Optional[LLMAgent] anthropic: Optional[LLMAgent] llm: Optional[LLMAgent] = Field( description="Deprecated! Use LLM providers instead. e.g. `perplexity` or `together_ai`" diff --git a/libs/superagent/prisma/migrations/20240412075016_add_aws_bedrock/migration.sql b/libs/superagent/prisma/migrations/20240412075016_add_aws_bedrock/migration.sql new file mode 100644 index 000000000..81ae92e68 --- /dev/null +++ b/libs/superagent/prisma/migrations/20240412075016_add_aws_bedrock/migration.sql @@ -0,0 +1,2 @@ +-- AlterEnum +ALTER TYPE "LLMProvider" ADD VALUE 'BEDROCK'; diff --git a/libs/superagent/prisma/schema.prisma b/libs/superagent/prisma/schema.prisma index b198ce2cd..6f03bca21 100644 --- a/libs/superagent/prisma/schema.prisma +++ b/libs/superagent/prisma/schema.prisma @@ -23,6 +23,7 @@ enum LLMProvider { PERPLEXITY TOGETHER_AI ANTHROPIC + BEDROCK } enum LLMModel { From f2a9381ceec8b17b911d3ac2da42c444c7e07867 Mon Sep 17 00:00:00 2001 From: alisalim17 Date: Fri, 12 Apr 2024 14:20:31 +0400 Subject: [PATCH 4/6] feat(ui): add amazon bedrock --- libs/ui/app/integrations/llm.tsx | 13 +++++++++-- libs/ui/config/site.ts | 37 ++++++++++++++++++++++++++------ 2 files changed, 41 insertions(+), 9 deletions(-) diff --git a/libs/ui/app/integrations/llm.tsx b/libs/ui/app/integrations/llm.tsx index 26d13166a..92ff7af77 100644 --- a/libs/ui/app/integrations/llm.tsx +++ b/libs/ui/app/integrations/llm.tsx @@ -36,9 +36,17 @@ const azureSchema = z.object({ azure_deployment: z.string().nonempty("Deployment cannnot be empty"), }) +const bedrockSchema = z.object({ + aws_access_key_id: z.string().nonempty("Access key ID cannot be empty"), + aws_secret_access_key: z + .string() + .nonempty("Secret access key cannot be empty"), + aws_region: z.string().nonempty("Region cannot be empty"), +}) + const formSchema = z.object({ - apiKey: z.string().nonempty("API key is mandatory"), - options: azureSchema.optional(), + apiKey: z.string().optional(), + options: z.union([azureSchema, bedrockSchema]), }) export default function LLM({ @@ -59,6 +67,7 @@ export default function LLM({ }, }) + console.log("form", form.formState.errors) async function onSubmit(values: z.infer) { const payload = { ...values, diff --git a/libs/ui/config/site.ts b/libs/ui/config/site.ts index c4f006707..d03c066f5 100644 --- a/libs/ui/config/site.ts +++ b/libs/ui/config/site.ts @@ -249,7 +249,7 @@ export const siteConfig = { }, ], }, - { + { value: "ADVANCED_SCRAPER", title: "Advanced Web extractor", metadata: [ @@ -486,15 +486,25 @@ export const siteConfig = { ], }, { - disabled: true, - formDescription: "Please enter your HF API key.", - provider: "HUGGINGFACE", - name: "Hugging Face", + disabled: false, + formDescription: "Please enter your AWS credentials.", + provider: "BEDROCK", + name: "Amazon Bedrock", metadata: [ { - key: "apiKey", + key: "options.aws_access_key_id", type: "input", - label: "HF API Key", + label: "AWS Access Key", + }, + { + key: "options.aws_secret_access_key", + type: "input", + label: "AWS Secret Access Key", + }, + { + key: "options.aws_region_name", + type: "input", + label: "AWS Region", }, ], }, @@ -526,6 +536,19 @@ export const siteConfig = { }, ], }, + { + disabled: true, + formDescription: "Please enter your HF API key.", + provider: "HUGGINGFACE", + name: "Hugging Face", + metadata: [ + { + key: "apiKey", + type: "input", + label: "HF API Key", + }, + ], + }, ], vectorDbs: [ { From 3488378aa95a7a7bf13e5d147a583aa7db05193a Mon Sep 17 00:00:00 2001 From: alisalim17 Date: Fri, 12 Apr 2024 14:50:48 +0400 Subject: [PATCH 5/6] fix: only pass context if any functions are used --- libs/superagent/app/agents/llm.py | 6 +++++- libs/superagent/prompts/function_calling_agent.py | 4 ++++ 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 libs/superagent/prompts/function_calling_agent.py diff --git a/libs/superagent/app/agents/llm.py b/libs/superagent/app/agents/llm.py index fcb19ce16..f95b7d363 100644 --- a/libs/superagent/app/agents/llm.py +++ b/libs/superagent/app/agents/llm.py @@ -10,6 +10,7 @@ from app.utils.prisma import prisma from prisma.enums import AgentType, LLMProvider from prisma.models import Agent +from prompts.function_calling_agent import FUNCTION_CALLING_AGENT_PROMPT from prompts.json import JSON_FORMAT_INSTRUCTIONS logger = logging.getLogger(__name__) @@ -58,6 +59,7 @@ async def init(self): # E.g parent agent metadata have a model key, and if we pass it to ChatOpenAI # It will give us an error, because we will try to use parent Agent's LLM model. self.agent_config.metadata = {} + self.agent_config.prompt = FUNCTION_CALLING_AGENT_PROMPT await self._set_llm() await self._set_tools_return_direct() @@ -137,7 +139,9 @@ async def ainvoke(self, input, *_, **kwargs): input=input ) - if function_calling_res.get("output"): + if function_calling_res.get("output") and function_calling_res.get( + "indermediate_steps" + ): INPUT_TEMPLATE = "{input}\n Context: {context}\n" input = INPUT_TEMPLATE.format( input=input, context=function_calling_res.get("output") diff --git a/libs/superagent/prompts/function_calling_agent.py b/libs/superagent/prompts/function_calling_agent.py new file mode 100644 index 000000000..5d7ba3350 --- /dev/null +++ b/libs/superagent/prompts/function_calling_agent.py @@ -0,0 +1,4 @@ +FUNCTION_CALLING_AGENT_PROMPT = """ + Your job is to call available functions if needed to answer the user's question. + You should simply call the functions. If available functions do not help you to answer the question, just return 'None'. +""" From 391c50c941fc898e41904da135d1922e25b17bee Mon Sep 17 00:00:00 2001 From: alisalim17 Date: Fri, 12 Apr 2024 15:13:29 +0400 Subject: [PATCH 6/6] fix: typo --- libs/superagent/app/agents/llm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/superagent/app/agents/llm.py b/libs/superagent/app/agents/llm.py index f95b7d363..bc2a518fd 100644 --- a/libs/superagent/app/agents/llm.py +++ b/libs/superagent/app/agents/llm.py @@ -140,7 +140,7 @@ async def ainvoke(self, input, *_, **kwargs): ) if function_calling_res.get("output") and function_calling_res.get( - "indermediate_steps" + "intermediate_steps" ): INPUT_TEMPLATE = "{input}\n Context: {context}\n" input = INPUT_TEMPLATE.format(