Skip to content

Commit

Permalink
Add VertexAILLM & VertexAIEndpointLLM classes (#204)
Browse files Browse the repository at this point in the history
* Add `VertexGenerativeModelLLM` class

* Remove passing `expected_output_type`

* Integrate Vertex AI Text/Code API models

* Add `_vertexai_retry_decorator` variable

* Add `VertexAIEndpointLLM` class

* Refactor `_generate` methods

* Add Vertex AI unit tests

* Add `vertexai` extra requirement

* Update `_generate_contents` method
  • Loading branch information
gabrielmbmb authored Jan 5, 2024
1 parent f5a030c commit b62cc25
Show file tree
Hide file tree
Showing 15 changed files with 861 additions and 37 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ jobs:

- name: Install dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: pip install -e .[dev,tests]
run: pip install -e .[dev,tests,vertexai]

- name: Lint
run: make lint
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ hf-inference-endpoints = ["huggingface_hub >= 0.19.0"]
llama-cpp = ["llama-cpp-python >= 0.2.0"]
openai = ["openai >= 1.0.0"]
vllm = ["vllm >= 0.2.1"]
vertexai = ["google-cloud-aiplatform >= 1.38.0"]
argilla = ["argilla >= 1.18.0"]
tests = ["pytest >= 7.4.0"]
docs = [
Expand Down
15 changes: 9 additions & 6 deletions src/distilabel/llm/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,19 +13,22 @@
# limitations under the License.

from distilabel.llm.base import LLM, LLMPool, ProcessLLM
from distilabel.llm.google.vertexai import VertexAIEndpointLLM, VertexAILLM
from distilabel.llm.huggingface.inference_endpoints import InferenceEndpointsLLM
from distilabel.llm.huggingface.transformers import TransformersLLM
from distilabel.llm.llama_cpp import LlamaCppLLM
from distilabel.llm.openai import OpenAILLM
from distilabel.llm.vllm import vLLM

__all__ = [
"OpenAILLM",
"LlamaCppLLM",
"vLLM",
"InferenceEndpointsLLM",
"TransformersLLM",
"ProcessLLM",
"LLM",
"LLMPool",
"ProcessLLM",
"VertexAIEndpointLLM",
"VertexAILLM",
"InferenceEndpointsLLM",
"TransformersLLM",
"LlamaCppLLM",
"OpenAILLM",
"vLLM",
]
10 changes: 0 additions & 10 deletions src/distilabel/llm/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
Dict,
Generator,
List,
Type,
Union,
)

Expand Down Expand Up @@ -118,15 +117,13 @@ def _generate_prompts(
self,
inputs: List[Dict[str, Any]],
default_format: Union["SupportedFormats", None] = None,
expected_output_type: Type = str,
) -> List[Any]:
"""Generates the prompts to be used for generation.
Args:
inputs (List[Dict[str, Any]]): the inputs to be used for generation.
default_format (Union["SupportedFormats", None], optional): the default format to be used
for the prompt if no `prompt_format` is specified. Defaults to `None`.
expected_output_type (Type, optional): the expected type of the prompt. Defaults to `str`.
Returns:
List[Any]: the generated prompts.
Expand Down Expand Up @@ -161,13 +158,6 @@ def _generate_prompts(
stacklevel=2,
)
prompt = prompt.format_as(format="default")
if not isinstance(prompt, expected_output_type):
raise ValueError(
f"The provided `prompt={prompt}` is of `type={type(prompt)}`, but it must be of"
f" `type={expected_output_type}`, so make sure that `task.generate_prompt` returns"
f" a `{expected_output_type}` or that the `formatting_fn` formats the prompt as a "
f" `{expected_output_type}`."
)
prompts.append(prompt)
return prompts

Expand Down
14 changes: 14 additions & 0 deletions src/distilabel/llm/google/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# Copyright 2023-present, Argilla, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

Loading

0 comments on commit b62cc25

Please sign in to comment.