Skip to content

Commit

Permalink
Merge pull request #39 from microsoft/python
Browse files Browse the repository at this point in the history
Python Update
  • Loading branch information
sethjuarez authored Jul 15, 2024
2 parents 7a0d8eb + 330a4a6 commit 8493707
Show file tree
Hide file tree
Showing 14 changed files with 4,017 additions and 27 deletions.
8 changes: 6 additions & 2 deletions .vscode/project.code-workspace
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,10 @@
{
"path": "../runtime/prompty",
"name": "prompty"
},
{
"path": "../runtime/promptycs",
"name": "promptycs"
}
],
}
]
}
2 changes: 1 addition & 1 deletion runtime/prompty/README.md
Original file line number Diff line number Diff line change
@@ -1 +1 @@
# prompty

32 changes: 32 additions & 0 deletions runtime/prompty/doc.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
from inspect import getmembers, isclass, isfunction
import prompty

def build():
fn = [
f for f in getmembers(prompty, isfunction) if f[1].__module__.startswith("prompty")
]
cl = [
s for s in getmembers(prompty, isclass) if s[1].__module__.startswith("prompty")
]

d = {
"prompty": [
{"function": f[0], "module": f[1].__module__, "doc": f[1].__doc__} for f in fn
],
}

for c in cl:
if c[1].__module__ in d:
d[c[1].__module__].append(
{"class": c[0], "module": c[1].__module__, "doc": c[1].__doc__}
)
else:
d[c[1].__module__] = [
{"class": c[0], "module": c[1].__module__, "doc": c[1].__doc__}
]

print("DONE!")


if __name__ == "__main__":
build()
132 changes: 130 additions & 2 deletions runtime/prompty/prompty/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,38 @@ def headless(
parameters: Dict[str, any] = {},
connection: str = "default",
) -> Prompty:
"""Create a headless prompty object for programmatic use.
Parameters
----------
api : str
The API to use for the model
content : str | List[str] | dict
The content to process
configuration : Dict[str, any], optional
The configuration to use, by default {}
parameters : Dict[str, any], optional
The parameters to use, by default {}
connection : str, optional
The connection to use, by default "default"
Returns
-------
Prompty
The headless prompty object
Example
-------
>>> import prompty
>>> p = prompty.headless(
api="embedding",
configuration={"type": "azure", "azure_deployment": "text-embedding-ada-002"},
content="hello world",
)
>>> emb = prompty.execute(p)
"""

# get caller's path (to get relative path for prompty.json)
caller = Path(traceback.extract_stack()[-2].filename)
templateSettings = TemplateSettings(type="NOOP", parser="NOOP")
Expand All @@ -71,6 +103,27 @@ def headless(


def load(prompty_file: str, configuration: str = "default") -> Prompty:
"""Load a prompty file.
Parameters
----------
prompty_file : str
The path to the prompty file
configuration : str, optional
The configuration to use, by default "default"
Returns
-------
Prompty
The loaded prompty object
Example
-------
>>> import prompty
>>> p = prompty.load("prompts/basic.prompty")
>>> print(p)
"""

p = Path(prompty_file)
if not p.is_absolute():
# get caller's path (take into account trace frame)
Expand Down Expand Up @@ -180,6 +233,27 @@ def prepare(
prompt: Prompty,
inputs: Dict[str, any] = {},
):
""" Prepare the inputs for the prompt.
Parameters
----------
prompt : Prompty
The prompty object
inputs : Dict[str, any], optional
The inputs to the prompt, by default {}
Returns
-------
dict
The prepared and hidrated template shaped to the LLM model
Example
-------
>>> import prompty
>>> p = prompty.load("prompts/basic.prompty")
>>> inputs = {"name": "John Doe"}
>>> content = prompty.prepare(p, inputs)
"""
inputs = param_hoisting(inputs, prompt.sample)

if prompt.template.type == "NOOP":
Expand Down Expand Up @@ -208,7 +282,34 @@ def run(
parameters: Dict[str, any] = {},
raw: bool = False,
):
# invoker = InvokerFactory()
"""Run the prepared Prompty content.
Parameters
----------
prompt : Prompty
The prompty object
content : dict | list | str
The content to process
configuration : Dict[str, any], optional
The configuration to use, by default {}
parameters : Dict[str, any], optional
The parameters to use, by default {}
raw : bool, optional
Whether to skip processing, by default False
Returns
-------
any
The result of the prompt
Example
-------
>>> import prompty
>>> p = prompty.load("prompts/basic.prompty")
>>> inputs = {"name": "John Doe"}
>>> content = prompty.prepare(p, inputs)
>>> result = prompty.run(p, content)
"""

if configuration != {}:
prompt.model.configuration = param_hoisting(
Expand Down Expand Up @@ -243,7 +344,34 @@ def execute(
raw: bool = False,
connection: str = "default",
):

"""Execute a prompty.
Parameters
----------
prompt : Union[str, Prompty]
The prompty object or path to the prompty file
configuration : Dict[str, any], optional
The configuration to use, by default {}
parameters : Dict[str, any], optional
The parameters to use, by default {}
inputs : Dict[str, any], optional
The inputs to the prompt, by default {}
raw : bool, optional
Whether to skip processing, by default False
connection : str, optional
The connection to use, by default "default"
Returns
-------
any
The result of the prompt
Example
-------
>>> import prompty
>>> inputs = {"name": "John Doe"}
>>> result = prompty.execute("prompts/basic.prompty", inputs=inputs)
"""
if isinstance(prompt, str):
path = Path(prompt)
if not path.is_absolute():
Expand Down
Loading

0 comments on commit 8493707

Please sign in to comment.