Skip to content

Commit

Permalink
added context based tracing injection + internal stream handling
Browse files Browse the repository at this point in the history
  • Loading branch information
sethjuarez committed Aug 7, 2024
1 parent 755e991 commit dd5a39d
Show file tree
Hide file tree
Showing 10 changed files with 321 additions and 182 deletions.
34 changes: 32 additions & 2 deletions runtime/prompty/prompty/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@
import json
import abc
from pathlib import Path
from .tracer import Tracer, trace, to_dict
from pydantic import BaseModel, Field, FilePath
from typing import List, Literal, Dict, Callable, Set, TypeVar
from .tracer import trace
from typing import Iterator, List, Literal, Dict, Callable, Set


class PropertySettings(BaseModel):
Expand Down Expand Up @@ -449,3 +449,33 @@ def read(cls, string):
"body": body,
"frontmatter": fmatter,
}


class PromptyStream(Iterator):
"""PromptyStream class to iterate over LLM stream.
Necessary for Prompty to handle streaming data when tracing."""

def __init__(self, name: str, iterator: Iterator):
self.name = name
self.iterator = iterator
self.items: List[any] = []
self.__name__ = "PromptyStream"

def __iter__(self):
return self

def __next__(self):
try:
# enumerate but add to list
o = self.iterator.__next__()
self.items.append(o)
return o

except StopIteration:
# StopIteration is raised
# contents are exhausted
if len(self.items) > 0:
with Tracer.start(f"{self.name}.PromptyStream") as trace:
trace("items", [to_dict(s) for s in self.items])

raise StopIteration
17 changes: 8 additions & 9 deletions runtime/prompty/prompty/executors.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import azure.identity
from .tracer import Trace
from openai import AzureOpenAI
from .core import Invoker, InvokerFactory, Prompty
import importlib.metadata
from typing import Iterator
from openai import AzureOpenAI
from .core import Invoker, InvokerFactory, Prompty, PromptyStream

VERSION = importlib.metadata.version("prompty")

Expand Down Expand Up @@ -87,9 +87,8 @@ def invoke(self, data: any) -> any:
elif self.api == "image":
raise NotImplementedError("Azure OpenAI Image API is not implemented yet")

if hasattr(response, "usage") and response.usage:
Trace.add("completion_tokens", response.usage.completion_tokens)
Trace.add("prompt_tokens", response.usage.prompt_tokens)
Trace.add("total_tokens", response.usage.total_tokens)

return response
# stream response
if isinstance(response, Iterator):
return PromptyStream("AzureOpenAIExecutor", response)
else:
return response
7 changes: 2 additions & 5 deletions runtime/prompty/prompty/processors.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
from .tracer import Trace
from openai import Stream
from typing import Iterator
from pydantic import BaseModel
from openai.types.completion import Completion
from .core import Invoker, InvokerFactory, Prompty
from openai.types.chat.chat_completion import ChatCompletion
from .core import Invoker, InvokerFactory, Prompty, PromptyStream
from openai.types.create_embedding_response import CreateEmbeddingResponse


Expand Down Expand Up @@ -66,9 +64,8 @@ def generator():
for chunk in data:
if len(chunk.choices) == 1 and chunk.choices[0].delta.content != None:
content = chunk.choices[0].delta.content
Trace.add("stream", content)
yield content

return generator()
return PromptyStream("OpenAIProcessor", generator())
else:
return data
Loading

0 comments on commit dd5a39d

Please sign in to comment.