Skip to content

Commit

Permalink
Test invocation parameter parsing from span
Browse files Browse the repository at this point in the history
  • Loading branch information
cephalization committed Nov 1, 2024
1 parent 2eee8d8 commit bd16769
Show file tree
Hide file tree
Showing 2 changed files with 97 additions and 16 deletions.
28 changes: 27 additions & 1 deletion app/src/pages/playground/__tests__/fixtures.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,33 @@ export const basePlaygroundSpan: PlaygroundSpan = {
name: "test",
},
attributes: "",
invocationParameters: [],
// Implement a few default openai invocation parameters
invocationParameters: [
{
__typename: "BoundedFloatInvocationParameter",
canonicalName: "TOP_P",
invocationInputField: "value_float",
invocationName: "top_p",
},
{
__typename: "IntInvocationParameter",
canonicalName: "MAX_COMPLETION_TOKENS",
invocationInputField: "value_int",
invocationName: "max_tokens",
},
{
__typename: "StringListInvocationParameter",
canonicalName: "STOP_SEQUENCES",
invocationInputField: "value_string_list",
invocationName: "stop",
},
{
__typename: "IntInvocationParameter",
canonicalName: "RANDOM_SEED",
invocationInputField: "value_int",
invocationName: "seed",
},
],
};
export const spanAttributesWithInputMessages = {
llm: {
Expand Down
85 changes: 70 additions & 15 deletions app/src/pages/playground/__tests__/playgroundUtils.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import {
processAttributeToolCalls,
transformSpanAttributesToPlaygroundInstance,
} from "../playgroundUtils";
import { PlaygroundSpan } from "../spanPlaygroundPageLoader";

import {
basePlaygroundSpan,
Expand Down Expand Up @@ -387,14 +388,14 @@ describe("transformSpanAttributesToPlaygroundInstance", () => {
});
});

// TODO(apowell): Re-enable when invocation parameters are parseable from span
it.skip("should correctly parse the invocation parameters", () => {
const span = {
it("should correctly parse the invocation parameters", () => {
const span: PlaygroundSpan = {
...basePlaygroundSpan,
attributes: JSON.stringify({
...spanAttributesWithInputMessages,
llm: {
...spanAttributesWithInputMessages.llm,
// only parameters defined on the span InvocationParameter[] field are parsed
// note that snake case keys are automatically converted to camel case
invocation_parameters:
'{"top_p": 0.5, "max_tokens": 100, "seed": 12345, "stop": ["stop", "me"]}',
Expand All @@ -406,14 +407,70 @@ describe("transformSpanAttributesToPlaygroundInstance", () => {
...expectedPlaygroundInstanceWithIO,
model: {
...expectedPlaygroundInstanceWithIO.model,
invocationParameters: {
topP: 0.5,
maxTokens: 100,
seed: 12345,
stop: ["stop", "me"],
},
invocationParameters: [
{
canonicalName: "TOP_P",
invocationName: "top_p",
valueFloat: 0.5,
},
{
canonicalName: "MAX_COMPLETION_TOKENS",
invocationName: "max_tokens",
valueInt: 100,
},
{
canonicalName: "RANDOM_SEED",
invocationName: "seed",
valueInt: 12345,
},
{
canonicalName: "STOP_SEQUENCES",
invocationName: "stop",
valueStringList: ["stop", "me"],
},
],
},
},
} satisfies PlaygroundInstance,
parsingErrors: [],
});
});

it("should ignore invocation parameters that are not defined on the span", () => {
const span: PlaygroundSpan = {
...basePlaygroundSpan,
attributes: JSON.stringify({
...spanAttributesWithInputMessages,
llm: {
...spanAttributesWithInputMessages.llm,
// only parameters defined on the span InvocationParameter[] field are parsed
// note that snake case keys are automatically converted to camel case
invocation_parameters:
'{"top_p": 0.5, "max_tokens": 100, "seed": 12345, "stop": ["stop", "me"]}',
},
}),
invocationParameters: [
{
__typename: "IntInvocationParameter",
canonicalName: "MAX_COMPLETION_TOKENS",
invocationInputField: "value_int",
invocationName: "max_tokens",
},
],
};
expect(transformSpanAttributesToPlaygroundInstance(span)).toEqual({
playgroundInstance: {
...expectedPlaygroundInstanceWithIO,
model: {
...expectedPlaygroundInstanceWithIO.model,
invocationParameters: [
{
canonicalName: "MAX_COMPLETION_TOKENS",
invocationName: "max_tokens",
valueInt: 100,
},
],
},
} satisfies PlaygroundInstance,
parsingErrors: [],
});
});
Expand Down Expand Up @@ -636,7 +693,7 @@ describe("getModelConfigFromAttributes", () => {
});

// TODO(apowell): Re-enable when invocation parameters are parseable from span
it.skip("should return parsed model config if valid with the provider inferred", () => {
it("should return parsed model config if valid with the provider inferred", () => {
const parsedAttributes = {
llm: {
model_name: "gpt-3.5-turbo",
Expand All @@ -647,10 +704,8 @@ describe("getModelConfigFromAttributes", () => {
modelConfig: {
modelName: "gpt-3.5-turbo",
provider: "OPENAI",
invocationParameters: {
topP: 0.5,
maxTokens: 100,
},
// getBaseModelConfigFromAttributes does not parse invocation parameters
invocationParameters: [],
},
parsingErrors: [],
});
Expand Down

0 comments on commit bd16769

Please sign in to comment.