Skip to content

Commit

Permalink
fix: change of prompt api
Browse files Browse the repository at this point in the history
  • Loading branch information
jeasonstudio committed Aug 8, 2024
1 parent e831ec2 commit 52d57cd
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 36 deletions.
10 changes: 8 additions & 2 deletions src/global.d.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
export type ChromeAISessionAvailable = 'no' | 'after-download' | 'readily';

export interface ChromeAISessionOptions {
export interface ChromeAIModelInfo {
defaultTemperature: number;
defaultTopK: number;
maxTopK: number;
}

export interface ChromeAISessionOptions extends Record<string, any> {
temperature?: number;
topK?: number;
}
Expand All @@ -13,7 +19,7 @@ export interface ChromeAISession {

export interface ChromePromptAPI {
canCreateTextSession: () => Promise<ChromeAISessionAvailable>;
defaultTextSessionOptions: () => Promise<ChromeAISessionOptions>;
textModelInfo: () => Promise<ChromeAIModelInfo>;
createTextSession: (
options?: ChromeAISessionOptions
) => Promise<ChromeAISession>;
Expand Down
35 changes: 7 additions & 28 deletions src/language-model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,33 +24,7 @@ export type ChromeAIChatModelId = 'text';

export interface ChromeAIChatSettings extends Record<string, unknown> {
temperature?: number;
/**
* Optional. The maximum number of tokens to consider when sampling.
*
* Models use nucleus sampling or combined Top-k and nucleus sampling.
* Top-k sampling considers the set of topK most probable tokens.
* Models running with nucleus sampling don't allow topK setting.
*/
topK?: number;

/**
* Optional. A list of unique safety settings for blocking unsafe content.
* @note this is not working yet
*/
safetySettings?: Array<{
category:
| 'HARM_CATEGORY_HATE_SPEECH'
| 'HARM_CATEGORY_DANGEROUS_CONTENT'
| 'HARM_CATEGORY_HARASSMENT'
| 'HARM_CATEGORY_SEXUALLY_EXPLICIT';

threshold:
| 'HARM_BLOCK_THRESHOLD_UNSPECIFIED'
| 'BLOCK_LOW_AND_ABOVE'
| 'BLOCK_MEDIUM_AND_ABOVE'
| 'BLOCK_ONLY_HIGH'
| 'BLOCK_NONE';
}>;
}

function getStringContent(
Expand Down Expand Up @@ -105,8 +79,13 @@ export class ChromeAIChatLanguageModel implements LanguageModelV1 {
throw new LoadSettingError({ message: 'Built-in model not ready' });
}

const defaultOptions = await ai.defaultTextSessionOptions();
this.options = { ...defaultOptions, ...this.options, ...options };
const defaultOptions = await ai.textModelInfo();

Check failure on line 82 in src/language-model.ts

View workflow job for this annotation

GitHub Actions / build-and-test-coverage

src/language-model.test.ts > language-model > should do generate text

TypeError: ai.textModelInfo is not a function ❯ ChromeAIChatLanguageModel.getSession src/language-model.ts:82:37 ❯ ChromeAIChatLanguageModel.doGenerate src/language-model.ts:165:21 ❯ fn node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/generate-text/generate-text.ts:189:30 ❯ node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/telemetry/record-span.ts:18:22 ❯ _retryWithExponentialBackoff node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/util/retry-with-exponential-backoff.ts:36:12 ❯ fn node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/generate-text/generate-text.ts:179:32 ❯ node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/telemetry/record-span.ts:18:22 ❯ src/language-model.test.ts:68:5

Check failure on line 82 in src/language-model.ts

View workflow job for this annotation

GitHub Actions / build-and-test-coverage

src/language-model.test.ts > language-model > should do stream text

TypeError: ai.textModelInfo is not a function ❯ ChromeAIChatLanguageModel.getSession src/language-model.ts:82:37 ❯ ChromeAIChatLanguageModel.doStream src/language-model.ts:193:21 ❯ fn node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/generate-text/stream-text.ts:194:23 ❯ node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/telemetry/record-span.ts:18:22 ❯ _retryWithExponentialBackoff node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/util/retry-with-exponential-backoff.ts:36:12 ❯ fn node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/generate-text/stream-text.ts:182:11 ❯ node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/telemetry/record-span.ts:18:22 ❯ src/language-model.test.ts:111:20

Check failure on line 82 in src/language-model.ts

View workflow job for this annotation

GitHub Actions / build-and-test-coverage

src/language-model.test.ts > language-model > should do generate object

TypeError: ai.textModelInfo is not a function ❯ ChromeAIChatLanguageModel.getSession src/language-model.ts:82:37 ❯ ChromeAIChatLanguageModel.doGenerate src/language-model.ts:165:21 ❯ _retryWithExponentialBackoff node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/util/retry-with-exponential-backoff.ts:36:12 ❯ Module.generateObject node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/generate-object/generate-object.ts:118:30 ❯ src/language-model.test.ts:128:24

Check failure on line 82 in src/language-model.ts

View workflow job for this annotation

GitHub Actions / build-and-test-coverage

src/language-model.test.ts > language-model > should do generate text

TypeError: ai.textModelInfo is not a function ❯ ChromeAIChatLanguageModel.getSession src/language-model.ts:82:37 ❯ ChromeAIChatLanguageModel.doGenerate src/language-model.ts:165:21 ❯ fn node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/generate-text/generate-text.ts:189:30 ❯ node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/telemetry/record-span.ts:18:22 ❯ _retryWithExponentialBackoff node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/util/retry-with-exponential-backoff.ts:36:12 ❯ fn node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/generate-text/generate-text.ts:179:32 ❯ node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/telemetry/record-span.ts:18:22 ❯ src/language-model.test.ts:68:5

Check failure on line 82 in src/language-model.ts

View workflow job for this annotation

GitHub Actions / build-and-test-coverage

src/language-model.test.ts > language-model > should do stream text

TypeError: ai.textModelInfo is not a function ❯ ChromeAIChatLanguageModel.getSession src/language-model.ts:82:37 ❯ ChromeAIChatLanguageModel.doStream src/language-model.ts:193:21 ❯ fn node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/generate-text/stream-text.ts:194:23 ❯ node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/telemetry/record-span.ts:18:22 ❯ _retryWithExponentialBackoff node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/util/retry-with-exponential-backoff.ts:36:12 ❯ fn node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/generate-text/stream-text.ts:182:11 ❯ node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/telemetry/record-span.ts:18:22 ❯ src/language-model.test.ts:111:20

Check failure on line 82 in src/language-model.ts

View workflow job for this annotation

GitHub Actions / build-and-test-coverage

src/language-model.test.ts > language-model > should do generate object

TypeError: ai.textModelInfo is not a function ❯ ChromeAIChatLanguageModel.getSession src/language-model.ts:82:37 ❯ ChromeAIChatLanguageModel.doGenerate src/language-model.ts:165:21 ❯ _retryWithExponentialBackoff node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/util/retry-with-exponential-backoff.ts:36:12 ❯ Module.generateObject node_modules/.pnpm/[email protected][email protected][email protected][email protected][email protected]/node_modules/ai/core/generate-object/generate-object.ts:118:30 ❯ src/language-model.test.ts:128:24
this.options = {
temperature: defaultOptions.defaultTemperature,
topK: defaultOptions.defaultTopK,
...this.options,
...options,
};

this.session = await ai.createTextSession(this.options);

Expand Down
14 changes: 8 additions & 6 deletions src/polyfill/session.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { LlmInference, ProgressListener } from '@mediapipe/tasks-genai';
import {
ChromeAIModelInfo,
ChromeAISession,
ChromeAISessionAvailable,
ChromeAISessionOptions,
Expand Down Expand Up @@ -87,16 +88,17 @@ export class PolyfillChromeAI implements ChromePromptAPI {
return isModelAssetBufferReady ? 'readily' : 'after-download';
};

public defaultTextSessionOptions =
async (): Promise<ChromeAISessionOptions> => ({
temperature: 0.8,
topK: 3,
});
public textModelInfo = async (): Promise<ChromeAIModelInfo> => ({
defaultTemperature: 0.8,
defaultTopK: 3,
maxTopK: 128,
});

public createTextSession = async (
options?: ChromeAISessionOptions
): Promise<ChromeAISession> => {
const argv = options ?? (await this.defaultTextSessionOptions());
const defaultParams = await this.textModelInfo();
const argv = options ?? { temperature: 0.8, topK: 3 };
const llm = await LlmInference.createFromOptions(
{
wasmLoaderPath: this.aiOptions.wasmLoaderPath!,
Expand Down

0 comments on commit 52d57cd

Please sign in to comment.