Skip to content

Commit

Permalink
chore: update web-llm version
Browse files Browse the repository at this point in the history
  • Loading branch information
Neet-Nestor committed Jun 13, 2024
1 parent 6a5564d commit 88e51a2
Show file tree
Hide file tree
Showing 10 changed files with 31 additions and 79 deletions.
5 changes: 1 addition & 4 deletions app/client/api.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
import {
ChatCompletionFinishReason,
CompletionUsage,
} from "@neet-nestor/web-llm";
import { ChatCompletionFinishReason, CompletionUsage } from "@mlc-ai/web-llm";
import { CacheType, ModelType } from "../store";
export const ROLES = ["system", "user", "assistant"] as const;
export type MessageRole = (typeof ROLES)[number];
Expand Down
58 changes: 13 additions & 45 deletions app/client/webllm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@ import {
WebWorkerMLCEngine,
CompletionUsage,
ChatCompletionFinishReason,
} from "@neet-nestor/web-llm";
} from "@mlc-ai/web-llm";

import { ChatOptions, LLMApi, LLMConfig, RequestMessage } from "./api";
import { LogLevel } from "@neet-nestor/web-llm";
import { LogLevel } from "@mlc-ai/web-llm";

const KEEP_ALIVE_INTERVAL = 5_000;

Expand Down Expand Up @@ -128,51 +128,19 @@ export class WebLLMApi implements LLMApi {
log.error(JSON.stringify(err));
errorMessage = JSON.stringify(err);
}
if (!errorMessage.includes("MLCEngine.reload(model)")) {
console.error("Error in chatCompletion", errorMessage);
if (
errorMessage.includes("WebGPU") &&
errorMessage.includes("compatibility chart")
) {
// Add WebGPU compatibility chart link
errorMessage = errorMessage.replace(
"compatibility chart",
"[compatibility chart](https://caniuse.com/webgpu)",
);
}
options.onError?.(errorMessage);
return;
}
// Service worker has been stopped. Restart it
try {
await this.initModel(options.onUpdate);
} catch (err: any) {
let errorMessage = err.message || err.toString() || "";
if (errorMessage === "[object Object]") {
errorMessage = JSON.stringify(err);
}
console.error("Error while initializing the model", errorMessage);
options?.onError?.(errorMessage);
return;
}
try {
const completion = await this.chatCompletion(
!!options.config.stream,
options.messages,
options.onUpdate,
console.error("Error in chatCompletion", errorMessage);
if (
errorMessage.includes("WebGPU") &&
errorMessage.includes("compatibility chart")
) {
// Add WebGPU compatibility chart link
errorMessage = errorMessage.replace(
"compatibility chart",
"[compatibility chart](https://caniuse.com/webgpu)",
);
reply = completion.content;
stopReason = completion.stopReason;
usage = completion.usage;
} catch (err: any) {
let errorMessage = err.message || err.toString() || "";
if (errorMessage === "[object Object]") {
errorMessage = JSON.stringify(err);
}
console.error("Error in chatCompletion", errorMessage);
options.onError?.(errorMessage);
return;
}
options.onError?.(errorMessage);
return;
}

if (reply) {
Expand Down
2 changes: 1 addition & 1 deletion app/components/home.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import {
Route,
useLocation,
} from "react-router-dom";
import { ServiceWorkerMLCEngine } from "@neet-nestor/web-llm";
import { ServiceWorkerMLCEngine } from "@mlc-ai/web-llm";

import MlcIcon from "../icons/mlc.svg";
import LoadingIcon from "../icons/three-dots.svg";
Expand Down
12 changes: 2 additions & 10 deletions app/components/settings.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,7 @@ import ClearIcon from "../icons/clear.svg";
import EditIcon from "../icons/edit.svg";
import EyeIcon from "../icons/eye.svg";

import {
Input,
List,
ListItem,
Modal,
Popover,
Select,
showConfirm,
} from "./ui-lib";
import { Input, List, ListItem, Modal, Select, showConfirm } from "./ui-lib";
import { ModelConfigList } from "./model-config";

import { IconButton } from "./button";
Expand All @@ -42,7 +34,7 @@ import { ErrorBoundary } from "./error";
import { InputRange } from "./input-range";
import { useNavigate } from "react-router-dom";
import { nanoid } from "nanoid";
import { LogLevel } from "@neet-nestor/web-llm";
import { LogLevel } from "@mlc-ai/web-llm";
import { WebLLMContext } from "../client/webllm";

function EditPromptModal(props: { id: string; onClose: () => void }) {
Expand Down
5 changes: 1 addition & 4 deletions app/store/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,7 @@ import { estimateTokenLength } from "../utils/token";
import { nanoid } from "nanoid";
import { createPersistStore } from "../utils/store";
import { WebLLMApi } from "../client/webllm";
import {
ChatCompletionFinishReason,
CompletionUsage,
} from "@neet-nestor/web-llm";
import { ChatCompletionFinishReason, CompletionUsage } from "@mlc-ai/web-llm";

export type ChatMessage = RequestMessage & {
date: string;
Expand Down
2 changes: 1 addition & 1 deletion app/store/config.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { LogLevel } from "@neet-nestor/web-llm";
import { LogLevel } from "@mlc-ai/web-llm";
import { ModelRecord } from "../client/api";
import {
DEFAULT_INPUT_TEMPLATE,
Expand Down
9 changes: 4 additions & 5 deletions app/worker/service-worker.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
import { MLCEngineServiceWorkerHandler, MLCEngine } from "@neet-nestor/web-llm";
import { ServiceWorkerMLCEngineHandler } from "@mlc-ai/web-llm";
import { defaultCache } from "@serwist/next/worker";
import type { PrecacheEntry, SerwistGlobalConfig } from "serwist";
import { CacheFirst, ExpirationPlugin, Serwist } from "serwist";

declare const self: ServiceWorkerGlobalScope;
const CHATGPT_NEXT_WEB_CACHE = "chatgpt-next-web-cache";
const engine = new MLCEngine();
let handler: MLCEngineServiceWorkerHandler;
let handler: ServiceWorkerMLCEngineHandler;

async function checkGPUAvailablity() {
if (!("gpu" in navigator)) {
Expand All @@ -23,7 +22,7 @@ async function checkGPUAvailablity() {

self.addEventListener("message", (event) => {
if (!handler) {
handler = new MLCEngineServiceWorkerHandler(engine);
handler = new ServiceWorkerMLCEngineHandler();
console.log("Service Worker: Web-LLM Engine Activated");
}

Expand Down Expand Up @@ -58,7 +57,7 @@ self.addEventListener("install", (event) => {

self.addEventListener("activate", (event) => {
if (!handler) {
handler = new MLCEngineServiceWorkerHandler(engine);
handler = new ServiceWorkerMLCEngineHandler();
console.log("Service Worker: Web-LLM Engine Activated");
}
});
Expand Down
7 changes: 3 additions & 4 deletions app/worker/web-worker.ts
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
import log from "loglevel";
import { MLCEngineWorkerHandler, MLCEngine } from "@neet-nestor/web-llm";
import { WebWorkerMLCEngineHandler } from "@mlc-ai/web-llm";

const engine = new MLCEngine();
let handler: MLCEngineWorkerHandler;
let handler: WebWorkerMLCEngineHandler;

self.addEventListener("message", (event) => {});

self.onmessage = (msg: MessageEvent) => {
if (!handler) {
handler = new MLCEngineWorkerHandler(engine);
handler = new WebWorkerMLCEngineHandler();
log.info("Web Worker: Web-LLM Engine Activated");
}
handler.onmessage(msg);
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
"dependencies": {
"@fortaine/fetch-event-source": "^3.0.6",
"@hello-pangea/dnd": "^16.5.0",
"@neet-nestor/web-llm": "^0.2.57",
"@mlc-ai/web-llm": "^0.2.46",
"@serwist/next": "^9.0.2",
"@svgr/webpack": "^6.5.1",
"emoji-picker-react": "^4.9.2",
Expand Down
8 changes: 4 additions & 4 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -2089,10 +2089,10 @@
"@jridgewell/resolve-uri" "^3.1.0"
"@jridgewell/sourcemap-codec" "^1.4.14"

"@neet-nestor/web-llm@^0.2.57":
version "0.2.57"
resolved "https://registry.yarnpkg.com/@neet-nestor/web-llm/-/web-llm-0.2.57.tgz#6360624a01828e0fe30ebf2a956d055ad00f707d"
integrity sha512-hZig9pAzMc//fQBxZ1lt8db6G96zuLS9snzI/iKbHgM7ajvjV0Yun/eW4EojN4oawfzuVo8lcKe78pDHLIWUqw==
"@mlc-ai/web-llm@^0.2.46":
version "0.2.46"
resolved "https://registry.yarnpkg.com/@mlc-ai/web-llm/-/web-llm-0.2.46.tgz#bf32dab0710dba4967d49b2810b39184272d8840"
integrity sha512-NGf5ylTvOVwH1iN4Q2N0t/GfmrjDekFPMjWRO/WqEB3BhSC6+B2ZkLY3AZ43c2RbT2NqROp3BovgNq3Mxq3ONQ==
dependencies:
loglevel "^1.9.1"

Expand Down

0 comments on commit 88e51a2

Please sign in to comment.