Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main'
Browse files Browse the repository at this point in the history
  • Loading branch information
Hk-Gosuto committed Nov 16, 2023
2 parents f9d3667 + 9876a1a commit f718ca0
Show file tree
Hide file tree
Showing 48 changed files with 741 additions and 428 deletions.
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,6 @@

## 最新动态

- 🚀 v2.9.6 版本发布
- 🚀 v2.9.5 正式版本发布
- 🚀 v2.9.1-plugin-preview 预览版发布。

Expand Down
20 changes: 17 additions & 3 deletions README_CN.md
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,20 @@ OpenAI 接口代理 URL,如果你手动配置了 openai 接口代理,请填

指定 OpenAI 中的组织 ID。

### `AZURE_URL` (可选)

> 形如:https://{azure-resource-url}/openai/deployments/{deploy-name}
Azure 部署地址。

### `AZURE_API_KEY` (可选)

Azure 密钥。

### `AZURE_API_VERSION` (可选)

Azure Api 版本,你可以在这里找到:[Azure 文档](https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions)

### `HIDE_USER_API_KEY` (可选)

如果你不想让用户自行填入 API Key,将此环境变量设置为 1 即可。
Expand All @@ -108,9 +122,9 @@ OpenAI 接口代理 URL,如果你手动配置了 openai 接口代理,请填

### `CUSTOM_MODELS` (可选)

> 示例:`+qwen-7b-chat,+glm-6b,-gpt-3.5-turbo` 表示增加 `qwen-7b-chat``glm-6b` 到模型列表,而从列表中删除 `gpt-3.5-turbo`
> 示例:`+qwen-7b-chat,+glm-6b,-gpt-3.5-turbo,gpt-4-1106-preview:gpt-4-turbo` 表示增加 `qwen-7b-chat``glm-6b` 到模型列表,而从列表中删除 `gpt-3.5-turbo`,并将 `gpt-4-1106-preview` 模型名字展示为 `gpt-4-turbo`
用来控制模型列表,使用 `+` 增加一个模型,使用 `-` 来隐藏一个模型,用英文逗号隔开。
用来控制模型列表,使用 `+` 增加一个模型,使用 `-` 来隐藏一个模型,使用 `模型名:展示名` 来自定义模型的展示名,用英文逗号隔开。

## 开发

Expand All @@ -124,7 +138,7 @@ OpenAI 接口代理 URL,如果你手动配置了 openai 接口代理,请填
OPENAI_API_KEY=<your api key here>
# 中国大陆用户,可以使用本项目自带的代理进行开发,你也可以自由选择其他代理地址
BASE_URL=https://nb.nextweb.fun/api/proxy
BASE_URL=https://a.nextweb.fun/api/proxy
```

### 本地开发
Expand Down
25 changes: 19 additions & 6 deletions app/api/auth.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ export function auth(req: NextRequest) {
const authToken = req.headers.get("Authorization") ?? "";

// check if it is openai api key or user token
const { accessCode, apiKey: token } = parseApiKey(authToken);
const { accessCode, apiKey } = parseApiKey(authToken);

const hashedCode = md5.hash(accessCode ?? "").trim();

Expand All @@ -39,19 +39,32 @@ export function auth(req: NextRequest) {
console.log("[User IP] ", getIP(req));
console.log("[Time] ", new Date().toLocaleString());

if (serverConfig.needCode && !serverConfig.codes.has(hashedCode) && !token) {
if (serverConfig.needCode && !serverConfig.codes.has(hashedCode) && !apiKey) {
return {
error: true,
msg: !accessCode ? "empty access code" : "wrong access code",
};
}

if (serverConfig.hideUserApiKey && !!apiKey) {
return {
error: true,
msg: "you are not allowed to access openai with your own api key",
};
}

// if user does not provide an api key, inject system api key
if (!token) {
const apiKey = serverConfig.apiKey;
if (apiKey) {
if (!apiKey) {
const serverApiKey = serverConfig.isAzure
? serverConfig.azureApiKey
: serverConfig.apiKey;

if (serverApiKey) {
console.log("[Auth] use system api key");
req.headers.set("Authorization", `Bearer ${apiKey}`);
req.headers.set(
"Authorization",
`${serverConfig.isAzure ? "" : "Bearer "}${serverApiKey}`,
);
} else {
console.log("[Auth] admin did not provide an api key");
}
Expand Down
33 changes: 24 additions & 9 deletions app/api/common.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,24 @@
import { NextRequest, NextResponse } from "next/server";
import { getServerSideConfig } from "../config/server";
import { DEFAULT_MODELS, OPENAI_BASE_URL } from "../constant";
import { collectModelTable, collectModels } from "../utils/model";
import { collectModelTable } from "../utils/model";
import { makeAzurePath } from "../azure";

const serverConfig = getServerSideConfig();

export async function requestOpenai(req: NextRequest) {
const controller = new AbortController();

const authValue = req.headers.get("Authorization") ?? "";
const openaiPath = `${req.nextUrl.pathname}${req.nextUrl.search}`.replaceAll(
const authHeaderName = serverConfig.isAzure ? "api-key" : "Authorization";

let path = `${req.nextUrl.pathname}${req.nextUrl.search}`.replaceAll(
"/api/openai/",
"",
);

let baseUrl = serverConfig.baseUrl ?? OPENAI_BASE_URL;
let baseUrl =
serverConfig.azureUrl || serverConfig.baseUrl || OPENAI_BASE_URL;

if (!baseUrl.startsWith("http")) {
baseUrl = `https://${baseUrl}`;
Expand All @@ -23,7 +28,7 @@ export async function requestOpenai(req: NextRequest) {
baseUrl = baseUrl.slice(0, -1);
}

console.log("[Proxy] ", openaiPath);
console.log("[Proxy] ", path);
console.log("[Base Url]", baseUrl);
console.log("[Org ID]", serverConfig.openaiOrgId);

Expand All @@ -34,14 +39,24 @@ export async function requestOpenai(req: NextRequest) {
10 * 60 * 1000,
);

const fetchUrl = `${baseUrl}/${openaiPath}`;
if (serverConfig.isAzure) {
if (!serverConfig.azureApiVersion) {
return NextResponse.json({
error: true,
message: `missing AZURE_API_VERSION in server env vars`,
});
}
path = makeAzurePath(path, serverConfig.azureApiVersion);
}

const fetchUrl = `${baseUrl}/${path}`;
const fetchOptions: RequestInit = {
headers: {
"Content-Type": "application/json",
"Cache-Control": "no-store",
Authorization: authValue,
...(process.env.OPENAI_ORG_ID && {
"OpenAI-Organization": process.env.OPENAI_ORG_ID,
[authHeaderName]: authValue,
...(serverConfig.openaiOrgId && {
"OpenAI-Organization": serverConfig.openaiOrgId,
}),
},
method: req.method,
Expand All @@ -66,7 +81,7 @@ export async function requestOpenai(req: NextRequest) {
const jsonBody = JSON.parse(clonedBody) as { model?: string };

// not undefined and is false
if (modelTable[jsonBody?.model ?? ""] === false) {
if (modelTable[jsonBody?.model ?? ""].available === false) {
return NextResponse.json(
{
error: true,
Expand Down
9 changes: 9 additions & 0 deletions app/azure.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
export function makeAzurePath(path: string, apiVersion: string) {
// should omit /v1 prefix
path = path.replaceAll("v1/", "");

// should add api-key to query string
path += `${path.includes("?") ? "&" : "?"}api-version=${apiVersion}`;

return path;
}
16 changes: 10 additions & 6 deletions app/client/api.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { getClientConfig } from "../config/client";
import { ACCESS_CODE_PREFIX } from "../constant";
import { ACCESS_CODE_PREFIX, Azure, ServiceProvider } from "../constant";
import { ChatMessage, ModelType, useAccessStore } from "../store";
import { ChatGPTApi } from "./platforms/openai";

Expand Down Expand Up @@ -151,22 +151,26 @@ export const api = new ClientApi();

export function getHeaders() {
const accessStore = useAccessStore.getState();
let headers: Record<string, string> = {
const headers: Record<string, string> = {
"Content-Type": "application/json",
"x-requested-with": "XMLHttpRequest",
};

const makeBearer = (token: string) => `Bearer ${token.trim()}`;
const isAzure = accessStore.provider === ServiceProvider.Azure;
const authHeader = isAzure ? "api-key" : "Authorization";
const apiKey = isAzure ? accessStore.azureApiKey : accessStore.openaiApiKey;

const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`;
const validString = (x: string) => x && x.length > 0;

// use user's api key first
if (validString(accessStore.token)) {
headers.Authorization = makeBearer(accessStore.token);
if (validString(apiKey)) {
headers[authHeader] = makeBearer(apiKey);
} else if (
accessStore.enabledAccessControl() &&
validString(accessStore.accessCode)
) {
headers.Authorization = makeBearer(
headers[authHeader] = makeBearer(
ACCESS_CODE_PREFIX + accessStore.accessCode,
);
}
Expand Down
48 changes: 36 additions & 12 deletions app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import {
ApiPath,
DEFAULT_API_HOST,
DEFAULT_MODELS,
OpenaiPath,
REQUEST_TIMEOUT_MS,
ServiceProvider,
} from "@/app/constant";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";

Expand All @@ -21,6 +23,7 @@ import {
} from "@fortaine/fetch-event-source";
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import { makeAzurePath } from "@/app/azure";

export interface OpenAIListModelResponse {
object: string;
Expand All @@ -35,20 +38,35 @@ export class ChatGPTApi implements LLMApi {
private disableListModels = true;

path(path: string): string {
let openaiUrl = useAccessStore.getState().openaiUrl;
const apiPath = "/api/openai";
const accessStore = useAccessStore.getState();

if (openaiUrl.length === 0) {
const isAzure = accessStore.provider === ServiceProvider.Azure;

if (isAzure && !accessStore.isValidAzure()) {
throw Error(
"incomplete azure config, please check it in your settings page",
);
}

let baseUrl = isAzure ? accessStore.azureUrl : accessStore.openaiUrl;

if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
openaiUrl = isApp ? DEFAULT_API_HOST : apiPath;
baseUrl = isApp ? DEFAULT_API_HOST : ApiPath.OpenAI;
}
if (openaiUrl.endsWith("/")) {
openaiUrl = openaiUrl.slice(0, openaiUrl.length - 1);

if (baseUrl.endsWith("/")) {
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
}
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.OpenAI)) {
baseUrl = "https://" + baseUrl;
}
if (!openaiUrl.startsWith("http") && !openaiUrl.startsWith(apiPath)) {
openaiUrl = "https://" + openaiUrl;

if (isAzure) {
path = makeAzurePath(path, accessStore.azureApiVersion);
}
return [openaiUrl, path].join("/");

return [baseUrl, path].join("/");
}

extractMessage(res: any) {
Expand Down Expand Up @@ -163,14 +181,20 @@ export class ChatGPTApi implements LLMApi {
}
const text = msg.data;
try {
const json = JSON.parse(text);
const delta = json.choices[0]?.delta.content;
const json = JSON.parse(text) as {
choices: Array<{
delta: {
content: string;
};
}>;
};
const delta = json.choices[0]?.delta?.content;
if (delta) {
responseText += delta;
options.onUpdate?.(responseText, delta);
}
} catch (e) {
console.error("[Request] parse error", text, msg);
console.error("[Request] parse error", text);
}
},
onclose() {
Expand Down
8 changes: 4 additions & 4 deletions app/components/auth.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ export function AuthPage() {
const goChat = () => navigate(Path.Chat);
const resetAccessCode = () => {
accessStore.update((access) => {
access.token = "";
access.openaiApiKey = "";
access.accessCode = "";
});
}; // Reset access code to empty string
Expand Down Expand Up @@ -56,11 +56,11 @@ export function AuthPage() {
<input
className={styles["auth-input"]}
type="password"
placeholder={Locale.Settings.Token.Placeholder}
value={accessStore.token}
placeholder={Locale.Settings.Access.OpenAI.ApiKey.Placeholder}
value={accessStore.openaiApiKey}
onChange={(e) => {
accessStore.update(
(access) => (access.token = e.currentTarget.value),
(access) => (access.openaiApiKey = e.currentTarget.value),
);
}}
/>
Expand Down
5 changes: 4 additions & 1 deletion app/components/chat-list.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,10 @@ export function ChatItem(props: {
{props.narrow ? (
<div className={styles["chat-item-narrow"]}>
<div className={styles["chat-item-avatar"] + " no-dark"}>
<MaskAvatar mask={props.mask} />
<MaskAvatar
avatar={props.mask.avatar}
model={props.mask.modelConfig.model}
/>
</div>
<div className={styles["chat-item-narrow-count"]}>
{props.count}
Expand Down
Loading

0 comments on commit f718ca0

Please sign in to comment.