Skip to content

Commit bca7424

Browse files
authored
fix: fix gemini issue when using app (ChatGPTNextWeb#4013)
* chore: update path * fix: fix google auth logic * fix: not using header authorization for google api * chore: revert to allow stream
1 parent 9d5801f commit bca7424

16 files changed

+102
-59
lines changed

app/client/api.ts

+16-13
Original file line numberDiff line numberDiff line change
@@ -144,31 +144,34 @@ export function getHeaders() {
144144
const headers: Record<string, string> = {
145145
"Content-Type": "application/json",
146146
"x-requested-with": "XMLHttpRequest",
147-
"Accept": "application/json",
147+
Accept: "application/json",
148148
};
149149
const modelConfig = useChatStore.getState().currentSession().mask.modelConfig;
150-
const isGoogle = modelConfig.model === "gemini-pro";
150+
const isGoogle = modelConfig.model.startsWith("gemini");
151151
const isAzure = accessStore.provider === ServiceProvider.Azure;
152152
const authHeader = isAzure ? "api-key" : "Authorization";
153153
const apiKey = isGoogle
154154
? accessStore.googleApiKey
155155
: isAzure
156156
? accessStore.azureApiKey
157157
: accessStore.openaiApiKey;
158-
158+
const clientConfig = getClientConfig();
159159
const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`;
160160
const validString = (x: string) => x && x.length > 0;
161161

162-
// use user's api key first
163-
if (validString(apiKey)) {
164-
headers[authHeader] = makeBearer(apiKey);
165-
} else if (
166-
accessStore.enabledAccessControl() &&
167-
validString(accessStore.accessCode)
168-
) {
169-
headers[authHeader] = makeBearer(
170-
ACCESS_CODE_PREFIX + accessStore.accessCode,
171-
);
162+
// when using google api in app, not set auth header
163+
if (!(isGoogle && clientConfig?.isApp)) {
164+
// use user's api key first
165+
if (validString(apiKey)) {
166+
headers[authHeader] = makeBearer(apiKey);
167+
} else if (
168+
accessStore.enabledAccessControl() &&
169+
validString(accessStore.accessCode)
170+
) {
171+
headers[authHeader] = makeBearer(
172+
ACCESS_CODE_PREFIX + accessStore.accessCode,
173+
);
174+
}
172175
}
173176

174177
return headers;

app/client/platforms/google.ts

+18-14
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,8 @@
11
import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
22
import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api";
33
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
4-
import {
5-
EventStreamContentType,
6-
fetchEventSource,
7-
} from "@fortaine/fetch-event-source";
8-
import { prettyObject } from "@/app/utils/format";
94
import { getClientConfig } from "@/app/config/client";
10-
import Locale from "../../locales";
11-
import { getServerSideConfig } from "@/app/config/server";
12-
import de from "@/app/locales/de";
5+
import { DEFAULT_API_HOST } from "@/app/constant";
136
export class GeminiProApi implements LLMApi {
147
extractMessage(res: any) {
158
console.log("[Response] gemini-pro response: ", res);
@@ -21,7 +14,7 @@ export class GeminiProApi implements LLMApi {
2114
);
2215
}
2316
async chat(options: ChatOptions): Promise<void> {
24-
const apiClient = this;
17+
// const apiClient = this;
2518
const messages = options.messages.map((v) => ({
2619
role: v.role.replace("assistant", "model").replace("system", "user"),
2720
parts: [{ text: v.content }],
@@ -79,20 +72,31 @@ export class GeminiProApi implements LLMApi {
7972
],
8073
};
8174

82-
console.log("[Request] google payload: ", requestPayload);
75+
const isApp = !!getClientConfig()?.isApp;
8376

8477
const shouldStream = !!options.config.stream;
8578
const controller = new AbortController();
8679
options.onController?.(controller);
80+
const accessStore = useAccessStore.getState();
8781
try {
88-
const chatPath = this.path(Google.ChatPath);
82+
let chatPath = this.path(Google.ChatPath);
83+
84+
// let baseUrl = accessStore.googleUrl;
85+
86+
chatPath = isApp
87+
? DEFAULT_API_HOST +
88+
"/api/proxy/google/" +
89+
Google.ChatPath +
90+
`?key=${accessStore.googleApiKey}`
91+
: chatPath;
92+
8993
const chatPayload = {
9094
method: "POST",
9195
body: JSON.stringify(requestPayload),
9296
signal: controller.signal,
9397
headers: getHeaders(),
9498
};
95-
99+
console.log("[Request] google chatPath: ", chatPath, isApp);
96100
// make a fetch request
97101
const requestTimeoutId = setTimeout(
98102
() => controller.abort(),
@@ -134,6 +138,8 @@ export class GeminiProApi implements LLMApi {
134138

135139
// start animaion
136140
animateResponseText();
141+
142+
console.log("[Proxy Endpoint] ", streamChatPath);
137143
fetch(streamChatPath, chatPayload)
138144
.then((response) => {
139145
const reader = response?.body?.getReader();
@@ -187,9 +193,7 @@ export class GeminiProApi implements LLMApi {
187193
} else {
188194
const res = await fetch(chatPath, chatPayload);
189195
clearTimeout(requestTimeoutId);
190-
191196
const resJson = await res.json();
192-
193197
if (resJson?.promptFeedback?.blockReason) {
194198
// being blocked
195199
options.onError?.(

app/client/platforms/openai.ts

+6-1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
"use client";
12
import {
23
ApiPath,
34
DEFAULT_API_HOST,
@@ -45,7 +46,9 @@ export class ChatGPTApi implements LLMApi {
4546

4647
if (baseUrl.length === 0) {
4748
const isApp = !!getClientConfig()?.isApp;
48-
baseUrl = isApp ? DEFAULT_API_HOST : ApiPath.OpenAI;
49+
baseUrl = isApp
50+
? DEFAULT_API_HOST + "/proxy" + ApiPath.OpenAI
51+
: ApiPath.OpenAI;
4952
}
5053

5154
if (baseUrl.endsWith("/")) {
@@ -59,6 +62,8 @@ export class ChatGPTApi implements LLMApi {
5962
path = makeAzurePath(path, accessStore.azureApiVersion);
6063
}
6164

65+
console.log("[Proxy Endpoint] ", baseUrl, path);
66+
6267
return [baseUrl, path].join("/");
6368
}
6469

app/components/exporter.tsx

+1-1
Original file line numberDiff line numberDiff line change
@@ -307,7 +307,7 @@ export function PreviewActions(props: {
307307
setShouldExport(false);
308308

309309
var api: ClientApi;
310-
if (config.modelConfig.model === "gemini-pro") {
310+
if (config.modelConfig.model.startsWith("gemini")) {
311311
api = new ClientApi(ModelProvider.GeminiPro);
312312
} else {
313313
api = new ClientApi(ModelProvider.GPT);

app/components/home.tsx

+1-1
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,7 @@ export function useLoadData() {
171171
const config = useAppConfig();
172172

173173
var api: ClientApi;
174-
if (config.modelConfig.model === "gemini-pro") {
174+
if (config.modelConfig.model.startsWith("gemini")) {
175175
api = new ClientApi(ModelProvider.GeminiPro);
176176
} else {
177177
api = new ClientApi(ModelProvider.GPT);

app/components/model-config.tsx

+1-1
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ export function ModelConfigList(props: {
9292
></input>
9393
</ListItem>
9494

95-
{props.modelConfig.model === "gemini-pro" ? null : (
95+
{props.modelConfig.model.startsWith("gemini") ? null : (
9696
<>
9797
<ListItem
9898
title={Locale.Settings.PresencePenalty.Title}

app/constant.ts

+5-3
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,10 @@ export const FETCH_COMMIT_URL = `https://api.github.com/repos/${OWNER}/${REPO}/c
88
export const FETCH_TAG_URL = `https://api.github.com/repos/${OWNER}/${REPO}/tags?per_page=1`;
99
export const RUNTIME_CONFIG_DOM = "danger-runtime-config";
1010

11-
export const DEFAULT_CORS_HOST = "https://a.nextweb.fun";
12-
export const DEFAULT_API_HOST = `${DEFAULT_CORS_HOST}/api/proxy`;
11+
// export const DEFAULT_CORS_HOST = "https://api.nextchat.dev";
12+
// export const DEFAULT_API_HOST = `${DEFAULT_CORS_HOST}/api/proxy`;
13+
14+
export const DEFAULT_API_HOST = "https://api.nextchat.dev";
1315
export const OPENAI_BASE_URL = "https://api.openai.com";
1416

1517
export const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/";
@@ -111,7 +113,7 @@ export const KnowledgeCutOffDate: Record<string, string> = {
111113
"gpt-4-1106-preview": "2023-04",
112114
"gpt-4-0125-preview": "2023-04",
113115
"gpt-4-vision-preview": "2023-04",
114-
// After improvements,
116+
// After improvements,
115117
// it's now easier to add "KnowledgeCutOffDate" instead of stupid hardcoding it, as was done previously.
116118
"gemini-pro": "2023-12",
117119
};

app/store/access.ts

+3-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,9 @@ import { ensure } from "../utils/clone";
1212
let fetchState = 0; // 0 not fetch, 1 fetching, 2 done
1313

1414
const DEFAULT_OPENAI_URL =
15-
getClientConfig()?.buildMode === "export" ? DEFAULT_API_HOST : ApiPath.OpenAI;
15+
getClientConfig()?.buildMode === "export"
16+
? DEFAULT_API_HOST + "/api/proxy/openai"
17+
: ApiPath.OpenAI;
1618

1719
const DEFAULT_ACCESS_STATE = {
1820
accessCode: "",

app/store/chat.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -316,7 +316,7 @@ export const useChatStore = createPersistStore(
316316
});
317317

318318
var api: ClientApi;
319-
if (modelConfig.model === "gemini-pro") {
319+
if (modelConfig.model.startsWith("gemini")) {
320320
api = new ClientApi(ModelProvider.GeminiPro);
321321
} else {
322322
api = new ClientApi(ModelProvider.GPT);
@@ -501,7 +501,7 @@ export const useChatStore = createPersistStore(
501501
const modelConfig = session.mask.modelConfig;
502502

503503
var api: ClientApi;
504-
if (modelConfig.model === "gemini-pro") {
504+
if (modelConfig.model.startsWith("gemini")) {
505505
api = new ClientApi(ModelProvider.GeminiPro);
506506
} else {
507507
api = new ClientApi(ModelProvider.GPT);

app/utils/cors.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
import { getClientConfig } from "../config/client";
2-
import { ApiPath, DEFAULT_CORS_HOST } from "../constant";
2+
import { ApiPath, DEFAULT_API_HOST } from "../constant";
33

44
export function corsPath(path: string) {
5-
const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_CORS_HOST}` : "";
5+
const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_API_HOST}` : "";
66

77
if (!path.startsWith("/")) {
88
path = "/" + path;

next.config.mjs

+10-1
Original file line numberDiff line numberDiff line change
@@ -64,8 +64,17 @@ if (mode !== "export") {
6464

6565
nextConfig.rewrites = async () => {
6666
const ret = [
67+
// adjust for previous verison directly using "/api/proxy/" as proxy base route
6768
{
68-
source: "/api/proxy/:path*",
69+
source: "/api/proxy/v1/:path*",
70+
destination: "https://api.openai.com/v1/:path*",
71+
},
72+
{
73+
source: "/api/proxy/google/:path*",
74+
destination: "https://generativelanguage.googleapis.com/:path*",
75+
},
76+
{
77+
source: "/api/proxy/openai/:path*",
6978
destination: "https://api.openai.com/:path*",
7079
},
7180
{

package.json

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
{
2-
"name": "chatgpt-next-web",
2+
"name": "nextchat",
33
"private": false,
44
"license": "mit",
55
"scripts": {
@@ -64,4 +64,4 @@
6464
"resolutions": {
6565
"lint-staged/yaml": "^2.2.2"
6666
}
67-
}
67+
}

scripts/setup.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ if ! command -v node >/dev/null || ! command -v git >/dev/null || ! command -v y
5454
fi
5555

5656
# Clone the repository and install dependencies
57-
git clone https://github.com/Yidadaa/ChatGPT-Next-Web
57+
git clone https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web
5858
cd ChatGPT-Next-Web
5959
yarn install
6060

src-tauri/Cargo.lock

+11-11
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src-tauri/Cargo.toml

+22-4
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
[package]
2-
name = "chatgpt-next-web"
2+
name = "nextchat"
33
version = "0.1.0"
44
description = "A cross platform app for LLM ChatBot."
55
authors = ["Yidadaa"]
66
license = "mit"
77
repository = ""
8-
default-run = "chatgpt-next-web"
8+
default-run = "nextchat"
99
edition = "2021"
1010
rust-version = "1.60"
1111

@@ -17,11 +17,29 @@ tauri-build = { version = "1.3.0", features = [] }
1717
[dependencies]
1818
serde_json = "1.0"
1919
serde = { version = "1.0", features = ["derive"] }
20-
tauri = { version = "1.3.0", features = ["notification-all", "fs-all", "clipboard-all", "dialog-all", "shell-open", "updater", "window-close", "window-hide", "window-maximize", "window-minimize", "window-set-icon", "window-set-ignore-cursor-events", "window-set-resizable", "window-show", "window-start-dragging", "window-unmaximize", "window-unminimize"] }
20+
tauri = { version = "1.3.0", features = [
21+
"notification-all",
22+
"fs-all",
23+
"clipboard-all",
24+
"dialog-all",
25+
"shell-open",
26+
"updater",
27+
"window-close",
28+
"window-hide",
29+
"window-maximize",
30+
"window-minimize",
31+
"window-set-icon",
32+
"window-set-ignore-cursor-events",
33+
"window-set-resizable",
34+
"window-show",
35+
"window-start-dragging",
36+
"window-unmaximize",
37+
"window-unminimize",
38+
] }
2139
tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
2240

2341
[features]
2442
# this feature is used for production builds or when `devPath` points to the filesystem and the built-in dev server is disabled.
2543
# If you use cargo directly instead of tauri's cli you can use this feature flag to switch between tauri's `dev` and `build` modes.
2644
# DO NOT REMOVE!!
27-
custom-protocol = [ "tauri/custom-protocol" ]
45+
custom-protocol = ["tauri/custom-protocol"]

src-tauri/tauri.conf.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@
9191
"updater": {
9292
"active": true,
9393
"endpoints": [
94-
"https://github.com/Yidadaa/ChatGPT-Next-Web/releases/latest/download/latest.json"
94+
"https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/releases/latest/download/latest.json"
9595
],
9696
"dialog": false,
9797
"windows": {

0 commit comments

Comments
 (0)