Skip to content

Commit

Permalink
fix: 修复消息条数过多发送失败的问题 pljhonglu#2
Browse files Browse the repository at this point in the history
  • Loading branch information
jianghonglu.neo committed Mar 24, 2023
1 parent f32cf52 commit 5d97e20
Show file tree
Hide file tree
Showing 12 changed files with 172 additions and 77 deletions.
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
"@traptitech/markdown-it-katex": "^3.6.0",
"@types/lodash": "^4.14.191",
"@vueuse/core": "^9.13.0",
"gpt3-tokenizer": "^1.1.5",
"highlight.js": "^11.7.0",
"html2canvas": "^1.4.1",
"katex": "^0.16.4",
Expand Down
13 changes: 13 additions & 0 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

46 changes: 31 additions & 15 deletions src-tauri/src/app/cmd/gpt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use eventsource_stream::{Eventsource, EventStreamError};
use serde_json::{json, Value};
use serde::{ser::Serializer, Serialize, Deserialize};
use futures::{TryStreamExt};
use std::{collections::HashMap, time::Duration};
use std::{ time::Duration };
use log::{error, info};

type Result<T> = std::result::Result<T, Error>;
Expand Down Expand Up @@ -46,26 +46,39 @@ impl ProgressPayload {
}
}

#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Message {
pub role: String,
pub content: String
}

#[derive(Serialize, Deserialize, Debug, Clone)]
#[allow(non_snake_case)]
pub struct FetchOption {
pub proxy: Option<String>,
pub apiKey: String,
pub model: String,
pub temperature: f32,
}

#[tauri::command]
pub async fn fetch_chat_api(
handle: AppHandle,
id: u64,
proxy: Option<String>,
token: String,
model: String,
messages: Vec<HashMap<String, String>>,
temperature: f32
messages: Vec<Message>,
option: FetchOption,
) -> Result<u64> {
// https://platform.openai.com/docs/guides/chat/introduction
// "https://api.openai.com/v1/chat/completions";
let url = "https://api.openai.com/v1/chat/completions";
let data = json!({
"model": model,
"model": option.model,
"messages": messages,
"temperature": temperature,
"stream": true
"temperature": option.temperature,
"stream": true,
});
let proxy_str = proxy.unwrap_or(String::from(""));
log::info!("> send message: length: {}, option: {:?},", messages.len(), option);
let proxy_str = option.proxy.unwrap_or(String::from(""));

let client : reqwest::Client = {
log::info!("proxy is: {}", proxy_str);
Expand All @@ -78,15 +91,18 @@ pub async fn fetch_chat_api(
};
let res = client.post(url)
.header("Content-Type", "application/json")
.header("Authorization", format!("Bearer {}", token))
.timeout(Duration::from_secs(300))
.header("Authorization", format!("Bearer {}", option.apiKey))
.timeout(Duration::from_secs(600))
.body(data.to_string())
.send()
.await?;
info!("send message: {}", json!(messages));
info!("> receive message: {}", id);

if res.status().as_u16() != 200 {
return Err(Error::Custom {code: res.status().as_u16(), msg:String::from("openai api request error!")})
let status_code = res.status().as_u16();
if status_code != 200 {
let error_msg = res.text().await?;
log::error!("{}", error_msg);
return Err(Error::Custom {code: status_code, msg:String::from(error_msg)})
}

let mut stream = res.bytes_stream().eventsource();
Expand Down
2 changes: 1 addition & 1 deletion src-tauri/tauri.conf.json
Original file line number Diff line number Diff line change
Expand Up @@ -96,4 +96,4 @@
"pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IDM2OUUyQUQ5QjE1Q0FEMTEKUldRUnJWeXgyU3FlTmxOS0N0aVBhNGUwL3c3QlBIY29uMHFUdmhUZS9YNmpKNE83L1BKZ3dER2QK"
}
}
}
}
9 changes: 2 additions & 7 deletions src/api/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,8 @@ async function listenToEventIfNeeded(): Promise<void> {
}

export async function fetchChatAPIProcess(
apiKey: string,
proxy: string | null,
modelName: string,
messages: Chat.RequestMessage[],
option: Chat.ChatOptions,
progressHandler?: (detail: string, role: string) => void,
errorHandle?: (err: Error) => void,
signal?: GenericAbortSignal,
Expand All @@ -55,11 +53,8 @@ export async function fetchChatAPIProcess(
}
await invoke('fetch_chat_api', {
id,
proxy,
token: apiKey,
model: modelName,
messages,
temperature: 0.6,
option,
}).catch((error) => {
handlers.delete(id)
if (errorHandle)
Expand Down
14 changes: 7 additions & 7 deletions src/components/common/Setting/User.vue
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,14 @@ const userStore = useUserStore()
const ms = useMessage()
const formRef = ref<FormInst | null>(null)
const userInfo = computed(() => userStore.userInfo)
const userConfig = computed(() => userStore.userConfig)
const model = ref({
name: userInfo.value.name,
avatar: userInfo.value.avatar,
apiKey: userInfo.value.apiKey,
modelName: userInfo.value.modelName,
proxy: userInfo.value.proxy,
apiKey: userConfig.value.apiKey,
modelName: userConfig.value.modelName,
proxy: userConfig.value.proxy,
})
const models = userStore.allModels().map(v => ({
Expand Down Expand Up @@ -71,13 +72,12 @@ function saveUserInfo() {
if (!errors) {
userInfo.value.name = model.value.name
userInfo.value.avatar = model.value.avatar
userInfo.value.apiKey = model.value.apiKey
userInfo.value.modelName = model.value.modelName
userInfo.value.proxy = model.value.proxy
userConfig.value.apiKey = model.value.apiKey
userConfig.value.modelName = model.value.modelName
userConfig.value.proxy = model.value.proxy
userStore.recordState()
ms.success(t('common.success'))
// window.location.reload()
}
})
}
Expand Down
2 changes: 1 addition & 1 deletion src/store/modules/chat/helper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ const LOCAL_NAME = 'chatStorage'

export function defaultState(): Chat.ChatState {
const uuid = 1002
return { active: uuid, history: [{ uuid, title: 'New Chat', isEdit: false }], chat: [{ uuid, data: [] }] }
return { active: uuid, history: [{ uuid, title: 'New Chat', isEdit: false }], chat: [{ uuid, data: [], opt: {} }] }
}

export function getLocalState(): Chat.ChatState {
Expand Down
13 changes: 10 additions & 3 deletions src/store/modules/chat/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,14 @@ export const useChatStore = defineStore('chat-store', {
return null
},

getChatByUuid(state: Chat.ChatState) {
getChatSessionByUuid(state: Chat.ChatState) {
return (uuid?: number) => {
if (uuid)
return state.chat.find(item => item.uuid === uuid)
return state.chat.find(item => item.uuid === state.active)
}
},
getChatDataByUuid(state: Chat.ChatState) {
return (uuid?: number) => {
if (uuid)
return state.chat.find(item => item.uuid === uuid)?.data ?? []
Expand All @@ -25,7 +32,7 @@ export const useChatStore = defineStore('chat-store', {
actions: {
addHistory(history: Chat.History, chatData: Chat.Chat[] = []) {
this.history.unshift(history)
this.chat.unshift({ uuid: history.uuid, data: chatData })
this.chat.unshift({ uuid: history.uuid, data: chatData, opt: {} })
this.active = history.uuid
this.reloadRoute(history.uuid)
},
Expand Down Expand Up @@ -92,7 +99,7 @@ export const useChatStore = defineStore('chat-store', {
if (this.history.length === 0) {
const uuid = Date.now()
this.history.push({ uuid, title: chat.text, isEdit: false })
this.chat.push({ uuid, data: [chat] })
this.chat.push({ uuid, data: [chat], opt: {} })
this.active = uuid
this.recordState()
}
Expand Down
8 changes: 8 additions & 0 deletions src/store/modules/user/helper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,23 +6,31 @@ const LOCAL_NAME = 'userStorage'
export interface UserInfo {
avatar: string
name: string | null
}

export interface UserConfig {
modelName: string
apiKey: string
proxy: string | null
maxTokenNum: number
}

export interface UserState {
userInfo: UserInfo
userConfig: UserConfig
}

export function defaultSetting(): UserState {
return {
userInfo: {
avatar: '',
name: null,
},
userConfig: {
modelName: 'gpt-3.5-turbo',
apiKey: import.meta.env.VITE_GLOB_OPENAI_KEY,
proxy: null,
maxTokenNum: 4096,
},
}
}
Expand Down
33 changes: 19 additions & 14 deletions src/typings/chat.d.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
declare namespace Chat {

// 一条聊天消息
interface Chat {
dateTime: string
text: string
tokenNum?: number
inversion?: boolean
error?: boolean
loading?: boolean
Expand All @@ -14,26 +16,29 @@ declare namespace Chat {
uuid: number
}

interface ChatOptions {
apiKey: string
proxy: string | null
model: string
systemMessage: string
temperature: number
}

// 一个聊天会话
interface ChatSession {
uuid: number
data: Chat[]
opt: Partial<ChatOptions>
}

interface ChatState {
active: number | null
history: History[]
chat: { uuid: number; data: Chat[] }[]
chat: ChatSession[]
}

interface RequestMessage {
role: string
content: string
}

// interface ConversationRequest {
// conversationId?: string
// parentMessageId?: string
// }

// interface ConversationResponse {
// options: ConversationRequest,
// detail: string,
// role: string,
// finish_reason: string,
// }
}
}
Loading

0 comments on commit 5d97e20

Please sign in to comment.