Skip to content

Commit

Permalink
v2.20.10
Browse files Browse the repository at this point in the history
  • Loading branch information
Dooy committed Sep 13, 2024
1 parent b6d160f commit b0f71b8
Show file tree
Hide file tree
Showing 2 changed files with 71 additions and 29 deletions.
81 changes: 54 additions & 27 deletions src/api/openapi.ts
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ export const isDallImageModel =(model:string|undefined)=>{

interface subModelType{
message:any[]
onMessage:(d:{text:string,isFinish:boolean})=>void
onMessage:(d:{text:string,isFinish:boolean,isAll?:boolean})=>void
onError?:(d?:any)=>void
signal?:AbortSignal
model?:string
Expand Down Expand Up @@ -324,6 +324,10 @@ Latex block: $$e=mc^2$$`;
return DEFAULT_SYSTEM_TEMPLATE;

}

export const isNewModel=(model:string)=>{
return model.startsWith('o1-')
}
export const subModel= async (opt: subModelType)=>{
//
let model= opt.model?? ( gptConfigStore.myData.model?gptConfigStore.myData.model: "gpt-3.5-turbo");
Expand All @@ -347,7 +351,7 @@ export const subModel= async (opt: subModelType)=>{
model= model.replace('gpt-4-gizmo-','')
}

let body ={
let body:any ={
max_tokens ,
model ,
temperature,
Expand All @@ -356,8 +360,18 @@ export const subModel= async (opt: subModelType)=>{
"messages": opt.message
,stream:true
}
//

if(isNewModel(model)){
body ={
max_completion_tokens:max_tokens ,
model ,
//temperature,
top_p,
presence_penalty ,frequency_penalty,
"messages": opt.message
,stream:false
}
}
if(body.stream){
let headers ={
'Content-Type': 'application/json'
//,'Authorization': 'Bearer ' +gptServerStore.myData.OPENAI_API_KEY
Expand All @@ -366,29 +380,42 @@ export const subModel= async (opt: subModelType)=>{
headers={...headers,...getHeaderAuthorization()}

try {
await fetchSSE( gptGetUrl('/v1/chat/completions'),{
method: 'POST',
headers: headers,
signal:opt.signal,
onMessage: async (data:string)=> {
//mlog('🐞测试' , data ) ;
if(data=='[DONE]') opt.onMessage({text:'',isFinish:true})
else {
const obj= JSON.parse(data );
opt.onMessage({text:obj.choices[0].delta?.content??'' ,isFinish:obj.choices[0].finish_reason!=null })
}
},
onError(e ){
//console.log('eee>>', e )
mlog('❌未错误',e )
opt.onError && opt.onError(e)
},
body:JSON.stringify(body)
});
} catch (error ) {
mlog('❌未错误2',error )
opt.onError && opt.onError(error)
}
await fetchSSE( gptGetUrl('/v1/chat/completions'),{
method: 'POST',
headers: headers,
signal:opt.signal,
onMessage: async (data:string)=> {
//mlog('🐞测试' , data ) ;
if(data=='[DONE]') opt.onMessage({text:'',isFinish:true})
else {
const obj= JSON.parse(data );
opt.onMessage({text:obj.choices[0].delta?.content??'' ,isFinish:obj.choices[0].finish_reason!=null })
}
},
onError(e ){
//console.log('eee>>', e )
mlog('❌未错误',e )
opt.onError && opt.onError(e)
},
body:JSON.stringify(body)
});
} catch (error ) {
mlog('❌未错误2',error )
opt.onError && opt.onError(error)
}
}else{
try {
mlog('🐞非流输出',body )
opt.onMessage({text: t('mj.thinking') ,isFinish: false })
let obj :any= await gptFetch( gptGetUrl('/v1/chat/completions'),body )
//mlog('结果 >>',obj )
opt.onMessage({text:obj.choices[0].message.content??'' ,isFinish: true ,isAll:true})

} catch (error ) {
mlog('❌未错误2',error )
opt.onError && opt.onError(error)
}
}
}

export const getInitChat = (txt:string )=>{
Expand Down
19 changes: 17 additions & 2 deletions src/views/mj/aiGpt.vue
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@ import { homeStore, useChatStore } from '@/store'
import { getInitChat, mlog, subModel,getSystemMessage , localSaveAny, canVisionModel
,isTTS, subTTS, file2blob, whisperUpload, getHistoryMessage, checkDisableGpt4, chatSetting,
canBase64Model,
isCanBase64Model} from '@/api'
isCanBase64Model,
isNewModel} from '@/api'
//import { isNumber } from '@/utils/is'
import { useMessage } from "naive-ui";
import { t } from "@/locales";
Expand Down Expand Up @@ -37,6 +38,8 @@ const goFinish= ( )=>{
const getMessage= async (start=1000,loadingCnt=3)=>{
return getHistoryMessage(dataSources.value,loadingCnt,start);
}
watch( ()=>textRz.value, (n)=>{
//mlog('🐞 textRz',n);
if(n.length==0) return ;
Expand Down Expand Up @@ -155,6 +158,10 @@ watch(()=>homeStore.myData.act, async (n)=>{
//return ;
let message= [ { "role": "system", "content": getSystemMessage( +uuid2) },
...historyMesg ];
if ( isNewModel( model ) ) {
message= [ ...historyMesg ];
}
if( dd.fileBase64 && dd.fileBase64.length>0 ){
//if( model=='gpt-4-vision-preview' || model=='gemini-pro-1.5'){
if( isCanBase64Model(model) ){
Expand Down Expand Up @@ -221,6 +228,9 @@ watch(()=>homeStore.myData.act, async (n)=>{
controller.value = new AbortController();
let message= [ { "role": "system", "content": getSystemMessage(+st.value.uuid ) },
...historyMesg ];
if ( isNewModel( model ) ) {
message= [ ...historyMesg ];
}
textRz.value=[];
submit(model, message );
Expand Down Expand Up @@ -311,7 +321,12 @@ const submit= (model:string, message:any[] , opt?:any )=>{
,uuid:st.value.uuid //当前会话
,onMessage:(d)=>{
mlog('🐞消息',d);
textRz.value.push(d.text);
if(d.isAll){
textRz.value= [d.text];
}else{
textRz.value.push(d.text);
}
}
,onError:(e:any)=>{
mlog('onError',e)
Expand Down

0 comments on commit b0f71b8

Please sign in to comment.