diff --git a/src/api/openapi.ts b/src/api/openapi.ts index f289e956b..0a446c74b 100644 --- a/src/api/openapi.ts +++ b/src/api/openapi.ts @@ -269,7 +269,7 @@ export const isDallImageModel =(model:string|undefined)=>{ interface subModelType{ message:any[] - onMessage:(d:{text:string,isFinish:boolean})=>void + onMessage:(d:{text:string,isFinish:boolean,isAll?:boolean})=>void onError?:(d?:any)=>void signal?:AbortSignal model?:string @@ -324,6 +324,10 @@ Latex block: $$e=mc^2$$`; return DEFAULT_SYSTEM_TEMPLATE; } + +export const isNewModel=(model:string)=>{ + return model.startsWith('o1-') +} export const subModel= async (opt: subModelType)=>{ // let model= opt.model?? ( gptConfigStore.myData.model?gptConfigStore.myData.model: "gpt-3.5-turbo"); @@ -347,7 +351,7 @@ export const subModel= async (opt: subModelType)=>{ model= model.replace('gpt-4-gizmo-','') } - let body ={ + let body:any ={ max_tokens , model , temperature, @@ -356,8 +360,18 @@ export const subModel= async (opt: subModelType)=>{ "messages": opt.message ,stream:true } - // - + if(isNewModel(model)){ + body ={ + max_completion_tokens:max_tokens , + model , + //temperature, + top_p, + presence_penalty ,frequency_penalty, + "messages": opt.message + ,stream:false + } + } + if(body.stream){ let headers ={ 'Content-Type': 'application/json' //,'Authorization': 'Bearer ' +gptServerStore.myData.OPENAI_API_KEY @@ -366,29 +380,42 @@ export const subModel= async (opt: subModelType)=>{ headers={...headers,...getHeaderAuthorization()} try { - await fetchSSE( gptGetUrl('/v1/chat/completions'),{ - method: 'POST', - headers: headers, - signal:opt.signal, - onMessage: async (data:string)=> { - //mlog('πŸžζ΅‹θ―•' , data ) ; - if(data=='[DONE]') opt.onMessage({text:'',isFinish:true}) - else { - const obj= JSON.parse(data ); - opt.onMessage({text:obj.choices[0].delta?.content??'' ,isFinish:obj.choices[0].finish_reason!=null }) - } - }, - onError(e ){ - //console.log('eee>>', e ) - mlog('❌ζœͺι”™θ――',e ) - opt.onError && opt.onError(e) - }, - body:JSON.stringify(body) - }); - } catch (error ) { - mlog('❌ζœͺι”™θ――2',error ) - opt.onError && opt.onError(error) - } + await fetchSSE( gptGetUrl('/v1/chat/completions'),{ + method: 'POST', + headers: headers, + signal:opt.signal, + onMessage: async (data:string)=> { + //mlog('πŸžζ΅‹θ―•' , data ) ; + if(data=='[DONE]') opt.onMessage({text:'',isFinish:true}) + else { + const obj= JSON.parse(data ); + opt.onMessage({text:obj.choices[0].delta?.content??'' ,isFinish:obj.choices[0].finish_reason!=null }) + } + }, + onError(e ){ + //console.log('eee>>', e ) + mlog('❌ζœͺι”™θ――',e ) + opt.onError && opt.onError(e) + }, + body:JSON.stringify(body) + }); + } catch (error ) { + mlog('❌ζœͺι”™θ――2',error ) + opt.onError && opt.onError(error) + } + }else{ + try { + mlog('πŸžιžζ΅θΎ“ε‡Ί',body ) + opt.onMessage({text: t('mj.thinking') ,isFinish: false }) + let obj :any= await gptFetch( gptGetUrl('/v1/chat/completions'),body ) + //mlog('η»“ζžœ >>',obj ) + opt.onMessage({text:obj.choices[0].message.content??'' ,isFinish: true ,isAll:true}) + + } catch (error ) { + mlog('❌ζœͺι”™θ――2',error ) + opt.onError && opt.onError(error) + } + } } export const getInitChat = (txt:string )=>{ diff --git a/src/views/mj/aiGpt.vue b/src/views/mj/aiGpt.vue index e8597311e..6154d9f79 100644 --- a/src/views/mj/aiGpt.vue +++ b/src/views/mj/aiGpt.vue @@ -6,7 +6,8 @@ import { homeStore, useChatStore } from '@/store' import { getInitChat, mlog, subModel,getSystemMessage , localSaveAny, canVisionModel ,isTTS, subTTS, file2blob, whisperUpload, getHistoryMessage, checkDisableGpt4, chatSetting, canBase64Model, - isCanBase64Model} from '@/api' + isCanBase64Model, + isNewModel} from '@/api' //import { isNumber } from '@/utils/is' import { useMessage } from "naive-ui"; import { t } from "@/locales"; @@ -37,6 +38,8 @@ const goFinish= ( )=>{ const getMessage= async (start=1000,loadingCnt=3)=>{ return getHistoryMessage(dataSources.value,loadingCnt,start); } + + watch( ()=>textRz.value, (n)=>{ //mlog('🐞 textRz',n); if(n.length==0) return ; @@ -155,6 +158,10 @@ watch(()=>homeStore.myData.act, async (n)=>{ //return ; let message= [ { "role": "system", "content": getSystemMessage( +uuid2) }, ...historyMesg ]; + + if ( isNewModel( model ) ) { + message= [ ...historyMesg ]; + } if( dd.fileBase64 && dd.fileBase64.length>0 ){ //if( model=='gpt-4-vision-preview' || model=='gemini-pro-1.5'){ if( isCanBase64Model(model) ){ @@ -221,6 +228,9 @@ watch(()=>homeStore.myData.act, async (n)=>{ controller.value = new AbortController(); let message= [ { "role": "system", "content": getSystemMessage(+st.value.uuid ) }, ...historyMesg ]; + if ( isNewModel( model ) ) { + message= [ ...historyMesg ]; + } textRz.value=[]; submit(model, message ); @@ -311,7 +321,12 @@ const submit= (model:string, message:any[] , opt?:any )=>{ ,uuid:st.value.uuid //ε½“ε‰δΌšθ― ,onMessage:(d)=>{ mlog('🐞梈息',d); - textRz.value.push(d.text); + + if(d.isAll){ + textRz.value= [d.text]; + }else{ + textRz.value.push(d.text); + } } ,onError:(e:any)=>{ mlog('onError',e)