openai.js 4.3 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127
import { fetchEventSource } from '@microsoft/fetch-event-source';
import Prompt from './prompt.js'

class OpenAI {

    constructor(config) {
        this.config = config
        this.abortController = null
        this.callback = null
        this.temperature = parseFloat(config?.temperature??0.7)
    }

    createCompletion (prompt, history, context, callback) {
        const config = this.config
       
        const abortController = new AbortController();
        const signal = abortController.signal;
        this.abortController = abortController

        this.callback = callback

        const mode = config?.mode??'chat'
        const token = config?.token??'empty'
        const url = config.api_url + (mode === 'chat' ? '/chat/completions' : '/completions')
        const stop = config?.stop??'[DONE]'
        const max_tokens = config?.api_max_token??512
        const model = config?.model??'vicuna-13b-all-v1.1'
        const temperature = this.temperature
        const top_p = config?.top_p??1.0
        let stop_key = config?.stop_key??null
        if (stop_key !== null && stop_key !== '') {
            stop_key = stop_key.split(';;')
        }
        const data = {
            model: model,
            max_tokens: parseInt(max_tokens),
            temperature: parseFloat(temperature),
            top_p: parseFloat(top_p),
            stream: true,
            stop: stop_key
            // prefix: prefix
        }
        if (mode === 'chat') {
            data.messages = Prompt.getPromptByChatMode(config, context, history)
        } else {
            data.prompt = Prompt.getPromptByTemplate(config, context, prompt)
        }
        // const prefix = config?.prompt_prefix??''
        const fetcher = fetchEventSource(url, {
            method: 'POST',
            signal: signal,
            headers: {
                'Content-Type': 'application/json',
            },
            body: JSON.stringify(data),
            onmessage(msg) {
                // if the server emits an error message, throw an exception
                // so it gets handled by the onerror callback below:
                if (msg && msg?.data) {
                    if (msg?.data === stop) {
                        if (callback?.onclose) {
                            callback?.onclose()
                        }
                        abortController.abort();
                        return

                    } else {
                        console.info(msg.data)
                        const jsonData = JSON.parse(msg.data)
                        // 和上面重复触发,只留一个
                        // if (jsonData.choices[0].finish_reason === 'stop') {
                        //     if (callback?.onclose) {
                        //         callback?.onclose()
                        //     }
                        //     return
                        //   }
                        let message = null
                        if (mode === 'chat') {
                            message = jsonData?.choices[0]?.message?.content
                            if (typeof message === 'undefined') {
                                message = jsonData?.choices[0]?.delta?.content
                            }
                            if (typeof message === 'undefined') { 
                                message = ''
                            }
                        } else {
                            message = jsonData?.choices[0]?.text
                        }
                        
                        callback?.onmessage(message, true)
                    }
                    
                }

                
                // if (msg.event === 'FatalError') {
                //     throw new FatalError(msg.data);
                // }
            },
            onclose() {
                if (callback?.onclose) {
                    callback?.onclose()
                }
                // if the server closes the connection unexpectedly, retry:
            },
            onerror(err) {
                if (callback?.onerror) {
                    callback?.onerror(err)
                }
            }
        });

    }

    close () {
        if (this.abortController) {
            this.abortController.abort()

            if (this.callback && this.callback?.onclose) {
                this.callback.onclose()
            }
        }
    }

}

export default OpenAI