File size: 2,828 Bytes
8a8fe1d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
import {Chat, ChatOptions, Request, Response, ResponseStream} from "../base";
import {AxiosInstance, AxiosRequestConfig, CreateAxiosDefaults} from "axios";
import {CreateAxiosProxy} from "../../utils/proxyAgent";
import es from "event-stream";
import {parseJSON} from "../../utils";
import {Stream} from "stream";

interface Message {
    role: string;
    content: string;
}

interface RealReq {
    messages: Message[];
    stream: boolean;
    model: string;
    temperature: number;
    presence_penalty: number;
}

export interface McbbsReq extends Request {
    options: {
        parse: string;
        messages: string;
        temperature: number;
    }
}

export class Mcbbs extends Chat {
    private client: AxiosInstance;

    constructor(options?: ChatOptions) {
        super(options);
        this.client = CreateAxiosProxy({
            baseURL: 'https://ai.mcbbs.gq/api',
            headers: {
                'Content-Type': 'application/json',
                "accept": "text/event-stream",
                "Cache-Control": "no-cache",
                "Proxy-Connection": "keep-alive"
            }
        } as CreateAxiosDefaults);
    }

    public async ask(req: McbbsReq): Promise<Response> {
        const res = await this.askStream(req)
        const result: Response = {
            text: '', other: {}
        }
        return new Promise(resolve => {
            res.text.on('data', (data) => {
                result.text += data;
            }).on('close', () => {
                resolve(result);
            })
        })

    }

    public async askStream(req: McbbsReq): Promise<ResponseStream> {
        const {
            messages,
            temperature = 1,
            parse = 'true'
        } = req.options;
        const data: RealReq = {
            stream: true,
            messages: JSON.parse(messages),
            temperature,
            presence_penalty: 2,
            model: 'gpt-3.5-turbo'
        };
        const res = await this.client.post('/openai/v1/chat/completions', data, {
            responseType: 'stream',
        } as AxiosRequestConfig);
        if (parse === 'false') {
            return {text: res.data}
        }
        return {
            text: this.parseData(res.data)
        };
    }

    parseData(v: Stream): Stream {
        return v.pipe(es.split(/\r?\n\r?\n/)).pipe(es.map(async (chunk: any, cb: any) => {
            const dataStr = chunk.replace('data: ', '');
            if (dataStr === '[Done]') {
                cb(null, '');
                return;
            }
            const data = parseJSON(dataStr, {} as any);
            if (!data?.choices) {
                cb(null, '');
                return;
            }
            const [{delta: {content = ""}}] = data.choices;
            cb(null, content);
        }))
    }
}