Spaces:
Running
Running
Remove unused templates
Browse files- apiGPT4/index.ts +2 -2
- apiGPT4/model/aidream/index.ts +0 -122
- apiGPT4/model/index.ts +0 -11
- apiGPT4/model/mcbbs/index.ts +0 -100
- apiGPT4/model/you/index.ts +0 -159
apiGPT4/index.ts
CHANGED
@@ -27,7 +27,7 @@ interface AskReq {
|
|
27 |
}
|
28 |
|
29 |
router.get('/ask', async (ctx) => {
|
30 |
-
const {prompt, model = Model.
|
31 |
if (!prompt) {
|
32 |
ctx.body = 'please input prompt';
|
33 |
return;
|
@@ -42,7 +42,7 @@ router.get('/ask', async (ctx) => {
|
|
42 |
});
|
43 |
|
44 |
router.get('/ask/stream', async (ctx) => {
|
45 |
-
const {prompt, model = Model.
|
46 |
if (!prompt) {
|
47 |
ctx.body = 'please input prompt';
|
48 |
return;
|
|
|
27 |
}
|
28 |
|
29 |
router.get('/ask', async (ctx) => {
|
30 |
+
const {prompt, model = Model.Forefront, ...options} = ctx.query as unknown as AskReq;
|
31 |
if (!prompt) {
|
32 |
ctx.body = 'please input prompt';
|
33 |
return;
|
|
|
42 |
});
|
43 |
|
44 |
router.get('/ask/stream', async (ctx) => {
|
45 |
+
const {prompt, model = Model.Forefront, ...options} = ctx.query as unknown as AskReq;
|
46 |
if (!prompt) {
|
47 |
ctx.body = 'please input prompt';
|
48 |
return;
|
apiGPT4/model/aidream/index.ts
DELETED
@@ -1,122 +0,0 @@
|
|
1 |
-
import {Chat, ChatOptions, Request, Response, ResponseStream} from "../base";
|
2 |
-
import {CreateAxiosProxy} from "../../utils/proxyAgent";
|
3 |
-
import {AxiosInstance, AxiosRequestConfig, CreateAxiosDefaults} from "axios";
|
4 |
-
import {Stream} from "stream";
|
5 |
-
import es from "event-stream";
|
6 |
-
import {parseJSON} from "../../utils";
|
7 |
-
|
8 |
-
export interface AiDreamReq extends Request {
|
9 |
-
options: {
|
10 |
-
parentMessageId: string
|
11 |
-
systemMessage: string
|
12 |
-
temperature: number;
|
13 |
-
top_p: number
|
14 |
-
parse: boolean;
|
15 |
-
};
|
16 |
-
}
|
17 |
-
|
18 |
-
interface RealReq {
|
19 |
-
options: {
|
20 |
-
parentMessageId?: string;
|
21 |
-
};
|
22 |
-
prompt: string;
|
23 |
-
systemMessage: string;
|
24 |
-
temperature: number;
|
25 |
-
top_p: number;
|
26 |
-
}
|
27 |
-
|
28 |
-
interface RealRes {
|
29 |
-
role: string;
|
30 |
-
id: string;
|
31 |
-
parentMessageId: string;
|
32 |
-
text: string;
|
33 |
-
delta: string;
|
34 |
-
detail: {
|
35 |
-
id: string;
|
36 |
-
object: string;
|
37 |
-
created: number;
|
38 |
-
model: string;
|
39 |
-
choices: {
|
40 |
-
delta: {
|
41 |
-
content: string;
|
42 |
-
};
|
43 |
-
index: number;
|
44 |
-
finish_reason: any;
|
45 |
-
}[];
|
46 |
-
};
|
47 |
-
}
|
48 |
-
|
49 |
-
export class AiDream extends Chat {
|
50 |
-
private client: AxiosInstance;
|
51 |
-
|
52 |
-
constructor(options?: ChatOptions) {
|
53 |
-
super(options);
|
54 |
-
this.client = CreateAxiosProxy({
|
55 |
-
baseURL: 'http://aidream.cloud/api/',
|
56 |
-
headers: {
|
57 |
-
"Cache-Control": "no-cache",
|
58 |
-
"Proxy-Connection": "keep-alive"
|
59 |
-
}
|
60 |
-
} as CreateAxiosDefaults);
|
61 |
-
}
|
62 |
-
|
63 |
-
public async ask(req: AiDreamReq): Promise<Response> {
|
64 |
-
req.options.parse = false;
|
65 |
-
const res = await this.askStream(req)
|
66 |
-
const result: Response = {
|
67 |
-
text: '', other: {}
|
68 |
-
}
|
69 |
-
return new Promise(resolve => {
|
70 |
-
res.text.pipe(es.split(/\r?\n/)).pipe(es.map(async (chunk: any, cb: any) => {
|
71 |
-
const data = parseJSON(chunk, {}) as RealRes;
|
72 |
-
if (!data?.detail?.choices) {
|
73 |
-
cb(null, '');
|
74 |
-
return;
|
75 |
-
}
|
76 |
-
const [{delta: {content}}] = data.detail.choices;
|
77 |
-
result.other.parentMessageId = data.parentMessageId;
|
78 |
-
cb(null, content);
|
79 |
-
})).on('data', (data) => {
|
80 |
-
result.text += data;
|
81 |
-
}).on('close', () => {
|
82 |
-
resolve(result);
|
83 |
-
})
|
84 |
-
})
|
85 |
-
|
86 |
-
}
|
87 |
-
|
88 |
-
public async askStream(req: AiDreamReq): Promise<ResponseStream> {
|
89 |
-
const {prompt = ''} = req;
|
90 |
-
const {
|
91 |
-
systemMessage = 'You are ChatGPT, a large language model trained by OpenAI. Follow the user\'s instructions carefully. Respond using markdown.',
|
92 |
-
temperature = 1.0,
|
93 |
-
top_p = 1,
|
94 |
-
parentMessageId,
|
95 |
-
parse = true,
|
96 |
-
} = req.options;
|
97 |
-
const data: RealReq = {
|
98 |
-
options: {parentMessageId}, prompt, systemMessage, temperature, top_p
|
99 |
-
};
|
100 |
-
const res = await this.client.post('/chat-process', data, {
|
101 |
-
responseType: 'stream'
|
102 |
-
} as AxiosRequestConfig);
|
103 |
-
if (parse) {
|
104 |
-
return {
|
105 |
-
text: this.parseData(res.data)
|
106 |
-
}
|
107 |
-
}
|
108 |
-
return {text: res.data};
|
109 |
-
}
|
110 |
-
|
111 |
-
parseData(v: Stream): Stream {
|
112 |
-
return v.pipe(es.split(/\r?\n/)).pipe(es.map(async (chunk: any, cb: any) => {
|
113 |
-
const data = parseJSON(chunk, {}) as RealRes;
|
114 |
-
if (!data?.detail?.choices) {
|
115 |
-
cb(null, '');
|
116 |
-
return;
|
117 |
-
}
|
118 |
-
const [{delta: {content}}] = data.detail.choices;
|
119 |
-
cb(null, content);
|
120 |
-
}))
|
121 |
-
}
|
122 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apiGPT4/model/index.ts
CHANGED
@@ -1,15 +1,8 @@
|
|
1 |
import {Chat, ChatOptions} from "./base";
|
2 |
-
import {You} from "./you";
|
3 |
-
import {AiDream} from "./aidream";
|
4 |
import {Forefrontnew} from "./forefront";
|
5 |
-
import {Mcbbs} from "./mcbbs";
|
6 |
|
7 |
export enum Model {
|
8 |
-
// define new model here
|
9 |
-
You = 'you',
|
10 |
Forefront = 'forefront',
|
11 |
-
AiDream = 'aidream',
|
12 |
-
Mcbbs = 'mcbbs',
|
13 |
}
|
14 |
|
15 |
export class ChatModelFactory {
|
@@ -23,11 +16,7 @@ export class ChatModelFactory {
|
|
23 |
}
|
24 |
|
25 |
init() {
|
26 |
-
// register new model here
|
27 |
-
this.modelMap.set(Model.You, new You(this.options))
|
28 |
this.modelMap.set(Model.Forefront, new Forefrontnew(this.options))
|
29 |
-
this.modelMap.set(Model.AiDream, new AiDream(this.options))
|
30 |
-
this.modelMap.set(Model.Mcbbs, new Mcbbs(this.options))
|
31 |
}
|
32 |
|
33 |
get(model: Model): Chat | undefined {
|
|
|
1 |
import {Chat, ChatOptions} from "./base";
|
|
|
|
|
2 |
import {Forefrontnew} from "./forefront";
|
|
|
3 |
|
4 |
export enum Model {
|
|
|
|
|
5 |
Forefront = 'forefront',
|
|
|
|
|
6 |
}
|
7 |
|
8 |
export class ChatModelFactory {
|
|
|
16 |
}
|
17 |
|
18 |
init() {
|
|
|
|
|
19 |
this.modelMap.set(Model.Forefront, new Forefrontnew(this.options))
|
|
|
|
|
20 |
}
|
21 |
|
22 |
get(model: Model): Chat | undefined {
|
apiGPT4/model/mcbbs/index.ts
DELETED
@@ -1,100 +0,0 @@
|
|
1 |
-
import {Chat, ChatOptions, Request, Response, ResponseStream} from "../base";
|
2 |
-
import {AxiosInstance, AxiosRequestConfig, CreateAxiosDefaults} from "axios";
|
3 |
-
import {CreateAxiosProxy} from "../../utils/proxyAgent";
|
4 |
-
import es from "event-stream";
|
5 |
-
import {parseJSON} from "../../utils";
|
6 |
-
import {Stream} from "stream";
|
7 |
-
|
8 |
-
interface Message {
|
9 |
-
role: string;
|
10 |
-
content: string;
|
11 |
-
}
|
12 |
-
|
13 |
-
interface RealReq {
|
14 |
-
messages: Message[];
|
15 |
-
stream: boolean;
|
16 |
-
model: string;
|
17 |
-
temperature: number;
|
18 |
-
presence_penalty: number;
|
19 |
-
}
|
20 |
-
|
21 |
-
export interface McbbsReq extends Request {
|
22 |
-
options: {
|
23 |
-
parse: string;
|
24 |
-
messages: string;
|
25 |
-
temperature: number;
|
26 |
-
}
|
27 |
-
}
|
28 |
-
|
29 |
-
export class Mcbbs extends Chat {
|
30 |
-
private client: AxiosInstance;
|
31 |
-
|
32 |
-
constructor(options?: ChatOptions) {
|
33 |
-
super(options);
|
34 |
-
this.client = CreateAxiosProxy({
|
35 |
-
baseURL: 'https://ai.mcbbs.gq/api',
|
36 |
-
headers: {
|
37 |
-
'Content-Type': 'application/json',
|
38 |
-
"accept": "text/event-stream",
|
39 |
-
"Cache-Control": "no-cache",
|
40 |
-
"Proxy-Connection": "keep-alive"
|
41 |
-
}
|
42 |
-
} as CreateAxiosDefaults);
|
43 |
-
}
|
44 |
-
|
45 |
-
public async ask(req: McbbsReq): Promise<Response> {
|
46 |
-
const res = await this.askStream(req)
|
47 |
-
const result: Response = {
|
48 |
-
text: '', other: {}
|
49 |
-
}
|
50 |
-
return new Promise(resolve => {
|
51 |
-
res.text.on('data', (data) => {
|
52 |
-
result.text += data;
|
53 |
-
}).on('close', () => {
|
54 |
-
resolve(result);
|
55 |
-
})
|
56 |
-
})
|
57 |
-
|
58 |
-
}
|
59 |
-
|
60 |
-
public async askStream(req: McbbsReq): Promise<ResponseStream> {
|
61 |
-
const {
|
62 |
-
messages,
|
63 |
-
temperature = 1,
|
64 |
-
parse = 'true'
|
65 |
-
} = req.options;
|
66 |
-
const data: RealReq = {
|
67 |
-
stream: true,
|
68 |
-
messages: JSON.parse(messages),
|
69 |
-
temperature,
|
70 |
-
presence_penalty: 2,
|
71 |
-
model: 'gpt-3.5-turbo'
|
72 |
-
};
|
73 |
-
const res = await this.client.post('/openai/v1/chat/completions', data, {
|
74 |
-
responseType: 'stream',
|
75 |
-
} as AxiosRequestConfig);
|
76 |
-
if (parse === 'false') {
|
77 |
-
return {text: res.data}
|
78 |
-
}
|
79 |
-
return {
|
80 |
-
text: this.parseData(res.data)
|
81 |
-
};
|
82 |
-
}
|
83 |
-
|
84 |
-
parseData(v: Stream): Stream {
|
85 |
-
return v.pipe(es.split(/\r?\n\r?\n/)).pipe(es.map(async (chunk: any, cb: any) => {
|
86 |
-
const dataStr = chunk.replace('data: ', '');
|
87 |
-
if (dataStr === '[Done]') {
|
88 |
-
cb(null, '');
|
89 |
-
return;
|
90 |
-
}
|
91 |
-
const data = parseJSON(dataStr, {} as any);
|
92 |
-
if (!data?.choices) {
|
93 |
-
cb(null, '');
|
94 |
-
return;
|
95 |
-
}
|
96 |
-
const [{delta: {content = ""}}] = data.choices;
|
97 |
-
cb(null, content);
|
98 |
-
}))
|
99 |
-
}
|
100 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apiGPT4/model/you/index.ts
DELETED
@@ -1,159 +0,0 @@
|
|
1 |
-
import {v4 as uuidv4} from 'uuid';
|
2 |
-
//@ts-ignore
|
3 |
-
import UserAgent from 'user-agents';
|
4 |
-
import {Session} from "tls-client/dist/esm/sessions";
|
5 |
-
import {Params} from "tls-client/dist/esm/types";
|
6 |
-
import {parseJSON, toEventCB, toEventStream} from "../../utils";
|
7 |
-
import {Chat, ChatOptions, Request, Response, ResponseStream} from "../base";
|
8 |
-
import {CreateTlsProxy} from "../../utils/proxyAgent";
|
9 |
-
|
10 |
-
const userAgent = new UserAgent();
|
11 |
-
|
12 |
-
interface IRequestOptions {
|
13 |
-
page?: number;
|
14 |
-
count?: number;
|
15 |
-
safeSearch?: string;
|
16 |
-
onShoppingPage?: string;
|
17 |
-
mkt?: string;
|
18 |
-
responseFilter?: string;
|
19 |
-
domain?: string;
|
20 |
-
queryTraceId?: string | null;
|
21 |
-
chat?: any[] | null;
|
22 |
-
includeLinks?: string;
|
23 |
-
detailed?: string;
|
24 |
-
debug?: string;
|
25 |
-
proxy?: string | null;
|
26 |
-
}
|
27 |
-
|
28 |
-
interface SearchResult {
|
29 |
-
search: {
|
30 |
-
third_party_search_results: {
|
31 |
-
name: string,
|
32 |
-
url: string,
|
33 |
-
displayUrl: string,
|
34 |
-
snippet: string,
|
35 |
-
language: null | string,
|
36 |
-
thumbnailUrl: string,
|
37 |
-
isFamilyFriendly: null | boolean,
|
38 |
-
isNavigational: null | boolean,
|
39 |
-
snmix_link: null | string
|
40 |
-
}[],
|
41 |
-
rankings: {
|
42 |
-
pole: null,
|
43 |
-
sidebar: null,
|
44 |
-
mainline: {
|
45 |
-
answerType: string,
|
46 |
-
resultIndex: number,
|
47 |
-
value: {
|
48 |
-
id: string
|
49 |
-
}
|
50 |
-
}[]
|
51 |
-
},
|
52 |
-
query_context: {
|
53 |
-
spelling: null,
|
54 |
-
originalQuery: string
|
55 |
-
},
|
56 |
-
third_party_web_results_source: number
|
57 |
-
},
|
58 |
-
time: number,
|
59 |
-
query: string,
|
60 |
-
exactAbTestSlices: {
|
61 |
-
abUseQueryRewriter: string
|
62 |
-
}
|
63 |
-
}
|
64 |
-
|
65 |
-
export class You extends Chat {
|
66 |
-
private session: Session;
|
67 |
-
|
68 |
-
constructor(props?: ChatOptions) {
|
69 |
-
super(props);
|
70 |
-
this.session = CreateTlsProxy({clientIdentifier: 'chrome_108'});
|
71 |
-
this.session.headers = this.getHeaders();
|
72 |
-
}
|
73 |
-
|
74 |
-
private async request(req: Request) {
|
75 |
-
let {
|
76 |
-
page = 1,
|
77 |
-
count = 10,
|
78 |
-
safeSearch = 'Moderate',
|
79 |
-
onShoppingPage = 'False',
|
80 |
-
mkt = '',
|
81 |
-
responseFilter = 'WebPages,Translations,TimeZone,Computation,RelatedSearches',
|
82 |
-
domain = 'youchat',
|
83 |
-
queryTraceId = null,
|
84 |
-
chat = null,
|
85 |
-
includeLinks = "False",
|
86 |
-
detailed = "False",
|
87 |
-
debug = "False",
|
88 |
-
} = req.options || {};
|
89 |
-
if (!chat) {
|
90 |
-
chat = [];
|
91 |
-
}
|
92 |
-
return await this.session.get(
|
93 |
-
'https://you.com/api/streamingSearch', {
|
94 |
-
params: {
|
95 |
-
q: req.prompt,
|
96 |
-
page: page + '',
|
97 |
-
count: count + '',
|
98 |
-
safeSearch: safeSearch + '',
|
99 |
-
onShoppingPage: onShoppingPage + '',
|
100 |
-
mkt: mkt + '',
|
101 |
-
responseFilter: responseFilter + '',
|
102 |
-
domain: domain + '',
|
103 |
-
queryTraceId: queryTraceId || uuidv4(),
|
104 |
-
chat: JSON.stringify(chat),
|
105 |
-
} as Params,
|
106 |
-
}
|
107 |
-
);
|
108 |
-
}
|
109 |
-
|
110 |
-
public async askStream(req: Request): Promise<ResponseStream> {
|
111 |
-
const response = await this.request(req);
|
112 |
-
return {text: toEventStream(response.content), other: {}}
|
113 |
-
}
|
114 |
-
|
115 |
-
public async ask(
|
116 |
-
req: Request): Promise<Response> {
|
117 |
-
const response = await this.request(req);
|
118 |
-
return new Promise(resolve => {
|
119 |
-
const res: Response = {
|
120 |
-
text: '',
|
121 |
-
other: {},
|
122 |
-
};
|
123 |
-
toEventCB(response.content, (eventName, data) => {
|
124 |
-
let obj: any;
|
125 |
-
switch (eventName) {
|
126 |
-
case 'youChatToken':
|
127 |
-
obj = parseJSON(data, {}) as any;
|
128 |
-
res.text += obj.youChatToken;
|
129 |
-
break;
|
130 |
-
case 'done':
|
131 |
-
resolve(res);
|
132 |
-
return;
|
133 |
-
default:
|
134 |
-
obj = parseJSON(data, {}) as any;
|
135 |
-
res.other[eventName] = obj;
|
136 |
-
return;
|
137 |
-
}
|
138 |
-
});
|
139 |
-
})
|
140 |
-
}
|
141 |
-
|
142 |
-
getHeaders(): { [key: string]: string } {
|
143 |
-
return {
|
144 |
-
authority: 'you.com',
|
145 |
-
accept: 'text/event-stream',
|
146 |
-
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
147 |
-
'cache-control': 'no-cache',
|
148 |
-
referer: 'https://you.com/search?q=who+are+you&tbm=youchat',
|
149 |
-
'sec-ch-ua': '"Not_A Brand";v="99", "Google Chrome";v="109", "Chromium";v="109"',
|
150 |
-
'sec-ch-ua-mobile': '?0',
|
151 |
-
'sec-ch-ua-platform': '"Windows"',
|
152 |
-
'sec-fetch-dest': 'empty',
|
153 |
-
'sec-fetch-mode': 'cors',
|
154 |
-
'sec-fetch-site': 'same-origin',
|
155 |
-
cookie: `safesearch_guest=Moderate; uuid_guest=${uuidv4()}`,
|
156 |
-
'user-agent': userAgent.toString(),
|
157 |
-
};
|
158 |
-
}
|
159 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|