Spaces:
Runtime error
Runtime error
feat: minify template before sending it to openAI, added signal control to be able to cancel the request
Browse files- src/services/api/index.ts +32 -3
src/services/api/index.ts
CHANGED
@@ -2,6 +2,26 @@ import { ChatCompletionRequestMessage, OpenAIApi } from "openai";
|
|
2 |
import { nanoid } from "nanoid";
|
3 |
import { extractCode, miniPrompt } from "@/utils/prompt";
|
4 |
import { systemMessage } from "@/constants";
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
|
6 |
interface ToOpenAIProps {
|
7 |
command: string;
|
@@ -22,7 +42,7 @@ export async function toOpenAI({
|
|
22 |
model = "gpt-3.5-turbo",
|
23 |
maxTokens = "2048",
|
24 |
client = null,
|
25 |
-
signal,
|
26 |
}: ToOpenAIProps) {
|
27 |
if (client === null) {
|
28 |
throw new Error("OpenAI client is not defined");
|
@@ -30,13 +50,16 @@ export async function toOpenAI({
|
|
30 |
|
31 |
const prompt_ = prompt.trim();
|
32 |
|
|
|
|
|
|
|
33 |
const nextMessage: ChatCompletionRequestMessage = {
|
34 |
role: "user",
|
35 |
content: miniPrompt`
|
36 |
"${command}": ${prompt_}. Return the full source code of the game.
|
37 |
TEMPLATE:
|
38 |
\`\`\`javascript
|
39 |
-
${
|
40 |
\`\`\`
|
41 |
`,
|
42 |
};
|
@@ -84,6 +107,12 @@ export async function toOpenAI({
|
|
84 |
id: nanoid(),
|
85 |
};
|
86 |
} catch (error) {
|
87 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
88 |
}
|
89 |
}
|
|
|
2 |
import { nanoid } from "nanoid";
|
3 |
import { extractCode, miniPrompt } from "@/utils/prompt";
|
4 |
import { systemMessage } from "@/constants";
|
5 |
+
import { OpenAIError } from "./openai";
|
6 |
+
import { minify } from "terser";
|
7 |
+
|
8 |
+
const minifyConfig = {
|
9 |
+
compress: {
|
10 |
+
dead_code: true,
|
11 |
+
drop_console: true,
|
12 |
+
drop_debugger: true,
|
13 |
+
keep_classnames: false,
|
14 |
+
keep_fargs: true,
|
15 |
+
keep_fnames: false,
|
16 |
+
keep_infinity: false,
|
17 |
+
},
|
18 |
+
mangle: false,
|
19 |
+
module: false,
|
20 |
+
sourceMap: false,
|
21 |
+
output: {
|
22 |
+
comments: true,
|
23 |
+
},
|
24 |
+
};
|
25 |
|
26 |
interface ToOpenAIProps {
|
27 |
command: string;
|
|
|
42 |
model = "gpt-3.5-turbo",
|
43 |
maxTokens = "2048",
|
44 |
client = null,
|
45 |
+
signal = new AbortController().signal,
|
46 |
}: ToOpenAIProps) {
|
47 |
if (client === null) {
|
48 |
throw new Error("OpenAI client is not defined");
|
|
|
50 |
|
51 |
const prompt_ = prompt.trim();
|
52 |
|
53 |
+
const minifiedCode = await minify(template, minifyConfig);
|
54 |
+
|
55 |
+
// // ${template.trim().replace(/^\s+/gm, "").replace(/^\n+/g, "").replace(/\s+/, " ")}
|
56 |
const nextMessage: ChatCompletionRequestMessage = {
|
57 |
role: "user",
|
58 |
content: miniPrompt`
|
59 |
"${command}": ${prompt_}. Return the full source code of the game.
|
60 |
TEMPLATE:
|
61 |
\`\`\`javascript
|
62 |
+
${minifiedCode.code}
|
63 |
\`\`\`
|
64 |
`,
|
65 |
};
|
|
|
107 |
id: nanoid(),
|
108 |
};
|
109 |
} catch (error) {
|
110 |
+
const err = error as OpenAIError;
|
111 |
+
|
112 |
+
if (err.response) {
|
113 |
+
throw err.response;
|
114 |
+
} else {
|
115 |
+
throw error;
|
116 |
+
}
|
117 |
}
|
118 |
}
|