File size: 2,441 Bytes
35d05d3
a86df80
6c2bcb4
2f65818
1c782e2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a86df80
35d05d3
 
 
 
 
 
 
 
2d85080
35d05d3
 
a86df80
6c2bcb4
65567a2
6c2bcb4
a86df80
65567a2
 
65cfba9
1c782e2
35d05d3
65cfba9
 
2f65818
 
65cfba9
 
1c782e2
 
 
a86df80
 
 
6c2bcb4
90d9359
6c2bcb4
1c782e2
90d9359
 
a86df80
90d9359
2aa79c0
 
 
 
 
2f65818
2aa79c0
 
 
a86df80
 
2d85080
 
 
 
 
 
 
 
 
a86df80
 
 
 
90d9359
 
 
6c2bcb4
90d9359
 
 
 
 
a86df80
 
 
 
 
 
 
 
 
 
1c782e2
 
 
 
 
 
 
a86df80
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
import { ChatCompletionRequestMessage, OpenAIApi } from "openai";
import { nanoid } from "nanoid";
import { extractCode, miniPrompt } from "@/utils/prompt";
import { systemMessage } from "@/constants";
import { OpenAIError } from "./openai";
import { minify } from "terser";

const minifyConfig = {
	compress: {
		dead_code: true,
		drop_console: true,
		drop_debugger: true,
		keep_classnames: false,
		keep_fargs: true,
		keep_fnames: false,
		keep_infinity: false,
	},
	mangle: false,
	module: false,
	sourceMap: false,
	output: {
		comments: true,
	},
};

interface ToOpenAIProps {
	command: string;
	prompt: string;
	temperature: string;
	template: string;
	model: string;
	maxTokens: string;
	client: OpenAIApi | null;
	signal?: AbortSignal;
}

export async function toOpenAI({
	command = "CREATE_GAME",
	prompt = "extend the code",
	temperature = "0.2",
	template = "",
	model = "gpt-3.5-turbo",
	maxTokens = "2048",
	client = null,
	signal = new AbortController().signal,
}: ToOpenAIProps) {
	if (client === null) {
		throw new Error("OpenAI client is not defined");
	}

	const prompt_ = prompt.trim();

	const minifiedCode = await minify(template, minifyConfig);

	// 		// ${template.trim().replace(/^\s+/gm, "").replace(/^\n+/g, "").replace(/\s+/, " ")}
	const nextMessage: ChatCompletionRequestMessage = {
		role: "user",
		content: miniPrompt`
			"${command}": ${prompt_}. Return the full source code of the game.
			TEMPLATE:
			\`\`\`javascript
			${minifiedCode.code}
			\`\`\`
			`,
	};

	const task = `${prompt_}`;

	const messages: ChatCompletionRequestMessage[] = [
		{
			role: "system",
			content: miniPrompt`${systemMessage}`,
		},
		nextMessage,
	];

	try {
		const response = await client.createChatCompletion(
			{
				model,
				messages,
				max_tokens: Number.parseInt(maxTokens),
				temperature: Number.parseFloat(temperature),
			},
			{ signal }
		);

		const { message } = response.data.choices[0];

		if (message) {
			return {
				...message,
				content: extractCode(message.content).replace(
					/(COMMANDS|CREATE_GAME|ADD_FEATURE|REMOVE_FEATURE|UPDATE_FEATURE|FIX_BUG|TEMPLATE|OUTPUT FORMAT).*\n/,
					""
				),
				task,
				id: nanoid(),
			};
		}

		// Something broke
		// ToDo: fix it :)
		return {
			content: "/* BROKEN */",
			task,
			id: nanoid(),
		};
	} catch (error) {
		const err = error as OpenAIError;

		if (err.response) {
			throw err.response;
		} else {
			throw error;
		}
	}
}