ai-comic-factory / src /app /queries /predictWithOpenAI.ts
jbilcke-hf's picture
jbilcke-hf HF staff
the dynamic import is causing issue during build, but it's not an issue we can just put the var init inside the function
6463491
raw
history blame
918 Bytes
"use server"
import type { ChatCompletionMessage } from "openai/resources/chat"
import OpenAI from "openai"
export async function predict(inputs: string): Promise<string> {
const openaiApiKey = `${process.env.AUTH_OPENAI_API_KEY || ""}`
const openaiApiBaseUrl = `${process.env.LLM_OPENAI_API_BASE_URL || "https://api.openai.com/v1"}`
const openaiApiModel = `${process.env.LLM_OPENAI_API_MODEL || "gpt-3.5-turbo"}`
const openai = new OpenAI({
apiKey: openaiApiKey,
baseURL: openaiApiBaseUrl,
})
const messages: ChatCompletionMessage[] = [
{ role: "system", content: inputs },
]
try {
const res = await openai.chat.completions.create({
messages: messages,
stream: false,
model: openaiApiModel,
temperature: 0.8
})
return res.choices[0].message.content || ""
} catch (err) {
console.error(`error during generation: ${err}`)
return ""
}
}