Spaces:
Running
on
T4
Running
on
T4
File size: 4,950 Bytes
426a708 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 |
import { MISTRAL_API_KEY } from "../api.js";
export class LLM {
constructor() {
this.apiKey = MISTRAL_API_KEY;
}
async getChatCompletion(systemPrompt, userInput) {
const messages = [
{
role: "system",
content: systemPrompt,
},
{
role: "user",
content: userInput,
},
];
try {
const response = await fetch("https://api.mistral.ai/v1/chat/completions", {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${this.apiKey}`,
},
body: JSON.stringify({
model: "mistral-large-latest",
messages: messages,
temperature: 0.7,
max_tokens: 150,
}),
});
const data = await response.json();
return data.choices[0].message.content;
} catch (error) {
console.error("LLM Error:", error);
throw error;
}
}
async #getFunctionCall(systemPrompt, userInput, tools) {
const messages = [
{
role: "system",
content: systemPrompt,
},
{
role: "user",
content: userInput,
},
];
try {
const response = await fetch("https://api.mistral.ai/v1/chat/completions", {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${this.apiKey}`,
},
body: JSON.stringify({
model: "mistral-large-latest",
messages: messages,
tools: tools,
tool_choice: "any", // Forces tool use
}),
});
const data = await response.json();
// Extract function call details from the response
const toolCall = data.choices[0].message.tool_calls[0];
return {
functionName: toolCall.function.name,
arguments: JSON.parse(toolCall.function.arguments),
toolCallId: toolCall.id,
};
} catch (error) {
console.error("Function Call Error:", error);
throw error;
}
}
async getFunctionKey(functionDescriptions, prompt) {
// Convert the key-value pairs into the tools format required by the API
const tools = functionDescriptions.map(({ key, description, parameters = {} }) => ({
type: "function",
function: {
name: key,
description: description,
parameters: {
type: "object",
properties: {
...Object.fromEntries(
Object.entries(parameters).map(([paramName, paramConfig]) => [
paramName,
{
type: paramConfig.type || "string", // Use provided type or default to "string"
description: paramConfig.description,
},
])
),
},
required: Object.keys(parameters), // Make all parameters required
},
},
}));
// Use the private getFunctionCall method to make the API call
const result = await this.#getFunctionCall(
"You are a helpful assistant. Based on the user's input, choose the most appropriate function to call.",
prompt,
tools
);
return {
functionName: result.functionName,
parameters: result.arguments,
};
}
async getJsonCompletion(systemPrompt, userInput) {
const messages = [
{
role: "system",
content: systemPrompt,
},
{
role: "user",
content: userInput,
},
];
try {
const response = await fetch("https://api.mistral.ai/v1/chat/completions", {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${this.apiKey}`,
},
body: JSON.stringify({
model: "mistral-large-latest",
messages: messages,
temperature: 0.7,
max_tokens: 256,
response_format: { type: "json_object" },
}),
});
const data = await response.json();
console.log(data);
return JSON.parse(data.choices[0].message.content);
} catch (error) {
console.error("JSON LLM Error:", error);
throw error;
}
}
}
export default LLM;
// Function call Usage example
// const functionDescriptions = [
// {
// key: "searchProducts",
// description: "Search for products in the catalog",
// parameters: {
// query: {
// type: "string",
// description: "Search query"
// },
// maxPrice: {
// type: "number",
// description: "Maximum price filter"
// },
// inStock: {
// type: "boolean",
// description: "Filter for in-stock items only"
// }
// }
// }
// ];
// const result = await llm.getFunctionKey(functionDescriptions, "Find red shoes in footwear");
|