code
stringlengths 419
47.9k
| apis
sequencelengths 1
7
| extract_api
stringlengths 67
6.13k
|
---|---|---|
package dev.langchain4j.rag.query.transformer;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.rag.query.Metadata;
import dev.langchain4j.rag.query.Query;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import java.util.Collection;
import java.util.List;
import java.util.stream.Stream;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
class CompressingQueryTransformerIT {
@ParameterizedTest
@MethodSource("models")
void should_compress_query_and_chat_memory_into_single_query(ChatLanguageModel model) {
// given
List<ChatMessage> chatMemory = asList(
UserMessage.from("Tell me about Klaus Heisler"),
AiMessage.from("He is a cool guy")
);
UserMessage userMessage = UserMessage.from("How old is he?");
Metadata metadata = Metadata.from(userMessage, "default", chatMemory);
Query query = Query.from(userMessage.text(), metadata);
CompressingQueryTransformer transformer = new CompressingQueryTransformer(model);
// when
Collection<Query> queries = transformer.transform(query);
System.out.println(queries);
// then
assertThat(queries).hasSize(1);
Query compressedQuery = queries.iterator().next();
assertThat(compressedQuery.text()).contains("Klaus");
assertThat(compressedQuery.text()).doesNotContain(":");
}
static Stream<Arguments> models() {
return Stream.of(
Arguments.of(
OpenAiChatModel.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.apiKey(System.getenv("OPENAI_API_KEY"))
.organizationId(System.getenv("OPENAI_ORGANIZATION_ID"))
.logRequests(true)
.logResponses(true)
.build()
)
// TODO add more models
);
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1894, 2300), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1894, 2259), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1894, 2207), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1894, 2156), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1894, 2067), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1894, 1994), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package org.acme.examples.aiservices;
import static com.github.tomakehurst.wiremock.client.WireMock.aResponse;
import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options;
import static dev.langchain4j.data.message.ChatMessageDeserializer.messagesFromJson;
import static dev.langchain4j.data.message.ChatMessageSerializer.messagesToJson;
import static dev.langchain4j.data.message.ChatMessageType.AI;
import static dev.langchain4j.data.message.ChatMessageType.SYSTEM;
import static dev.langchain4j.data.message.ChatMessageType.USER;
import static io.quarkiverse.langchain4j.openai.test.WiremockUtils.DEFAULT_TOKEN;
import static java.time.Month.JULY;
import static org.acme.examples.aiservices.MessageAssertUtils.*;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.Assertions.tuple;
import java.io.IOException;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import jakarta.validation.constraints.NotNull;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.tomakehurst.wiremock.WireMockServer;
import com.github.tomakehurst.wiremock.stubbing.Scenario;
import com.github.tomakehurst.wiremock.stubbing.ServeEvent;
import com.github.tomakehurst.wiremock.verification.LoggedRequest;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.input.structured.StructuredPrompt;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiModerationModel;
import dev.langchain4j.model.output.structured.Description;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.MemoryId;
import dev.langchain4j.service.Moderate;
import dev.langchain4j.service.ModerationException;
import dev.langchain4j.service.SystemMessage;
import dev.langchain4j.service.UserMessage;
import dev.langchain4j.service.V;
import dev.langchain4j.store.memory.chat.ChatMemoryStore;
import io.opentelemetry.instrumentation.annotations.SpanAttribute;
import io.quarkiverse.langchain4j.openai.test.WiremockUtils;
import io.quarkus.test.QuarkusUnitTest;
public class AiServicesTest {
@RegisterExtension
static final QuarkusUnitTest unitTest = new QuarkusUnitTest()
.setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class).addClasses(WiremockUtils.class, MessageAssertUtils.class));
static WireMockServer wireMockServer;
static ObjectMapper mapper;
private static OpenAiChatModel createChatModel() {
return OpenAiChatModel.builder().baseUrl("http://localhost:8089/v1")
.logRequests(true)
.logResponses(true)
.apiKey("whatever").build();
}
private static OpenAiModerationModel createModerationModel() {
return OpenAiModerationModel.builder().baseUrl("http://localhost:8089/v1")
.logRequests(true)
.logResponses(true)
.apiKey("whatever").build();
}
private static MessageWindowChatMemory createChatMemory() {
return MessageWindowChatMemory.withMaxMessages(10);
}
@BeforeAll
static void beforeAll() {
wireMockServer = new WireMockServer(options().port(8089));
wireMockServer.start();
mapper = new ObjectMapper();
}
@AfterAll
static void afterAll() {
wireMockServer.stop();
}
@BeforeEach
void setup() {
wireMockServer.resetAll();
wireMockServer.stubFor(WiremockUtils.defaultChatCompletionsStub());
}
interface Assistant {
String chat(String message);
}
@Test
public void test_simple_instruction_with_single_argument_and_no_annotations() throws IOException {
String result = AiServices.create(Assistant.class, createChatModel()).chat("Tell me a joke about developers");
assertThat(result).isNotBlank();
assertSingleRequestMessage(getRequestAsMap(), "Tell me a joke about developers");
}
interface Humorist {
@UserMessage("Tell me a joke about {{wrapper.topic}}")
String joke(@SpanAttribute @NotNull Wrapper wrapper);
}
public record Wrapper(String topic) {
}
@Test
public void test_simple_instruction_with_single_argument() throws IOException {
String result = AiServices.create(Humorist.class, createChatModel()).joke(new Wrapper("programmers"));
assertThat(result).isNotBlank();
assertSingleRequestMessage(getRequestAsMap(), "Tell me a joke about programmers");
}
interface DateTimeExtractor {
@UserMessage("Extract date from {{it}}")
LocalDate extractDateFrom(String text);
@UserMessage("Extract time from {{it}}")
LocalTime extractTimeFrom(String text);
@UserMessage("Extract date and time from {{it}}")
LocalDateTime extractDateTimeFrom(String text);
}
@Test
void test_extract_date() throws IOException {
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(), "1968-07-04"));
DateTimeExtractor dateTimeExtractor = AiServices.create(DateTimeExtractor.class, createChatModel());
LocalDate result = dateTimeExtractor.extractDateFrom(
"The tranquility pervaded the evening of 1968, just fifteen minutes shy of midnight, following the celebrations of Independence Day.");
assertThat(result).isEqualTo(LocalDate.of(1968, JULY, 4));
assertSingleRequestMessage(getRequestAsMap(),
"Extract date from The tranquility pervaded the evening of 1968, just fifteen minutes shy of midnight, following the celebrations of Independence Day.\nYou must answer strictly in the following format: yyyy-MM-dd");
}
@Test
void test_extract_time() throws IOException {
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(), "23:45:00"));
DateTimeExtractor dateTimeExtractor = AiServices.create(DateTimeExtractor.class, createChatModel());
LocalTime result = dateTimeExtractor.extractTimeFrom(
"The tranquility pervaded the evening of 1968, just fifteen minutes shy of midnight, following the celebrations of Independence Day.");
assertThat(result).isEqualTo(LocalTime.of(23, 45, 0));
assertSingleRequestMessage(getRequestAsMap(),
"Extract time from The tranquility pervaded the evening of 1968, just fifteen minutes shy of midnight, following the celebrations of Independence Day.\nYou must answer strictly in the following format: HH:mm:ss");
}
@Test
void test_extract_date_time() throws IOException {
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(), "1968-07-04T23:45:00"));
DateTimeExtractor dateTimeExtractor = AiServices.create(DateTimeExtractor.class, createChatModel());
LocalDateTime result = dateTimeExtractor.extractDateTimeFrom(
"The tranquility pervaded the evening of 1968, just fifteen minutes shy of midnight, following the celebrations of Independence Day.");
assertThat(result).isEqualTo(LocalDateTime.of(1968, JULY, 4, 23, 45, 0));
assertSingleRequestMessage(getRequestAsMap(),
"Extract date and time from The tranquility pervaded the evening of 1968, just fifteen minutes shy of midnight, following the celebrations of Independence Day.\nYou must answer strictly in the following format: yyyy-MM-ddTHH:mm:ss");
}
enum Sentiment {
POSITIVE,
NEUTRAL,
NEGATIVE
}
interface SentimentAnalyzer {
@UserMessage("Analyze sentiment of {{it}}")
Sentiment analyzeSentimentOf(String text);
}
@Test
void test_extract_enum() throws IOException {
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(), "POSITIVE"));
SentimentAnalyzer sentimentAnalyzer = AiServices.create(SentimentAnalyzer.class, createChatModel());
Sentiment sentiment = sentimentAnalyzer.analyzeSentimentOf(
"This LaptopPro X15 is wicked fast and that 4K screen is a dream.");
assertThat(sentiment).isEqualTo(Sentiment.POSITIVE);
assertSingleRequestMessage(getRequestAsMap(),
"Analyze sentiment of This LaptopPro X15 is wicked fast and that 4K screen is a dream.\nYou must answer strictly in the following format: one of [POSITIVE, NEUTRAL, NEGATIVE]");
}
record Person(String firstName, String lastName, LocalDate birthDate) {
@JsonCreator
public Person {
}
}
interface PersonExtractor {
@UserMessage("Extract information about a person from {{it}}")
Person extractPersonFrom(String text);
}
@Test
void test_extract_custom_POJO() throws IOException {
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
// this is supposed to be a string inside a json string hence all the escaping...
"{\\n\\\"firstName\\\": \\\"John\\\",\\n\\\"lastName\\\": \\\"Doe\\\",\\n\\\"birthDate\\\": \\\"1968-07-04\\\"\\n}"));
PersonExtractor personExtractor = AiServices.create(PersonExtractor.class, createChatModel());
String text = "In 1968, amidst the fading echoes of Independence Day, "
+ "a child named John arrived under the calm evening sky. "
+ "This newborn, bearing the surname Doe, marked the start of a new journey.";
Person result = personExtractor.extractPersonFrom(text);
assertThat(result.firstName).isEqualTo("John");
assertThat(result.lastName).isEqualTo("Doe");
assertThat(result.birthDate).isEqualTo(LocalDate.of(1968, JULY, 4));
assertSingleRequestMessage(getRequestAsMap(),
"Extract information about a person from In 1968, amidst the fading echoes of Independence Day, a child named John arrived under the calm evening sky. This newborn, bearing the surname Doe, marked the start of a new journey.\nYou must answer strictly in the following JSON format: {\n\"firstName\": (type: string),\n\"lastName\": (type: string),\n\"birthDate\": (type: date string (2023-12-31)),\n}");
}
static class Recipe {
private String title;
private String description;
@Description("each step should be described in 4 words, steps should rhyme")
private String[] steps;
private Integer preparationTimeMinutes;
}
@StructuredPrompt("Create a recipe of a {{dish}} that can be prepared using only {{ingredients}}")
static class CreateRecipePrompt {
private final String dish;
private final List<String> ingredients;
public CreateRecipePrompt(String dish, List<String> ingredients) {
this.dish = dish;
this.ingredients = ingredients;
}
public String getDish() {
return dish;
}
public List<String> getIngredients() {
return ingredients;
}
}
interface Chef {
@UserMessage("Create recipe using only {{it}}")
Recipe createRecipeFrom(String... ingredients);
Recipe createRecipeFrom(CreateRecipePrompt prompt);
@SystemMessage("You are a very {{character}} chef")
Recipe createRecipeFrom(@UserMessage CreateRecipePrompt prompt, String character);
}
@Test
void test_create_recipe_from_list_of_ingredients() throws IOException {
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
// this is supposed to be a string inside a json string hence all the escaping...
"{\\n\\\"title\\\": \\\"Greek Salad\\\",\\n\\\"description\\\": \\\"A refreshing and tangy salad with Mediterranean flavors.\\\",\\n\\\"steps\\\": [\\n\\\"Chop, dice, and slice.\\\",\\n\\\"Mix veggies with feta.\\\",\\n\\\"Drizzle with olive oil.\\\",\\n\\\"Toss gently, then serve.\\\"\\n],\\n\\\"preparationTimeMinutes\\\": 15\\n}"));
Chef chef = AiServices.create(Chef.class, createChatModel());
Recipe result = chef.createRecipeFrom("cucumber", "tomato", "feta", "onion", "olives");
assertThat(result.title).isNotBlank();
assertThat(result.description).isNotBlank();
assertThat(result.steps).isNotEmpty();
assertThat(result.preparationTimeMinutes).isPositive();
assertSingleRequestMessage(getRequestAsMap(),
"Create recipe using only [cucumber, tomato, feta, onion, olives]\nYou must answer strictly in the following JSON format: {\n\"title\": (type: string),\n\"description\": (type: string),\n\"steps\": (each step should be described in 4 words, steps should rhyme; type: array of string),\n\"preparationTimeMinutes\": (type: integer),\n}");
}
@Test
void test_create_recipe_using_structured_prompt() throws IOException {
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
// this is supposed to be a string inside a json string hence all the escaping...
"{\\n\\\"title\\\": \\\"Greek Salad\\\",\\n\\\"description\\\": \\\"A refreshing and tangy salad with Mediterranean flavors.\\\",\\n\\\"steps\\\": [\\n\\\"Chop, dice, and slice.\\\",\\n\\\"Mix veggies with feta.\\\",\\n\\\"Drizzle with olive oil.\\\",\\n\\\"Toss gently, then serve.\\\"\\n],\\n\\\"preparationTimeMinutes\\\": 15\\n}"));
Chef chef = AiServices.create(Chef.class, createChatModel());
Recipe result = chef
.createRecipeFrom(new CreateRecipePrompt("salad", List.of("cucumber", "tomato", "feta", "onion", "olives")));
assertThat(result.title).isNotBlank();
assertThat(result.description).isNotBlank();
assertThat(result.steps).isNotEmpty();
assertThat(result.preparationTimeMinutes).isPositive();
assertSingleRequestMessage(getRequestAsMap(),
"Create a recipe of a salad that can be prepared using only [cucumber, tomato, feta, onion, olives]\nYou must answer strictly in the following JSON format: {\n\"title\": (type: string),\n\"description\": (type: string),\n\"steps\": (each step should be described in 4 words, steps should rhyme; type: array of string),\n\"preparationTimeMinutes\": (type: integer),\n}");
}
@Test
void test_create_recipe_using_structured_prompt_and_system_message() throws IOException {
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
// this is supposed to be a string inside a json string hence all the escaping...
"{\\n\\\"title\\\": \\\"Greek Medley Salad\\\",\\n\\\"description\\\": \\\"A refreshing and tangy salad with a Mediterranean twist.\\\",\\n\\\"steps\\\": [\\n\\\"Slice and dice, precise!\\\",\\n\\\"Mix and toss, no loss!\\\",\\n\\\"Sprinkle feta, get betta!\\\",\\n\\\"Garnish with olives, no jives!\\\"\\n],\\n\\\"preparationTimeMinutes\\\": 15\\n}"));
Chef chef = AiServices.create(Chef.class, createChatModel());
Recipe result = chef
.createRecipeFrom(new CreateRecipePrompt("salad", List.of("cucumber", "tomato", "feta", "onion", "olives")),
"funny");
assertThat(result.title).isEqualTo("Greek Medley Salad");
assertThat(result.description).isNotBlank();
assertThat(result.steps).hasSize(4).satisfies(strings -> {
assertThat(strings[0]).contains("Slice and dice");
assertThat(strings[3]).contains("jives");
});
assertThat(result.preparationTimeMinutes).isEqualTo(15);
assertMultipleRequestMessage(getRequestAsMap(),
List.of(
new MessageContent("system", "You are a very funny chef"),
new MessageContent("user",
"Create a recipe of a salad that can be prepared using only [cucumber, tomato, feta, onion, olives]\nYou must answer strictly in the following JSON format: {\n\"title\": (type: string),\n\"description\": (type: string),\n\"steps\": (each step should be described in 4 words, steps should rhyme; type: array of string),\n\"preparationTimeMinutes\": (type: integer),\n}")));
}
@SystemMessage("You are a professional chef. You are friendly, polite and concise.")
interface ProfessionalChef {
String answer(String question);
@SystemMessage("You are an amateur.")
String answer2(String question);
}
@Test
void test_with_system_message_of_first_method() throws IOException {
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"Grilling chicken typically takes around 10-15 minutes per side, depending on the thickness of the chicken. It's important to ensure the internal temperature reaches 165°F (74°C) for safe consumption."));
ProfessionalChef chef = AiServices.create(ProfessionalChef.class, createChatModel());
String result = chef.answer("How long should I grill chicken?");
assertThat(result).contains("Grilling chicken typically");
assertMultipleRequestMessage(getRequestAsMap(),
List.of(
new MessageContent("system", "You are a professional chef. You are friendly, polite and concise."),
new MessageContent("user",
"How long should I grill chicken?")));
}
@Test
void test_with_system_message_of_second_method() throws IOException {
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"Grilling chicken typically takes around 10-15 minutes per side, depending on the thickness of the chicken. It's important to ensure the internal temperature reaches 165°F (74°C) for safe consumption."));
ProfessionalChef chef = AiServices.create(ProfessionalChef.class, createChatModel());
String result = chef.answer2("How long should I grill chicken?");
assertThat(result).contains("Grilling chicken typically");
assertMultipleRequestMessage(getRequestAsMap(),
List.of(
new MessageContent("system", "You are an amateur."),
new MessageContent("user",
"How long should I grill chicken?")));
}
interface Translator {
@SystemMessage("You are a professional translator into {{lang}}")
@UserMessage("Translate the following text: {{text}}")
String translate(@V("text") String text, @V("lang") String language);
}
@Test
void test_with_system_and_user_messages() throws IOException {
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"Hallo, wie geht es dir?"));
Translator translator = AiServices.create(Translator.class, createChatModel());
String translation = translator.translate("Hello, how are you?", "german");
assertThat(translation).isEqualTo("Hallo, wie geht es dir?");
assertMultipleRequestMessage(getRequestAsMap(),
List.of(
new MessageContent("system", "You are a professional translator into german"),
new MessageContent("user",
"Translate the following text: Hello, how are you?")));
}
interface Summarizer {
@SystemMessage("Summarize every message from user in {{n}} bullet points. Provide only bullet points.")
List<String> summarize(@UserMessage String text, @MemoryId int n);
}
@Test
void test_with_system_message_and_user_message_as_argument() throws IOException {
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"- AI is a branch of computer science\\n- AI aims to create machines that mimic human intelligence\\n- AI can perform tasks like recognizing patterns, making decisions, and predictions"));
Summarizer summarizer = AiServices.create(Summarizer.class, createChatModel());
String text = "AI, or artificial intelligence, is a branch of computer science that aims to create " +
"machines that mimic human intelligence. This can range from simple tasks such as recognizing " +
"patterns or speech to more complex tasks like making decisions or predictions.";
List<String> bulletPoints = summarizer.summarize(text, 3);
assertThat(bulletPoints).hasSize(3).satisfies(list -> {
assertThat(list.get(0)).contains("branch");
assertThat(list.get(2)).contains("predictions");
});
assertMultipleRequestMessage(getRequestAsMap(),
List.of(
new MessageContent("system",
"Summarize every message from user in 3 bullet points. Provide only bullet points."),
new MessageContent("user", text + "\nYou must put every item on a separate line.")));
}
interface ChatWithModeration {
@Moderate
String chat(String message);
}
@Test
void should_throw_when_text_is_flagged() {
wireMockServer.stubFor(WiremockUtils.moderationMapping(DEFAULT_TOKEN)
.willReturn(aResponse()
.withHeader("Content-Type", "application/json")
.withBody(
"""
{
"id": "modr-8Bmx2bYNsgzuAsSuxaQRDCMKHgJbC",
"model": "text-moderation-006",
"results": [
{
"flagged": true,
"categories": {
"sexual": false,
"hate": true,
"harassment": false,
"self-harm": false,
"sexual/minors": false,
"hate/threatening": true,
"violence/graphic": false,
"self-harm/intent": false,
"self-harm/instructions": false,
"harassment/threatening": false,
"violence": false
},
"category_scores": {
"sexual": 0.0001485530665377155,
"hate": 0.00004570276360027492,
"harassment": 0.00006113418203312904,
"self-harm": 5.4490744361146426e-8,
"sexual/minors": 6.557503979820467e-7,
"hate/threatening": 7.536454127432535e-9,
"violence/graphic": 2.776141343474592e-7,
"self-harm/intent": 9.653235544249128e-9,
"self-harm/instructions": 1.2119762970996817e-9,
"harassment/threatening": 5.06949959344638e-7,
"violence": 0.000026839805286726914
}
}
]
}
""")));
ChatWithModeration chatWithModeration = AiServices.builder(ChatWithModeration.class)
.chatLanguageModel(createChatModel())
.moderationModel(createModerationModel())
.build();
assertThatThrownBy(() -> chatWithModeration.chat("I WILL KILL YOU!!!"))
.isExactlyInstanceOf(ModerationException.class)
.hasMessage("Text \"" + "I WILL KILL YOU!!!" + "\" violates content policy");
}
@Test
void should_not_throw_when_text_is_not_flagged() {
wireMockServer.stubFor(WiremockUtils.moderationMapping(DEFAULT_TOKEN)
.willReturn(aResponse()
.withHeader("Content-Type", "application/json")
.withBody(
"""
{
"id": "modr-8Bmx2bYNsgzuAsSuxaQRDCMKHgJbC",
"model": "text-moderation-006",
"results": [
{
"flagged": false,
"categories": {
"sexual": false,
"hate": true,
"harassment": false,
"self-harm": false,
"sexual/minors": false,
"hate/threatening": false,
"violence/graphic": false,
"self-harm/intent": false,
"self-harm/instructions": false,
"harassment/threatening": false,
"violence": false
},
"category_scores": {
"sexual": 0.0001485530665377155,
"hate": 0.00004570276360027492,
"harassment": 0.00006113418203312904,
"self-harm": 5.4490744361146426e-8,
"sexual/minors": 6.557503979820467e-7,
"hate/threatening": 7.536454127432535e-9,
"violence/graphic": 2.776141343474592e-7,
"self-harm/intent": 9.653235544249128e-9,
"self-harm/instructions": 1.2119762970996817e-9,
"harassment/threatening": 5.06949959344638e-7,
"violence": 0.000026839805286726914
}
}
]
}
""")));
ChatWithModeration chatWithModeration = AiServices.builder(ChatWithModeration.class)
.chatLanguageModel(createChatModel())
.moderationModel(createModerationModel())
.build();
String result = chatWithModeration.chat("I will hug you");
assertThat(result).isNotBlank();
}
interface ChatWithMemory {
String chatWithoutSystemMessage(String userMessage);
@SystemMessage("You are helpful assistant")
String chatWithSystemMessage(String userMessage);
@SystemMessage("You are funny assistant")
String chatWithAnotherSystemMessage(String userMessage);
}
@Test
void should_keep_chat_memory() throws IOException {
MessageWindowChatMemory chatMemory = createChatMemory();
ChatWithMemory chatWithMemory = AiServices.builder(ChatWithMemory.class)
.chatLanguageModel(createChatModel())
.chatMemory(chatMemory)
.build();
/* **** First request **** */
String firstUserMessage = "Hello, my name is Klaus";
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"Nice to meet you Klaus"));
String firstAiMessage = chatWithMemory.chatWithoutSystemMessage(firstUserMessage);
// assert response
assertThat(firstAiMessage).isEqualTo("Nice to meet you Klaus");
// assert request
assertSingleRequestMessage(getRequestAsMap(), firstUserMessage);
// assert chat memory
assertThat(chatMemory.messages()).hasSize(2)
.extracting(ChatMessage::type, ChatMessage::text)
.containsExactly(tuple(USER, firstUserMessage), tuple(AI, firstAiMessage));
/* **** Second request **** */
wireMockServer.resetRequests();
String secondUserMessage = "What is my name?";
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"Your name is Klaus"));
String secondAiMessage = chatWithMemory.chatWithoutSystemMessage(secondUserMessage);
// assert response
assertThat(secondAiMessage).contains("Klaus");
// assert request
assertMultipleRequestMessage(getRequestAsMap(),
List.of(
new MessageContent("user", firstUserMessage),
new MessageContent("assistant", firstAiMessage),
new MessageContent("user", secondUserMessage)));
// assert chat memory
assertThat(chatMemory.messages()).hasSize(4)
.extracting(ChatMessage::type, ChatMessage::text)
.containsExactly(tuple(USER, firstUserMessage), tuple(AI, firstAiMessage), tuple(USER, secondUserMessage),
tuple(AI, secondAiMessage));
}
@Test
void should_keep_chat_memory_and_not_duplicate_system_message() throws IOException {
MessageWindowChatMemory chatMemory = createChatMemory();
ChatWithMemory chatWithMemory = AiServices.builder(ChatWithMemory.class)
.chatLanguageModel(createChatModel())
.chatMemory(chatMemory)
.build();
/* **** First request **** */
String systemMessage = "You are helpful assistant";
String firstUserMessage = "Hello, my name is Klaus";
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"Nice to meet you Klaus"));
String firstAiMessage = chatWithMemory.chatWithSystemMessage(firstUserMessage);
// assert response
assertThat(firstAiMessage).isEqualTo("Nice to meet you Klaus");
// assert request
assertMultipleRequestMessage(getRequestAsMap(),
List.of(
new MessageContent("system", systemMessage),
new MessageContent("user", firstUserMessage)));
// assert chat memory
assertThat(chatMemory.messages()).hasSize(3)
.extracting(ChatMessage::type, ChatMessage::text)
.containsExactly(tuple(SYSTEM, systemMessage), tuple(USER, firstUserMessage), tuple(AI, firstAiMessage));
/* **** Second request **** */
wireMockServer.resetRequests();
String secondUserMessage = "What is my name?";
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"Your name is Klaus"));
String secondAiMessage = chatWithMemory.chatWithSystemMessage(secondUserMessage);
// assert response
assertThat(secondAiMessage).contains("Klaus");
// assert request
assertMultipleRequestMessage(getRequestAsMap(),
List.of(
new MessageContent("system", systemMessage),
new MessageContent("user", firstUserMessage),
new MessageContent("assistant", firstAiMessage),
new MessageContent("user", secondUserMessage)));
// assert chat memory
assertThat(chatMemory.messages()).hasSize(5)
.extracting(ChatMessage::type, ChatMessage::text)
.containsExactly(tuple(SYSTEM, systemMessage), tuple(USER, firstUserMessage), tuple(AI, firstAiMessage),
tuple(USER, secondUserMessage),
tuple(AI, secondAiMessage));
}
@Test
void should_keep_chat_memory_and_add_new_system_message() throws IOException {
MessageWindowChatMemory chatMemory = createChatMemory();
ChatWithMemory chatWithMemory = AiServices.builder(ChatWithMemory.class)
.chatLanguageModel(createChatModel())
.chatMemory(chatMemory)
.build();
/* **** First request **** */
String firstSystemMessage = "You are helpful assistant";
String firstUserMessage = "Hello, my name is Klaus";
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"Nice to meet you Klaus"));
String firstAiMessage = chatWithMemory.chatWithSystemMessage(firstUserMessage);
// assert response
assertThat(firstAiMessage).isEqualTo("Nice to meet you Klaus");
// assert request
assertMultipleRequestMessage(getRequestAsMap(),
List.of(
new MessageContent("system", firstSystemMessage),
new MessageContent("user", firstUserMessage)));
// assert chat memory
assertThat(chatMemory.messages()).hasSize(3)
.extracting(ChatMessage::type, ChatMessage::text)
.containsExactly(tuple(SYSTEM, firstSystemMessage), tuple(USER, firstUserMessage), tuple(AI, firstAiMessage));
/* **** Second request **** */
wireMockServer.resetRequests();
String secondSystemMessage = "You are funny assistant";
String secondUserMessage = "What is my name?";
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"Your name is Klaus"));
String secondAiMessage = chatWithMemory.chatWithAnotherSystemMessage(secondUserMessage);
// assert response
assertThat(secondAiMessage).contains("Klaus");
// assert request
assertMultipleRequestMessage(getRequestAsMap(),
List.of(
new MessageContent("user", firstUserMessage),
new MessageContent("assistant", firstAiMessage),
new MessageContent("system", secondSystemMessage),
new MessageContent("user", secondUserMessage)));
// assert chat memory
assertThat(chatMemory.messages()).hasSize(5)
.extracting(ChatMessage::type, ChatMessage::text)
.containsExactly(tuple(USER, firstUserMessage), tuple(AI, firstAiMessage),
tuple(SYSTEM, secondSystemMessage), tuple(USER, secondUserMessage), tuple(AI, secondAiMessage));
}
interface ChatWithSeparateMemoryForEachUser {
String chat(@MemoryId int memoryId, @UserMessage String userMessage);
}
@Test
void should_keep_separate_chat_memory_for_each_user_in_store() throws IOException {
// emulating persistent storage
Map</* memoryId */ Object, String> persistentStorage = new HashMap<>();
ChatMemoryStore store = new ChatMemoryStore() {
@Override
public List<ChatMessage> getMessages(Object memoryId) {
return messagesFromJson(persistentStorage.get(memoryId));
}
@Override
public void updateMessages(Object memoryId, List<ChatMessage> messages) {
persistentStorage.put(memoryId, messagesToJson(messages));
}
@Override
public void deleteMessages(Object memoryId) {
persistentStorage.remove(memoryId);
}
};
ChatMemoryProvider chatMemoryProvider = memoryId -> MessageWindowChatMemory.builder()
.id(memoryId)
.maxMessages(10)
.chatMemoryStore(store)
.build();
int firstMemoryId = 1;
int secondMemoryId = 2;
ChatWithSeparateMemoryForEachUser chatWithMemory = AiServices.builder(ChatWithSeparateMemoryForEachUser.class)
.chatLanguageModel(createChatModel())
.chatMemoryProvider(chatMemoryProvider)
.build();
/* **** First request for user 1 **** */
String firstMessageFromFirstUser = "Hello, my name is Klaus";
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"Nice to meet you Klaus"));
String firstAiResponseToFirstUser = chatWithMemory.chat(firstMemoryId, firstMessageFromFirstUser);
// assert response
assertThat(firstAiResponseToFirstUser).isEqualTo("Nice to meet you Klaus");
// assert request
assertSingleRequestMessage(getRequestAsMap(), firstMessageFromFirstUser);
// assert chat memory
assertThat(store.getMessages(firstMemoryId)).hasSize(2)
.extracting(ChatMessage::type, ChatMessage::text)
.containsExactly(tuple(USER, firstMessageFromFirstUser), tuple(AI, firstAiResponseToFirstUser));
/* **** First request for user 2 **** */
wireMockServer.resetRequests();
String firstMessageFromSecondUser = "Hello, my name is Francine";
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"Nice to meet you Francine"));
String firstAiResponseToSecondUser = chatWithMemory.chat(secondMemoryId, firstMessageFromSecondUser);
// assert response
assertThat(firstAiResponseToSecondUser).isEqualTo("Nice to meet you Francine");
// assert request
assertSingleRequestMessage(getRequestAsMap(), firstMessageFromSecondUser);
// assert chat memory
assertThat(store.getMessages(secondMemoryId)).hasSize(2)
.extracting(ChatMessage::type, ChatMessage::text)
.containsExactly(tuple(USER, firstMessageFromSecondUser), tuple(AI, firstAiResponseToSecondUser));
/* **** Second request for user 1 **** */
wireMockServer.resetRequests();
String secondsMessageFromFirstUser = "What is my name?";
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"Your name is Klaus"));
String secondAiMessageToFirstUser = chatWithMemory.chat(firstMemoryId, secondsMessageFromFirstUser);
// assert response
assertThat(secondAiMessageToFirstUser).contains("Klaus");
// assert request
assertMultipleRequestMessage(getRequestAsMap(),
List.of(
new MessageContent("user", firstMessageFromFirstUser),
new MessageContent("assistant", firstAiResponseToFirstUser),
new MessageContent("user", secondsMessageFromFirstUser)));
// assert chat memory
assertThat(store.getMessages(firstMemoryId)).hasSize(4)
.extracting(ChatMessage::type, ChatMessage::text)
.containsExactly(tuple(USER, firstMessageFromFirstUser), tuple(AI, firstAiResponseToFirstUser),
tuple(USER, secondsMessageFromFirstUser), tuple(AI, secondAiMessageToFirstUser));
/* **** Second request for user 2 **** */
wireMockServer.resetRequests();
String secondsMessageFromSecondUser = "What is my name?";
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"Your name is Francine"));
String secondAiMessageToSecondUser = chatWithMemory.chat(secondMemoryId, secondsMessageFromSecondUser);
// assert response
assertThat(secondAiMessageToSecondUser).contains("Francine");
// assert request
assertMultipleRequestMessage(getRequestAsMap(),
List.of(
new MessageContent("user", firstMessageFromSecondUser),
new MessageContent("assistant", firstAiResponseToSecondUser),
new MessageContent("user", secondsMessageFromSecondUser)));
// assert chat memory
assertThat(store.getMessages(secondMemoryId)).hasSize(4)
.extracting(ChatMessage::type, ChatMessage::text)
.containsExactly(tuple(USER, firstMessageFromSecondUser), tuple(AI, firstAiResponseToSecondUser),
tuple(USER, secondsMessageFromSecondUser), tuple(AI, secondAiMessageToSecondUser));
}
static class Calculator {
private final Runnable after;
Calculator(Runnable after) {
this.after = after;
}
@Tool("calculates the square root of the provided number")
double squareRoot(double number) {
var result = Math.sqrt(number);
after.run();
return result;
}
}
@Test
void should_execute_tool_then_answer() throws IOException {
var firstResponse = """
{
"id": "chatcmpl-8D88Dag1gAKnOPP9Ed4bos7vSpaNz",
"object": "chat.completion",
"created": 1698140213,
"model": "gpt-3.5-turbo-0613",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": null,
"function_call": {
"name": "squareRoot",
"arguments": "{\\n \\"number\\": 485906798473894056\\n}"
}
},
"finish_reason": "function_call"
}
],
"usage": {
"prompt_tokens": 65,
"completion_tokens": 20,
"total_tokens": 85
}
}
""";
var secondResponse = """
{
"id": "chatcmpl-8D88FIAUWSpwLaShFr0w8G1SWuVdl",
"object": "chat.completion",
"created": 1698140215,
"model": "gpt-3.5-turbo-0613",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": "The square root of 485,906,798,473,894,056 in scientific notation is approximately 6.97070153193991E8."
},
"finish_reason": "stop"
}
],
"usage": {
"prompt_tokens": 102,
"completion_tokens": 33,
"total_tokens": 135
}
}
""";
String scenario = "tools";
String secondState = "second";
wireMockServer.stubFor(
WiremockUtils.chatCompletionMapping(DEFAULT_TOKEN)
.inScenario(scenario)
.whenScenarioStateIs(Scenario.STARTED)
.willReturn(WiremockUtils.CHAT_RESPONSE_WITHOUT_BODY.withBody(firstResponse)));
wireMockServer.stubFor(
WiremockUtils.chatCompletionMapping(DEFAULT_TOKEN)
.inScenario(scenario)
.whenScenarioStateIs(secondState)
.willReturn(WiremockUtils.CHAT_RESPONSE_WITHOUT_BODY.withBody(secondResponse)));
wireMockServer.setScenarioState(scenario, Scenario.STARTED);
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(createChatModel())
.chatMemory(createChatMemory())
.tools(new Calculator(() -> wireMockServer.setScenarioState(scenario, secondState)))
.build();
String userMessage = "What is the square root of 485906798473894056 in scientific notation?";
String answer = assistant.chat(userMessage);
assertThat(answer).isEqualTo(
"The square root of 485,906,798,473,894,056 in scientific notation is approximately 6.97070153193991E8.");
assertThat(wireMockServer.getAllServeEvents()).hasSize(2);
assertSingleRequestMessage(getRequestAsMap(getRequestBody(wireMockServer.getAllServeEvents().get(1))),
"What is the square root of 485906798473894056 in scientific notation?");
assertMultipleRequestMessage(getRequestAsMap(getRequestBody(wireMockServer.getAllServeEvents().get(0))),
List.of(
new MessageContent("user", "What is the square root of 485906798473894056 in scientific notation?"),
new MessageContent("assistant", null),
new MessageContent("function", "6.97070153193991E8")));
}
private Map<String, Object> getRequestAsMap() throws IOException {
return getRequestAsMap(getRequestBody());
}
private Map<String, Object> getRequestAsMap(byte[] body) throws IOException {
return mapper.readValue(body, MAP_TYPE_REF);
}
private byte[] getRequestBody() {
assertThat(wireMockServer.getAllServeEvents()).hasSize(1);
ServeEvent serveEvent = wireMockServer.getAllServeEvents().get(0); // this works because we reset requests for Wiremock before each test
return getRequestBody(serveEvent);
}
private byte[] getRequestBody(ServeEvent serveEvent) {
LoggedRequest request = serveEvent.getRequest();
assertThat(request.getBody()).isNotEmpty();
return request.getBody();
}
}
| [
"dev.langchain4j.service.AiServices.create",
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiModerationModel.builder",
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((2980, 3074), 'org.jboss.shrinkwrap.api.ShrinkWrap.create'), ((3224, 3400), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3224, 3392), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3224, 3356), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3224, 3320), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3224, 3285), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3491, 3673), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((3491, 3665), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((3491, 3629), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((3491, 3593), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((3491, 3558), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((4444, 4537), 'dev.langchain4j.service.AiServices.create'), ((5003, 5088), 'dev.langchain4j.service.AiServices.create'), ((22023, 25036), 'io.quarkiverse.langchain4j.openai.test.WiremockUtils.moderationMapping'), ((25087, 25268), 'dev.langchain4j.service.AiServices.builder'), ((25087, 25243), 'dev.langchain4j.service.AiServices.builder'), ((25087, 25185), 'dev.langchain4j.service.AiServices.builder'), ((25612, 28627), 'io.quarkiverse.langchain4j.openai.test.WiremockUtils.moderationMapping'), ((28678, 28859), 'dev.langchain4j.service.AiServices.builder'), ((28678, 28834), 'dev.langchain4j.service.AiServices.builder'), ((28678, 28776), 'dev.langchain4j.service.AiServices.builder'), ((29475, 29634), 'dev.langchain4j.service.AiServices.builder'), ((29475, 29609), 'dev.langchain4j.service.AiServices.builder'), ((29475, 29569), 'dev.langchain4j.service.AiServices.builder'), ((31712, 31871), 'dev.langchain4j.service.AiServices.builder'), ((31712, 31846), 'dev.langchain4j.service.AiServices.builder'), ((31712, 31806), 'dev.langchain4j.service.AiServices.builder'), ((34302, 34461), 'dev.langchain4j.service.AiServices.builder'), ((34302, 34436), 'dev.langchain4j.service.AiServices.builder'), ((34302, 34396), 'dev.langchain4j.service.AiServices.builder'), ((37776, 37937), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((37776, 37912), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((37776, 37872), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((37776, 37839), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((38063, 38257), 'dev.langchain4j.service.AiServices.builder'), ((38063, 38232), 'dev.langchain4j.service.AiServices.builder'), ((38063, 38176), 'dev.langchain4j.service.AiServices.builder'), ((45144, 45405), 'io.quarkiverse.langchain4j.openai.test.WiremockUtils.chatCompletionMapping'), ((45144, 45303), 'io.quarkiverse.langchain4j.openai.test.WiremockUtils.chatCompletionMapping'), ((45144, 45240), 'io.quarkiverse.langchain4j.openai.test.WiremockUtils.chatCompletionMapping'), ((45340, 45404), 'io.quarkiverse.langchain4j.openai.test.WiremockUtils.CHAT_RESPONSE_WITHOUT_BODY.withBody'), ((45456, 45713), 'io.quarkiverse.langchain4j.openai.test.WiremockUtils.chatCompletionMapping'), ((45456, 45610), 'io.quarkiverse.langchain4j.openai.test.WiremockUtils.chatCompletionMapping'), ((45456, 45552), 'io.quarkiverse.langchain4j.openai.test.WiremockUtils.chatCompletionMapping'), ((45647, 45712), 'io.quarkiverse.langchain4j.openai.test.WiremockUtils.CHAT_RESPONSE_WITHOUT_BODY.withBody'), ((45817, 46080), 'dev.langchain4j.service.AiServices.builder'), ((45817, 46055), 'dev.langchain4j.service.AiServices.builder'), ((45817, 45954), 'dev.langchain4j.service.AiServices.builder'), ((45817, 45906), 'dev.langchain4j.service.AiServices.builder')] |
package dev.langchain4j.agent.tool.graalvm;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
class GraalVmJavaScriptExecutionToolIT {
OpenAiChatModel model = OpenAiChatModel.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.apiKey(System.getenv("OPENAI_API_KEY"))
.organizationId(System.getenv("OPENAI_ORGANIZATION_ID"))
.build();
interface Assistant {
String chat(String userMessage);
}
@Test
public void should_execute_tool() {
GraalVmJavaScriptExecutionTool tool = spy(new GraalVmJavaScriptExecutionTool());
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(model)
.tools(tool)
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
String answer = assistant.chat("What is the square root of 485906798473894056 in scientific notation?");
assertThat(answer).contains("6.97");
verify(tool).executeJavaScriptCode(contains("485906798473894056"));
}
} | [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((403, 626), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((403, 605), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((403, 536), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((403, 483), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((875, 1079), 'dev.langchain4j.service.AiServices.builder'), ((875, 1054), 'dev.langchain4j.service.AiServices.builder'), ((875, 981), 'dev.langchain4j.service.AiServices.builder'), ((875, 952), 'dev.langchain4j.service.AiServices.builder')] |
import dev.langchain4j.code.Judge0JavaScriptExecutionTool;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import static java.time.Duration.ofSeconds;
public class ServiceWithDynamicToolsExample {
interface Assistant {
String chat(String message);
}
public static void main(String[] args) {
Judge0JavaScriptExecutionTool judge0Tool = new Judge0JavaScriptExecutionTool(ApiKeys.RAPID_API_KEY);
ChatLanguageModel chatLanguageModel = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.temperature(0.0)
.timeout(ofSeconds(60))
.build();
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(chatLanguageModel)
.chatMemory(MessageWindowChatMemory.withMaxMessages(20))
.tools(judge0Tool)
.build();
interact(assistant, "What is the square root of 49506838032859?");
interact(assistant, "Capitalize every third letter: abcabc");
interact(assistant, "What is the number of hours between 17:00 on 21 Feb 1988 and 04:00 on 12 Apr 2014?");
}
private static void interact(Assistant assistant, String userMessage) {
System.out.println("[User]: " + userMessage);
String answer = assistant.chat(userMessage);
System.out.println("[Assistant]: " + answer);
System.out.println();
System.out.println();
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((634, 806), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((634, 781), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((634, 741), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((634, 707), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((839, 1061), 'dev.langchain4j.service.AiServices.builder'), ((839, 1036), 'dev.langchain4j.service.AiServices.builder'), ((839, 1001), 'dev.langchain4j.service.AiServices.builder'), ((839, 928), 'dev.langchain4j.service.AiServices.builder')] |
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.MemoryId;
import dev.langchain4j.service.UserMessage;
public class ServiceWithMemoryForEachUserExample {
interface Assistant {
String chat(@MemoryId int memoryId, @UserMessage String userMessage);
}
public static void main(String[] args) {
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(ApiKeys.OPENAI_API_KEY))
.chatMemoryProvider(memoryId -> MessageWindowChatMemory.withMaxMessages(10))
.build();
System.out.println(assistant.chat(1, "Hello, my name is Klaus"));
// Hi Klaus! How can I assist you today?
System.out.println(assistant.chat(2, "Hello, my name is Francine"));
// Hello Francine! How can I assist you today?
System.out.println(assistant.chat(1, "What is my name?"));
// Your name is Klaus.
System.out.println(assistant.chat(2, "What is my name?"));
// Your name is Francine.
}
} | [
"dev.langchain4j.service.AiServices.builder"
] | [((482, 722), 'dev.langchain4j.service.AiServices.builder'), ((482, 697), 'dev.langchain4j.service.AiServices.builder'), ((482, 604), 'dev.langchain4j.service.AiServices.builder')] |
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.memory.chat.ChatMemoryStore;
import org.mapdb.DB;
import org.mapdb.DBMaker;
import java.util.List;
import java.util.Map;
import static dev.langchain4j.data.message.ChatMessageDeserializer.messagesFromJson;
import static dev.langchain4j.data.message.ChatMessageSerializer.messagesToJson;
import static org.mapdb.Serializer.STRING;
public class ServiceWithPersistentMemoryExample {
interface Assistant {
String chat(String message);
}
public static void main(String[] args) {
ChatMemory chatMemory = MessageWindowChatMemory.builder()
.maxMessages(10)
.chatMemoryStore(new PersistentChatMemoryStore())
.build();
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(ApiKeys.OPENAI_API_KEY))
.chatMemory(chatMemory)
.build();
String answer = assistant.chat("Hello! My name is Klaus.");
System.out.println(answer); // Hello Klaus! How can I assist you today?
// Now, comment out the two lines above, uncomment the two lines below, and run again.
// String answerWithName = assistant.chat("What is my name?");
// System.out.println(answerWithName); // Your name is Klaus.
}
// You can create your own implementation of ChatMemoryStore and store chat memory whenever you'd like
static class PersistentChatMemoryStore implements ChatMemoryStore {
private final DB db = DBMaker.fileDB("chat-memory.db").transactionEnable().make();
private final Map<String, String> map = db.hashMap("messages", STRING, STRING).createOrOpen();
@Override
public List<ChatMessage> getMessages(Object memoryId) {
String json = map.get((String) memoryId);
return messagesFromJson(json);
}
@Override
public void updateMessages(Object memoryId, List<ChatMessage> messages) {
String json = messagesToJson(messages);
map.put((String) memoryId, json);
db.commit();
}
@Override
public void deleteMessages(Object memoryId) {
map.remove((String) memoryId);
db.commit();
}
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((809, 966), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((809, 941), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((809, 875), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((999, 1186), 'dev.langchain4j.service.AiServices.builder'), ((999, 1161), 'dev.langchain4j.service.AiServices.builder'), ((999, 1121), 'dev.langchain4j.service.AiServices.builder'), ((1792, 1851), 'org.mapdb.DBMaker.fileDB'), ((1792, 1844), 'org.mapdb.DBMaker.fileDB')] |
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.structured.StructuredPrompt;
import dev.langchain4j.model.input.structured.StructuredPromptProcessor;
import dev.langchain4j.model.openai.OpenAiChatModel;
import java.util.List;
import static java.time.Duration.ofSeconds;
import static java.util.Arrays.asList;
public class StructuredPromptTemplateExamples {
static ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.timeout(ofSeconds(60))
.build();
static class Simple_Structured_Prompt_Example {
@StructuredPrompt("Create a recipe of a {{dish}} that can be prepared using only {{ingredients}}")
static class CreateRecipePrompt {
private String dish;
private List<String> ingredients;
}
public static void main(String[] args) {
CreateRecipePrompt createRecipePrompt = new CreateRecipePrompt();
createRecipePrompt.dish = "salad";
createRecipePrompt.ingredients = asList("cucumber", "tomato", "feta", "onion", "olives");
Prompt prompt = StructuredPromptProcessor.toPrompt(createRecipePrompt);
AiMessage aiMessage = model.generate(prompt.toUserMessage()).content();
System.out.println(aiMessage.text());
}
}
static class Multi_Line_Structured_Prompt_Example {
@StructuredPrompt({
"Create a recipe of a {{dish}} that can be prepared using only {{ingredients}}.",
"Structure your answer in the following way:",
"Recipe name: ...",
"Description: ...",
"Preparation time: ...",
"Required ingredients:",
"- ...",
"- ...",
"Instructions:",
"- ...",
"- ..."
})
static class CreateRecipePrompt {
private String dish;
private List<String> ingredients;
}
public static void main(String[] args) {
CreateRecipePrompt createRecipePrompt = new CreateRecipePrompt();
createRecipePrompt.dish = "salad";
createRecipePrompt.ingredients = asList("cucumber", "tomato", "feta", "onion", "olives");
Prompt prompt = StructuredPromptProcessor.toPrompt(createRecipePrompt);
AiMessage aiMessage = model.generate(prompt.toUserMessage()).content();
System.out.println(aiMessage.text());
}
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((528, 654), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((528, 633), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((528, 597), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
import dev.langchain4j.model.openai.OpenAiChatModel;
import java.net.InetSocketAddress;
import java.net.Proxy;
import static java.net.Proxy.Type.HTTP;
public class ProxyExample {
public static void main(String[] args) {
OpenAiChatModel model = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.proxy(new Proxy(HTTP, new InetSocketAddress("39.175.77.7", 30001)))
.build();
String answer = model.generate("hello");
System.out.println(answer);
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((261, 444), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((261, 419), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((261, 334), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
import dev.langchain4j.chain.ConversationalChain;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import java.io.IOException;
import static dev.langchain4j.data.message.UserMessage.userMessage;
public class ChatMemoryExamples {
// See also ServiceWithMemoryExample and ServiceWithMemoryForEachUserExample
public static class ConversationalChain_Example {
public static void main(String[] args) throws IOException {
ConversationalChain chain = ConversationalChain.builder()
.chatLanguageModel(OpenAiChatModel.withApiKey(ApiKeys.OPENAI_API_KEY))
// .chatMemory() // you can override default chat memory
.build();
String answer = chain.execute("Hello, my name is Klaus");
System.out.println(answer); // Hello Klaus! How can I assist you today?
String answerWithName = chain.execute("What is my name?");
System.out.println(answerWithName); // Your name is Klaus.
}
}
public static class If_You_Need_More_Control {
public static void main(String[] args) {
ChatLanguageModel model = OpenAiChatModel.withApiKey(ApiKeys.OPENAI_API_KEY);
ChatMemory chatMemory = TokenWindowChatMemory.withMaxTokens(300, new OpenAiTokenizer("gpt-3.5-turbo"));
// You have full control over the chat memory.
// You can decide if you want to add a particular message to the memory
// (e.g. you might not want to store few-shot examples to save on tokens).
// You can process/modify the message before saving if required.
chatMemory.add(userMessage("Hello, my name is Klaus"));
AiMessage answer = model.generate(chatMemory.messages()).content();
System.out.println(answer.text()); // Hello Klaus! How can I assist you today?
chatMemory.add(answer);
chatMemory.add(userMessage("What is my name?"));
AiMessage answerWithName = model.generate(chatMemory.messages()).content();
System.out.println(answerWithName.text()); // Your name is Klaus.
chatMemory.add(answerWithName);
}
}
}
| [
"dev.langchain4j.chain.ConversationalChain.builder"
] | [((736, 962), 'dev.langchain4j.chain.ConversationalChain.builder'), ((736, 856), 'dev.langchain4j.chain.ConversationalChain.builder')] |
package com.google.cloud.devrel.docchat;
import com.vladsch.flexmark.ext.gfm.strikethrough.StrikethroughExtension;
import com.vladsch.flexmark.ext.tables.TablesExtension;
import com.vladsch.flexmark.html.HtmlRenderer;
import com.vladsch.flexmark.parser.Parser;
import com.vladsch.flexmark.util.ast.Node;
import com.vladsch.flexmark.util.data.MutableDataSet;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.vertexai.VertexAiEmbeddingModel;
import dev.langchain4j.model.vertexai.VertexAiGeminiChatModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import dev.langchain4j.store.memory.chat.InMemoryChatMemoryStore;
import io.micronaut.core.io.IOUtils;
import io.micronaut.core.io.Readable;
import io.micronaut.core.io.ResourceResolver;
import jakarta.inject.Singleton;
import java.io.BufferedReader;
import java.io.IOException;
import java.net.URL;
import java.util.Arrays;
import java.util.Optional;
@Singleton
public class LLMQueryService {
private final PromptTemplate promptTemplate;
private final VertexAiEmbeddingModel embeddingModel;
private final VertexAiGeminiChatModel geminiChatModel;
private final InMemoryEmbeddingStore<TextSegment> embeddingStore;
private final InMemoryChatMemoryStore chatMemoryStore;
private final EmbeddingStoreRetriever retriever;
public LLMQueryService() {
this.embeddingModel = VertexAiEmbeddingModel.builder()
.endpoint("us-central1-aiplatform.googleapis.com:443")
.project("genai-java-demos")
.location("us-central1")
.publisher("google")
.modelName("textembedding-gecko@latest")
.maxRetries(3)
.build();
this.geminiChatModel = VertexAiGeminiChatModel.builder()
.project("genai-java-demos")
.location("us-central1")
.modelName("gemini-pro")
.temperature(0.3f)
.maxRetries(3)
.build();
Optional<URL> resource = new ResourceResolver().getResource("classpath:saved-embedding-store.json");
Readable savedEmbeddingStore = Readable.of(resource.get());
String storeJson = "{}";
try {
storeJson = IOUtils.readText(new BufferedReader(savedEmbeddingStore.asReader()));
System.out.println("Read " + storeJson.length() + " bytes of saved embeddings");
} catch (IOException ioe) {
System.err.println("Impossible to read saved embeddings");
ioe.printStackTrace();
}
this.embeddingStore = InMemoryEmbeddingStore.fromJson(storeJson);
System.out.println("In-memory embedding store loaded");
this.retriever = EmbeddingStoreRetriever.from(embeddingStore, embeddingModel);
this.promptTemplate = PromptTemplate.from("""
You are an expert in the Apache Groovy programming language.
You are also knowledgeable in the Java language, but be sure to write idiomatic Groovy code in your answers.
You excel at teaching and explaining concepts of the language.
If you don't know the answer to the question, say that you don't know the answer, and that the user should refer to the Groovy documentation.
Answer the following question to the best of your ability:
Using the Groovy language, {{question}}
Base your answer exclusively on the following information from the Groovy documentation:
{{information}}
""");
this.chatMemoryStore = new InMemoryChatMemoryStore();
}
public String executeWithMemory(String query, String chatId) {
MessageWindowChatMemory chatMemory = MessageWindowChatMemory.builder()
.chatMemoryStore(chatMemoryStore)
.id(chatId)
.maxMessages(11)
.build();
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(geminiChatModel)
.chatMemory(chatMemory)
.promptTemplate(promptTemplate)
.retriever(retriever)
.build();
System.out.println("query = " + query);
String response = chain.execute(query);
System.out.println("response = " + response);
return renderMarkdownToHtml(response);
}
private static String renderMarkdownToHtml(String markdown) {
MutableDataSet options = new MutableDataSet();
options.set(Parser.EXTENSIONS, Arrays.asList(TablesExtension.create(), StrikethroughExtension.create()));
Parser parser = Parser.builder(options).build();
HtmlRenderer renderer = HtmlRenderer.builder(options).build();
Node document = parser.parse(markdown);
return renderer.render(document);
}
}
| [
"dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder",
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder",
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((1644, 1955), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((1644, 1934), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((1644, 1907), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((1644, 1854), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((1644, 1821), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((1644, 1784), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((1644, 1743), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((1989, 2216), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1989, 2195), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1989, 2168), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1989, 2137), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1989, 2100), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1989, 2063), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((4003, 4156), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((4003, 4135), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((4003, 4106), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((4003, 4082), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((4204, 4425), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((4204, 4404), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((4204, 4370), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((4204, 4326), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((4204, 4290), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((4893, 4924), 'com.vladsch.flexmark.parser.Parser.builder'), ((4958, 4995), 'com.vladsch.flexmark.html.HtmlRenderer.builder')] |
package org.open4goods.ui.config;
import java.io.IOException;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
import org.open4goods.dao.ProductRepository;
import org.open4goods.helper.DevModeService;
import org.open4goods.model.constants.CacheConstants;
import org.open4goods.model.constants.Currency;
import org.open4goods.model.data.Price;
import org.open4goods.model.product.Product;
import org.open4goods.services.BarcodeValidationService;
import org.open4goods.services.BrandService;
import org.open4goods.services.DataSourceConfigService;
import org.open4goods.services.EvaluationService;
import org.open4goods.services.FeedbackService;
import org.open4goods.services.GoogleTaxonomyService;
import org.open4goods.services.ImageMagickService;
import org.open4goods.services.MailService;
import org.open4goods.services.RecaptchaService;
import org.open4goods.services.RemoteFileCachingService;
import org.open4goods.services.ResourceService;
import org.open4goods.services.SearchService;
import org.open4goods.services.SerialisationService;
import org.open4goods.services.StandardiserService;
import org.open4goods.services.VerticalsConfigService;
import org.open4goods.services.XwikiService;
import org.open4goods.services.ai.AiAgent;
import org.open4goods.services.ai.AiService;
import org.open4goods.ui.config.yml.UiConfig;
import org.open4goods.ui.services.BlogService;
import org.open4goods.ui.services.GtinService;
import org.open4goods.ui.services.ImageService;
import org.open4goods.ui.services.OpenDataService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.CacheManager;
import org.springframework.cache.caffeine.CaffeineCache;
import org.springframework.cache.support.SimpleCacheManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.support.ResourcePatternResolver;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.GenericJackson2JsonRedisSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
import org.springframework.mail.javamail.JavaMailSender;
import org.springframework.security.authentication.AuthenticationProvider;
import org.springframework.security.web.firewall.HttpFirewall;
import org.springframework.security.web.firewall.StrictHttpFirewall;
import org.springframework.web.servlet.LocaleResolver;
import org.springframework.web.servlet.i18n.AcceptHeaderLocaleResolver;
import org.springframework.web.servlet.i18n.LocaleChangeInterceptor;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.github.benmanes.caffeine.cache.Ticker;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.service.AiServices;
@Configuration
public class AppConfig {
// TODO : Cache period pageNumber conf
public static final int CACHE_PERIOD_SECONDS = 3600*24*7;
private final UiConfig config;
public AppConfig(UiConfig config) {
this.config = config;
}
@Bean
BarcodeValidationService barcodeValidationService () {
return new BarcodeValidationService();
}
@Bean
@Autowired
public DevModeService devModeService (ProductRepository repository, SerialisationService serialisationService, VerticalsConfigService verticalsConfigService) {
return new DevModeService(config.getDevModeConfig(),repository, serialisationService, verticalsConfigService);
}
@Bean
public RedisTemplate<String, Product> redisTemplate(RedisConnectionFactory connectionFactory) {
RedisTemplate<String, Product> template = new RedisTemplate<>();
template.setConnectionFactory(connectionFactory);
// Configure serialization
template.setKeySerializer(new StringRedisSerializer());
template.setValueSerializer(new GenericJackson2JsonRedisSerializer());
// Add some specific configuration here. Key serializers, etc.
return template;
}
@Bean
public BlogService blogService(@Autowired XwikiService xwikiService, @Autowired UiConfig config) {
return new BlogService(xwikiService, config.getBlogConfig(), config.getNamings().getBaseUrls());
}
@Bean
FeedbackService feedbackService(@Autowired UiConfig config) {
return new FeedbackService(config.getFeedbackConfig());
}
@Bean
ImageService imageService(@Autowired ImageMagickService imageMagickService, @Autowired ResourceService resourceService) {
return new ImageService(imageMagickService, resourceService);
}
@Bean
@Autowired
AiService aiService (AiAgent nudgerAgent, VerticalsConfigService verticalService, EvaluationService spelEvaluationService) {
return new AiService(nudgerAgent, verticalService, spelEvaluationService);
}
@Bean
AiAgent nudgerAgent(@Autowired ChatLanguageModel chatLanguageModel) {
return AiServices.builder(AiAgent.class)
.chatLanguageModel(chatLanguageModel)
// .retriever(retriever)
.build();
}
@Bean
BrandService brandService(@Autowired RemoteFileCachingService rfc, @Autowired UiConfig properties) {
return new BrandService(properties.getBrandConfig(), rfc);
}
@Bean
OpenDataService openDataService(@Autowired ProductRepository aggregatedDataRepository, @Autowired UiConfig props) {
return new OpenDataService(aggregatedDataRepository, props);
}
// TODO(note) : DISABLING SITE MAP GENERATION
// @Bean
// public SitemapGenerationService sitemapGenerationService (@Autowired ProductRepository aggregatedDataRepository, @Autowired UiConfig props ) {
// return new SitemapGenerationService(aggregatedDataRepository, props);
// }
//
@Bean AuthenticationProvider capsuleAuthenticationProvider(@Autowired XwikiService xwikiService) {
return new XwikiAuthenticationProvider(xwikiService);
}
@Bean
XwikiService xwikiService(@Autowired UiConfig props) {
return new XwikiService(props.getWikiConfig());
}
/** The bean providing datasource configurations **/
@Bean DataSourceConfigService datasourceConfigService(@Autowired final UiConfig config) {
return new DataSourceConfigService(config.getDatasourcesfolder());
}
@Bean
RecaptchaService recaptchaService() {
return new RecaptchaService();
}
@Bean MailService mailService(@Autowired final JavaMailSender sender) {
return new MailService(sender);
}
@Bean
ImageMagickService imageMagickService() {
return new ImageMagickService();
}
@Bean
ResourceService resourceService() {
return new ResourceService(config.getRemoteCachingFolder());
}
@Bean
GtinService gtinService(@Autowired ResourceService resourceService) {
return new GtinService(resourceService);
}
@Bean
ProductRepository aggregatedDataRepo() {
return new ProductRepository();
}
@Bean
SearchService searchService(@Autowired ProductRepository aggregatedDataRepository, @Autowired final UiConfig uiconfig) {
return new SearchService(aggregatedDataRepository, uiconfig.logsFolder());
}
@Bean
StandardiserService standardiserService() {
return new StandardiserService() {
@Override
public void standarise(final Price price, final Currency currency) {
}
};
}
@Bean
EvaluationService evaluationService() {
return new EvaluationService();
}
@Bean
@Autowired
VerticalsConfigService verticalConfigsService(ResourcePatternResolver resourceResolver, SerialisationService serialisationService, GoogleTaxonomyService googleTaxonomyService, ProductRepository productRepository) throws IOException {
return new VerticalsConfigService( serialisationService,config.getVerticalsFolder(), googleTaxonomyService, productRepository, resourceResolver);
}
////////////////////////////////////
// Locale resolution
////////////////////////////////////
@Bean
LocaleResolver localeResolver() {
return new AcceptHeaderLocaleResolver();
}
static LocaleChangeInterceptor localeChangeInterceptor() {
final LocaleChangeInterceptor lci = new LocaleChangeInterceptor();
lci.setParamName("lang");
return lci;
}
@Bean
HttpFirewall allowUrlEncodedSlashHttpFirewall() {
StrictHttpFirewall firewall = new StrictHttpFirewall();
firewall.setAllowUrlEncodedSlash(true);
firewall.setAllowBackSlash(true);
firewall.setAllowNull(true);
firewall.setAllowSemicolon(true);
firewall.setUnsafeAllowAnyHttpMethod(true);
firewall.setUnsafeAllowAnyHttpMethod(true);
firewall.setAllowUrlEncodedPercent(true);
return firewall;
}
//////////////////////////////////////////////
// The uidMap managers
//////////////////////////////////////////////
@Bean
CacheManager cacheManager(@Autowired final Ticker ticker) {
final CaffeineCache fCache = buildCache(CacheConstants.FOREVER_LOCAL_CACHE_NAME, ticker, 30000000);
final CaffeineCache hCache = buildCache(CacheConstants.ONE_HOUR_LOCAL_CACHE_NAME, ticker, 60);
final CaffeineCache mCache = buildCache(CacheConstants.ONE_MINUTE_LOCAL_CACHE_NAME, ticker, 1);
final CaffeineCache dCache = buildCache(CacheConstants.ONE_DAY_LOCAL_CACHE_NAME, ticker, 60 * 24);
final SimpleCacheManager manager = new SimpleCacheManager();
manager.setCaches(Arrays.asList(fCache, dCache, hCache,mCache));
return manager;
}
private CaffeineCache buildCache(final String name, final Ticker ticker, final int minutesToExpire) {
return new CaffeineCache(name,
Caffeine.newBuilder().expireAfterWrite(minutesToExpire, TimeUnit.MINUTES).ticker(ticker).build());
}
@Bean
Ticker ticker() {
return Ticker.systemTicker();
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((4958, 5130), 'dev.langchain4j.service.AiServices.builder'), ((4958, 5046), 'dev.langchain4j.service.AiServices.builder'), ((9475, 9571), 'com.github.benmanes.caffeine.cache.Caffeine.newBuilder'), ((9475, 9563), 'com.github.benmanes.caffeine.cache.Caffeine.newBuilder'), ((9475, 9548), 'com.github.benmanes.caffeine.cache.Caffeine.newBuilder')] |
package com.example.demo.configuration;
import dev.langchain4j.model.azure.AzureOpenAiChatModel;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.mistralai.MistralAiChatModel;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class ChatModelConfiguration {
@Bean
ChatLanguageModel openAIChatLanguageModel() {
return AzureOpenAiChatModel.builder()
.endpoint(System.getenv("AZURE_OPENAI_ENDPOINT"))
.apiKey(System.getenv("AZURE_OPENAI_KEY"))
.deploymentName("gpt-4")
.logRequestsAndResponses(true)
.build();
}
ChatLanguageModel mistralAIChatLanguageModel() {
return MistralAiChatModel.builder()
.baseUrl(System.getenv("MISTRAL_AI_BASE_URL"))
.apiKey(System.getenv("MISTRAL_AI_KEY"))
.logRequests(true)
.logResponses(true)
.build();
}
}
| [
"dev.langchain4j.model.mistralai.MistralAiChatModel.builder",
"dev.langchain4j.model.azure.AzureOpenAiChatModel.builder"
] | [((453, 721), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((453, 696), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((453, 649), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((453, 608), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((453, 549), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((798, 1042), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((798, 1017), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((798, 981), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((798, 946), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((798, 889), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder')] |
package io.github.btarg.javaOpenAI.openai;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.UserMessage;
import dev.langchain4j.service.UserName;
import dev.langchain4j.store.memory.chat.ChatMemoryStore;
import io.github.btarg.javaOpenAI.openai.memory.PersistentChatMemoryStore;
import io.github.btarg.javaOpenAI.openai.tools.Calculator;
import io.github.btarg.javaOpenAI.openai.tools.CommandTool;
import io.github.btarg.javaOpenAI.openai.tools.ItemTool;
import io.github.btarg.javaOpenAI.openai.tools.PlayerTool;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
public class ChatGPTAPI {
ChatMemoryStore chatMemoryStore;
public ChatGPTAPI(ChatMemoryStore chatMemoryStore) {
this.chatMemoryStore = chatMemoryStore;
}
public String GetResponse(Player sender, String message) {
String senderUUID = sender.getUniqueId().toString();
Object[] tools = new Object[] {
new Calculator(),
new CommandTool(),
new PlayerTool(),
new ItemTool()
};
ChatMemory chatMemory = MessageWindowChatMemory.builder()
.maxMessages(10)
.chatMemoryStore(new PersistentChatMemoryStore())
.build();
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(System.getenv("OPENAI_API_KEY")))
.tools(tools)
.chatMemory(chatMemory)
.build();
return assistant.chat(senderUUID, message);
}
interface Assistant {
String chat(@UserName String userUUID, @UserMessage String userMessage);
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((1290, 1447), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((1290, 1422), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((1290, 1356), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((1480, 1706), 'dev.langchain4j.service.AiServices.builder'), ((1480, 1681), 'dev.langchain4j.service.AiServices.builder'), ((1480, 1641), 'dev.langchain4j.service.AiServices.builder'), ((1480, 1611), 'dev.langchain4j.service.AiServices.builder')] |
package com.tencent.supersonic.headless.core.chat.parser.llm;
import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.core.config.OptimizationConfig;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMReq;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMReq.SqlGenerationMode;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMResp;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.output.Response;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.tuple.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.stream.Collectors;
@Service
@Slf4j
public class OnePassSCSqlGeneration implements SqlGeneration, InitializingBean {
private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline");
@Autowired
private ChatLanguageModel chatLanguageModel;
@Autowired
private SqlExamplarLoader sqlExamplarLoader;
@Autowired
private OptimizationConfig optimizationConfig;
@Autowired
private SqlPromptGenerator sqlPromptGenerator;
@Override
public LLMResp generation(LLMReq llmReq, Long dataSetId) {
//1.retriever sqlExamples and generate exampleListPool
keyPipelineLog.info("dataSetId:{},llmReq:{}", dataSetId, llmReq);
List<Map<String, String>> sqlExamples = sqlExamplarLoader.retrieverSqlExamples(llmReq.getQueryText(),
optimizationConfig.getText2sqlExampleNum());
List<List<Map<String, String>>> exampleListPool = sqlPromptGenerator.getExampleCombos(sqlExamples,
optimizationConfig.getText2sqlFewShotsNum(), optimizationConfig.getText2sqlSelfConsistencyNum());
//2.generator linking and sql prompt by sqlExamples,and parallel generate response.
List<String> linkingSqlPromptPool = sqlPromptGenerator.generatePromptPool(llmReq, exampleListPool, true);
List<String> llmResults = new CopyOnWriteArrayList<>();
linkingSqlPromptPool.parallelStream().forEach(linkingSqlPrompt -> {
Prompt prompt = PromptTemplate.from(JsonUtil.toString(linkingSqlPrompt))
.apply(new HashMap<>());
keyPipelineLog.info("request prompt:{}", prompt.toSystemMessage());
Response<AiMessage> response = chatLanguageModel.generate(prompt.toSystemMessage());
String result = response.content().text();
llmResults.add(result);
keyPipelineLog.info("model response:{}", result);
}
);
//3.format response.
List<String> schemaLinkingResults = llmResults.stream()
.map(llmResult -> OutputFormat.getSchemaLinks(llmResult)).collect(Collectors.toList());
List<String> candidateSortedList = OutputFormat.formatList(schemaLinkingResults);
Pair<String, Map<String, Double>> linkingMap = OutputFormat.selfConsistencyVote(candidateSortedList);
List<String> sqlList = llmResults.stream()
.map(llmResult -> OutputFormat.getSql(llmResult)).collect(Collectors.toList());
Pair<String, Map<String, Double>> sqlMapPair = OutputFormat.selfConsistencyVote(sqlList);
keyPipelineLog.info("linkingMap:{} sqlMap:{}", linkingMap, sqlMapPair.getRight());
LLMResp result = new LLMResp();
result.setQuery(llmReq.getQueryText());
result.setSqlRespMap(OutputFormat.buildSqlRespMap(sqlExamples, sqlMapPair.getRight()));
return result;
}
@Override
public void afterPropertiesSet() {
SqlGenerationFactory.addSqlGenerationForFactory(SqlGenerationMode.ONE_PASS_AUTO_COT_SELF_CONSISTENCY, this);
}
}
| [
"dev.langchain4j.model.input.PromptTemplate.from"
] | [((2569, 2677), 'dev.langchain4j.model.input.PromptTemplate.from')] |
package org.agoncal.fascicle.langchain4j.firstlook;
// tag::adocSnippet[]
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.time.Duration.ofSeconds;
import java.net.Proxy;
// tag::adocSkip[]
/**
* @author Antonio Goncalves
* http://www.antoniogoncalves.org
* --
*/
// end::adocSkip[]
public class AuthorService {
public static void main(String[] args) {
AuthorService authorService = new AuthorService();
System.out.println(authorService.getIsaacAsimovBiography());
}
private static final String OPENAI_API_KEY = System.getenv("OPENAI_API_KEY");
// private String[] scifiAuthors = {"Isaac Asimov", "Nora Jemisin", "Douglas Adams"};
public String getIsaacAsimovBiography() {
ChatLanguageModel model = OpenAiChatModel.builder()
//.baseUrl("http://localhost:8089")
.proxy(new Proxy(Proxy.Type.HTTP, new java.net.InetSocketAddress("localhost", 8089)))
.apiKey(OPENAI_API_KEY)
.modelName(GPT_3_5_TURBO)
.temperature(0.3)
.timeout(ofSeconds(60))
.logRequests(true)
.logResponses(true)
.build();
String prompt = "Write a short biography about Isaac Asimov";
String biography = model.generate(prompt);
return biography;
}
// @GET
// @Path("/{index}")
// public String getAuthorBiography(@PathParam("index") int index) {
//
// ChatLanguageModel model = OpenAiChatModel.builder()
// .apiKey(OPENAI_API_KEY)
// .modelName(GPT_3_5_TURBO)
// .temperature(0.3)
// .timeout(ofSeconds(60))
// .logRequests(true)
// .logResponses(true)
// .build();
//
// String prompt = "Write a short biography about " + scifiAuthors[index];
//
// String biography = model.generate(prompt);
//
// return biography;
// }
}
// end::adocSnippet[]
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((884, 1225), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((884, 1210), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((884, 1184), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((884, 1159), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((884, 1129), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((884, 1105), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((884, 1073), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((884, 1043), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package io.github.gabrielpadilh4.services;
import java.util.HashMap;
import java.util.Map;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiModelName;
/**
* @author pedro-hos@outlook.com
*/
public class LLMService {
public static String askChatGPT(String error, String apiKey) {
PromptTemplate promptTemplate = PromptTemplate.from("How to fix {{error}}");
Map<String, Object> variables = new HashMap<>();
variables.put("error", error);
Prompt prompt = promptTemplate.apply(variables);
return withModel(apiKey).generate(prompt.text());
}
private static ChatLanguageModel withModel(String apiKey) {
return OpenAiChatModel.builder()
.apiKey(apiKey)
.modelName(OpenAiModelName.GPT_3_5_TURBO)
.temperature(0.3)
.build();
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((821, 955), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((821, 940), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((821, 916), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((821, 868), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package com.adrianbadarau.langchainjava.config;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.BertTokenizer;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.retriever.Retriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import java.io.IOException;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
@Configuration
public class LLMConfig {
@Bean
EmbeddingModel embeddingModel(){
return new AllMiniLmL6V2EmbeddingModel();
}
@Bean
EmbeddingStore<TextSegment> embeddingStore(EmbeddingModel embeddingModel, ResourceLoader resourceLoader) throws IOException {
// I'm going to use the in memory store for now but in the future we should switch to a vector DB
InMemoryEmbeddingStore<TextSegment> store = new InMemoryEmbeddingStore<>();
Resource resource = resourceLoader.getResource("classpath:t&c.text");
Document document = loadDocument(resource.getFile().toPath());
DocumentSplitter splitter = DocumentSplitters.recursive(100, 0, new BertTokenizer());
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(splitter)
.embeddingModel(embeddingModel)
.embeddingStore(store)
.build();
ingestor.ingest(document);
return store;
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1758, 1946), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1758, 1921), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1758, 1882), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1758, 1834), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package ru.vzotov.ai.application;
import com.google.common.collect.Lists;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.Metadata;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import lombok.Builder;
import ru.vzotov.purchase.domain.model.Purchase;
import java.util.List;
import java.util.Objects;
import static java.util.Collections.singletonList;
public class PurchaseCategoryProcessor {
private final EmbeddingStoreIngestor ingestor;
private final int partitionSize;
@Builder
public PurchaseCategoryProcessor(
EmbeddingModel embeddingModel,
EmbeddingStore<TextSegment> embeddingStore,
int partitionSize
) {
if(partitionSize <= 0)
throw new IllegalArgumentException("partitionSize must be > 0");
this.partitionSize = partitionSize;
Objects.requireNonNull(embeddingModel);
Objects.requireNonNull(embeddingStore);
this.ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(doc -> singletonList(doc.toTextSegment()))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
}
public void process(List<Purchase> purchases) {
Lists.partition(purchases, partitionSize)
.stream()
.map(this::transform)
.forEach(ingestor::ingest);
}
List<Document> transform(List<Purchase> purchases) {
return purchases.stream()
.map(ItemAction::new)
.map(action -> new Document(action.text(), action.metadata()))
.toList();
}
static class ItemAction {
private static final String ENTITY_PURCHASE = "purchase";
private static final String F_ID = "entityId";
private static final String F_ENTITY = "entity";
private static final String F_REFERENCE_ID = "reference_id";
private static final String F_LAST_MODIFIED = "last_modified";
private final Purchase purchase;
private final String text;
public ItemAction(Purchase purchase) {
this.purchase = purchase;
this.text = "Purchase '%s' has category '%s' with id '%s'."
.formatted(purchase.name(), purchase.category().name(), purchase.category().categoryId().value());
}
public Purchase purchase() {
return purchase;
}
public String text() {
return text;
}
public Metadata metadata() {
return Metadata.from(F_ID, purchase().purchaseId().value())
.add(F_ENTITY, ENTITY_PURCHASE)
.add(F_LAST_MODIFIED, String.valueOf(purchase().updatedOn().toEpochMilli()))
.add(F_REFERENCE_ID, purchase().category().categoryId().value());
}
}
}
| [
"dev.langchain4j.data.document.Metadata.from",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1157, 1387), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1157, 1362), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1157, 1314), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1157, 1266), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1456, 1604), 'com.google.common.collect.Lists.partition'), ((1456, 1561), 'com.google.common.collect.Lists.partition'), ((1456, 1523), 'com.google.common.collect.Lists.partition'), ((2768, 3054), 'dev.langchain4j.data.document.Metadata.from'), ((2768, 2969), 'dev.langchain4j.data.document.Metadata.from'), ((2768, 2872), 'dev.langchain4j.data.document.Metadata.from')] |
package org.jugph;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.UserMessage;
import java.time.LocalDate;
import static java.time.Duration.ofSeconds;
public class JUGPHMemberExtractorAIServiceExample {
public record JUGPHMember(String firstName, String lastName, String email, JUGPHMember.Gender gender, LocalDate registrationDate) {
enum Gender {
MALE, FEMALE, NON_BINARY, NOT_SAID
}
@Override
public String toString() {
return "JUGPHMember {" +
" firstName = \"" + firstName + "\"" +
", lastName = \"" + lastName + "\"" +
", email = \"" + email + "\"" +
", gender = \"" + gender.name().toLowerCase() + "\"" +
", registrationDate = " + registrationDate +
" }";
}
}
interface MemberExtractor {
@UserMessage("Extract member information from the following text: {{it}}. Infer the gender if not explicitly said.")
JUGPHMember extractMemberFrom(String text);
}
public static void main(String[] args) {
var model = OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
.timeout(ofSeconds(120))
// .logRequests(true)
// .logResponses(true)
.build();
MemberExtractor extractor = AiServices.create(MemberExtractor.class, model);
var text = "New member alert: Maria Clara, a passionate Java developer, has just joined the JUGPH community. " +
"Her email, maria.clara@jugph.org, was sent out on the 17th of November, 2023.";
JUGPHMember member = extractor.extractMemberFrom(text);
System.out.println(member);
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1227, 1450), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1227, 1350), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1227, 1309), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package com.docuverse.backend.services;
import com.docuverse.backend.dtos.ChatRequestDTO;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.memory.chat.ChatMemoryStore;
import io.github.cdimascio.dotenv.Dotenv;
import org.mapdb.DB;
import org.mapdb.DBMaker;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Map;
import static dev.langchain4j.data.message.ChatMessageDeserializer.messagesFromJson;
import static dev.langchain4j.data.message.ChatMessageSerializer.messagesToJson;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.time.Duration.ofSeconds;
import static org.mapdb.Serializer.STRING;
@Service
public class ChatServiceImpl implements ChatService {
private final EmbeddingStore<TextSegment> embeddingStore;
private final EmbeddingModel embeddingModel;
public ChatServiceImpl(EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel) {
this.embeddingStore = embeddingStore;
this.embeddingModel = embeddingModel;
}
@Override
public String processChat(ChatRequestDTO request) {
Dotenv dotenv = Dotenv.load();
// Create a prompt template
PromptTemplate promptTemplate = PromptTemplate.from(
"Answer the question as truthfully as possible using the information below, and if the answer is not within the information, say 'I don't know.\n"
+ "\n"
+ "Question:\n"
+ "{{question}}\n"
+ "\n"
+ "Information:\n"
+ "{{information}}");
System.out.println("checkpoint 3");
try {
// Send the prompt to the OpenAI chat model
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey(dotenv.get("OPENAI_API_KEY"))
.modelName(GPT_3_5_TURBO)
.temperature(0.7)
.timeout(ofSeconds(15))
.maxRetries(3)
.logResponses(true)
.logRequests(true)
.build();
ChatMemory chatMemory = MessageWindowChatMemory.builder()
.maxMessages(10)
.chatMemoryStore(new PersistentChatMemoryStore())
.build();
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(chatModel)
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel, 1, 0.9))
.chatMemory(chatMemory) // you can override default chat memory
.promptTemplate(promptTemplate) // you can override default prompt template
.build();
String answer = chain.execute(request.question());
System.out.println(chatMemory.messages());
return answer;
} catch (Exception e) {
// Handle the exception here, you can log it or return an error message
e.printStackTrace(); // Print the exception stack trace for debugging
return "An error occurred while processing the request.";
}
}
// public String processChat(ChatRequest request) {
//
// Dotenv dotenv = Dotenv.load();
//
// // Create a prompt template
// PromptTemplate promptTemplate = PromptTemplate.from(
// "Answer the question as truthfully as possible using the information below, and if the answer is not within the information, say 'I don't know.\n"
// + "\n"
// + "Question:\n"
// + "{{question}}\n"
// + "\n"
// + "Information:\n"
// + "{{information}}");
//
// System.out.println("checkpoint 3");
//
//
// // Send the prompt to the OpenAI chat model
// ChatLanguageModel chatModel = OpenAiChatModel.builder()
// .apiKey(dotenv.get("OPENAI_API_KEY"))
// .modelName(GPT_3_5_TURBO)
// .temperature(0.7)
// .timeout(ofSeconds(15))
// .maxRetries(3)
// .logResponses(true)
// .logRequests(true)
// .build();
//
// ChatMemory chatMemory = MessageWindowChatMemory.builder()
// .maxMessages(10)
// .chatMemoryStore(new PersistentChatMemoryStore())
// .build();
//
// ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
// .chatLanguageModel(chatModel)
// .retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel, 1, 0.9))
// .chatMemory(chatMemory) // you can override default chat memory
// .promptTemplate(promptTemplate) // you can override default prompt template
// .build();
//
// String answer = chain.execute(request.getQuestion());
// System.out.println(chatMemory.messages());
// return answer;
// }
static class PersistentChatMemoryStore implements ChatMemoryStore {
private final DB db = DBMaker.fileDB("chat-memory.db").transactionEnable().make();
private final Map<String, String> map = db.hashMap("messages", STRING, STRING).createOrOpen();
@Override
public List<ChatMessage> getMessages(Object memoryId) {
String json = map.get((String) memoryId);
return messagesFromJson(json);
}
@Override
public void updateMessages(Object memoryId, List<ChatMessage> messages) {
String json = messagesToJson(messages);
map.put((String) memoryId, json);
db.commit();
}
@Override
public void deleteMessages(Object memoryId) {
map.remove((String) memoryId);
db.commit();
}
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((2386, 2740), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2386, 2711), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2386, 2672), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2386, 2632), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2386, 2597), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2386, 2553), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2386, 2515), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2386, 2469), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2779, 2948), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((2779, 2919), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((2779, 2849), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((3000, 3398), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3000, 3325), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3000, 3233), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3000, 3189), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3000, 3088), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((5833, 5892), 'org.mapdb.DBMaker.fileDB'), ((5833, 5885), 'org.mapdb.DBMaker.fileDB')] |
package org.example;
import dev.langchain4j.chain.ConversationalChain;
import dev.langchain4j.model.openai.OpenAiChatModel;
import java.util.Scanner;
public class _08_ConversationalChain {
public static void main(String[] args) {
OpenAiChatModel model = OpenAiChatModel.withApiKey(ApiKeys.OPENAI_DEMO);
ConversationalChain chain = ConversationalChain.builder()
.chatLanguageModel(model)
.build();
Scanner scanner = new Scanner(System.in);
while (scanner.hasNext()) {
String in = scanner.nextLine();
String answer = chain.execute(in);
System.out.println(answer);
}
}
}
| [
"dev.langchain4j.chain.ConversationalChain.builder"
] | [((357, 453), 'dev.langchain4j.chain.ConversationalChain.builder'), ((357, 428), 'dev.langchain4j.chain.ConversationalChain.builder')] |
package org.agoncal.fascicle.langchain4j.accessing.openai;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.SystemMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiLanguageModel;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import dev.langchain4j.model.output.FinishReason;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.output.TokenUsage;
// tag::adocSkip[]
/**
* @author Antonio Goncalves
* http://www.antoniogoncalves.org
* --
*/
// end::adocSkip[]
public class MusicianService {
public static void main(String[] args) {
MusicianService musicianService = new MusicianService();
musicianService.useOpenAiLanguageModel();
musicianService.useOpenAiLanguageModelPrompt();
musicianService.useOpenAiLanguageModelBuilder();
musicianService.useOpenAiChatModel();
musicianService.useOpenAiChatModelBuilder();
}
private static final String OPENAI_API_KEY = System.getenv("OPENAI_API_KEY");
private static final String PROMPT = "When was the first Beatles album released?";
// #############################
// ### OPENAI LANGUAGE MODEL ###
// #############################
public void useOpenAiLanguageModel() {
System.out.println("### useOpenAiLanguageModel");
OpenAiLanguageModel model = OpenAiLanguageModel.withApiKey(OPENAI_API_KEY);
Response<String> completion = model.generate("When was the first Beatles album released?");
String content = completion.content();
FinishReason finishReason = completion.finishReason();
TokenUsage tokenUsage = completion.tokenUsage();
System.out.println(content);
System.out.println(finishReason.name());
System.out.println(tokenUsage.inputTokenCount());
System.out.println(tokenUsage.outputTokenCount());
System.out.println(tokenUsage.totalTokenCount());
}
public void useOpenAiLanguageModelPrompt() {
System.out.println("### useOpenAiLanguageModelPrompt");
OpenAiLanguageModel model = OpenAiLanguageModel.withApiKey(OPENAI_API_KEY);
Prompt prompt = new Prompt("When was the first Beatles album released?");
Response<String> completion = model.generate(prompt);
String content = completion.content();
FinishReason finishReason = completion.finishReason();
TokenUsage tokenUsage = completion.tokenUsage();
System.out.println(content);
System.out.println(finishReason.name());
System.out.println(tokenUsage.inputTokenCount());
System.out.println(tokenUsage.outputTokenCount());
System.out.println(tokenUsage.totalTokenCount());
}
public void useOpenAiLanguageModelBuilder() {
System.out.println("### useOpenAiLanguageModelBuilder");
// tag::adocSnippet[]
OpenAiLanguageModel model = OpenAiLanguageModel.builder()
.apiKey(OPENAI_API_KEY)
.temperature(0.3)
.logRequests(true)
.logResponses(true)
.build();
// end::adocSnippet[]
Response<String> completion = model.generate(PROMPT);
System.out.println(completion.content());
System.out.println(completion.finishReason());
System.out.println(completion.tokenUsage());
}
// #########################
// ### OPENAI CHAT MODEL ###
// #########################
public void useOpenAiChatModel() {
System.out.println("### useOpenAiChatModel");
// tag::adocSimple[]
OpenAiChatModel model = OpenAiChatModel.withApiKey(OPENAI_API_KEY);
String completion = model.generate("When was the first Rolling Stones album released?");
System.out.println(completion);
// end::adocSimple[]
}
public void useOpenAiChatModelBuilder() {
System.out.println("### useOpenAiChatModelBuilder");
OpenAiChatModel model = OpenAiChatModel.builder()
.apiKey(OPENAI_API_KEY)
.modelName(GPT_3_5_TURBO)
.temperature(0.9)
.logRequests(true)
.logResponses(true)
.build();
String completion = model.generate("When was the first Rolling Stones album released?");
System.out.println(completion);
}
public void useOpenAiChatModelAiMessage() {
System.out.println("### useOpenAiChatModelAiMessage");
OpenAiChatModel model = OpenAiChatModel.withApiKey(OPENAI_API_KEY);
SystemMessage sysMsg = new SystemMessage("You are a music expert.");
UserMessage userMsg = new UserMessage("When was the first Rolling Stones album released?");
Response<AiMessage> completion = model.generate(sysMsg, userMsg);
System.out.println(completion);
}
}
| [
"dev.langchain4j.model.openai.OpenAiLanguageModel.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((2924, 3073), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((2924, 3058), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((2924, 3032), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((2924, 3007), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((2924, 2983), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((3881, 4058), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3881, 4043), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3881, 4017), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3881, 3992), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3881, 3968), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3881, 3936), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore;
import java.util.List;
public class WeaviateEmbeddingStoreExample {
public static void main(String[] args) {
EmbeddingStore<TextSegment> embeddingStore = WeaviateEmbeddingStore.builder()
// Find it under "Show API keys" of your Weaviate cluster.
.apiKey(System.getenv("WEAVIATE_API_KEY"))
// The scheme, e.g. "https" of cluster URL. Find in under Details of your Weaviate cluster.
.scheme("https")
// The host, e.g. "test-o1gvgnp4.weaviate.network" of cluster URL.
// Find in under Details of your Weaviate cluster.
.host("test3-bwsieg9y.weaviate.network")
// "Default" class is used if not specified. Must start from an uppercase letter!
.objectClass("Test")
// If true (default), then WeaviateEmbeddingStore will generate a hashed ID based on provided
// text segment, which avoids duplicated entries in DB. If false, then random ID will be generated.
.avoidDups(true)
// Consistency level: ONE, QUORUM (default) or ALL.
.consistencyLevel("ALL")
.build();
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
TextSegment segment1 = TextSegment.from("I like football.");
Embedding embedding1 = embeddingModel.embed(segment1).content();
embeddingStore.add(embedding1, segment1);
TextSegment segment2 = TextSegment.from("The weather is good today.");
Embedding embedding2 = embeddingModel.embed(segment2).content();
embeddingStore.add(embedding2, segment2);
Embedding queryEmbedding = embeddingModel.embed("What is your favourite sport?").content();
List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(queryEmbedding, 1);
EmbeddingMatch<TextSegment> embeddingMatch = relevant.get(0);
System.out.println(embeddingMatch.score()); // 0.8144288063049316
System.out.println(embeddingMatch.embedded().text()); // I like football.
}
}
| [
"dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder"
] | [((573, 1615), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder'), ((573, 1590), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder'), ((573, 1481), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder'), ((573, 1222), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder'), ((573, 1087), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder'), ((573, 880), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder'), ((573, 739), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder')] |
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.mistralai.MistralAiChatModel;
import dev.langchain4j.model.mistralai.MistralAiEmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument;
import static java.util.stream.Collectors.joining;
public class MistralAiBasicRagEmbedExamples {
static class Chat_Story_From_My_Document {
public static void main(String[] args) {
// In this very simple example, we are getting data that we want to use for RAG.
// We will use a history about origin of the Llama by National Geographic https://www.nationalgeographic.es/animales/llama.
Document document = loadDocument(toPath("example-files/story-about-origin-of-the-llama.txt"), new TextDocumentParser());
// In a RAG system, it is crucial to split the document into smaller chunks so that it's more effective
// to identify and retrieve the most relevant information in the retrieval process later
DocumentSplitter splitter = DocumentSplitters.recursive(200, 0);
List<TextSegment> segments = splitter.split(document);
// Now, for each text segment, we need to create text embeddings, which are numeric representations of the text in the vector space.
// Of course, we will use Mistral AI for this purpose.
EmbeddingModel embeddingModel = MistralAiEmbeddingModel.withApiKey(System.getenv("MISTRAL_AI_API_KEY"));
List<Embedding> embeddings = embeddingModel.embedAll(segments).content();
// Once we get the text embeddings, we will store them in a vector database for efficient processing and retrieval.
// For simplicity, this example uses an in-memory store, but you can choose any external compatible store for production environments.
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
embeddingStore.addAll(embeddings, segments);
// Whenever users ask a question, we also need to create embeddings for this question using the same embedding models as before.
String question = "Who create the llamas?";
Embedding questionEmbedding = embeddingModel.embed(question).content();
// We can perform a search on the vector database and retrieve the most relevant text chunks based on the user question.
int maxResults = 3;
double minScore = 0.7;
List<EmbeddingMatch<TextSegment>> relevantEmbeddings
= embeddingStore.findRelevant(questionEmbedding, maxResults, minScore);
// Now we can offer the relevant information as the context information within the prompt.
// Here is a prompt template where we can include both the retrieved text and user question in the prompt.
PromptTemplate promptTemplate = PromptTemplate.from(
"Context information is below.:\n"
+ "------------------\n"
+ "{{information}}\n"
+ "------------------\n"
+ "Given the context information and not prior knowledge, answer the query.\n"
+ "Query: {{question}}\n"
+ "Answer:");
String information = relevantEmbeddings.stream()
.map(match -> match.embedded().text())
.collect(joining("\n\n"));
Map<String, Object> promptInputs = new HashMap<>();
promptInputs.put("question", question);
promptInputs.put("information", information);
Prompt prompt = promptTemplate.apply(promptInputs);
// Now we can use the Mistral AI chat model to generate the answer to the user question based on the context information.
ChatLanguageModel chatModel = MistralAiChatModel.builder()
.apiKey(System.getenv("MISTRAL_AI_API_KEY"))
.modelName("mistral-medium")
.temperature(0.2) // expect a more focused and deterministic answer
.logRequests(true)
.logResponses(true)
.build();
AiMessage aiMessage = chatModel.generate(prompt.toUserMessage()).content();
String answer = aiMessage.text();
System.out.println(answer); // According to Inca legend, the llamas were created by the mythical founders of the Inca Empire....
}
}
static Path toPath(String fileName) {
try {
URL fileUrl = MistralAiBasicRagEmbedExamples.class.getResource(fileName);
return Paths.get(fileUrl.toURI());
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
}
| [
"dev.langchain4j.model.mistralai.MistralAiChatModel.builder"
] | [((4843, 5181), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((4843, 5152), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((4843, 5112), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((4843, 5023), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((4843, 4985), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((4843, 4936), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder')] |
package de.htwg.rag.retriever;
import dev.langchain4j.model.embedding.AllMiniLmL6V2QuantizedEmbeddingModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.rag.DefaultRetrievalAugmentor;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.rag.RetrievalAugmentor;
import dev.langchain4j.rag.content.injector.ContentInjector;
import dev.langchain4j.rag.content.injector.DefaultContentInjector;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.rag.query.transformer.CompressingQueryTransformer;
import dev.langchain4j.rag.query.transformer.QueryTransformer;
import io.quarkiverse.langchain4j.pgvector.PgVectorEmbeddingStore;
import jakarta.inject.Singleton;
import java.util.function.Supplier;
import static java.util.Arrays.asList;
@Singleton
public class AdvancedRetrievalAugmentor implements Supplier<RetrievalAugmentor> {
private final RetrievalAugmentor augmentor;
// uses the PgVectorEmbeddingStore and the AllMiniLmL6V2QuantizedEmbeddingModel.
// The Store is a extension of the normal PostgresDB and the model is running locally.
public AdvancedRetrievalAugmentor(PgVectorEmbeddingStore store, AllMiniLmL6V2QuantizedEmbeddingModel model) {
// chatmodel just for the query transformer, can be any model,
// all it does is compress the input query's to one so that the retrieval is more accurate
// and logic from the chat-history gets taken into account
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_APIKEY"))
.modelName("gpt-3.5-turbo")
.logRequests(true)
.logResponses(true)
.build();
QueryTransformer queryTransformer = CompressingQueryTransformer.builder()
.chatLanguageModel(chatModel)
.promptTemplate(PromptTemplate.from("Lese und verstehe das Gespräch zwischen dem Benutzer und dem KI. Analysiere dann die neue Anfrage des Benutzers. Identifiziere alle relevanten Details, Begriffe und den Kontext sowohl aus dem Gespräch als auch aus der neuen Anfrage. Formuliere diese Anfrage in ein klares, prägnantes und in sich geschlossenes Format um, das für die Informationssuche geeignet ist.\n" +
"\n" +
"Gespräch:\n" +
"{{chatMemory}}\n" +
"\n" +
"Benutzeranfrage: {{query}}\n" +
"\n" +
"Es ist sehr wichtig, dass du nur die umformulierte Anfrage und nichts anderes bereitstellst! Füge einer Anfrage nichts voran!"))
.build();
// ContentInjector to give metadata with the retrieved documents
ContentInjector contentInjector = DefaultContentInjector.builder()
.metadataKeysToInclude(asList("link"))
.build();
// The normal Retriever to get the Documents from the store.
EmbeddingStoreContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder()
.embeddingModel(model)
.embeddingStore(store)
.maxResults(3)
.minScore(0.7)
.build();
augmentor = DefaultRetrievalAugmentor
.builder()
.contentRetriever(contentRetriever)
.queryTransformer(queryTransformer)
.contentInjector(contentInjector)
.build();
}
@Override
public RetrievalAugmentor get() {
return augmentor;
}
}
| [
"dev.langchain4j.rag.query.transformer.CompressingQueryTransformer.builder",
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder",
"dev.langchain4j.rag.content.injector.DefaultContentInjector.builder"
] | [((1597, 1818), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1597, 1793), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1597, 1757), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1597, 1722), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1597, 1678), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1864, 2790), 'dev.langchain4j.rag.query.transformer.CompressingQueryTransformer.builder'), ((1864, 2765), 'dev.langchain4j.rag.query.transformer.CompressingQueryTransformer.builder'), ((1864, 1947), 'dev.langchain4j.rag.query.transformer.CompressingQueryTransformer.builder'), ((2908, 3020), 'dev.langchain4j.rag.content.injector.DefaultContentInjector.builder'), ((2908, 2995), 'dev.langchain4j.rag.content.injector.DefaultContentInjector.builder'), ((3150, 3355), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3150, 3330), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3150, 3299), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3150, 3268), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3150, 3229), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder')] |
package com.anthonyquere.companionapi.completion.langchain.models;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.mistralai.MistralAiChatModel;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class LangchainMistralModel {
@Value("${mistral-ia.api-key}")
private String mistralApiKey;
@Bean
public ChatLanguageModel buildMistralModel() {
return MistralAiChatModel.builder()
.apiKey(mistralApiKey)
.maxTokens(100)
.modelName("mistral-tiny")
.temperature(1.0d)
.logRequests(true)
.logResponses(true)
.maxRetries(1)
.build();
}
}
| [
"dev.langchain4j.model.mistralai.MistralAiChatModel.builder"
] | [((553, 857), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((553, 832), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((553, 801), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((553, 765), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((553, 730), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((553, 695), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((553, 652), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((553, 620), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder')] |
package me.nzuguem.something.story.configurations.langchain;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.rag.content.Content;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.rag.query.Query;
import io.quarkiverse.langchain4j.chroma.ChromaEmbeddingStore;
import jakarta.enterprise.context.ApplicationScoped;
import java.util.List;
@ApplicationScoped
public class StoryContentRetriever implements ContentRetriever {
private final EmbeddingStoreContentRetriever retriever;
public StoryContentRetriever(ChromaEmbeddingStore chromaEmbeddingStore, EmbeddingModel embeddingModel) {
this.retriever = EmbeddingStoreContentRetriever.builder()
.embeddingModel(embeddingModel)
.embeddingStore(chromaEmbeddingStore)
.maxResults(20)
.build();
}
@Override
public List<Content> retrieve(Query query) {
return this.retriever.retrieve(query);
}
}
| [
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder"
] | [((762, 961), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((762, 936), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((762, 904), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((762, 850), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder')] |
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.mistralai.MistralAiChatModel;
public class MistralAiChatModelExamples {
static class Simple_Prompt {
public static void main(String[] args) {
ChatLanguageModel model = MistralAiChatModel.builder()
.apiKey(System.getenv("MISTRAL_AI_API_KEY")) // Please use your own Mistral AI API key
.modelName("mistral-small")
.logRequests(true)
.logResponses(true)
.build();
String joke = model.generate("Tell me a joke about Java");
System.out.println(joke);
}
}
}
| [
"dev.langchain4j.model.mistralai.MistralAiChatModel.builder"
] | [((278, 569), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((278, 540), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((278, 500), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((278, 461), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((278, 371), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder')] |
import dev.langchain4j.code.Judge0JavaScriptExecutionTool;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import static java.time.Duration.ofSeconds;
public class _11_ServiceWithDynamicToolsExample {
interface Assistant {
String chat(String message);
}
public static void main(String[] args) {
Judge0JavaScriptExecutionTool judge0Tool = new Judge0JavaScriptExecutionTool(ApiKeys.RAPID_API_KEY);
ChatLanguageModel chatLanguageModel = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.temperature(0.0)
.timeout(ofSeconds(60))
.build();
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(chatLanguageModel)
.chatMemory(MessageWindowChatMemory.withMaxMessages(20))
.tools(judge0Tool)
.build();
interact(assistant, "What is the square root of 49506838032859?");
interact(assistant, "Capitalize every third letter: abcabc");
interact(assistant, "What is the number of hours between 17:00 on 21 Feb 1988 and 04:00 on 12 Apr 2014?");
}
private static void interact(Assistant assistant, String userMessage) {
System.out.println("[User]: " + userMessage);
String answer = assistant.chat(userMessage);
System.out.println("[Assistant]: " + answer);
System.out.println();
System.out.println();
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((638, 810), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((638, 785), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((638, 745), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((638, 711), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((843, 1065), 'dev.langchain4j.service.AiServices.builder'), ((843, 1040), 'dev.langchain4j.service.AiServices.builder'), ((843, 1005), 'dev.langchain4j.service.AiServices.builder'), ((843, 932), 'dev.langchain4j.service.AiServices.builder')] |
package dev.langchain4j.chain;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.rag.*;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.injector.DefaultContentInjector;
import dev.langchain4j.rag.query.Metadata;
import dev.langchain4j.retriever.Retriever;
import dev.langchain4j.service.AiServices;
import static dev.langchain4j.internal.Utils.getOrDefault;
import static dev.langchain4j.internal.ValidationUtils.ensureNotNull;
/**
* A chain for conversing with a specified {@link ChatLanguageModel}
* based on the information retrieved by a specified {@link ContentRetriever}.
* Includes a default {@link ChatMemory} (a message window with maximum 10 messages), which can be overridden.
* You can fully customize RAG behavior by providing an instance of a {@link RetrievalAugmentor},
* such as {@link DefaultRetrievalAugmentor}, or your own custom implementation.
* <br>
* It is recommended to use {@link AiServices} instead, as it is more powerful.
*/
public class ConversationalRetrievalChain implements Chain<String, String> {
private final ChatLanguageModel chatLanguageModel;
private final ChatMemory chatMemory;
private final RetrievalAugmentor retrievalAugmentor;
public ConversationalRetrievalChain(ChatLanguageModel chatLanguageModel,
ChatMemory chatMemory,
ContentRetriever contentRetriever) {
this(
chatLanguageModel,
chatMemory,
DefaultRetrievalAugmentor.builder()
.contentRetriever(contentRetriever)
.build()
);
}
public ConversationalRetrievalChain(ChatLanguageModel chatLanguageModel,
ChatMemory chatMemory,
RetrievalAugmentor retrievalAugmentor) {
this.chatLanguageModel = ensureNotNull(chatLanguageModel, "chatLanguageModel");
this.chatMemory = getOrDefault(chatMemory, () -> MessageWindowChatMemory.withMaxMessages(10));
this.retrievalAugmentor = ensureNotNull(retrievalAugmentor, "retrievalAugmentor");
}
/**
* Use another constructor with a new {@link ContentRetriever} instead.
*/
@Deprecated
public ConversationalRetrievalChain(ChatLanguageModel chatLanguageModel,
ChatMemory chatMemory,
PromptTemplate promptTemplate,
Retriever<TextSegment> retriever) {
this(
chatLanguageModel,
chatMemory,
DefaultRetrievalAugmentor.builder()
.contentRetriever(retriever.toContentRetriever())
.contentInjector(DefaultContentInjector.builder()
.promptTemplate(toPromptTemplateWithNewVariableNames(promptTemplate))
.build())
.build()
);
}
@Override
public String execute(String query) {
UserMessage userMessage = UserMessage.from(query);
Metadata metadata = Metadata.from(userMessage, chatMemory.id(), chatMemory.messages());
userMessage = retrievalAugmentor.augment(userMessage, metadata);
chatMemory.add(userMessage);
AiMessage aiMessage = chatLanguageModel.generate(chatMemory.messages()).content();
chatMemory.add(aiMessage);
return aiMessage.text();
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private ChatLanguageModel chatLanguageModel;
private ChatMemory chatMemory;
private RetrievalAugmentor retrievalAugmentor;
@Deprecated
private dev.langchain4j.retriever.Retriever<TextSegment> retriever;
@Deprecated
private PromptTemplate promptTemplate;
public Builder chatLanguageModel(ChatLanguageModel chatLanguageModel) {
this.chatLanguageModel = chatLanguageModel;
return this;
}
public Builder chatMemory(ChatMemory chatMemory) {
this.chatMemory = chatMemory;
return this;
}
public Builder contentRetriever(ContentRetriever contentRetriever) {
if (contentRetriever != null) {
this.retrievalAugmentor = DefaultRetrievalAugmentor.builder()
.contentRetriever(contentRetriever)
.build();
}
return this;
}
public Builder retrievalAugmentor(RetrievalAugmentor retrievalAugmentor) {
this.retrievalAugmentor = retrievalAugmentor;
return this;
}
/**
* Deprecated. Use {@link Builder#contentRetriever(ContentRetriever)} instead.
*/
@Deprecated
public Builder retriever(dev.langchain4j.retriever.Retriever<TextSegment> retriever) {
this.retriever = retriever;
return this;
}
/**
* Deprecated, Use this instead:<pre>
* .retrievalAugmentor(DefaultRetrievalAugmentor.builder()
* .contentInjector(DefaultContentInjector.builder()
* .promptTemplate(promptTemplate)
* .build())
* .build());
* </pre>
*/
@Deprecated
public Builder promptTemplate(PromptTemplate promptTemplate) {
this.promptTemplate = promptTemplate;
return this;
}
public ConversationalRetrievalChain build() {
if (retriever != null) {
retrievalAugmentor = DefaultRetrievalAugmentor.builder()
.contentRetriever(retriever.toContentRetriever())
.contentInjector(DefaultContentInjector.builder()
.promptTemplate(toPromptTemplateWithNewVariableNames(promptTemplate))
.build())
.build();
}
return new ConversationalRetrievalChain(chatLanguageModel, chatMemory, retrievalAugmentor);
}
}
private static PromptTemplate toPromptTemplateWithNewVariableNames(PromptTemplate oldPromptTemplate) {
if (oldPromptTemplate != null) {
return PromptTemplate.from(oldPromptTemplate.template()
.replaceAll("\\{\\{question}}", "{{userMessage}}")
.replaceAll("\\{\\{information}}", "{{contents}}")
);
}
return PromptTemplate.from(
"Answer the following question to the best of your ability: {{userMessage}}\n" +
"\n" +
"Base your answer on the following information:\n" +
"{{contents}}"
);
}
}
| [
"dev.langchain4j.rag.content.injector.DefaultContentInjector.builder"
] | [((3177, 3352), 'dev.langchain4j.rag.content.injector.DefaultContentInjector.builder'), ((3177, 3311), 'dev.langchain4j.rag.content.injector.DefaultContentInjector.builder'), ((6230, 6405), 'dev.langchain4j.rag.content.injector.DefaultContentInjector.builder'), ((6230, 6364), 'dev.langchain4j.rag.content.injector.DefaultContentInjector.builder')] |
package dev.langchain4j.model.vertexai;
import com.google.cloud.vertexai.api.*;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Struct;
import com.google.protobuf.Value;
import com.google.protobuf.util.JsonFormat;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.agent.tool.ToolExecutionRequestUtil;
import dev.langchain4j.agent.tool.ToolParameters;
import dev.langchain4j.agent.tool.ToolSpecification;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
class FunctionCallHelper {
static Type fromType(String type) {
//TODO: is it covering all the types correctly?
switch (type) {
case "string":
return Type.STRING;
case "integer":
return Type.INTEGER;
case "boolean":
return Type.BOOLEAN;
case "number":
return Type.NUMBER;
case "array":
return Type.ARRAY;
case "object":
return Type.OBJECT;
default:
return Type.TYPE_UNSPECIFIED;
}
}
static FunctionCall fromToolExecutionRequest(ToolExecutionRequest toolExecutionRequest) {
FunctionCall.Builder fnCallBuilder = FunctionCall.newBuilder()
.setName(toolExecutionRequest.name());
Struct.Builder structBuilder = Struct.newBuilder();
try {
JsonFormat.parser().merge(toolExecutionRequest.arguments(), structBuilder);
} catch (InvalidProtocolBufferException e) {
throw new RuntimeException(e);
}
Struct argsStruct = structBuilder.build();
fnCallBuilder.setArgs(argsStruct);
return fnCallBuilder.build();
}
static List<ToolExecutionRequest> fromFunctionCalls(List<FunctionCall> functionCalls) {
List<ToolExecutionRequest> toolExecutionRequests = new ArrayList<>();
for (FunctionCall functionCall : functionCalls) {
ToolExecutionRequest.Builder builder = ToolExecutionRequest.builder()
.name(functionCall.getName());
Map<String, Object> callArgsMap = new HashMap<>();
Struct callArgs = functionCall.getArgs();
Map<String, Value> callArgsFieldsMap = callArgs.getFieldsMap();
callArgsFieldsMap.forEach((key, value) -> callArgsMap.put(key, unwrapProtoValue(value)));
String serializedArgsMap = ToolExecutionRequestUtil.GSON.toJson(callArgsMap);
builder.arguments(serializedArgsMap);
toolExecutionRequests.add(builder.build());
}
return toolExecutionRequests;
}
static Object unwrapProtoValue(Value value) {
Object unwrappedValue;
switch (value.getKindCase()) {
case NUMBER_VALUE:
unwrappedValue = value.getNumberValue();
break;
case STRING_VALUE:
unwrappedValue = value.getStringValue();
break;
case BOOL_VALUE:
unwrappedValue = value.getBoolValue();
break;
case STRUCT_VALUE:
HashMap<String, Object> mapForStruct = new HashMap<>();
value.getStructValue().getFieldsMap().forEach((key, val) -> mapForStruct.put(key, unwrapProtoValue(val)));
unwrappedValue = mapForStruct;
break;
case LIST_VALUE:
unwrappedValue = value.getListValue().getValuesList().stream().map(FunctionCallHelper::unwrapProtoValue).collect(Collectors.toList());
break;
default: // NULL_VALUE, KIND_NOT_SET, and default
unwrappedValue = null;
break;
}
return unwrappedValue;
}
static Tool convertToolSpecifications(List<ToolSpecification> toolSpecifications) {
Tool.Builder tool = Tool.newBuilder();
for (ToolSpecification toolSpecification : toolSpecifications) {
FunctionDeclaration.Builder fnBuilder = FunctionDeclaration.newBuilder()
.setName(toolSpecification.name())
.setDescription(toolSpecification.description());
Schema.Builder schema = Schema.newBuilder().setType(Type.OBJECT);
ToolParameters parameters = toolSpecification.parameters();
for (String paramName : parameters.required()) {
schema.addRequired(paramName);
}
parameters.properties().forEach((paramName, paramProps) -> {
//TODO: is it covering all types & cases of tool parameters? (array & object in particular)
Type type = fromType((String) paramProps.getOrDefault("type", Type.TYPE_UNSPECIFIED));
String description = (String) paramProps.getOrDefault("description", "");
schema.putProperties(paramName, Schema.newBuilder()
.setDescription(description)
.setType(type)
.build());
});
fnBuilder.setParameters(schema.build());
tool.addFunctionDeclarations(fnBuilder.build());
}
return tool.build();
}
}
| [
"dev.langchain4j.agent.tool.ToolExecutionRequest.builder",
"dev.langchain4j.agent.tool.ToolExecutionRequestUtil.GSON.toJson"
] | [((1523, 1597), 'com.google.protobuf.util.JsonFormat.parser'), ((2125, 2201), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2539, 2588), 'dev.langchain4j.agent.tool.ToolExecutionRequestUtil.GSON.toJson')] |
package org.acme;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.rag.DefaultRetrievalAugmentor;
import dev.langchain4j.rag.RetrievalAugmentor;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import java.util.function.Supplier;
@Singleton
public class RetrievalAugmentorExample implements Supplier<RetrievalAugmentor> {
@Inject
EmbeddingModel embeddingModel;
@Inject
EmbeddingStore<TextSegment> embeddingStore;
@Override
public RetrievalAugmentor get() {
return DefaultRetrievalAugmentor.builder()
.contentRetriever(EmbeddingStoreContentRetriever.builder()
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build())
.build();
}
}
| [
"dev.langchain4j.rag.DefaultRetrievalAugmentor.builder",
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder"
] | [((726, 1007), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((726, 982), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((796, 981), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((796, 948), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((796, 892), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder')] |
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentParser;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.rag.DefaultRetrievalAugmentor;
import dev.langchain4j.rag.RetrievalAugmentor;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.rag.query.router.LanguageModelQueryRouter;
import dev.langchain4j.rag.query.router.QueryRouter;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
public class _03_Advanced_RAG_with_Query_Routing {
/**
* Please refer to previous examples for basic context.
* <p>
* Advanced RAG in LangChain4j is described here: https://github.com/langchain4j/langchain4j/pull/538
* <p>
* This example showcases the implementation of a more advanced RAG application
* using a technique known as "query routing".
* <p>
* Often, private data is spread across multiple sources and formats.
* This might include internal company documentation on Confluence, your project's code in a Git repository,
* a relational database with user data, or a search engine with the products you sell, among others.
* In a RAG flow that utilizes data from multiple sources, you will likely have multiple
* {@link EmbeddingStore}s or {@link ContentRetriever}s.
* While you could route each user query to all available {@link ContentRetriever}s,
* this approach might be inefficient and counterproductive.
* <p>
* "Query routing" is the solution to this challenge. It involves directing a query to the most appropriate
* {@link ContentRetriever} (or several). Routing can be implemented in various ways:
* - Using rules (e.g., depending on the user's privileges, location, etc.).
* - Using keywords (e.g., if a query contains words X1, X2, X3, route it to {@link ContentRetriever} X, etc.).
* - Using semantic similarity (see EmbeddingModelTextClassifierExample in this repository).
* - Using an LLM to make a routing decision.
* <p>
* For scenarios 1, 2, and 3, you can implement a custom {@link QueryRouter}.
* For scenario 4, this example will demonstrate how to use a {@link LanguageModelQueryRouter}.
* <p>
* We will continue using {@link AiServices} for this example,
* but the same principles apply to {@link ConversationalRetrievalChain}, or you can develop your custom RAG flow.
*/
public static void main(String[] args) {
Polymath polymath = createPolymath();
// First, ask "What is the legacy of John Doe?"
// Then, ask "Can I cancel my reservation?"
// Now, see the logs to observe how the queries are routed to different retrievers.
try (Scanner scanner = new Scanner(System.in)) {
while (true) {
System.out.println("==================================================");
System.out.print("User: ");
String userQuery = scanner.nextLine();
System.out.println("==================================================");
if ("exit".equalsIgnoreCase(userQuery)) {
break;
}
String polymathAnswer = polymath.answer(userQuery);
System.out.println("==================================================");
System.out.println("Polymath: " + polymathAnswer);
}
}
}
private static Polymath createPolymath() {
// Check _01_Naive_RAG if you need more details on what is going on here
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey("demo")
.build();
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
// Let's create a separate embedding store specifically for biographies.
EmbeddingStore<TextSegment> biographyEmbeddingStore =
embed(toPath("biography-of-john-doe.txt"), embeddingModel);
ContentRetriever biographyContentRetriever = EmbeddingStoreContentRetriever.builder()
.embeddingStore(biographyEmbeddingStore)
.embeddingModel(embeddingModel)
.maxResults(2)
.minScore(0.6)
.build();
// Additionally, let's create a separate embedding store dedicated to terms of use.
EmbeddingStore<TextSegment> termsOfUseEmbeddingStore =
embed(toPath("miles-of-smiles-terms-of-use.txt"), embeddingModel);
ContentRetriever termsOfUseContentRetriever = EmbeddingStoreContentRetriever.builder()
.embeddingStore(termsOfUseEmbeddingStore)
.embeddingModel(embeddingModel)
.maxResults(2)
.minScore(0.6)
.build();
// Let's create a query router.
Map<ContentRetriever, String> retrieverToDescription = new HashMap<>();
retrieverToDescription.put(biographyContentRetriever, "biography of John Doe");
retrieverToDescription.put(termsOfUseContentRetriever, "terms of use of car rental company");
QueryRouter queryRouter = new LanguageModelQueryRouter(chatModel, retrieverToDescription);
RetrievalAugmentor retrievalAugmentor = DefaultRetrievalAugmentor.builder()
.queryRouter(queryRouter)
.build();
return AiServices.builder(Polymath.class)
.chatLanguageModel(chatModel)
.retrievalAugmentor(retrievalAugmentor)
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
}
private static EmbeddingStore<TextSegment> embed(Path documentPath, EmbeddingModel embeddingModel) {
DocumentParser documentParser = new TextDocumentParser();
Document document = FileSystemDocumentLoader.loadDocument(documentPath, documentParser);
DocumentSplitter splitter = DocumentSplitters.recursive(300, 0);
List<TextSegment> segments = splitter.split(document);
List<Embedding> embeddings = embeddingModel.embedAll(segments).content();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
embeddingStore.addAll(embeddings, segments);
return embeddingStore;
}
interface Polymath {
String answer(String query);
}
private static Path toPath(String fileName) {
try {
URL fileUrl = _03_Advanced_RAG_with_Query_Routing.class.getResource(fileName);
return Paths.get(fileUrl.toURI());
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
} | [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.rag.DefaultRetrievalAugmentor.builder",
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((4678, 4760), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4678, 4735), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5111, 5343), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5111, 5318), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5111, 5287), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5111, 5256), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5111, 5208), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5638, 5871), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5638, 5846), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5638, 5815), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5638, 5784), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5638, 5736), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((6332, 6434), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((6332, 6409), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((6452, 6686), 'dev.langchain4j.service.AiServices.builder'), ((6452, 6661), 'dev.langchain4j.service.AiServices.builder'), ((6452, 6588), 'dev.langchain4j.service.AiServices.builder'), ((6452, 6532), 'dev.langchain4j.service.AiServices.builder')] |
package org.example;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.huggingface.HuggingFaceChatModel;
import dev.langchain4j.model.huggingface.HuggingFaceModelName;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiChatModelName;
public class _02_Builder {
public static void main(String[] args) {
ChatLanguageModel openAI = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_DEMO)
.modelName(OpenAiChatModelName.GPT_4)
.temperature(0.3d)
.maxTokens(50)
.logRequests(true)
.logResponses(true)
.maxRetries(3)
.build();
ChatLanguageModel huggingFace = HuggingFaceChatModel.builder()
.accessToken(ApiKeys.OPENAI_DEMO)
.modelId(HuggingFaceModelName.TII_UAE_FALCON_7B_INSTRUCT)
.temperature(0.3d)
.build();
}
}
| [
"dev.langchain4j.model.huggingface.HuggingFaceChatModel.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((421, 738), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((421, 713), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((421, 682), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((421, 646), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((421, 611), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((421, 580), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((421, 545), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((421, 491), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((781, 995), 'dev.langchain4j.model.huggingface.HuggingFaceChatModel.builder'), ((781, 970), 'dev.langchain4j.model.huggingface.HuggingFaceChatModel.builder'), ((781, 935), 'dev.langchain4j.model.huggingface.HuggingFaceChatModel.builder'), ((781, 861), 'dev.langchain4j.model.huggingface.HuggingFaceChatModel.builder')] |
package io.kadras.music.agent;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import io.kadras.music.tools.VirtualInstrumentTools;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration(proxyBeanMethods = false)
public class ComposerAgentConfig {
@Bean
ComposerAgent documentAgent(ChatLanguageModel chatLanguageModel, EmbeddingModel embeddingModel,
EmbeddingStore<TextSegment> embeddingStore, VirtualInstrumentTools virtualInstrumentTools) {
ContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.maxResults(5)
.minScore(0.5)
.build();
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
return AiServices.builder(ComposerAgent.class)
.chatLanguageModel(chatLanguageModel)
.contentRetriever(contentRetriever)
.chatMemory(chatMemory)
.tools(virtualInstrumentTools)
.build();
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder"
] | [((1053, 1276), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((1053, 1251), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((1053, 1220), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((1053, 1189), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((1053, 1141), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((1372, 1629), 'dev.langchain4j.service.AiServices.builder'), ((1372, 1604), 'dev.langchain4j.service.AiServices.builder'), ((1372, 1557), 'dev.langchain4j.service.AiServices.builder'), ((1372, 1517), 'dev.langchain4j.service.AiServices.builder'), ((1372, 1465), 'dev.langchain4j.service.AiServices.builder')] |
package org.tutorial.yy.langchain.demo.aiservice.memory;
import dev.langchain4j.chain.ConversationalChain;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import org.tutorial.yy.langchain.demo.aiservice.LangChainBase;
import java.io.IOException;
import static dev.langchain4j.data.message.UserMessage.userMessage;
/**
* @author yyHuangfu
* @create 2024/2/3
* @description
*/
public class ChatMemoryExamples extends LangChainBase {
public static void main(String[] args) throws IOException {
ConversationalChain chain = ConversationalChain.builder()
.chatLanguageModel(getModel())
.chatMemory(getCustomMemory())
.build();
String answerWithName = chain.execute("can u introduce yourself?"); // 你能介绍一下你自己吗?
System.out.println(answerWithName);
String answerInCN = chain.execute("in chinese plz"); // 请用中文说
System.out.println(answerInCN);
}
public static ChatMemory getCustomMemory() {
// yiyu robot
ChatMemory chatMemory = MessageWindowChatMemory.builder()
.maxMessages(10)
.build();
chatMemory.add(userMessage("you are the human called Alex HF, u also called yiyu"));
chatMemory.add(userMessage("u like programming, reading and any sports"));
chatMemory.add(userMessage("u can speak Chinese and English"));
return chatMemory;
}
}
| [
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder",
"dev.langchain4j.chain.ConversationalChain.builder"
] | [((594, 742), 'dev.langchain4j.chain.ConversationalChain.builder'), ((594, 717), 'dev.langchain4j.chain.ConversationalChain.builder'), ((594, 670), 'dev.langchain4j.chain.ConversationalChain.builder'), ((1132, 1223), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((1132, 1198), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')] |
package org.tutorial.yy.langchain.service;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.qianfan.QianfanChatModel;
import dev.langchain4j.model.qianfan.QianfanStreamingChatModel;
import static dev.langchain4j.model.openai.OpenAiChatModelName.GPT_3_5_TURBO;
import static java.time.Duration.ofSeconds;
/**
* @author yyHuangfu
* @create 2024/1/29
* @description
*/
public class LangModel {
public static ChatLanguageModel getModel(String modelName) {
switch (modelName) {
case "qian_fan":
return getQianFanModel();
case "open_ai":
return getOpenAIModel();
default:
throw new RuntimeException("unKnown model name, plz check your config");
}
}
private static ChatLanguageModel getOpenAIModel() {
return OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.modelName(GPT_3_5_TURBO)
.temperature(0.6)
.timeout(ofSeconds(60))
.logRequests(true)
.logResponses(true)
.build();
}
public static QianfanStreamingChatModel getStreamModel() {
return QianfanStreamingChatModel.builder()
.apiKey("bUyk2k96KQ2gG1ZQ8TB8t3cd")
.secretKey(ApiKeys.QIAN_FAN_API_KEY)
.modelName("Yi-34B-Chat")
.build();
}
private static QianfanChatModel getQianFanModel() {
return QianfanChatModel.builder()
.apiKey("bUyk2k96KQ2gG1ZQ8TB8t3cd")
.secretKey(ApiKeys.QIAN_FAN_API_KEY)
.modelName("Yi-34B-Chat")
.build();
}
}
| [
"dev.langchain4j.model.qianfan.QianfanChatModel.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder",
"dev.langchain4j.model.qianfan.QianfanStreamingChatModel.builder"
] | [((979, 1264), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((979, 1239), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((979, 1203), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((979, 1168), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((979, 1128), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((979, 1094), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((979, 1052), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1351, 1558), 'dev.langchain4j.model.qianfan.QianfanStreamingChatModel.builder'), ((1351, 1533), 'dev.langchain4j.model.qianfan.QianfanStreamingChatModel.builder'), ((1351, 1491), 'dev.langchain4j.model.qianfan.QianfanStreamingChatModel.builder'), ((1351, 1438), 'dev.langchain4j.model.qianfan.QianfanStreamingChatModel.builder'), ((1638, 1836), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1638, 1811), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1638, 1769), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1638, 1716), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder')] |
package com.baeldung.langchain;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.time.Duration.ofSeconds;
import static java.util.stream.Collectors.joining;
import static org.junit.Assert.assertNotNull;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
public class ChatWithDocumentLiveTest {
private static final Logger logger = LoggerFactory.getLogger(ChatWithDocumentLiveTest.class);
@Test
public void givenDocument_whenPrompted_thenValidResponse() {
Document document = loadDocument(Paths.get("src/test/resources/example-files/simpson's_adventures.txt"));
DocumentSplitter splitter = DocumentSplitters.recursive(100, 0, new OpenAiTokenizer(GPT_3_5_TURBO));
List<TextSegment> segments = splitter.split(document);
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
List<Embedding> embeddings = embeddingModel.embedAll(segments)
.content();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
embeddingStore.addAll(embeddings, segments);
String question = "Who is Simpson?";
Embedding questionEmbedding = embeddingModel.embed(question)
.content();
int maxResults = 3;
double minScore = 0.7;
List<EmbeddingMatch<TextSegment>> relevantEmbeddings = embeddingStore.findRelevant(questionEmbedding, maxResults, minScore);
PromptTemplate promptTemplate = PromptTemplate.from("Answer the following question to the best of your ability:\n" + "\n" + "Question:\n" + "{{question}}\n" + "\n" + "Base your answer on the following information:\n" + "{{information}}");
String information = relevantEmbeddings.stream()
.map(match -> match.embedded()
.text())
.collect(joining("\n\n"));
Map<String, Object> variables = new HashMap<>();
variables.put("question", question);
variables.put("information", information);
Prompt prompt = promptTemplate.apply(variables);
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey(Constants.OPENAI_API_KEY)
.timeout(ofSeconds(60))
.build();
AiMessage aiMessage = chatModel.generate(prompt.toUserMessage())
.content();
logger.info(aiMessage.text());
assertNotNull(aiMessage.text());
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((3188, 3316), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3188, 3295), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3188, 3259), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package dev.langchain4j.model.vertexai;
import dev.langchain4j.data.image.Image;
import dev.langchain4j.model.output.Response;
import org.junit.jupiter.api.Test;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Base64;
import java.util.List;
import java.util.Objects;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
public class VertexAiImageModelIT {
private static final String ENDPOINT = System.getenv("GCP_VERTEXAI_ENDPOINT");
private static final String LOCATION = System.getenv("GCP_LOCATION");
private static final String PROJECT = System.getenv("GCP_PROJECT_ID");
private static final String PUBLISHER = "google";
private static Image fromPath(Path path) {
try {
byte[] allBytes = Files.readAllBytes(path);
String base64 = Base64.getEncoder().encodeToString(allBytes);
return Image.builder()
.url(path.toUri())
.base64Data(base64)
.build();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Test
public void should_generate_one_image_with_persistence() {
VertexAiImageModel imagenModel = VertexAiImageModel.builder()
.endpoint(ENDPOINT)
.location(LOCATION)
.project(PROJECT)
.publisher(PUBLISHER)
.modelName("imagegeneration@005")
.maxRetries(2)
.withPersisting()
.build();
Response<Image> imageResponse = imagenModel.generate("watercolor of a colorful parrot drinking a cup of coffee");
System.out.println(imageResponse.content().url());
// has a URL because the generated image is persisted into a file
assertThat(imageResponse.content().url()).isNotNull();
assertThat(new File(imageResponse.content().url())).exists();
// checks that there's Base64 data representing the image
assertThat(imageResponse.content().base64Data()).isNotNull();
// checks that the Base64 content is valid Base64 encoded
assertDoesNotThrow(() -> Base64.getDecoder().decode(imageResponse.content().base64Data()));
}
@Test
public void should_generate_three_images_with_persistence() {
VertexAiImageModel imagenModel = VertexAiImageModel.builder()
.endpoint(ENDPOINT)
.location(LOCATION)
.project(PROJECT)
.publisher(PUBLISHER)
.modelName("imagegeneration@005")
.withPersisting()
.build();
Response<List<Image>> imageListResponse = imagenModel.generate("photo of a sunset over Malibu beach", 3);
assertThat(imageListResponse.content()).hasSize(3);
imageListResponse.content().forEach(img -> {
assertThat(img.url()).isNotNull();
assertThat(img.base64Data()).isNotNull();
System.out.println(img.url());
});
}
@Test
public void should_use_image_style_seed_image_source_and_mask_for_editing() throws URISyntaxException {
VertexAiImageModel model = VertexAiImageModel.builder()
.endpoint(ENDPOINT)
.location(LOCATION)
.project(PROJECT)
.publisher(PUBLISHER)
.modelName("imagegeneration@002")
.seed(19707L)
.sampleImageStyle(VertexAiImageModel.ImageStyle.photograph)
.guidanceScale(100)
.maxRetries(4)
.withPersisting()
.build();
Response<Image> forestResp = model.generate("lush forest");
System.out.println(forestResp.content().url());
assertThat(forestResp.content().base64Data()).isNotNull();
URI maskFileUri = Objects.requireNonNull(getClass().getClassLoader().getResource("mask.png")).toURI();
Response<Image> compositeResp = model.edit(
forestResp.content(), fromPath(Paths.get(maskFileUri)), "red trees"
);
System.out.println(compositeResp.content().url());
assertThat(compositeResp.content().base64Data()).isNotNull();
}
@Test
public void should_use_persistTo_and_image_upscaling() {
Path defaultTempDirPath = Paths.get(System.getProperty("java.io.tmpdir"));
VertexAiImageModel imagenModel = VertexAiImageModel.builder()
.endpoint(ENDPOINT)
.location(LOCATION)
.project(PROJECT)
.publisher(PUBLISHER)
.modelName("imagegeneration@002")
.sampleImageSize(1024)
.withPersisting()
.persistTo(defaultTempDirPath)
.maxRetries(3)
.build();
Response<Image> imageResponse =
imagenModel.generate("A black bird looking itself in an antique mirror");
System.out.println(imageResponse.content().url());
assertThat(imageResponse.content().url()).isNotNull();
assertThat(new File(imageResponse.content().url())).exists();
assertThat(imageResponse.content().base64Data()).isNotNull();
VertexAiImageModel imagenModelForUpscaling = VertexAiImageModel.builder()
.endpoint(ENDPOINT)
.location(LOCATION)
.project(PROJECT)
.publisher(PUBLISHER)
.modelName("imagegeneration@002")
.sampleImageSize(4096)
.withPersisting()
.persistTo(defaultTempDirPath)
.maxRetries(3)
.build();
Response<Image> upscaledImageResponse =
imagenModelForUpscaling.edit(imageResponse.content(), "");
System.out.println(upscaledImageResponse.content().url());
assertThat(upscaledImageResponse.content().url()).isNotNull();
assertThat(new File(upscaledImageResponse.content().url())).exists();
assertThat(upscaledImageResponse.content().base64Data()).isNotNull();
}
@Test
public void should_use_negative_prompt_and_different_prompt_language() {
VertexAiImageModel imagenModel = VertexAiImageModel.builder()
.endpoint(ENDPOINT)
.location(LOCATION)
.project(PROJECT)
.publisher(PUBLISHER)
.modelName("imagegeneration@005")
.language("ja")
.negativePrompt("pepperoni, pineapple")
.maxRetries(2)
.withPersisting()
.build();
Response<Image> imageResponse = imagenModel.generate("ピザ"); // pizza
System.out.println(imageResponse.content().url());
assertThat(imageResponse.content().url()).isNotNull();
assertThat(imageResponse.content().base64Data()).isNotNull();
}
@Test
public void should_raise_error_on_problematic_prompt_or_content_generation() {
VertexAiImageModel imagenModel = VertexAiImageModel.builder()
.endpoint(ENDPOINT)
.location(LOCATION)
.project(PROJECT)
.publisher(PUBLISHER)
.modelName("imagegeneration@005")
.withPersisting()
.build();
assertThatExceptionOfType(Throwable.class).isThrownBy(() -> imagenModel.generate("a nude woman"));
}
}
| [
"dev.langchain4j.data.image.Image.builder"
] | [((1109, 1153), 'java.util.Base64.getEncoder'), ((1174, 1297), 'dev.langchain4j.data.image.Image.builder'), ((1174, 1268), 'dev.langchain4j.data.image.Image.builder'), ((1174, 1228), 'dev.langchain4j.data.image.Image.builder'), ((2446, 2510), 'java.util.Base64.getDecoder'), ((4141, 4224), 'java.util.Objects.requireNonNull')] |
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Duration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.util.stream.Collectors.joining;
public class ChatWithDocumentsExamples {
// Please also check ServiceWithRetrieverExample
static class IfYouNeedSimplicity {
public static void main(String[] args) throws Exception {
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
Document document = loadDocument(toPath("example-files/story-about-happy-carrot.txt"));
ingestor.ingest(document);
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(OpenAiChatModel.withApiKey(ApiKeys.OPENAI_API_KEY))
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
// .chatMemory() // you can override default chat memory
// .promptTemplate() // you can override default prompt template
.build();
String answer = chain.execute("Who is Charlie?");
System.out.println(answer); // Charlie is a cheerful carrot living in VeggieVille...
}
}
static class If_You_Need_More_Control {
public static void main(String[] args) {
// Load the document that includes the information you'd like to "chat" about with the model.
Document document = loadDocument(toPath("example-files/story-about-happy-carrot.txt"));
// Split document into segments 100 tokens each
DocumentSplitter splitter = DocumentSplitters.recursive(
100,
0,
new OpenAiTokenizer(GPT_3_5_TURBO)
);
List<TextSegment> segments = splitter.split(document);
// Embed segments (convert them into vectors that represent the meaning) using embedding model
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
List<Embedding> embeddings = embeddingModel.embedAll(segments).content();
// Store embeddings into embedding store for further search / retrieval
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
embeddingStore.addAll(embeddings, segments);
// Specify the question you want to ask the model
String question = "Who is Charlie?";
// Embed the question
Embedding questionEmbedding = embeddingModel.embed(question).content();
// Find relevant embeddings in embedding store by semantic similarity
// You can play with parameters below to find a sweet spot for your specific use case
int maxResults = 3;
double minScore = 0.7;
List<EmbeddingMatch<TextSegment>> relevantEmbeddings
= embeddingStore.findRelevant(questionEmbedding, maxResults, minScore);
// Create a prompt for the model that includes question and relevant embeddings
PromptTemplate promptTemplate = PromptTemplate.from(
"Answer the following question to the best of your ability:\n"
+ "\n"
+ "Question:\n"
+ "{{question}}\n"
+ "\n"
+ "Base your answer on the following information:\n"
+ "{{information}}");
String information = relevantEmbeddings.stream()
.map(match -> match.embedded().text())
.collect(joining("\n\n"));
Map<String, Object> variables = new HashMap<>();
variables.put("question", question);
variables.put("information", information);
Prompt prompt = promptTemplate.apply(variables);
// Send the prompt to the OpenAI chat model
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.timeout(Duration.ofSeconds(60))
.build();
AiMessage aiMessage = chatModel.generate(prompt.toUserMessage()).content();
// See an answer from the model
String answer = aiMessage.text();
System.out.println(answer); // Charlie is a cheerful carrot living in VeggieVille...
}
}
private static Path toPath(String fileName) {
try {
URL fileUrl = ChatWithDocumentsExamples.class.getResource(fileName);
return Paths.get(fileUrl.toURI());
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
} | [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1935, 2179), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1935, 2149), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1935, 2096), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1935, 2043), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2376, 2794), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2376, 2600), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2376, 2506), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((5821, 5983), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5821, 5953), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5821, 5899), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package com.baeldung.langchain;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static java.time.Duration.ofSeconds;
import static org.junit.Assert.assertNotNull;
import java.nio.file.Paths;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
public class ChainWithDocumentLiveTest {
private static final Logger logger = LoggerFactory.getLogger(ChainWithDocumentLiveTest.class);
@Test
public void givenChainWithDocument_whenPrompted_thenValidResponse() {
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
Document document = loadDocument(Paths.get("src/test/resources/example-files/simpson's_adventures.txt"));
ingestor.ingest(document);
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey(Constants.OPENAI_API_KEY)
.timeout(ofSeconds(60))
.build();
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(chatModel)
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.promptTemplate(PromptTemplate.from("Answer the following question to the best of your ability: {{question}}\n\nBase your answer on the following information:\n{{information}}"))
.build();
String answer = chain.execute("Who is Simpson?");
logger.info(answer);
assertNotNull(answer);
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1556, 1764), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1556, 1743), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1556, 1699), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1556, 1655), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1955, 2083), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1955, 2062), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1955, 2026), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2131, 2577), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2131, 2556), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2131, 2365), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2131, 2296), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2131, 2211), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] |
package org.acme.examples.aiservices;
import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options;
import static dev.langchain4j.data.message.ChatMessageDeserializer.messagesFromJson;
import static dev.langchain4j.data.message.ChatMessageSerializer.messagesToJson;
import static dev.langchain4j.data.message.ChatMessageType.AI;
import static dev.langchain4j.data.message.ChatMessageType.USER;
import static io.quarkiverse.langchain4j.openai.test.WiremockUtils.DEFAULT_TOKEN;
import static org.acme.examples.aiservices.MessageAssertUtils.assertMultipleRequestMessage;
import static org.acme.examples.aiservices.MessageAssertUtils.assertSingleRequestMessage;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.tuple;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import jakarta.enterprise.context.control.ActivateRequestContext;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.tomakehurst.wiremock.WireMockServer;
import com.github.tomakehurst.wiremock.stubbing.Scenario;
import com.github.tomakehurst.wiremock.stubbing.ServeEvent;
import com.github.tomakehurst.wiremock.verification.LoggedRequest;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.retriever.Retriever;
import dev.langchain4j.service.MemoryId;
import dev.langchain4j.service.UserMessage;
import dev.langchain4j.store.memory.chat.ChatMemoryStore;
import io.quarkiverse.langchain4j.RegisterAiService;
import io.quarkiverse.langchain4j.openai.test.WiremockUtils;
import io.quarkus.arc.Arc;
import io.quarkus.test.QuarkusUnitTest;
public class DeclarativeAiServicesTest {
private static final int WIREMOCK_PORT = 8089;
@RegisterExtension
static final QuarkusUnitTest unitTest = new QuarkusUnitTest()
.setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class).addClasses(WiremockUtils.class, MessageAssertUtils.class))
.overrideRuntimeConfigKey("quarkus.langchain4j.openai.api-key", "whatever")
.overrideRuntimeConfigKey("quarkus.langchain4j.openai.base-url", "http://localhost:" + WIREMOCK_PORT + "/v1");
private static final TypeReference<Map<String, Object>> MAP_TYPE_REF = new TypeReference<>() {
};
static WireMockServer wireMockServer;
static ObjectMapper mapper;
@BeforeAll
static void beforeAll() {
wireMockServer = new WireMockServer(options().port(WIREMOCK_PORT));
wireMockServer.start();
mapper = new ObjectMapper();
}
@AfterAll
static void afterAll() {
wireMockServer.stop();
}
@BeforeEach
void setup() {
wireMockServer.resetAll();
wireMockServer.stubFor(WiremockUtils.defaultChatCompletionsStub());
}
interface AssistantBase {
String chat(String message);
}
@RegisterAiService
interface Assistant extends AssistantBase {
String chat2(String message);
}
@Inject
Assistant assistant;
@Test
@ActivateRequestContext
public void test_simple_instruction_with_single_argument_and_no_annotations_from_super() throws IOException {
String result = assistant.chat("Tell me a joke about developers");
assertThat(result).isNotBlank();
assertSingleRequestMessage(getRequestAsMap(), "Tell me a joke about developers");
}
@Test
@ActivateRequestContext
public void test_simple_instruction_with_single_argument_and_no_annotations_from_iface() throws IOException {
String result = assistant.chat2("Tell me a joke about developers");
assertThat(result).isNotBlank();
assertSingleRequestMessage(getRequestAsMap(), "Tell me a joke about developers");
}
@Singleton
public static class DummyRetriever implements Retriever<TextSegment> {
@Override
public List<TextSegment> findRelevant(String text) {
return List.of(TextSegment.from("dummy"));
}
}
@RegisterAiService(retriever = DummyRetriever.class)
interface AssistantWithRetriever {
String chat(String message);
}
@Inject
AssistantWithRetriever assistantWithRetriever;
@Test
@ActivateRequestContext
public void test_simple_instruction_with_retriever() throws IOException {
String result = assistantWithRetriever.chat("Tell me a joke about developers");
assertThat(result).isNotBlank();
assertSingleRequestMessage(getRequestAsMap(),
"Tell me a joke about developers\n\nAnswer using the following information:\ndummy");
}
enum Sentiment {
POSITIVE,
NEUTRAL,
NEGATIVE
}
@RegisterAiService
interface SentimentAnalyzer {
@UserMessage("Analyze sentiment of {it}")
Sentiment analyzeSentimentOf(String text);
}
@Inject
SentimentAnalyzer sentimentAnalyzer;
@Test
@ActivateRequestContext
void test_extract_enum() throws IOException {
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(), "POSITIVE"));
Sentiment sentiment = sentimentAnalyzer
.analyzeSentimentOf("This LaptopPro X15 is wicked fast and that 4K screen is a dream.");
assertThat(sentiment).isEqualTo(Sentiment.POSITIVE);
assertSingleRequestMessage(getRequestAsMap(),
"Analyze sentiment of This LaptopPro X15 is wicked fast and that 4K screen is a dream.\nYou must answer strictly in the following format: one of [POSITIVE, NEUTRAL, NEGATIVE]");
}
@Singleton
static class Calculator {
private final Runnable after;
Calculator(CalculatorAfter after) {
this.after = after;
}
@Tool("calculates the square root of the provided number")
double squareRoot(double number) {
var result = Math.sqrt(number);
after.run();
return result;
}
}
private static final String scenario = "tools";
private static final String secondState = "second";
@Singleton
public static class CalculatorAfter implements Runnable {
@Override
public void run() {
wireMockServer.setScenarioState(scenario, secondState);
}
}
public static class ChatMemoryProviderProducer {
@Singleton
ChatMemoryProvider chatMemory(ChatMemoryStore store) {
return memoryId -> MessageWindowChatMemory.builder()
.id(memoryId)
.maxMessages(10)
.chatMemoryStore(store)
.build();
}
}
@Singleton
public static class CustomChatMemoryStore implements ChatMemoryStore {
// emulating persistent storage
private final Map</* memoryId */ Object, String> persistentStorage = new HashMap<>();
@Override
public List<ChatMessage> getMessages(Object memoryId) {
return messagesFromJson(persistentStorage.get(memoryId));
}
@Override
public void updateMessages(Object memoryId, List<ChatMessage> messages) {
persistentStorage.put(memoryId, messagesToJson(messages));
}
@Override
public void deleteMessages(Object memoryId) {
persistentStorage.remove(memoryId);
}
}
@RegisterAiService(tools = Calculator.class)
interface AssistantWithCalculator extends AssistantBase {
}
@Inject
AssistantWithCalculator assistantWithCalculator;
@Test
@ActivateRequestContext
void should_execute_tool_then_answer() throws IOException {
var firstResponse = """
{
"id": "chatcmpl-8D88Dag1gAKnOPP9Ed4bos7vSpaNz",
"object": "chat.completion",
"created": 1698140213,
"model": "gpt-3.5-turbo-0613",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": null,
"function_call": {
"name": "squareRoot",
"arguments": "{\\n \\"number\\": 485906798473894056\\n}"
}
},
"finish_reason": "function_call"
}
],
"usage": {
"prompt_tokens": 65,
"completion_tokens": 20,
"total_tokens": 85
}
}
""";
var secondResponse = """
{
"id": "chatcmpl-8D88FIAUWSpwLaShFr0w8G1SWuVdl",
"object": "chat.completion",
"created": 1698140215,
"model": "gpt-3.5-turbo-0613",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": "The square root of 485,906,798,473,894,056 in scientific notation is approximately 6.97070153193991E8."
},
"finish_reason": "stop"
}
],
"usage": {
"prompt_tokens": 102,
"completion_tokens": 33,
"total_tokens": 135
}
}
""";
wireMockServer.stubFor(
WiremockUtils.chatCompletionMapping(DEFAULT_TOKEN)
.inScenario(scenario)
.whenScenarioStateIs(Scenario.STARTED)
.willReturn(WiremockUtils.CHAT_RESPONSE_WITHOUT_BODY.withBody(firstResponse)));
wireMockServer.stubFor(
WiremockUtils.chatCompletionMapping(DEFAULT_TOKEN)
.inScenario(scenario)
.whenScenarioStateIs(secondState)
.willReturn(WiremockUtils.CHAT_RESPONSE_WITHOUT_BODY.withBody(secondResponse)));
wireMockServer.setScenarioState(scenario, Scenario.STARTED);
String userMessage = "What is the square root of 485906798473894056 in scientific notation?";
String answer = assistantWithCalculator.chat(userMessage);
assertThat(answer).isEqualTo(
"The square root of 485,906,798,473,894,056 in scientific notation is approximately 6.97070153193991E8.");
assertThat(wireMockServer.getAllServeEvents()).hasSize(2);
assertSingleRequestMessage(getRequestAsMap(getRequestBody(wireMockServer.getAllServeEvents().get(1))),
"What is the square root of 485906798473894056 in scientific notation?");
assertMultipleRequestMessage(getRequestAsMap(getRequestBody(wireMockServer.getAllServeEvents().get(0))),
List.of(
new MessageAssertUtils.MessageContent("user",
"What is the square root of 485906798473894056 in scientific notation?"),
new MessageAssertUtils.MessageContent("assistant", null),
new MessageAssertUtils.MessageContent("function", "6.97070153193991E8")));
}
@RegisterAiService
interface ChatWithSeparateMemoryForEachUser {
String chat(@MemoryId int memoryId, @UserMessage String userMessage);
}
@Inject
ChatWithSeparateMemoryForEachUser chatWithSeparateMemoryForEachUser;
@Test
@ActivateRequestContext
void should_keep_separate_chat_memory_for_each_user_in_store() throws IOException {
ChatMemoryStore store = Arc.container().instance(ChatMemoryStore.class).get();
int firstMemoryId = 1;
int secondMemoryId = 2;
/* **** First request for user 1 **** */
String firstMessageFromFirstUser = "Hello, my name is Klaus";
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"Nice to meet you Klaus"));
String firstAiResponseToFirstUser = chatWithSeparateMemoryForEachUser.chat(firstMemoryId, firstMessageFromFirstUser);
// assert response
assertThat(firstAiResponseToFirstUser).isEqualTo("Nice to meet you Klaus");
// assert request
assertSingleRequestMessage(getRequestAsMap(), firstMessageFromFirstUser);
// assert chat memory
assertThat(store.getMessages(firstMemoryId)).hasSize(2)
.extracting(ChatMessage::type, ChatMessage::text)
.containsExactly(tuple(USER, firstMessageFromFirstUser), tuple(AI, firstAiResponseToFirstUser));
/* **** First request for user 2 **** */
wireMockServer.resetRequests();
String firstMessageFromSecondUser = "Hello, my name is Francine";
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"Nice to meet you Francine"));
String firstAiResponseToSecondUser = chatWithSeparateMemoryForEachUser.chat(secondMemoryId, firstMessageFromSecondUser);
// assert response
assertThat(firstAiResponseToSecondUser).isEqualTo("Nice to meet you Francine");
// assert request
assertSingleRequestMessage(getRequestAsMap(), firstMessageFromSecondUser);
// assert chat memory
assertThat(store.getMessages(secondMemoryId)).hasSize(2)
.extracting(ChatMessage::type, ChatMessage::text)
.containsExactly(tuple(USER, firstMessageFromSecondUser), tuple(AI, firstAiResponseToSecondUser));
/* **** Second request for user 1 **** */
wireMockServer.resetRequests();
String secondsMessageFromFirstUser = "What is my name?";
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"Your name is Klaus"));
String secondAiMessageToFirstUser = chatWithSeparateMemoryForEachUser.chat(firstMemoryId, secondsMessageFromFirstUser);
// assert response
assertThat(secondAiMessageToFirstUser).contains("Klaus");
// assert request
assertMultipleRequestMessage(getRequestAsMap(),
List.of(
new MessageAssertUtils.MessageContent("user", firstMessageFromFirstUser),
new MessageAssertUtils.MessageContent("assistant", firstAiResponseToFirstUser),
new MessageAssertUtils.MessageContent("user", secondsMessageFromFirstUser)));
// assert chat memory
assertThat(store.getMessages(firstMemoryId)).hasSize(4)
.extracting(ChatMessage::type, ChatMessage::text)
.containsExactly(tuple(USER, firstMessageFromFirstUser), tuple(AI, firstAiResponseToFirstUser),
tuple(USER, secondsMessageFromFirstUser), tuple(AI, secondAiMessageToFirstUser));
/* **** Second request for user 2 **** */
wireMockServer.resetRequests();
String secondsMessageFromSecondUser = "What is my name?";
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"Your name is Francine"));
String secondAiMessageToSecondUser = chatWithSeparateMemoryForEachUser.chat(secondMemoryId,
secondsMessageFromSecondUser);
// assert response
assertThat(secondAiMessageToSecondUser).contains("Francine");
// assert request
assertMultipleRequestMessage(getRequestAsMap(),
List.of(
new MessageAssertUtils.MessageContent("user", firstMessageFromSecondUser),
new MessageAssertUtils.MessageContent("assistant", firstAiResponseToSecondUser),
new MessageAssertUtils.MessageContent("user", secondsMessageFromSecondUser)));
// assert chat memory
assertThat(store.getMessages(secondMemoryId)).hasSize(4)
.extracting(ChatMessage::type, ChatMessage::text)
.containsExactly(tuple(USER, firstMessageFromSecondUser), tuple(AI, firstAiResponseToSecondUser),
tuple(USER, secondsMessageFromSecondUser), tuple(AI, secondAiMessageToSecondUser));
}
@RegisterAiService(chatMemoryProviderSupplier = RegisterAiService.NoChatMemoryProviderSupplier.class)
interface NoMemoryService {
String chat(@UserMessage String userMessage);
}
@Inject
NoMemoryService noMemoryService;
@Test
@ActivateRequestContext
void no_memory_should_be_used() throws IOException {
String firstUserMessage = "Hello, my name is Klaus";
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"Nice to meet you Klaus"));
String firstAiResponse = noMemoryService.chat(firstUserMessage);
// assert response
assertThat(firstAiResponse).isEqualTo("Nice to meet you Klaus");
// assert request
assertSingleRequestMessage(getRequestAsMap(), firstUserMessage);
wireMockServer.resetRequests();
String secondUserMessage = "What is my name";
wireMockServer.stubFor(WiremockUtils.chatCompletionsMessageContent(Optional.empty(),
"I don't know"));
String secondAiResponse = noMemoryService.chat(secondUserMessage);
// assert response
assertThat(secondAiResponse).isEqualTo("I don't know");
// assert request only contains the second request, so no memory is used
assertSingleRequestMessage(getRequestAsMap(), secondUserMessage);
}
private Map<String, Object> getRequestAsMap() throws IOException {
return getRequestAsMap(getRequestBody());
}
private Map<String, Object> getRequestAsMap(byte[] body) throws IOException {
return mapper.readValue(body, MAP_TYPE_REF);
}
private byte[] getRequestBody() {
assertThat(wireMockServer.getAllServeEvents()).hasSize(1);
ServeEvent serveEvent = wireMockServer.getAllServeEvents().get(0); // this works because we reset requests for Wiremock before each test
return getRequestBody(serveEvent);
}
private byte[] getRequestBody(ServeEvent serveEvent) {
LoggedRequest request = serveEvent.getRequest();
assertThat(request.getBody()).isNotEmpty();
return request.getBody();
}
}
| [
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((2567, 2661), 'org.jboss.shrinkwrap.api.ShrinkWrap.create'), ((7155, 7332), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((7155, 7303), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((7155, 7259), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((7155, 7222), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((10452, 10713), 'io.quarkiverse.langchain4j.openai.test.WiremockUtils.chatCompletionMapping'), ((10452, 10611), 'io.quarkiverse.langchain4j.openai.test.WiremockUtils.chatCompletionMapping'), ((10452, 10548), 'io.quarkiverse.langchain4j.openai.test.WiremockUtils.chatCompletionMapping'), ((10648, 10712), 'io.quarkiverse.langchain4j.openai.test.WiremockUtils.CHAT_RESPONSE_WITHOUT_BODY.withBody'), ((10764, 11021), 'io.quarkiverse.langchain4j.openai.test.WiremockUtils.chatCompletionMapping'), ((10764, 10918), 'io.quarkiverse.langchain4j.openai.test.WiremockUtils.chatCompletionMapping'), ((10764, 10860), 'io.quarkiverse.langchain4j.openai.test.WiremockUtils.chatCompletionMapping'), ((10955, 11020), 'io.quarkiverse.langchain4j.openai.test.WiremockUtils.CHAT_RESPONSE_WITHOUT_BODY.withBody'), ((12603, 12656), 'io.quarkus.arc.Arc.container'), ((12603, 12650), 'io.quarkus.arc.Arc.container')] |
package org.acme.example;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.inject.Produces;
import jakarta.inject.Inject;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.rag.DefaultRetrievalAugmentor;
import dev.langchain4j.rag.RetrievalAugmentor;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import io.quarkiverse.langchain4j.RegisterAiService;
@RegisterAiService
public interface AiServiceWithAutoDiscoveredRetrievalAugmentor {
String chat(String message);
@ApplicationScoped
class AugmentorProducer {
@Inject
InMemoryEmbeddingStore<TextSegment> store;
@Inject
EmbeddingModel embeddingModel;
@Produces
public RetrievalAugmentor get() {
ContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder()
.embeddingModel(embeddingModel)
.embeddingStore(store)
.maxResults(1)
.build();
return DefaultRetrievalAugmentor.builder()
.contentRetriever(contentRetriever)
.build();
}
}
}
| [
"dev.langchain4j.rag.DefaultRetrievalAugmentor.builder",
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder"
] | [((1030, 1229), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((1030, 1200), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((1030, 1165), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((1030, 1122), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((1250, 1370), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((1250, 1341), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder')] |
package org.jxch.capital.config;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.embedding.OnnxEmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.milvus.MilvusEmbeddingStore;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
import java.util.List;
@Slf4j
@SpringBootTest
class MilvusConfigTest {
// @Autowired
private MilvusEmbeddingStore embeddingStore2;
@Test
void milvusEmbeddingStore() {
// EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
EmbeddingModel embeddingModel = new OnnxEmbeddingModel("D:\\huggingface\\shibing624\\text2vec-base-chinese\\onnx\\model.onnx");
EmbeddingStore<TextSegment> embeddingStore = MilvusEmbeddingStore.builder()
.host("localhost")
.port(19530)
.dimension(768)
.build();
TextSegment segment1 = TextSegment.from("突破K线通常具有高成交量,有时候其成交量将是普通K线的10至20倍。成交量越高,以及急速拉升的K线越多,出现重大后续行情的几率就越高。在突破之前,多头和空头均在分批建仓,争夺市场的控制权,双方均试图在各自方向成功突破。一旦出现明确的突破,输的一方会很快斩仓止损,而赢的一方甚至会更为激进地加仓。结果是一根或多根趋势K线,通常伴有高成交量。成交量并非总是特别高,但是当其为近期K线平均水平的10倍或更高时,成功突破的概率就更高。成功突破指的是拥有多根后续K线。此外,在几根K线之内失败的突破也可能伴有非比寻常的高成交量,但是这种情况较不常见。成交量的可靠性不足以指导决策,而构成急速拉升的大型趋势K线已经告诉你突破是否很可能会成功。试图将成交量纳人考虑,更多时候会让你分心,妨碍你发挥最佳水平。");
Embedding embedding1 = embeddingModel.embed(segment1).content();
embeddingStore.add(embedding1, segment1);
Embedding queryEmbedding = embeddingModel.embed("如K线重卺,影线越来越长,反方向的趋势K线和回调K线。尽管趋势可能持续很长时间,但是这一段趋势通常会被回撤,成为交易区间的一部分。举例而言,在个急速与通道的上涨趋势形态中,急速拉升是突破,通道通常成为交易区间的第一腿,因此经常被回撤。").content();
List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(queryEmbedding, 8);
EmbeddingMatch<TextSegment> embeddingMatch = relevant.get(0);
for (EmbeddingMatch<TextSegment> match : relevant) {
System.out.println(match.score()); // 0.8144287765026093
System.out.println(match.embedded().text()); // I like football.
}
}
} | [
"dev.langchain4j.store.embedding.milvus.MilvusEmbeddingStore.builder"
] | [((1007, 1158), 'dev.langchain4j.store.embedding.milvus.MilvusEmbeddingStore.builder'), ((1007, 1133), 'dev.langchain4j.store.embedding.milvus.MilvusEmbeddingStore.builder'), ((1007, 1101), 'dev.langchain4j.store.embedding.milvus.MilvusEmbeddingStore.builder'), ((1007, 1072), 'dev.langchain4j.store.embedding.milvus.MilvusEmbeddingStore.builder')] |
package dev.langchain4j.chain;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.rag.content.injector.DefaultContentInjector;
import dev.langchain4j.rag.DefaultRetrievalAugmentor;
import dev.langchain4j.rag.content.Content;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.retriever.Retriever;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.Spy;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.List;
import static dev.langchain4j.data.message.AiMessage.aiMessage;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
@ExtendWith(MockitoExtension.class)
class ConversationalRetrievalChainTest {
private static final String QUERY = "query";
private static final String ANSWER = "answer";
@Mock
ChatLanguageModel chatLanguageModel;
@Mock
ContentRetriever contentRetriever;
@Mock
Retriever<TextSegment> retriever;
@Spy
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
@Captor
ArgumentCaptor<List<ChatMessage>> messagesCaptor;
@BeforeEach
void beforeEach() {
when(chatLanguageModel.generate(anyList())).thenReturn(Response.from(aiMessage(ANSWER)));
}
@Test
void should_inject_retrieved_segments() {
// given
when(contentRetriever.retrieve(any())).thenReturn(asList(
Content.from("Segment 1"),
Content.from("Segment 2")
));
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(chatLanguageModel)
.chatMemory(chatMemory)
.contentRetriever(contentRetriever)
.build();
// when
String answer = chain.execute(QUERY);
// then
assertThat(answer).isEqualTo(ANSWER);
verify(chatLanguageModel).generate(messagesCaptor.capture());
UserMessage expectedUserMessage = UserMessage.from(
"query\n" +
"\n" +
"Answer using the following information:\n" +
"Segment 1\n" +
"\n" +
"Segment 2");
assertThat(messagesCaptor.getValue()).containsExactly(expectedUserMessage);
assertThat(chatMemory.messages()).containsExactly(
expectedUserMessage,
AiMessage.from(ANSWER)
);
}
@Test
void should_inject_retrieved_segments_using_custom_prompt_template() {
// given
when(contentRetriever.retrieve(any())).thenReturn(asList(
Content.from("Segment 1"),
Content.from("Segment 2")
));
PromptTemplate promptTemplate = PromptTemplate.from(
"Answer '{{userMessage}}' using '{{contents}}'");
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(chatLanguageModel)
.chatMemory(chatMemory)
.retrievalAugmentor(DefaultRetrievalAugmentor.builder()
.contentRetriever(contentRetriever)
.contentInjector(DefaultContentInjector.builder()
.promptTemplate(promptTemplate)
.build())
.build())
.build();
// when
String answer = chain.execute(QUERY);
// then
assertThat(answer).isEqualTo(ANSWER);
verify(chatLanguageModel).generate(messagesCaptor.capture());
UserMessage expectedUserMessage = UserMessage.from(
"Answer 'query' using 'Segment 1\n\nSegment 2'");
assertThat(messagesCaptor.getValue()).containsExactly(expectedUserMessage);
assertThat(chatMemory.messages()).containsExactly(
expectedUserMessage,
AiMessage.from(ANSWER)
);
}
@Test
void test_backward_compatibility_should_inject_retrieved_segments() {
// given
when(retriever.findRelevant(QUERY)).thenReturn(asList(
TextSegment.from("Segment 1"),
TextSegment.from("Segment 2")
));
when(retriever.toContentRetriever()).thenCallRealMethod();
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(chatLanguageModel)
.chatMemory(chatMemory)
.retriever(retriever)
.build();
// when
String answer = chain.execute(QUERY);
// then
assertThat(answer).isEqualTo(ANSWER);
verify(chatLanguageModel).generate(messagesCaptor.capture());
UserMessage expectedUserMessage = UserMessage.from(
"Answer the following question to the best of your ability: query\n" +
"\n" +
"Base your answer on the following information:\n" +
"Segment 1\n" +
"\n" +
"Segment 2");
assertThat(messagesCaptor.getValue()).containsExactly(expectedUserMessage);
assertThat(chatMemory.messages()).containsExactly(
expectedUserMessage,
AiMessage.from(ANSWER)
);
}
@Test
void test_backward_compatibility_should_inject_retrieved_segments_using_custom_prompt_template() {
// given
when(retriever.findRelevant(QUERY)).thenReturn(asList(
TextSegment.from("Segment 1"),
TextSegment.from("Segment 2")
));
when(retriever.toContentRetriever()).thenCallRealMethod();
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(chatLanguageModel)
.chatMemory(chatMemory)
.promptTemplate(PromptTemplate.from("Answer '{{question}}' using '{{information}}'"))
.retriever(retriever)
.build();
// when
String answer = chain.execute(QUERY);
// then
assertThat(answer).isEqualTo(ANSWER);
verify(chatLanguageModel).generate(messagesCaptor.capture());
UserMessage expectedUserMessage = UserMessage.from(
"Answer 'query' using 'Segment 1\n\nSegment 2'");
assertThat(messagesCaptor.getValue()).containsExactly(expectedUserMessage);
assertThat(chatMemory.messages()).containsExactly(
expectedUserMessage,
AiMessage.from(ANSWER)
);
}
}
| [
"dev.langchain4j.rag.DefaultRetrievalAugmentor.builder",
"dev.langchain4j.rag.content.injector.DefaultContentInjector.builder"
] | [((3725, 4033), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((3725, 4000), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((3725, 3820), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((3862, 3999), 'dev.langchain4j.rag.content.injector.DefaultContentInjector.builder'), ((3862, 3958), 'dev.langchain4j.rag.content.injector.DefaultContentInjector.builder')] |
package dev.langchain4j.rag.content.injector;
import dev.langchain4j.data.document.Metadata;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.rag.content.Content;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import java.util.List;
import java.util.function.Function;
import java.util.stream.Stream;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.assertThat;
class DefaultContentInjectorTest {
@Test
void should_not_inject_when_no_content() {
// given
UserMessage userMessage = UserMessage.from("Tell me about bananas.");
List<Content> contents = emptyList();
ContentInjector injector = new DefaultContentInjector();
// when
UserMessage injected = injector.inject(contents, userMessage);
// then
assertThat(injected).isEqualTo(userMessage);
}
@Test
void should_inject_single_content() {
// given
UserMessage userMessage = UserMessage.from("Tell me about bananas.");
List<Content> contents = singletonList(Content.from("Bananas are awesome!"));
ContentInjector injector = new DefaultContentInjector();
// when
UserMessage injected = injector.inject(contents, userMessage);
// then
assertThat(injected.text()).isEqualTo(
"Tell me about bananas.\n" +
"\n" +
"Answer using the following information:\n" +
"Bananas are awesome!"
);
}
@Test
void should_inject_single_content_with_metadata() {
// given
UserMessage userMessage = UserMessage.from("Tell me about bananas.");
TextSegment segment = TextSegment.from(
"Bananas are awesome!",
Metadata.from("source", "trust me bro")
);
List<Content> contents = singletonList(Content.from(segment));
List<String> metadataKeysToInclude = singletonList("source");
ContentInjector injector = new DefaultContentInjector(metadataKeysToInclude);
// when
UserMessage injected = injector.inject(contents, userMessage);
// then
assertThat(injected.text()).isEqualTo(
"Tell me about bananas.\n" +
"\n" +
"Answer using the following information:\n" +
"content: Bananas are awesome!\n" +
"source: trust me bro"
);
}
@Test
void should_inject_multiple_contents() {
// given
UserMessage userMessage = UserMessage.from("Tell me about bananas.");
List<Content> contents = asList(
Content.from("Bananas are awesome!"),
Content.from("Bananas are healthy!")
);
ContentInjector injector = new DefaultContentInjector();
// when
UserMessage injected = injector.inject(contents, userMessage);
// then
assertThat(injected.text()).isEqualTo(
"Tell me about bananas.\n" +
"\n" +
"Answer using the following information:\n" +
"Bananas are awesome!\n" +
"\n" +
"Bananas are healthy!"
);
}
@ParameterizedTest
@MethodSource
void should_inject_multiple_contents_with_multiple_metadata_entries(
Function<List<String>, ContentInjector> contentInjectorProvider
) {
// given
UserMessage userMessage = UserMessage.from("Tell me about bananas.");
TextSegment segment1 = TextSegment.from(
"Bananas are awesome!",
Metadata.from("source", "trust me bro")
.add("date", "today")
);
TextSegment segment2 = TextSegment.from(
"Bananas are healthy!",
Metadata.from("source", "my doctor")
.add("reliability", "100%")
);
List<Content> contents = asList(Content.from(segment1), Content.from(segment2));
List<String> metadataKeysToInclude = asList("source", "reliability", "date");
ContentInjector injector = contentInjectorProvider.apply(metadataKeysToInclude);
// when
UserMessage injected = injector.inject(contents, userMessage);
// then
assertThat(injected.text()).isEqualTo(
"Tell me about bananas.\n" +
"\n" +
"Answer using the following information:\n" +
"content: Bananas are awesome!\n" +
"source: trust me bro\n" +
"date: today\n" +
"\n" +
"content: Bananas are healthy!\n" +
"source: my doctor\n" +
"reliability: 100%"
);
}
static Stream<Arguments> should_inject_multiple_contents_with_multiple_metadata_entries() {
return Stream.<Arguments>builder()
.add(Arguments.of(
(Function<List<String>, ContentInjector>) DefaultContentInjector::new
))
.add(Arguments.of(
(Function<List<String>, ContentInjector>)
(metadataKeysToInclude) -> DefaultContentInjector.builder()
.metadataKeysToInclude(metadataKeysToInclude)
.build()
))
.build();
}
@ParameterizedTest
@MethodSource
void should_inject_multiple_contents_with_custom_prompt_template(
Function<PromptTemplate, ContentInjector> contentInjectorProvider) {
// given
PromptTemplate promptTemplate = PromptTemplate.from("{{userMessage}}\n{{contents}}");
UserMessage userMessage = UserMessage.from("Tell me about bananas.");
List<Content> contents = asList(
Content.from("Bananas are awesome!"),
Content.from("Bananas are healthy!")
);
ContentInjector injector = contentInjectorProvider.apply(promptTemplate);
// when
UserMessage injected = injector.inject(contents, userMessage);
// then
assertThat(injected.text()).isEqualTo(
"Tell me about bananas.\n" +
"Bananas are awesome!\n" +
"\n" +
"Bananas are healthy!"
);
}
static Stream<Arguments> should_inject_multiple_contents_with_custom_prompt_template() {
return Stream.<Arguments>builder()
.add(Arguments.of(
(Function<PromptTemplate, ContentInjector>) DefaultContentInjector::new
))
.add(Arguments.of(
(Function<PromptTemplate, ContentInjector>)
(promptTemplate) -> DefaultContentInjector.builder()
.promptTemplate(promptTemplate)
.build()
))
.build();
}
} | [
"dev.langchain4j.data.document.Metadata.from"
] | [((4092, 4177), 'dev.langchain4j.data.document.Metadata.from'), ((4294, 4382), 'dev.langchain4j.data.document.Metadata.from'), ((5423, 5970), 'java.util.stream.Stream.<Arguments>builder'), ((5423, 5945), 'java.util.stream.Stream.<Arguments>builder'), ((5423, 5598), 'java.util.stream.Stream.<Arguments>builder'), ((7055, 7585), 'java.util.stream.Stream.<Arguments>builder'), ((7055, 7560), 'java.util.stream.Stream.<Arguments>builder'), ((7055, 7232), 'java.util.stream.Stream.<Arguments>builder')] |
package io.thomasvitale.langchain4j.spring.openai;
import java.util.List;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.SystemMessage;
import dev.langchain4j.data.message.ToolExecutionResultMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.output.FinishReason;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.output.TokenUsage;
import org.json.JSONException;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
import org.skyscreamer.jsonassert.JSONAssert;
import org.skyscreamer.jsonassert.JSONCompareMode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.client.RestClient;
import io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionFinishReason;
import io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionRequest;
import io.thomasvitale.langchain4j.spring.openai.client.OpenAiClient;
import io.thomasvitale.langchain4j.spring.openai.client.OpenAiClientConfig;
import static dev.langchain4j.agent.tool.JsonSchemaProperty.INTEGER;
import static dev.langchain4j.data.message.UserMessage.userMessage;
import static dev.langchain4j.model.output.FinishReason.STOP;
import static dev.langchain4j.model.output.FinishReason.TOOL_EXECUTION;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Integration tests for {@link OpenAiChatModel}.
* <p>
* Adapted from OpenAiChatModelIT in the LangChain4j project.
*/
@EnabledIfEnvironmentVariable(named = "LANGCHAIN4J_OPENAI_CLIENT_API_KEY", matches = ".*")
class OpenAiChatModelIT {
private static final Logger logger = LoggerFactory.getLogger(OpenAiChatModelIT.class);
public static final String MODEL_NAME = "gpt-3.5-turbo";
private static OpenAiClient openAiClient;
private ToolSpecification calculator = ToolSpecification.builder()
.name("calculator")
.description("returns a sum of two numbers")
.addParameter("first", INTEGER)
.addParameter("second", INTEGER)
.build();
@BeforeAll
static void beforeAll() {
var apiKey = System.getenv("LANGCHAIN4J_OPENAI_CLIENT_API_KEY");
openAiClient = new OpenAiClient(OpenAiClientConfig.builder().apiKey(apiKey).build(), RestClient.builder());
}
@Test
void generateText() {
OpenAiChatModel chatModel = OpenAiChatModel.builder()
.client(openAiClient)
.options(OpenAiChatOptions.builder().build())
.build();
UserMessage userMessage = UserMessage.from("What is the capital of Italy?");
Response<AiMessage> response = chatModel.generate(userMessage);
logger.info("Response: \n" + response);
AiMessage aiMessage = response.content();
assertThat(aiMessage.text()).contains("Rome");
assertThat(aiMessage.toolExecutionRequests()).isNull();
TokenUsage tokenUsage = response.tokenUsage();
assertThat(tokenUsage.inputTokenCount()).isGreaterThan(0);
assertThat(tokenUsage.outputTokenCount()).isGreaterThan(0);
assertThat(tokenUsage.totalTokenCount())
.isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount());
assertThat(response.finishReason().name()).isEqualToIgnoringCase(ChatCompletionFinishReason.STOP.name());
}
@Test
void generateTextTooLong() {
OpenAiChatModel chatModel = OpenAiChatModel.builder()
.client(openAiClient)
.options(OpenAiChatOptions.builder()
.maxTokens(1).build())
.build();
UserMessage userMessage = UserMessage.from("What is the capital of Denmark?");
Response<AiMessage> response = chatModel.generate(userMessage);
logger.info("Response: \n" + response);
AiMessage aiMessage = response.content();
assertThat(aiMessage.text()).isNotBlank();
TokenUsage tokenUsage = response.tokenUsage();
assertThat(tokenUsage.inputTokenCount()).isGreaterThan(0);
assertThat(tokenUsage.outputTokenCount()).isEqualTo(1);
assertThat(tokenUsage.totalTokenCount())
.isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount());
assertThat(response.finishReason().name()).isEqualToIgnoringCase(ChatCompletionFinishReason.LENGTH.name());
}
@Test
void generateTextWithFewShots() {
OpenAiChatModel chatModel = OpenAiChatModel.builder()
.client(openAiClient)
.options(OpenAiChatOptions.builder().model(MODEL_NAME).build())
.build();
List<ChatMessage> messages = List.of(
UserMessage.from("1 + 1 ="), AiMessage.from(">>> 2"),
UserMessage.from("2 + 2 ="), AiMessage.from(">>> 4"),
UserMessage.from("4 + 4 ="));
Response<AiMessage> response = chatModel.generate(messages);
logger.info("Response: \n" + response);
assertThat(response.content().text()).startsWith(">>> 8");
}
@Test
void generateTextWithSystemMessage() {
OpenAiChatModel chatModel = OpenAiChatModel.builder()
.client(openAiClient)
.options(OpenAiChatOptions.builder().model(MODEL_NAME).build())
.build();
SystemMessage systemMessage = SystemMessage.from("Start every answer with Ahoy");
UserMessage userMessage = UserMessage.from("Hello, captain!");
Response<AiMessage> response = chatModel.generate(systemMessage, userMessage);
logger.info("Response: \n" + response);
assertThat(response.content().text()).containsIgnoringCase("Ahoy");
}
@Test
void generateTextWithNumPredict() {
var maximumOutputTokens = 1;
OpenAiChatModel chatModel = OpenAiChatModel.builder()
.client(openAiClient)
.options(OpenAiChatOptions.builder().model(MODEL_NAME).maxTokens(maximumOutputTokens).build())
.build();
UserMessage userMessage = UserMessage.from("What is the capital of Italy?");
Response<AiMessage> response = chatModel.generate(userMessage);
logger.info("Response: \n" + response);
AiMessage aiMessage = response.content();
assertThat(aiMessage.text()).doesNotContain("Italy");
assertThat(response.tokenUsage().outputTokenCount()).isEqualTo(maximumOutputTokens);
}
@Test
void generateTextAsJson() throws JSONException {
OpenAiChatModel chatModel = OpenAiChatModel.builder()
.client(openAiClient)
.options(OpenAiChatOptions.builder().model(MODEL_NAME)
.responseFormat(new ChatCompletionRequest.ResponseFormat("json_object")).build())
.build();
String response = chatModel
.generate("Return a JSON object with two fields: location is Jungle and name is Jumanji.");
logger.info("Response: \n" + response);
JSONAssert.assertEquals("""
{
"name": "Jumanji",
"location": "Jungle"
}
""", response, JSONCompareMode.STRICT);
}
@Test
void executeToolExplicitlyAndThenGenerateAnswer() throws JSONException {
OpenAiChatModel chatModel = OpenAiChatModel.builder()
.client(openAiClient)
.options(OpenAiChatOptions.builder().build())
.build();
// Execute tool
UserMessage userMessage = userMessage("2+2=?");
List<ToolSpecification> toolSpecifications = List.of(calculator);
Response<AiMessage> response = chatModel.generate(List.of(userMessage), toolSpecifications);
AiMessage aiMessage = response.content();
assertThat(aiMessage.text()).isNull();
assertThat(aiMessage.toolExecutionRequests()).hasSize(1);
ToolExecutionRequest toolExecutionRequest = aiMessage.toolExecutionRequests().get(0);
assertThat(toolExecutionRequest.id()).isNotBlank();
assertThat(toolExecutionRequest.name()).isEqualTo("calculator");
JSONAssert.assertEquals("""
{
"first": 2,
"second": 2
}
""", toolExecutionRequest.arguments(), JSONCompareMode.STRICT);
TokenUsage tokenUsage = response.tokenUsage();
assertThat(tokenUsage.inputTokenCount()).isGreaterThan(0);
assertThat(tokenUsage.outputTokenCount()).isGreaterThan(0);
assertThat(tokenUsage.totalTokenCount())
.isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount());
assertThat(response.finishReason()).isEqualTo(FinishReason.TOOL_EXECUTION);
// Then answer
ToolExecutionResultMessage toolExecutionResultMessage = ToolExecutionResultMessage.from(toolExecutionRequest, "4");
List<ChatMessage> messages = List.of(userMessage, aiMessage, toolExecutionResultMessage);
Response<AiMessage> secondResponse = chatModel.generate(messages);
AiMessage secondAiMessage = secondResponse.content();
assertThat(secondAiMessage.text()).contains("4");
assertThat(secondAiMessage.toolExecutionRequests()).isNull();
TokenUsage secondTokenUsage = secondResponse.tokenUsage();
assertThat(secondTokenUsage.inputTokenCount()).isGreaterThan(0);
assertThat(secondTokenUsage.outputTokenCount()).isGreaterThan(0);
assertThat(secondTokenUsage.totalTokenCount())
.isEqualTo(secondTokenUsage.inputTokenCount() + secondTokenUsage.outputTokenCount());
assertThat(secondResponse.finishReason()).isEqualTo(FinishReason.STOP);
}
@Test
void executeToolImplicitlyAndThenGenerateAnswer() throws JSONException {
OpenAiChatModel chatModel = OpenAiChatModel.builder()
.client(openAiClient)
.options(OpenAiChatOptions.builder().build())
.build();
// Execute tool
UserMessage userMessage = userMessage("2+2=?");
Response<AiMessage> response = chatModel.generate(List.of(userMessage), calculator);
AiMessage aiMessage = response.content();
assertThat(aiMessage.text()).isNull();
assertThat(aiMessage.toolExecutionRequests()).hasSize(1);
ToolExecutionRequest toolExecutionRequest = aiMessage.toolExecutionRequests().get(0);
assertThat(toolExecutionRequest.id()).isNotBlank();
assertThat(toolExecutionRequest.name()).isEqualTo("calculator");
JSONAssert.assertEquals("""
{
"first": 2,
"second": 2
}
""", toolExecutionRequest.arguments(), JSONCompareMode.STRICT);
TokenUsage tokenUsage = response.tokenUsage();
assertThat(tokenUsage.inputTokenCount()).isGreaterThan(0);
assertThat(tokenUsage.outputTokenCount()).isGreaterThan(0);
assertThat(tokenUsage.totalTokenCount())
.isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount());
assertThat(response.finishReason()).isEqualTo(STOP); // Not sure if a bug in OpenAI or stop is expected here
// Then answer
ToolExecutionResultMessage toolExecutionResultMessage = ToolExecutionResultMessage.from(toolExecutionRequest, "4");
List<ChatMessage> messages = List.of(userMessage, aiMessage, toolExecutionResultMessage);
Response<AiMessage> secondResponse = chatModel.generate(messages);
AiMessage secondAiMessage = secondResponse.content();
assertThat(secondAiMessage.text()).contains("4");
assertThat(secondAiMessage.toolExecutionRequests()).isNull();
TokenUsage secondTokenUsage = secondResponse.tokenUsage();
assertThat(secondTokenUsage.inputTokenCount()).isGreaterThan(0);
assertThat(secondTokenUsage.outputTokenCount()).isGreaterThan(0);
assertThat(secondTokenUsage.totalTokenCount())
.isEqualTo(secondTokenUsage.inputTokenCount() + secondTokenUsage.outputTokenCount());
assertThat(secondResponse.finishReason()).isEqualTo(STOP);
}
@Test
void executeMultipleToolsInParallelThenAnswer() throws JSONException {
OpenAiChatModel chatModel = OpenAiChatModel.builder()
.client(openAiClient)
.options(OpenAiChatOptions.builder()
.model("gpt-3.5-turbo-1106")
.temperature(0.0)
.build())
.build();
// Execute multiple tools
UserMessage userMessage = userMessage("2+2=? 3+3=?");
List<ToolSpecification> toolSpecifications = List.of(calculator);
Response<AiMessage> response = chatModel.generate(List.of(userMessage), toolSpecifications);
AiMessage aiMessage = response.content();
assertThat(aiMessage.text()).isNull();
assertThat(aiMessage.toolExecutionRequests()).hasSize(2);
ToolExecutionRequest toolExecutionRequest1 = aiMessage.toolExecutionRequests().get(0);
assertThat(toolExecutionRequest1.name()).isEqualTo("calculator");
JSONAssert.assertEquals("""
{
"first": 2,
"second": 2
}
""", toolExecutionRequest1.arguments(), JSONCompareMode.STRICT);
ToolExecutionRequest toolExecutionRequest2 = aiMessage.toolExecutionRequests().get(1);
assertThat(toolExecutionRequest2.name()).isEqualTo("calculator");
JSONAssert.assertEquals("""
{
"first": 3,
"second": 3
}
""", toolExecutionRequest2.arguments(), JSONCompareMode.STRICT);
TokenUsage tokenUsage = response.tokenUsage();
assertThat(tokenUsage.inputTokenCount()).isGreaterThan(0);
assertThat(tokenUsage.outputTokenCount()).isGreaterThan(0);
assertThat(tokenUsage.totalTokenCount())
.isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount());
assertThat(response.finishReason()).isEqualTo(TOOL_EXECUTION);
// Then answer
ToolExecutionResultMessage toolExecutionResultMessage1 = ToolExecutionResultMessage.from(toolExecutionRequest1, "4");
ToolExecutionResultMessage toolExecutionResultMessage2 = ToolExecutionResultMessage.from(toolExecutionRequest2, "6");
List<ChatMessage> messages = List.of(userMessage, aiMessage, toolExecutionResultMessage1, toolExecutionResultMessage2);
Response<AiMessage> secondResponse = chatModel.generate(messages);
AiMessage secondAiMessage = secondResponse.content();
assertThat(secondAiMessage.text()).contains("4", "6");
assertThat(secondAiMessage.toolExecutionRequests()).isNull();
TokenUsage secondTokenUsage = secondResponse.tokenUsage();
assertThat(secondTokenUsage.inputTokenCount()).isGreaterThan(0);
assertThat(secondTokenUsage.outputTokenCount()).isGreaterThan(0);
assertThat(secondTokenUsage.totalTokenCount())
.isEqualTo(secondTokenUsage.inputTokenCount() + secondTokenUsage.outputTokenCount());
assertThat(secondResponse.finishReason()).isEqualTo(STOP);
}
}
| [
"dev.langchain4j.agent.tool.ToolSpecification.builder"
] | [((2103, 2329), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2103, 2308), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2103, 2263), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2103, 2219), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2103, 2162), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2490, 2541), 'io.thomasvitale.langchain4j.spring.openai.client.OpenAiClientConfig.builder'), ((2490, 2533), 'io.thomasvitale.langchain4j.spring.openai.client.OpenAiClientConfig.builder'), ((3562, 3600), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionFinishReason.STOP.name'), ((4590, 4630), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionFinishReason.LENGTH.name')] |
package org.feuyeux.ai.langchain.hellolangchain;
import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument;
import static java.time.Duration.ofSeconds;
import static org.feuyeux.ai.langchain.hellolangchain.OpenApi.getKey;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.nio.file.Paths;
import java.util.concurrent.TimeUnit;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
@Slf4j
public class ChainsTest {
public static final String SIMPSON_S_ADVENTURES_TXT =
"src/test/resources/simpson's_adventures.txt";
@AfterEach
public void tearDown() throws InterruptedException {
TimeUnit.SECONDS.sleep(25);
}
@Test
public void givenChainWithDocument_whenPrompted_thenValidResponse() {
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
EmbeddingStoreIngestor ingestor =
EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
Document document = loadDocument(Paths.get(SIMPSON_S_ADVENTURES_TXT), new TextDocumentParser());
ingestor.ingest(document);
ChatLanguageModel chatModel =
OpenAiChatModel.builder().apiKey(getKey()).timeout(ofSeconds(60)).build();
ConversationalRetrievalChain chain =
ConversationalRetrievalChain.builder()
.chatLanguageModel(chatModel)
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.promptTemplate(
PromptTemplate.from(
"Answer the following question to the best of your ability: {{question}}\n\nBase your answer on the following information:\n{{information}}"))
.build();
String answer = chain.execute("Who is Simpson?");
log.info(answer);
Assertions.assertNotNull(answer);
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1559, 1585), 'java.util.concurrent.TimeUnit.SECONDS.sleep'), ((1872, 2080), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1872, 2059), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1872, 2015), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1872, 1971), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2258, 2331), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2258, 2323), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2258, 2300), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2383, 2867), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2383, 2846), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2383, 2617), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2383, 2548), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2383, 2463), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] |
package dev.langchain4j.model.vertexai;
import com.google.cloud.vertexai.VertexAI;
import com.google.cloud.vertexai.api.GenerationConfig;
import com.google.cloud.vertexai.generativeai.GenerativeModel;
import dev.langchain4j.agent.tool.JsonSchemaProperty;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.message.*;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.output.FinishReason;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.output.TokenUsage;
import dev.langchain4j.service.AiServices;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.Base64;
import java.util.List;
import static dev.langchain4j.internal.Utils.readBytes;
import static dev.langchain4j.model.output.FinishReason.LENGTH;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.mockito.Mockito.*;
class VertexAiGeminiChatModelIT {
static final String CAT_IMAGE_URL = "https://upload.wikimedia.org/wikipedia/commons/e/e9/Felis_silvestris_silvestris_small_gradual_decrease_of_quality.png";
static final String DICE_IMAGE_URL = "https://upload.wikimedia.org/wikipedia/commons/4/47/PNG_transparency_demonstration_1.png";
ChatLanguageModel model = VertexAiGeminiChatModel.builder()
.project(System.getenv("GCP_PROJECT_ID"))
.location(System.getenv("GCP_LOCATION"))
.modelName("gemini-pro")
.build();
ChatLanguageModel visionModel = VertexAiGeminiChatModel.builder()
.project(System.getenv("GCP_PROJECT_ID"))
.location(System.getenv("GCP_LOCATION"))
.modelName("gemini-pro-vision")
.build();
@Test
void should_generate_response() {
// given
UserMessage userMessage = UserMessage.from("What is the capital of Germany?");
// when
Response<AiMessage> response = model.generate(userMessage);
System.out.println(response);
// then
assertThat(response.content().text()).contains("Berlin");
TokenUsage tokenUsage = response.tokenUsage();
assertThat(tokenUsage.inputTokenCount()).isEqualTo(7);
assertThat(tokenUsage.outputTokenCount()).isGreaterThan(0);
assertThat(tokenUsage.totalTokenCount())
.isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount());
assertThat(response.finishReason()).isEqualTo(FinishReason.STOP);
}
@Test
void should_deny_system_message() {
// given
SystemMessage systemMessage = SystemMessage.from("Be polite");
UserMessage userMessage = UserMessage.from("Tell me a joke");
// when-then
assertThatThrownBy(() -> model.generate(systemMessage, userMessage))
.isExactlyInstanceOf(IllegalArgumentException.class)
.hasMessage("SystemMessage is currently not supported by Gemini");
}
@Test
void should_respect_maxOutputTokens() {
// given
ChatLanguageModel model = VertexAiGeminiChatModel.builder()
.project(System.getenv("GCP_PROJECT_ID"))
.location(System.getenv("GCP_LOCATION"))
.modelName("gemini-pro")
.maxOutputTokens(1)
.build();
UserMessage userMessage = UserMessage.from("Tell me a joke");
// when
Response<AiMessage> response = model.generate(userMessage);
System.out.println(response);
// then
assertThat(response.content().text()).isNotBlank();
TokenUsage tokenUsage = response.tokenUsage();
assertThat(tokenUsage.inputTokenCount()).isEqualTo(4);
assertThat(tokenUsage.outputTokenCount()).isEqualTo(1);
assertThat(tokenUsage.totalTokenCount())
.isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount());
assertThat(response.finishReason()).isEqualTo(LENGTH);
}
@Test
void should_allow_custom_generativeModel_and_generationConfig() {
// given
VertexAI vertexAi = new VertexAI(System.getenv("GCP_PROJECT_ID"), System.getenv("GCP_LOCATION"));
GenerativeModel generativeModel = new GenerativeModel("gemini-pro", vertexAi);
GenerationConfig generationConfig = GenerationConfig.getDefaultInstance();
ChatLanguageModel model = new VertexAiGeminiChatModel(generativeModel, generationConfig);
UserMessage userMessage = UserMessage.from("What is the capital of Germany?");
// when
Response<AiMessage> response = model.generate(userMessage);
System.out.println(response);
// then
assertThat(response.content().text()).contains("Berlin");
}
@Test
void should_accept_text_and_image_from_public_url() {
// given
UserMessage userMessage = UserMessage.from(
ImageContent.from(CAT_IMAGE_URL),
TextContent.from("What do you see? Reply in one word.")
);
// when
Response<AiMessage> response = visionModel.generate(userMessage);
// then
assertThat(response.content().text()).containsIgnoringCase("cat");
}
@Test
void should_accept_text_and_image_from_google_storage_url() {
// given
UserMessage userMessage = UserMessage.from(
ImageContent.from("gs://langchain4j-test/cat.png"),
TextContent.from("What do you see? Reply in one word.")
);
// when
Response<AiMessage> response = visionModel.generate(userMessage);
// then
assertThat(response.content().text()).containsIgnoringCase("cat");
}
@Test
void should_accept_text_and_base64_image() {
// given
String base64Data = Base64.getEncoder().encodeToString(readBytes(CAT_IMAGE_URL));
UserMessage userMessage = UserMessage.from(
ImageContent.from(base64Data, "image/png"),
TextContent.from("What do you see? Reply in one word.")
);
// when
Response<AiMessage> response = visionModel.generate(userMessage);
// then
assertThat(response.content().text()).containsIgnoringCase("cat");
}
@Test
void should_accept_text_and_multiple_images_from_public_urls() {
// given
UserMessage userMessage = UserMessage.from(
ImageContent.from(CAT_IMAGE_URL),
ImageContent.from(DICE_IMAGE_URL),
TextContent.from("What do you see? Reply with one word per image.")
);
// when
Response<AiMessage> response = visionModel.generate(userMessage);
// then
assertThat(response.content().text())
.containsIgnoringCase("cat")
.containsIgnoringCase("dice");
}
@Test
void should_accept_text_and_multiple_images_from_google_storage_urls() {
// given
UserMessage userMessage = UserMessage.from(
ImageContent.from("gs://langchain4j-test/cat.png"),
ImageContent.from("gs://langchain4j-test/dice.png"),
TextContent.from("What do you see? Reply with one word per image.")
);
// when
Response<AiMessage> response = visionModel.generate(userMessage);
// then
assertThat(response.content().text())
.containsIgnoringCase("cat")
.containsIgnoringCase("dice");
}
@Test
void should_accept_text_and_multiple_base64_images() {
// given
String catBase64Data = Base64.getEncoder().encodeToString(readBytes(CAT_IMAGE_URL));
String diceBase64Data = Base64.getEncoder().encodeToString(readBytes(DICE_IMAGE_URL));
UserMessage userMessage = UserMessage.from(
ImageContent.from(catBase64Data, "image/png"),
ImageContent.from(diceBase64Data, "image/png"),
TextContent.from("What do you see? Reply with one word per image.")
);
// when
Response<AiMessage> response = visionModel.generate(userMessage);
// then
assertThat(response.content().text())
.containsIgnoringCase("cat")
.containsIgnoringCase("dice");
}
@Test
void should_accept_text_and_multiple_images_from_different_sources() {
// given
UserMessage userMessage = UserMessage.from(
ImageContent.from(CAT_IMAGE_URL),
ImageContent.from("gs://langchain4j-test/dog.jpg"),
ImageContent.from(Base64.getEncoder().encodeToString(readBytes(DICE_IMAGE_URL)), "image/png"),
TextContent.from("What do you see? Reply with one word per image.")
);
// when
Response<AiMessage> response = visionModel.generate(userMessage);
// then
assertThat(response.content().text())
.containsIgnoringCase("cat")
.containsIgnoringCase("dog")
.containsIgnoringCase("dice");
}
@Test
void should_accept_tools_for_function_calling() {
// given
ChatLanguageModel model = VertexAiGeminiChatModel.builder()
.project(System.getenv("GCP_PROJECT_ID"))
.location(System.getenv("GCP_LOCATION"))
.modelName("gemini-pro")
.build();
ToolSpecification weatherToolSpec = ToolSpecification.builder()
.name("getWeatherForecast")
.description("Get the weather forecast for a location")
.addParameter("location", JsonSchemaProperty.STRING,
JsonSchemaProperty.description("the location to get the weather forecast for"))
.build();
List<ChatMessage> allMessages = new ArrayList<>();
UserMessage weatherQuestion = UserMessage.from("What is the weather in Paris?");
System.out.println("Question: " + weatherQuestion.text());
allMessages.add(weatherQuestion);
// when
Response<AiMessage> messageResponse = model.generate(allMessages, weatherToolSpec);
// then
assertThat(messageResponse.content().hasToolExecutionRequests()).isTrue();
ToolExecutionRequest toolExecutionRequest = messageResponse.content().toolExecutionRequests().get(0);
assertThat(toolExecutionRequest.arguments()).contains("Paris");
assertThat(toolExecutionRequest.name()).isEqualTo("getWeatherForecast");
allMessages.add(messageResponse.content());
// when (feeding the function return value back)
ToolExecutionResultMessage toolExecResMsg = ToolExecutionResultMessage.from(toolExecutionRequest,
"{\"location\":\"Paris\",\"forecast\":\"sunny\", \"temperature\": 20}");
allMessages.add(toolExecResMsg);
Response<AiMessage> weatherResponse = model.generate(allMessages);
// then
System.out.println("Answer: " + weatherResponse.content().text());
assertThat(weatherResponse.content().text()).containsIgnoringCase("sunny");
}
static class Calculator {
@Tool("Adds two given numbers")
double add(double a, double b) {
System.out.printf("Called add(%s, %s)%n", a, b);
return a + b;
}
@Tool("Multiplies two given numbers")
String multiply(double a, double b) {
System.out.printf("Called multiply(%s, %s)%n", a, b);
return String.valueOf(a * b);
}
}
interface Assistant {
String chat(String userMessage);
}
@Test
void should_use_tools_with_AiService() {
// given
Calculator calculator = spy(new Calculator());
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(model)
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.tools(calculator)
.build();
// when
String answer = assistant.chat("How much is 74589613588 + 4786521789?");
// then
// assertThat(answer).contains("79376135377"); TODO
verify(calculator).add(74589613588.0, 4786521789.0);
verifyNoMoreInteractions(calculator);
}
@Test
void should_use_tools_with_AiService_2() {
// given
Calculator calculator = spy(new Calculator());
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(model)
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.tools(calculator)
.build();
// when
String answer = assistant.chat("How much is 257 * 467?");
// then
// assertThat(answer).contains("120019"); TODO
verify(calculator).multiply(257, 467);
verifyNoMoreInteractions(calculator);
}
} | [
"dev.langchain4j.agent.tool.ToolSpecification.builder",
"dev.langchain4j.service.AiServices.builder"
] | [((6027, 6087), 'java.util.Base64.getEncoder'), ((7832, 7892), 'java.util.Base64.getEncoder'), ((7926, 7987), 'java.util.Base64.getEncoder'), ((8823, 8884), 'java.util.Base64.getEncoder'), ((9670, 10011), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((9670, 9986), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((9670, 9813), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((9670, 9741), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((12012, 12222), 'dev.langchain4j.service.AiServices.builder'), ((12012, 12197), 'dev.langchain4j.service.AiServices.builder'), ((12012, 12162), 'dev.langchain4j.service.AiServices.builder'), ((12012, 12089), 'dev.langchain4j.service.AiServices.builder'), ((12675, 12885), 'dev.langchain4j.service.AiServices.builder'), ((12675, 12860), 'dev.langchain4j.service.AiServices.builder'), ((12675, 12825), 'dev.langchain4j.service.AiServices.builder'), ((12675, 12752), 'dev.langchain4j.service.AiServices.builder')] |
package io.thomasvitale.langchain4j.spring.core.tool.jackson;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import org.skyscreamer.jsonassert.JSONAssert;
import org.skyscreamer.jsonassert.JSONCompareMode;
import io.thomasvitale.langchain4j.spring.core.json.jackson.LangChain4jJacksonProvider;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Unit tests for {@link ToolExecutionRequestMixin}.
*/
class ToolExecutionRequestMixinTests {
private final ObjectMapper objectMapper = LangChain4jJacksonProvider.getObjectMapper();
@Test
void serializeAndDeserializeToolExecutionRequest() throws JsonProcessingException, JSONException {
var toolExecutionRequest = ToolExecutionRequest.builder()
.id("QUERY_DB")
.name("queryDatabase")
.arguments("{}")
.build();
var json = objectMapper.writeValueAsString(toolExecutionRequest);
JSONAssert.assertEquals("""
{
"id": "QUERY_DB",
"name": "queryDatabase",
"arguments": "{}"
}
""", json, JSONCompareMode.STRICT);
var deserializedToolExecutionRequest = objectMapper.readValue(json, ToolExecutionRequest.class);
assertThat(deserializedToolExecutionRequest).isEqualTo(toolExecutionRequest);
}
}
| [
"dev.langchain4j.agent.tool.ToolExecutionRequest.builder"
] | [((886, 1029), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((886, 1008), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((886, 979), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((886, 944), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')] |
package com.baeldung.langchain;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.time.Duration.ofSeconds;
import static java.util.stream.Collectors.joining;
import static org.junit.Assert.assertNotNull;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
public class ChatWithDocumentLiveTest {
private static final Logger logger = LoggerFactory.getLogger(ChatWithDocumentLiveTest.class);
@Test
public void givenDocument_whenPrompted_thenValidResponse() {
Document document = loadDocument(Paths.get("src/test/resources/example-files/simpson's_adventures.txt"));
DocumentSplitter splitter = DocumentSplitters.recursive(100, 0, new OpenAiTokenizer(GPT_3_5_TURBO));
List<TextSegment> segments = splitter.split(document);
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
List<Embedding> embeddings = embeddingModel.embedAll(segments)
.content();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
embeddingStore.addAll(embeddings, segments);
String question = "Who is Simpson?";
Embedding questionEmbedding = embeddingModel.embed(question)
.content();
int maxResults = 3;
double minScore = 0.7;
List<EmbeddingMatch<TextSegment>> relevantEmbeddings = embeddingStore.findRelevant(questionEmbedding, maxResults, minScore);
PromptTemplate promptTemplate = PromptTemplate.from("Answer the following question to the best of your ability:\n" + "\n" + "Question:\n" + "{{question}}\n" + "\n" + "Base your answer on the following information:\n" + "{{information}}");
String information = relevantEmbeddings.stream()
.map(match -> match.embedded()
.text())
.collect(joining("\n\n"));
Map<String, Object> variables = new HashMap<>();
variables.put("question", question);
variables.put("information", information);
Prompt prompt = promptTemplate.apply(variables);
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey(Constants.OPENAI_API_KEY)
.timeout(ofSeconds(60))
.build();
AiMessage aiMessage = chatModel.generate(prompt.toUserMessage())
.content();
logger.info(aiMessage.text());
assertNotNull(aiMessage.text());
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((3188, 3316), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3188, 3295), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3188, 3259), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package com.baeldung.langchain;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.time.Duration.ofSeconds;
import static java.util.stream.Collectors.joining;
import static org.junit.Assert.assertNotNull;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
public class ChatWithDocumentLiveTest {
private static final Logger logger = LoggerFactory.getLogger(ChatWithDocumentLiveTest.class);
@Test
public void givenDocument_whenPrompted_thenValidResponse() {
Document document = loadDocument(Paths.get("src/test/resources/example-files/simpson's_adventures.txt"));
DocumentSplitter splitter = DocumentSplitters.recursive(100, 0, new OpenAiTokenizer(GPT_3_5_TURBO));
List<TextSegment> segments = splitter.split(document);
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
List<Embedding> embeddings = embeddingModel.embedAll(segments)
.content();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
embeddingStore.addAll(embeddings, segments);
String question = "Who is Simpson?";
Embedding questionEmbedding = embeddingModel.embed(question)
.content();
int maxResults = 3;
double minScore = 0.7;
List<EmbeddingMatch<TextSegment>> relevantEmbeddings = embeddingStore.findRelevant(questionEmbedding, maxResults, minScore);
PromptTemplate promptTemplate = PromptTemplate.from("Answer the following question to the best of your ability:\n" + "\n" + "Question:\n" + "{{question}}\n" + "\n" + "Base your answer on the following information:\n" + "{{information}}");
String information = relevantEmbeddings.stream()
.map(match -> match.embedded()
.text())
.collect(joining("\n\n"));
Map<String, Object> variables = new HashMap<>();
variables.put("question", question);
variables.put("information", information);
Prompt prompt = promptTemplate.apply(variables);
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey(Constants.OPENAI_API_KEY)
.timeout(ofSeconds(60))
.build();
AiMessage aiMessage = chatModel.generate(prompt.toUserMessage())
.content();
logger.info(aiMessage.text());
assertNotNull(aiMessage.text());
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((3188, 3316), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3188, 3295), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3188, 3259), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package com.baeldung.langchain;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static java.time.Duration.ofSeconds;
import static org.junit.Assert.assertNotNull;
import java.nio.file.Paths;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
public class ChainWithDocumentLiveTest {
private static final Logger logger = LoggerFactory.getLogger(ChainWithDocumentLiveTest.class);
@Test
public void givenChainWithDocument_whenPrompted_thenValidResponse() {
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
Document document = loadDocument(Paths.get("src/test/resources/example-files/simpson's_adventures.txt"));
ingestor.ingest(document);
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey(Constants.OPENAI_API_KEY)
.timeout(ofSeconds(60))
.build();
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(chatModel)
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.promptTemplate(PromptTemplate.from("Answer the following question to the best of your ability: {{question}}\n\nBase your answer on the following information:\n{{information}}"))
.build();
String answer = chain.execute("Who is Simpson?");
logger.info(answer);
assertNotNull(answer);
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1556, 1764), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1556, 1743), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1556, 1699), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1556, 1655), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1955, 2083), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1955, 2062), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1955, 2026), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2131, 2577), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2131, 2556), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2131, 2365), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2131, 2296), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2131, 2211), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] |
package com.baeldung.langchain;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static java.time.Duration.ofSeconds;
import static org.junit.Assert.assertNotNull;
import java.nio.file.Paths;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
public class ChainWithDocumentLiveTest {
private static final Logger logger = LoggerFactory.getLogger(ChainWithDocumentLiveTest.class);
@Test
public void givenChainWithDocument_whenPrompted_thenValidResponse() {
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
Document document = loadDocument(Paths.get("src/test/resources/example-files/simpson's_adventures.txt"));
ingestor.ingest(document);
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey(Constants.OPENAI_API_KEY)
.timeout(ofSeconds(60))
.build();
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(chatModel)
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.promptTemplate(PromptTemplate.from("Answer the following question to the best of your ability: {{question}}\n\nBase your answer on the following information:\n{{information}}"))
.build();
String answer = chain.execute("Who is Simpson?");
logger.info(answer);
assertNotNull(answer);
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1556, 1764), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1556, 1743), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1556, 1699), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1556, 1655), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1955, 2083), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1955, 2062), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1955, 2026), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2131, 2577), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2131, 2556), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2131, 2365), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2131, 2296), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2131, 2211), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] |
package io.quarkiverse.langchain4j.sample;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import jakarta.annotation.PreDestroy;
import jakarta.enterprise.context.RequestScoped;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
@RequestScoped
public class ChatMemoryBean implements ChatMemoryProvider {
private final Map<Object, ChatMemory> memories = new ConcurrentHashMap<>();
@Override
public ChatMemory get(Object memoryId) {
return memories.computeIfAbsent(memoryId, id -> MessageWindowChatMemory.builder()
.maxMessages(20)
.id(memoryId)
.build());
}
@PreDestroy
public void close() {
memories.clear();
}
}
| [
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((632, 753), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((632, 728), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((632, 698), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')] |
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.service.AiServices;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class AnswerService {
private static final Logger LOGGER = LogManager.getLogger(AnswerService.class);
private Assistant assistant;
public void init(SearchAction action) {
action.appendAnswer("Initiating...");
initChat(action);
}
private void initChat(SearchAction action) {
StreamingChatLanguageModel model = OpenAiStreamingChatModel.withApiKey(ApiKeys.OPENAI_API_KEY);
assistant = AiServices.builder(Assistant.class)
.streamingChatLanguageModel(model)
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
action.appendAnswer("Done");
action.setFinished();
}
void ask(SearchAction action) {
LOGGER.info("Asking question '" + action.getQuestion() + "'");
var responseHandler = new CustomStreamingResponseHandler(action);
assistant.chat(action.getQuestion())
.onNext(responseHandler::onNext)
.onComplete(responseHandler::onComplete)
.onError(responseHandler::onError)
.start();
}
} | [
"dev.langchain4j.service.AiServices.builder"
] | [((758, 942), 'dev.langchain4j.service.AiServices.builder'), ((758, 917), 'dev.langchain4j.service.AiServices.builder'), ((758, 844), 'dev.langchain4j.service.AiServices.builder')] |
package com.example.demo;
import dev.langchain4j.chain.ConversationalChain;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.SystemMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.image.ImageModel;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.GetMapping;
import java.util.Arrays;
import java.util.List;
import static java.util.Arrays.asList;
@Controller
public class DemoController {
private final ImageModel imageModel;
private final ChatLanguageModel chatLanguageModel;
private final EmbeddingModel embeddingModel;
private final EmbeddingStore<TextSegment> embeddingStore;
public DemoController(ImageModel imageModel, ChatLanguageModel chatLanguageModel, EmbeddingModel embeddingModel, EmbeddingStore<TextSegment> embeddingStore) {
this.imageModel = imageModel;
this.chatLanguageModel = chatLanguageModel;
this.embeddingModel = embeddingModel;
this.embeddingStore = embeddingStore;
}
@GetMapping("/")
public String demo() {
return "demo";
}
@GetMapping("/1")
String createImage(Model model) {
String question = "A coffee mug in Paris, France";
String answer = imageModel.generate(question).content().url().toString();
model.addAttribute("demo", "Demo 1: image generation");
model.addAttribute("question", question);
model.addAttribute("answer", answer);
return "demo";
}
@GetMapping("/2")
String getAnswer(Model model) {
String question = "Who painted the Mona Lisa?";
String answer = chatLanguageModel.generate(UserMessage.from(question)).content().text();
model.addAttribute("demo", "Demo 2: simple question");
model.addAttribute("question", question);
model.addAttribute("answer", answer);
return "demo";
}
@GetMapping("/3")
String getAnswerWithSystemMessage(Model model) {
SystemMessage systemMessage = SystemMessage.from("I am the king of France. " +
"Talk to me with extreme deference.");
String question = "Who painted the Mona Lisa?";
String answer = chatLanguageModel.generate(systemMessage, UserMessage.from(question)).content().text();
model.addAttribute("demo", "Demo 3: advanced question");
model.addAttribute("question", question);
model.addAttribute("answer", answer);
return "demo";
}
@GetMapping("/4")
String getAnswerWithLocation(Model model) {
String question = "Where can you see this painting?";
String answer = chatLanguageModel.generate(UserMessage.from(question)).content().text();
model.addAttribute("demo", "Demo 4: A question without memory");
model.addAttribute("question", question);
model.addAttribute("answer", answer);
return "demo";
}
@GetMapping("/5")
String getAnswerUsingConversationChain(Model model) {
String context = "Who painted the Mona Lisa?";
String question = "Where can you see this painting?";
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(20);
ConversationalChain chain = ConversationalChain.builder()
.chatLanguageModel(chatLanguageModel)
.chatMemory(chatMemory)
.build();
chain.execute(context);
String answer = chain.execute(question);
model.addAttribute("demo", "Demo 5: A question with memory");
model.addAttribute("question", question);
model.addAttribute("answer", answer);
return "demo";
}
@GetMapping("/6")
String loadVectorDatabase(Model model) {
String content1 = "banana";
String content2 = "computer";
String content3 = "apple";
String content4 = "pizza";
String content5 = "strawberry";
String content6 = "chess";
List<String> contents = asList(content1, content2, content3, content4, content5, content6);
for (String content : contents) {
TextSegment textSegment = TextSegment.from(content);
Embedding embedding = embeddingModel.embed(content).content();
embeddingStore.add(embedding, textSegment);
}
model.addAttribute("demo", "Demo 6: Data ingestion");
model.addAttribute("question", "Ingesting data into the vector database");
model.addAttribute("answer", "OK");
return "demo";
}
@GetMapping("/7")
String queryVectorDatabase(Model model) {
String question = "fruit";
Embedding relevantEmbedding = embeddingModel.embed(question).content();
List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(relevantEmbedding, 3);
String answer = relevant.get(0).embedded().text() + "\n";
answer += relevant.get(1).embedded().text() + "\n";
answer += relevant.get(2).embedded().text() + "\n";
model.addAttribute("demo", "Demo 7: Querying the vector database");
model.addAttribute("question", question);
model.addAttribute("answer", answer);
return "demo";
}
@GetMapping("/8")
String queryVectorDatabaseWithDetails(Model model) {
String question = "fruit";
Embedding relevantEmbedding = embeddingModel.embed(question).content();
List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(relevantEmbedding, 3);
String answer = relevant.get(0).embedded().text() + " | " + Arrays.toString(relevant.get(0).embedding().vector()) + "\n";
answer += relevant.get(1).embedded().text() + " | " + Arrays.toString(relevant.get(1).embedding().vector()) + "\n";
answer += relevant.get(2).embedded().text() + " | " + Arrays.toString(relevant.get(2).embedding().vector()) + "\n";
model.addAttribute("demo", "Demo 8: Getting the vectors from the vector database");
model.addAttribute("question", question);
model.addAttribute("answer", answer);
return "demo";
}
}
| [
"dev.langchain4j.chain.ConversationalChain.builder"
] | [((3673, 3821), 'dev.langchain4j.chain.ConversationalChain.builder'), ((3673, 3796), 'dev.langchain4j.chain.ConversationalChain.builder'), ((3673, 3756), 'dev.langchain4j.chain.ConversationalChain.builder')] |
package co.elastic.examples;
import dev.langchain4j.model.ollama.OllamaEmbeddingModel;
import dev.langchain4j.model.ollama.OllamaStreamingLanguageModel;
import dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore;
import org.jline.reader.LineReader;
import org.jline.reader.LineReaderBuilder;
import org.jline.terminal.Terminal;
import org.jline.terminal.TerminalBuilder;
import java.io.FileInputStream;
import java.io.IOException;
import java.time.Duration;
import java.util.Map;
import java.util.Properties;
public class Components {
private static final Map<String, String> config;
static {
try (var input = new FileInputStream("config.properties")) {
var props = new Properties();
props.load(input);
@SuppressWarnings("unchecked")
Map<String, String> map = (Map<String, String>) (Map<?, ?>) props;
config = map;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static LineReader createLineReader() throws IOException {
Terminal terminal = TerminalBuilder.builder().dumb(true).build();
return LineReaderBuilder.builder().terminal(terminal).build();
}
public static OllamaEmbeddingModel createEmbeddingModel() {
return new OllamaEmbeddingModel(
config.get("ollama-url"),
Duration.ofSeconds(30),
config.get("ollama-embed-model"),
1
);
}
public static OllamaStreamingLanguageModel createLanguageModel() {
return new OllamaStreamingLanguageModel(
config.get("ollama-url"),
Duration.ofSeconds(30),
config.get("ollama-language-model"),
null
);
}
public static ElasticsearchEmbeddingStore createStore(String index) {
var config = readConfig();
return ElasticsearchEmbeddingStore.builder()
.serverUrl(config.get("es-url"))
.userName(config.get("es-login"))
.password(config.get("es-password"))
.dimension(Integer.parseInt(config.get("ollama-embed-dimensions")))
.indexName(index)
.build();
}
public static Map<String, String> readConfig() {
try (var input = new FileInputStream("config.properties")) {
var props = new Properties();
props.load(input);
@SuppressWarnings("unchecked")
Map<String, String> map = (Map<String, String>) (Map<?, ?>) props;
return map;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
| [
"dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder"
] | [((1106, 1150), 'org.jline.terminal.TerminalBuilder.builder'), ((1106, 1142), 'org.jline.terminal.TerminalBuilder.builder'), ((1167, 1221), 'org.jline.reader.LineReaderBuilder.builder'), ((1167, 1213), 'org.jline.reader.LineReaderBuilder.builder'), ((1889, 2197), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1889, 2176), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1889, 2146), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1889, 2066), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1889, 2017), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1889, 1971), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder')] |
package com.sivalabs.tcguidesbot.config;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.UrlDocumentLoader;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.time.Duration;
import java.util.List;
@Configuration
public class AppConfig {
private static final Logger log = LoggerFactory.getLogger(AppConfig.class);
private static final List<String> guideUrls = List.of(
"https://testcontainers.com/guides/replace-h2-with-real-database-for-testing/",
"https://testcontainers.com/guides/configuration-of-services-running-in-container/",
"https://testcontainers.com/guides/testing-spring-boot-kafka-listener-using-testcontainers/",
"https://testcontainers.com/guides/getting-started-with-testcontainers-for-java/",
"https://testcontainers.com/guides/testing-rest-api-integrations-in-micronaut-apps-using-wiremock/",
"https://testcontainers.com/guides/working-with-jooq-flyway-using-testcontainers/"
);
private final ApplicationProperties properties;
public AppConfig(ApplicationProperties properties) {
this.properties = properties;
}
@Bean
public ConversationalRetrievalChain chain() {
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
log.info("Ingesting documents...");
List<Document> documents = guideUrls.stream().map(UrlDocumentLoader::load).toList();
ingestor.ingest(documents);
log.info("Ingested {} documents", documents.size());
return ConversationalRetrievalChain.builder()
.chatLanguageModel(OpenAiChatModel.builder()
.apiKey(properties.apiKey())
.timeout(Duration.ofSeconds(30))
.build()
)
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
// .chatMemory() // you can override default chat memory
// .promptTemplate() // you can override default prompt template
.build();
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((2145, 2369), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2145, 2344), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2145, 2296), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2145, 2248), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2622, 3150), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2622, 2971), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2622, 2882), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2696, 2864), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2696, 2831), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2696, 2774), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package com.example.interlang;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.SystemMessage;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
interface Assistant {
@SystemMessage("You are a FHIR assistant AI. You can access various tools to access FHIR resources to chat about a patient.")
String chat(String userMessage);
}
@Service
public class AssistantService implements Assistant {
private final Assistant assistant;
private final FhirTools fhirTools;
public AssistantService(@Value("${fhir.server.url}") String fhirServerUrl,
@Value("${fhir.api.key}") String fhirApiKey,
@Value("${openai.api.key}") String openAiApiKey) {
this.fhirTools = new FhirTools(fhirServerUrl, fhirApiKey);
OpenAiChatModel chatModel = OpenAiChatModel.builder()
.apiKey(openAiApiKey)
.modelName("gpt-4")
.build();
this.assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(chatModel)
.tools(this.fhirTools)
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
}
@Override
public String chat(String userMessage) {
return this.assistant.chat(userMessage);
}
} | [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((979, 1103), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((979, 1078), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((979, 1042), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1131, 1349), 'dev.langchain4j.service.AiServices.builder'), ((1131, 1324), 'dev.langchain4j.service.AiServices.builder'), ((1131, 1251), 'dev.langchain4j.service.AiServices.builder'), ((1131, 1212), 'dev.langchain4j.service.AiServices.builder')] |
package me.nzuguem.bot.configurations.clients;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.source.UrlSource;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.PathParam;
import jakarta.ws.rs.Produces;
import jakarta.ws.rs.core.MediaType;
import org.eclipse.microprofile.config.ConfigProvider;
import org.eclipse.microprofile.rest.client.inject.RegisterRestClient;
import java.util.ArrayList;
import java.util.List;
@Path("")
@RegisterRestClient(configKey = "quarkus-github-app-extension-doc-client")
@Produces(MediaType.TEXT_PLAIN)
public interface QuarkusGithubAppExtensionDocClient {
List<String> FILE_NAMES = List.of(
"commands.adoc", "create-github-app.adoc", "developer-reference.adoc",
"index.adoc", "push-to-production.adoc", "register-github-app.adoc",
"replay-ui.adoc", "testing.adoc", "includes/quarkus-github-app.adoc"
);
@GET
@Path("{fileName}")
String getContent(@PathParam("fileName") String fileName);
default Document getContentAsDocument(String fileName, String baseUri) {
var content = this.getContent(fileName);
var metadata = UrlSource.from("%s/%s".formatted(baseUri, fileName))
.metadata()
.add("source", "quarkus-github-app-extension");
return new Document(content, metadata);
}
default List<Document> getContentAsDocument() {
var documents = new ArrayList<Document>();
var baseUri = ConfigProvider.getConfig().getValue("quarkus.rest-client.quarkus-github-app-extension-doc-client.url", String.class);
FILE_NAMES.forEach(fileName -> documents.add(getContentAsDocument(fileName, baseUri)));
return documents;
}
}
| [
"dev.langchain4j.data.document.source.UrlSource.from"
] | [((1196, 1339), 'dev.langchain4j.data.document.source.UrlSource.from'), ((1196, 1276), 'dev.langchain4j.data.document.source.UrlSource.from'), ((1525, 1641), 'org.eclipse.microprofile.config.ConfigProvider.getConfig')] |
package org.agoncal.fascicle.langchain4j.accessing.qianfan;
import dev.langchain4j.model.qianfan.QianfanChatModel;
// tag::adocSkip[]
/**
* @author Antonio Goncalves
* http://www.antoniogoncalves.org
* --
*/
// end::adocSkip[]
public class MusicianService {
public static void main(String[] args) {
MusicianService musicianService = new MusicianService();
musicianService.useQianfanChatModel();
}
private static final String QIANFAN_KEY = System.getenv("AZURE_OPENAI_KEY");
private static final String QIANFAN_ENDPOINT = System.getenv("AZURE_OPENAI_ENDPOINT");
private static final String AZURE_OPENAI_DEPLOYMENT_NAME = System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME");
private static final String PROMPT = "When was the first Beatles album released?";
// ###############################
// ### QIANFAN CHAT MODEL ###
// ###############################
public void useQianfanChatModel() {
System.out.println("### useAzureOpenAiuseQianfanChatModelChatModel");
// tag::adocSnippet[]
QianfanChatModel model = QianfanChatModel.builder()
.apiKey(QIANFAN_KEY)
.endpoint(QIANFAN_ENDPOINT)
.temperature(0.3)
.build();
// end::adocSnippet[]
String completion = model.generate("When was the first Rolling Stones album released?");
System.out.println(completion);
}
public void useQianfanChatModelRequest() {
System.out.println("### useQianfanChatModelRequest");
// tag::adocRequest[]
QianfanChatModel model = QianfanChatModel.builder()
.apiKey(QIANFAN_KEY)
.endpoint(QIANFAN_ENDPOINT)
.modelName(AZURE_OPENAI_DEPLOYMENT_NAME)
.baseUrl("https://api.qianfanapi.com")
.penaltyScore(0.0d)
.secretKey("secretKey")
.maxRetries(3)
.topP(1.0d)
.temperature(0.9)
.logRequests(true)
.logResponses(true)
.build();
// end::adocRequest[]
String completion = model.generate("When was the first Rolling Stones album released?");
System.out.println(completion);
}
}
| [
"dev.langchain4j.model.qianfan.QianfanChatModel.builder"
] | [((1056, 1182), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1056, 1167), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1056, 1143), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1056, 1109), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1505, 1869), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1505, 1854), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1505, 1828), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1505, 1803), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1505, 1779), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1505, 1761), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1505, 1740), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1505, 1710), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1505, 1684), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1505, 1639), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1505, 1592), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1505, 1558), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder')] |
package org.jugph;
import dev.langchain4j.model.input.structured.StructuredPrompt;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.output.structured.Description;
import dev.langchain4j.service.AiServices;
import java.util.Arrays;
import java.util.List;
import static java.time.Duration.ofSeconds;
public class ResourceFinderAIServiceExample {
public record JavaBook(
@Description("the title of the learning resource, concise") String title,
@Description("a brief summary of the book content, up to two sentences") String summary,
@Description("difficulty level, categorized as Beginner, Intermediate, or Advanced") String difficultyLevel,
@Description("a list of key topics covered in the book") List<String> topicsCovered
) {}
@StructuredPrompt("Find a Java book for a {{skillLevel}} developer interested in {{topics}}")
public record JavaBookPrompt(String skillLevel, List<String> topics) {}
interface BookFinder {
JavaBook findBook(JavaBookPrompt prompt);
}
public static void main(String[] args) {
var model = OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
.timeout(ofSeconds(120))
.build();
BookFinder finder = AiServices.create(BookFinder.class, model);
JavaBookPrompt prompt = new JavaBookPrompt("Beginner",
Arrays.asList("object-oriented programming", "basic Java syntax"));
JavaBook resource = finder.findBook(prompt);
System.out.println(resource);
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1148, 1296), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1148, 1271), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1148, 1230), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package org.jxch.capital.config;
import dev.langchain4j.store.embedding.milvus.MilvusEmbeddingStore;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import okhttp3.HttpUrl;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
import java.util.Optional;
@Data
@Slf4j
@Configuration
@ConfigurationProperties(prefix = "milvus")
public class MilvusConfig {
private String url;
private Integer dimension;
// @Bean
public MilvusEmbeddingStore milvusEmbeddingStore() {
HttpUrl httpUrl = Optional.ofNullable(HttpUrl.parse(url)).orElseThrow(() -> new IllegalArgumentException("无效的URL: " + url));
return MilvusEmbeddingStore.builder()
.host(httpUrl.host())
.port(httpUrl.port())
.dimension(dimension)
.build();
}
}
| [
"dev.langchain4j.store.embedding.milvus.MilvusEmbeddingStore.builder"
] | [((597, 708), 'java.util.Optional.ofNullable'), ((725, 894), 'dev.langchain4j.store.embedding.milvus.MilvusEmbeddingStore.builder'), ((725, 869), 'dev.langchain4j.store.embedding.milvus.MilvusEmbeddingStore.builder'), ((725, 831), 'dev.langchain4j.store.embedding.milvus.MilvusEmbeddingStore.builder'), ((725, 793), 'dev.langchain4j.store.embedding.milvus.MilvusEmbeddingStore.builder')] |
package org.goafabric.dbagent.ai;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.service.AiServices;
import org.goafabric.dbagent.ai.mock.MockChatModel;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import java.util.HashMap;
import java.util.function.Function;
@Configuration
@Profile("mock")
public class MockConfiguration {
@Bean
ChatLanguageModel chatModelMock(DatabaseTool databaseTool) {
HashMap<String, Function<String, Object>> functions = new HashMap<>();
functions.put("firstname", databaseTool::findByFirstName);
functions.put("lastname", databaseTool::findByLastName);
functions.put("city", databaseTool::findByCity);
functions.put("allergy", databaseTool::findByAllergy);
//functions.put("hi", f -> "hi there");
return new MockChatModel(functions);
}
@Bean
DatabaseAgent databaseAgent(ChatLanguageModel chatLanguageModel, DatabaseTool databaseTool) {
return AiServices.builder(DatabaseAgent.class)
.chatLanguageModel(chatLanguageModel)
.chatMemory(MessageWindowChatMemory.withMaxMessages(20))
.tools(databaseTool)
.build();
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((1171, 1399), 'dev.langchain4j.service.AiServices.builder'), ((1171, 1374), 'dev.langchain4j.service.AiServices.builder'), ((1171, 1337), 'dev.langchain4j.service.AiServices.builder'), ((1171, 1264), 'dev.langchain4j.service.AiServices.builder')] |
package com.revolvingSolutions.aicvgeneratorbackend.conf;
import com.revolvingSolutions.aicvgeneratorbackend.agent.*;
import com.revolvingSolutions.aicvgeneratorbackend.model.aimodels.JobClassification;
import dev.langchain4j.classification.EmbeddingModelTextClassifier;
import dev.langchain4j.classification.TextClassifier;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.moderation.ModerationModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.model.openai.OpenAiModerationModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.retriever.Retriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import java.io.IOException;
import java.time.Duration;
import java.util.HashMap;
import java.util.List;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static dev.langchain4j.model.openai.OpenAiModelName.TEXT_EMBEDDING_ADA_002;
@Configuration
@RequiredArgsConstructor
public class LangChainConf {
@Value("${langchain4j.chat-model.openai.api-key}")
private String apikey;
@Value("${langchain4j.chat-model.openai.model-name}")
private String modelName;
@Value("${langchain4j.chat-model.openai.temperature}")
private Double temperature;
@Value("${app.api.embed.information}")
private Boolean embed;
@Value("${app.api.embedding.key}")
private String embedKey;
@Value("${app.api.embedding.environment}")
private String environment;
@Value("${app.api.embedding.index}")
private String index;
@Value("${app.api.embedding.project.id}")
private String projectId;
@Bean
public ChatLanguageModel chatLanguageModel() {
return OpenAiChatModel.builder()
.modelName(modelName)
.apiKey(apikey)
.temperature(temperature)
.logRequests(true)
.logResponses(true)
.maxRetries(2)
.maxTokens(1000)
.topP(1.0)
.timeout(Duration.ofMinutes(2))
.frequencyPenalty(0.0)
.presencePenalty(0.0)
.build();
}
public ChatLanguageModel extractionChatLanguageModel() {
return OpenAiChatModel.builder()
.modelName(modelName)
.apiKey(apikey)
.temperature(temperature)
.logRequests(true)
.logResponses(true)
.maxRetries(2)
.maxTokens(3000)
.topP(1.0)
.timeout(Duration.ofMinutes(3))
.frequencyPenalty(0.0)
.presencePenalty(0.0)
.build();
}
@Bean
public DescriptionAgent descriptionAgent(ChatLanguageModel chatLanguageModel) {
return AiServices.builder(DescriptionAgent.class)
.chatLanguageModel(chatLanguageModel)
.chatMemory(MessageWindowChatMemory.withMaxMessages(3))
.build();
}
@Bean
public EmploymentHistoryExpander employmentHistoryExpander(ChatLanguageModel chatLanguageModel) {
return AiServices.builder(EmploymentHistoryExpander.class)
.chatLanguageModel(chatLanguageModel)
.chatMemory(MessageWindowChatMemory.withMaxMessages(3))
.build();
}
@Bean
public EducationDescriptionAgent educationDescriptionAgent(ChatLanguageModel chatLanguageModel) {
return AiServices.builder(EducationDescriptionAgent.class)
.chatLanguageModel(chatLanguageModel)
.chatMemory(MessageWindowChatMemory.withMaxMessages(3))
.build();
}
@Bean
public ExtractionAgent extractionAgent(ChatLanguageModel extractionChatLanguageModel) {
return AiServices.builder(ExtractionAgent.class)
.chatLanguageModel(extractionChatLanguageModel)
.chatMemory(MessageWindowChatMemory.withMaxMessages(5))
.build();
}
@Bean
public EmbeddingModel embeddingModel() {
return OpenAiEmbeddingModel.builder()
.apiKey(apikey)
.logRequests(false)
.logResponses(false)
.modelName(TEXT_EMBEDDING_ADA_002)
.build();
}
@Bean
public Retriever<TextSegment> retriever(EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel) {
return EmbeddingStoreRetriever.from(embeddingStore,embeddingModel,1,0.9);
}
@Bean
public EmbeddingStore<TextSegment> embeddingStore(EmbeddingModel embeddingModel, ResourceLoader resourceLoader) throws IOException {
EmbeddingStore<TextSegment> embeddingStore = PineconeEmbeddingStore.builder()
.apiKey(embedKey)
.environment(environment)
.index(index)
.nameSpace("")
.projectId(projectId)
.build();
if (embed) return embeddingStore;
try {
EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(100,new OpenAiTokenizer(GPT_3_5_TURBO)))
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.build()
.ingest(loadDocument(resourceLoader.getResource("classpath:data.txt").getFile().toPath()));
} catch (Exception e) {
System.out.println("Warning Something has BADLY gone Wrong!");
}
return embeddingStore;
}
@Bean
public ModerationModel moderationModel() {
return OpenAiModerationModel.withApiKey(apikey);
}
@Bean
public TextClassifier<JobClassification> jobMap(EmbeddingModel embeddingModel) {
HashMap<JobClassification, List<String>> map = new HashMap<>();
map.put(
JobClassification.engineering,
List.of(
"Analytical Thinker",
"Problem Solver",
"Innovative Designer",
"Detail-Oriented Professional",
"Technical Expert",
"Team Player",
"Creative Solution Provider",
"Continuous Learner",
"Critical Thinker",
"Precision Engineer"
)
);
map.put(
JobClassification.business,
List.of(
"Strategic Thinker",
"Effective Communicator",
"Team Leader",
"Analytical Mindset",
"Financial Acumen",
"Negotiation Skills",
"Decision Maker",
"Adaptable to Change",
"Problem Solver",
"Customer-Centric"
)
);
map.put(
JobClassification.computer_science,
List.of(
"Algorithm Expert",
"Coding Guru",
"Problem-Solving Pro",
"Data Science Enthusiast",
"Cybersecurity Whiz",
"AI and Machine Learning Aficionado",
"Software Development Maestro",
"Database Wizard",
"Web Development Prodigy",
"Networking Ninja"
)
);
map.put(
JobClassification.architecture,
List.of(
"Creative Designer",
"Spatial Thinker",
"Detail-Oriented Planner",
"Innovative Problem Solver",
"Technically Proficient",
"Team Player",
"Sustainable Design Advocate",
"Continuous Learner",
"Critical Evaluator",
"Master of Form and Function"
)
);
map.put(
JobClassification.finance,
List.of(
"Analytical Thinker",
"Risk Management Expert",
"Financial Strategist",
"Data-Driven Decision Maker",
"Detail-Oriented Analyst",
"Investment Savvy",
"Regulatory Compliance Specialist",
"Effective Communicator",
"Problem-Solving Guru",
"Economic Trend Interpreter"
)
);
map.put(
JobClassification.education,
List.of(
"Passionate Educator",
"Innovative Curriculum Developer",
"Dedicated Mentor",
"Lifelong Learner",
"Student-Centered Advocate",
"Effective Classroom Manager",
"Tech-Savvy Instructor",
"Research-Driven Scholar",
"Collaborative Team Player",
"Compassionate Listener"
)
);
map.put(
JobClassification.law,
List.of(
"Analytical Legal Mind",
"Expert Researcher",
"Effective Communicator",
"Detail-Oriented",
"Strong Advocate",
"Critical Thinker",
"Negotiation Skills",
"Legal Writing Proficiency",
"Ethical and Professional",
"Strategic Problem Solver"
)
);
return new EmbeddingModelTextClassifier<JobClassification>(embeddingModel, map);
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder",
"dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((2875, 3324), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2875, 3299), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2875, 3261), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2875, 3222), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2875, 3174), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2875, 3147), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2875, 3114), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2875, 3083), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2875, 3047), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2875, 3012), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2875, 2970), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2875, 2938), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3409, 3858), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3409, 3833), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3409, 3795), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3409, 3756), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3409, 3708), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3409, 3681), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3409, 3648), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3409, 3617), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3409, 3581), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3409, 3546), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3409, 3504), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3409, 3472), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3976, 4169), 'dev.langchain4j.service.AiServices.builder'), ((3976, 4144), 'dev.langchain4j.service.AiServices.builder'), ((3976, 4072), 'dev.langchain4j.service.AiServices.builder'), ((4305, 4507), 'dev.langchain4j.service.AiServices.builder'), ((4305, 4482), 'dev.langchain4j.service.AiServices.builder'), ((4305, 4410), 'dev.langchain4j.service.AiServices.builder'), ((4643, 4845), 'dev.langchain4j.service.AiServices.builder'), ((4643, 4820), 'dev.langchain4j.service.AiServices.builder'), ((4643, 4748), 'dev.langchain4j.service.AiServices.builder'), ((4971, 5173), 'dev.langchain4j.service.AiServices.builder'), ((4971, 5148), 'dev.langchain4j.service.AiServices.builder'), ((4971, 5076), 'dev.langchain4j.service.AiServices.builder'), ((5252, 5463), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((5252, 5438), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((5252, 5387), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((5252, 5350), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((5252, 5314), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((5891, 6123), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((5891, 6098), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((5891, 6060), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((5891, 6029), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((5891, 5999), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((5891, 5957), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((6193, 6576), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((6193, 6465), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((6193, 6436), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((6193, 6384), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((6193, 6332), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package example.aiservice;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentParser;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.parser.apache.pdfbox.ApachePdfBoxDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import example.aiservice.customerservice.CustomerService;
import example.aiservice.customerservice.CustomerServiceAgent;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.lang.StringTemplate.STR;
@Configuration
public class Config {
@Value("${doc.names}")
private String docNames;
@Bean
StreamingChatLanguageModel streamingModel() {
return OpenAiStreamingChatModel.withApiKey(System.getenv("AI_OPENAI_API_KEY"));
}
@Bean
CustomerServiceAgent customerSupportAgent(StreamingChatLanguageModel streamingChatLanguageModel,
ChatLanguageModel chatLanguageModel,
ContentRetriever contentRetriever,
CustomerService customerService) {
return AiServices.builder(CustomerServiceAgent.class)
.streamingChatLanguageModel(streamingChatLanguageModel)
.chatLanguageModel(chatLanguageModel)
.chatMemory(MessageWindowChatMemory.withMaxMessages(20))
.tools(customerService)
.contentRetriever(contentRetriever)
.build();
}
@Bean
ContentRetriever contentRetriever(EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel) {
// You will need to adjust these parameters to find the optimal setting, which will depend on two main factors:
// - The nature of your data
// - The embedding model you are using
int maxResults = 1;
double minScore = 0.6;
return EmbeddingStoreContentRetriever.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.maxResults(maxResults)
.minScore(minScore)
.build();
}
@Bean
EmbeddingModel embeddingModel() {
return new AllMiniLmL6V2EmbeddingModel();
}
@Bean
EmbeddingStore<TextSegment> embeddingStore(EmbeddingModel embeddingModel, ResourceLoader resourceLoader) throws IOException {
// Normally, you would already have your embedding store filled with your data.
// However, for the purpose of this demonstration, we will:
// 1. Create an in-memory embedding store
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
// 2. Load an example document ("Miles of Smiles" terms of use)
var docList = List.of(docNames.split(","));
docList.forEach(doc -> {
Resource resource = resourceLoader.getResource(STR."classpath:\{doc}");
try {
loadEmbeddingForDocument(embeddingModel, resource, embeddingStore);
} catch (IOException e) {
throw new RuntimeException(e);
}
});
return embeddingStore;
}
private DocumentParser getParserForDocument(Resource resource) throws IOException {
return resource.getFile().toPath().toString().endsWith(".pdf") ? new ApachePdfBoxDocumentParser() : new TextDocumentParser();
}
private void loadEmbeddingForDocument(EmbeddingModel embeddingModel, Resource resource, EmbeddingStore<TextSegment> embeddingStore) throws IOException {
Document document = loadDocument(resource.getFile().toPath(), getParserForDocument(resource));
// 3. Split the document into segments 100 tokens each
// 4. Convert segments into embeddings
// 5. Store embeddings into embedding store
// All this can be done manually, but we will use EmbeddingStoreIngestor to automate this:
DocumentSplitter documentSplitter = DocumentSplitters.recursive(100, 0, new OpenAiTokenizer(GPT_3_5_TURBO));
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(documentSplitter)
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
ingestor.ingest(document);
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder"
] | [((2535, 2897), 'dev.langchain4j.service.AiServices.builder'), ((2535, 2872), 'dev.langchain4j.service.AiServices.builder'), ((2535, 2820), 'dev.langchain4j.service.AiServices.builder'), ((2535, 2780), 'dev.langchain4j.service.AiServices.builder'), ((2535, 2707), 'dev.langchain4j.service.AiServices.builder'), ((2535, 2653), 'dev.langchain4j.service.AiServices.builder'), ((3311, 3548), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3311, 3523), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3311, 3487), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3311, 3447), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3311, 3399), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5497, 5702), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((5497, 5677), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((5497, 5629), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((5497, 5581), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package my.samples;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.apache.pdfbox.ApachePdfBoxDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
public class LoadTeslaManualtoES {
public static void main(String[] args) {
EmbeddingStore<TextSegment> embeddingStore = ElasticsearchEmbeddingStore.builder()
.serverUrl("http://localhost:9200")
.indexName("car-warranty-guide-embeddings")
.dimension(384)
.build();
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(300, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
Path filePath = toPath("example-files/Tesla_Models_Owners_Manual.pdf");
Document document = FileSystemDocumentLoader.loadDocument(filePath, new ApachePdfBoxDocumentParser());
document.metadata().add("fileName", filePath.getFileName().toString());
document.metadata().add("filePath", filePath.toString());
document.metadata().add("company", "TESLA");
document.metadata().add("product", "MODEL S");
document.metadata().add("language", "ENG");
document.metadata().add("version", "V1");
document.metadata().add("year", "2025");
document.metadata().add("type", "Owner's Manual Guide");
document.metadata().add("country", "US");
document.metadata().add("category", "Automotive");
ingestor.ingest(document);
System.out.println( "Document ingested successfully" );
}
private static Path toPath(String fileName) {
try {
// Corrected path assuming files are in src/main/resources/example-files
URL fileUrl = LoadTeslaManualtoES.class.getClassLoader().getResource( fileName);
if (fileUrl == null) {
throw new RuntimeException("Resource not found: " + fileName);
}
return Paths.get(fileUrl.toURI());
} catch (URISyntaxException e) {
throw new RuntimeException("Failed to resolve URI for: " + fileName, e);
}
}
}
| [
"dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((911, 1117), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((911, 1092), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((911, 1060), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((911, 1000), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1237, 1461), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1237, 1436), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1237, 1388), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1237, 1340), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package io.quarkiverse.langchain4j.runtime;
import java.util.function.Function;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.Tokenizer;
import dev.langchain4j.store.memory.chat.ChatMemoryStore;
import io.quarkiverse.langchain4j.runtime.aiservice.ChatMemoryConfig;
import io.quarkus.arc.SyntheticCreationalContext;
import io.quarkus.runtime.annotations.Recorder;
@Recorder
public class ChatMemoryRecorder {
public Function<SyntheticCreationalContext<ChatMemoryProvider>, ChatMemoryProvider> messageWindow(ChatMemoryConfig config) {
return new Function<>() {
@Override
public ChatMemoryProvider apply(SyntheticCreationalContext<ChatMemoryProvider> context) {
ChatMemoryStore chatMemoryStore = context.getInjectedReference(ChatMemoryStore.class);
int maxMessages = config.memoryWindow().maxMessages();
return new ChatMemoryProvider() {
@Override
public ChatMemory get(Object memoryId) {
return MessageWindowChatMemory.builder()
.maxMessages(maxMessages)
.id(memoryId)
.chatMemoryStore(chatMemoryStore)
.build();
}
};
}
};
}
public Function<SyntheticCreationalContext<ChatMemoryProvider>, ChatMemoryProvider> tokenWindow(ChatMemoryConfig config) {
return new Function<>() {
@Override
public ChatMemoryProvider apply(SyntheticCreationalContext<ChatMemoryProvider> context) {
ChatMemoryStore chatMemoryStore = context.getInjectedReference(ChatMemoryStore.class);
Tokenizer tokenizer = context.getInjectedReference(Tokenizer.class);
int maxTokens = config.tokenWindow().maxTokens();
return new ChatMemoryProvider() {
@Override
public ChatMemory get(Object memoryId) {
return TokenWindowChatMemory.builder()
.maxTokens(maxTokens, tokenizer)
.id(memoryId)
.chatMemoryStore(chatMemoryStore)
.build();
}
};
}
};
}
}
| [
"dev.langchain4j.memory.chat.TokenWindowChatMemory.builder",
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((1242, 1486), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((1242, 1445), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((1242, 1379), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((1242, 1333), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((2272, 2521), 'dev.langchain4j.memory.chat.TokenWindowChatMemory.builder'), ((2272, 2480), 'dev.langchain4j.memory.chat.TokenWindowChatMemory.builder'), ((2272, 2414), 'dev.langchain4j.memory.chat.TokenWindowChatMemory.builder'), ((2272, 2368), 'dev.langchain4j.memory.chat.TokenWindowChatMemory.builder')] |
package com.genai.tmgenai.service;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentLoader;
import dev.langchain4j.data.document.DocumentSegment;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.ParagraphSplitter;
import dev.langchain4j.data.document.splitter.SentenceSplitter;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.PineconeEmbeddingStore;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static dev.langchain4j.data.document.DocumentType.PDF;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static dev.langchain4j.model.openai.OpenAiModelName.TEXT_EMBEDDING_ADA_002;
import static java.time.Duration.ofSeconds;
import static java.util.stream.Collectors.joining;
@Service
public class FileEmbeddingService {
@Value("${key.opnenapikey}")
private String OPENAI_API_KEY;
public void embedFile(MultipartFile multipartFile,String fileId) throws IOException {
File file = new File("/Users/amankumar/Downloads" + fileId + ".pdf");
multipartFile.transferTo(file);
DocumentLoader documentLoader = DocumentLoader.from(Paths.get(file.getPath()), PDF);
Document document = documentLoader.load();
// Split document into segments (one paragraph per segment)
DocumentSplitter splitter = new ParagraphSplitter();
// List<DocumentSegment> documentSegments = splitter.split(document);
// Embed segments (convert them into semantic vectors)
EmbeddingModel embeddingModel = OpenAiEmbeddingModel.builder()
.apiKey(OPENAI_API_KEY) // https://platform.openai.com/account/api-keys
.modelName(TEXT_EMBEDDING_ADA_002)
.timeout(ofSeconds(15))
.build();
// List<Embedding> embeddings = embeddingModel.embedAll(documentSegments).get();
// Store embeddings into embedding store for further search / retrieval
PineconeEmbeddingStore pinecone = PineconeEmbeddingStore.builder()
.apiKey("1d0899b3-7abf-40be-a267-ac208d572ed3") // https://app.pinecone.io/organizations/xxx/projects/yyy:zzz/keys
.environment("asia-southeast1-gcp-free")
.projectName("bca6a53")
.index("documents") // make sure the dimensions of the Pinecone index match the dimensions of the embedding model (1536 for text-embedding-ada-002)
.build();
//
// pinecone.addAll(embeddings, documentSegments);
String question = "what is the value for policy no?";
Embedding questionEmbedding = embeddingModel.embed(question).get();
// Find relevant embeddings in embedding store by semantic similarity
List<EmbeddingMatch<DocumentSegment>> relevantEmbeddings = pinecone.findRelevant(questionEmbedding, 2);
// Create a prompt for the model that includes question and relevant embeddings
PromptTemplate promptTemplate = PromptTemplate.from(
"Answer the following question to the best of your ability :\n"
+ "\n"
+ "Question:\n"
+ "{{question}}\n"
+ "\n"
+ "Base your answer on the below information from a policy document: \n"
+ "{{information}}");
String information = relevantEmbeddings.stream()
.map(match -> match.embedded().get().text())
.collect(joining("\n\n"));
Map<String, Object> variables = new HashMap<>();
variables.put("question", question);
variables.put("information", information);
Prompt prompt = promptTemplate.apply(variables);
// Send prompt to the model
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey(OPENAI_API_KEY) // https://platform.openai.com/account/api-keys
.modelName(GPT_3_5_TURBO)
.temperature(1.0)
.logResponses(true)
.logRequests(true)
.build();
AiMessage aiMessage = chatModel.sendUserMessage(prompt).get();
// See an answer from the model
String answer = aiMessage.text();
System.out.println(answer);
}
}
| [
"dev.langchain4j.store.embedding.PineconeEmbeddingStore.builder",
"dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((2341, 2575), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((2341, 2550), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((2341, 2510), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((2341, 2411), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((2792, 3241), 'dev.langchain4j.store.embedding.PineconeEmbeddingStore.builder'), ((2792, 3088), 'dev.langchain4j.store.embedding.PineconeEmbeddingStore.builder'), ((2792, 3052), 'dev.langchain4j.store.embedding.PineconeEmbeddingStore.builder'), ((2792, 3012), 'dev.langchain4j.store.embedding.PineconeEmbeddingStore.builder'), ((2792, 2888), 'dev.langchain4j.store.embedding.PineconeEmbeddingStore.builder'), ((4610, 4895), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4610, 4870), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4610, 4835), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4610, 4799), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4610, 4765), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4610, 4675), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package my.samples;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.apache.pdfbox.ApachePdfBoxDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
public class ElasticSearchEmbeddingManualLoader {
public static final String ANSI_GREEN = "\u001B[32m";
public static final String ANSI_RESET = "\u001B[0m";
public static final String ANSI_YELLOW = "\u001B[33m";
public static void main(String[] args) {
EmbeddingStore<TextSegment> embeddingStore = ElasticsearchEmbeddingStore.builder()
.serverUrl("http://localhost:9200")
.indexName("car-warranty-guide-embeddings")
.dimension(384)
.build();
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(300, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
Path filePath = toPath("example-files/Tesla_Models_Owners_Manual.pdf");
Document document = FileSystemDocumentLoader.loadDocument(filePath, new ApachePdfBoxDocumentParser());
document.metadata().add("fileName", filePath.getFileName().toString());
document.metadata().add("filePath", filePath.toString());
document.metadata().add("company", "TESLA");
document.metadata().add("product", "MODEL S");
document.metadata().add("language", "ENG");
document.metadata().add("version", "V1");
document.metadata().add("year", "2025");
document.metadata().add("type", "Owner's Manual Guide");
document.metadata().add("country", "US");
document.metadata().add("category", "Automotive");
ingestor.ingest(document);
Scanner scanner = new Scanner(System.in);
while (true) {
System.out.println("Enter your query (or type 'exit' to quit):");
// Wait for the user to input a query
String query = scanner.nextLine();
// Check if the user wants to exit the program
if ("exit".equalsIgnoreCase(query)) {
System.out.println("Exiting program.");
break;
}
// Who Pays For Warranty Repairs?
// What is the warranty period?
// What is the warranty period for the powertrain?
// What is the warranty period for the powertrain?
// Process the query and get an answer
Embedding queryEmbedding = embeddingModel.embed(query).content();
List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(queryEmbedding,5 );
System.out.println("Start --------- Matching Context from Document: 2025_US_F150_Warranty_Guide_ENG_V1.pdf");
List<String> answers = new ArrayList<>();
for (EmbeddingMatch<TextSegment> match : relevant) {
System.out.println(match.score());
answers.add(match.embedded().text());
System.out.println(ANSI_GREEN+match.embedded().text()+ANSI_RESET);
System.out.println("");
}
System.out.println("End --------- Matching Context from Document: 2025_US_F150_Warranty_Guide_ENG_V1.pdf");
if(!answers.isEmpty()){
try {
System.out.println(ANSI_YELLOW+ RestClient.getAnswer(query, answers) + ANSI_RESET);
} catch (IOException e) {
e.printStackTrace();
}
}
}
// Close the scanner
scanner.close();
// In-memory embedding store can be serialized and deserialized to/from JSON
// String serializedStore = ((InMemoryEmbeddingStore)embeddingStore).serializeToJson();
// System.out.println(serializedStore);
// InMemoryEmbeddingStore<TextSegment> deserializedStore = InMemoryEmbeddingStore.fromJson(serializedStore);
// In-memory embedding store can be serialized and deserialized to/from file
// String filePath = "/home/me/embedding.store";
// embeddingStore.serializeToFile(filePath);
// InMemoryEmbeddingStore<TextSegment> deserializedStore = InMemoryEmbeddingStore.fromFile(filePath);
}
private static Path toPath(String fileName) {
try {
// Corrected path assuming files are in src/main/resources/example-files
URL fileUrl = ElasticSearchEmbeddingManualLoader.class.getClassLoader().getResource( fileName);
if (fileUrl == null) {
throw new RuntimeException("Resource not found: " + fileName);
}
return Paths.get(fileUrl.toURI());
} catch (URISyntaxException e) {
throw new RuntimeException("Failed to resolve URI for: " + fileName, e);
}
}
}
| [
"dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1308, 1514), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1308, 1489), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1308, 1457), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1308, 1397), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1634, 1858), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1634, 1833), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1634, 1785), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1634, 1737), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.image.Image;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.image.ImageModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiImageModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.net.URISyntaxException;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import static dev.ai4j.openai4j.image.ImageModel.DALL_E_QUALITY_HD;
import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument;
public class OpenAiImageModelExamples {
static class Simple_Prompt {
public static void main(String[] args) {
ImageModel model = OpenAiImageModel.withApiKey(System.getenv("OPENAI_API_KEY"));
Response<Image> response = model.generate("Donald Duck in New York, cartoon style");
System.out.println(response.content().url()); // Donald Duck is here :)
}
}
static class Draw_Story_From_My_Document {
public static void main(String[] args) throws URISyntaxException {
ImageModel model = OpenAiImageModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
.quality(DALL_E_QUALITY_HD)
.logRequests(true)
.logResponses(true)
.withPersisting()
.build();
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor
.builder()
.documentSplitter(DocumentSplitters.recursive(1000, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
Document document = loadDocument(
Paths.get(
Objects
.requireNonNull(
OpenAiImageModelExamples.class.getResource("example-files/story-about-happy-carrot.txt")
)
.toURI()
),
new TextDocumentParser()
);
ingestor.ingest(document);
ConversationalRetrievalChain chain = ConversationalRetrievalChain
.builder()
.chatLanguageModel(OpenAiChatModel.builder().apiKey(System.getenv("OPENAI_API_KEY")).build())
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
.build();
PromptTemplate drawPromptTemplate = PromptTemplate.from(
"Draw {{object}}. Base the picture on following information:\n\n{{information}}"
);
Map<String, Object> variables = new HashMap<>();
variables.put("information", chain.execute("Who is Charlie?"));
variables.put("object", "Ultra realistic Charlie on the party, cinematic lighting");
Response<Image> response = model.generate(drawPromptTemplate.apply(variables).text());
System.out.println(response.content().url()); // Enjoy your locally stored picture of Charlie on the party :)
}
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder",
"dev.langchain4j.model.openai.OpenAiImageModel.builder"
] | [((1819, 2100), 'dev.langchain4j.model.openai.OpenAiImageModel.builder'), ((1819, 2071), 'dev.langchain4j.model.openai.OpenAiImageModel.builder'), ((1819, 2033), 'dev.langchain4j.model.openai.OpenAiImageModel.builder'), ((1819, 1993), 'dev.langchain4j.model.openai.OpenAiImageModel.builder'), ((1819, 1954), 'dev.langchain4j.model.openai.OpenAiImageModel.builder'), ((1819, 1906), 'dev.langchain4j.model.openai.OpenAiImageModel.builder'), ((3236, 3309), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3236, 3301), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentParser;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.cohere.CohereScoringModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.scoring.ScoringModel;
import dev.langchain4j.rag.DefaultRetrievalAugmentor;
import dev.langchain4j.rag.RetrievalAugmentor;
import dev.langchain4j.rag.content.aggregator.ContentAggregator;
import dev.langchain4j.rag.content.aggregator.ReRankingContentAggregator;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Scanner;
public class _04_Advanced_RAG_with_ReRanking {
/**
* Please refer to previous examples for basic context.
* <p>
* Advanced RAG in LangChain4j is described here: https://github.com/langchain4j/langchain4j/pull/538
* <p>
* This example illustrates the implementation of a more advanced RAG application
* using a technique known as "re-ranking".
* <p>
* Frequently, not all results retrieved by {@link ContentRetriever} are truly relevant to the user query.
* This is because, during the initial retrieval stage, it is often preferable to use faster
* and more cost-effective models, particularly when dealing with a large volume of data.
* The trade-off is that the retrieval quality may be lower.
* Providing irrelevant information to the LLM can be costly and, in the worst case, lead to hallucinations.
* Therefore, in the second stage, we can perform re-ranking of the results obtained in the first stage
* and eliminate irrelevant results using a more advanced model (e.g., Cohere Rerank).
* <p>
* We will continue using {@link AiServices} for this example,
* but the same principles apply to {@link ConversationalRetrievalChain}, or you can develop your custom RAG flow.
*/
public static void main(String[] args) {
CustomerSupportAgent agent = createCustomerSupportAgent();
// First, say "Hi". Observe how all segments retrieved in the first stage were filtered out.
// Then, ask "Can I cancel my reservation?" and observe how all but one segment were filtered out.
try (Scanner scanner = new Scanner(System.in)) {
while (true) {
System.out.println("==================================================");
System.out.print("User: ");
String userQuery = scanner.nextLine();
System.out.println("==================================================");
if ("exit".equalsIgnoreCase(userQuery)) {
break;
}
String agentAnswer = agent.answer(userQuery);
System.out.println("==================================================");
System.out.println("Agent: " + agentAnswer);
}
}
}
private static CustomerSupportAgent createCustomerSupportAgent() {
// Check _01_Naive_RAG if you need more details on what is going on here
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey("demo")
.modelName("gpt-3.5-turbo")
.build();
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
Path documentPath = toPath("miles-of-smiles-terms-of-use.txt");
EmbeddingStore<TextSegment> embeddingStore = embed(documentPath, embeddingModel);
ContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.maxResults(5) // let's get more results
.build();
// To register and get a free API key for Cohere, please visit the following link:
// https://dashboard.cohere.com/welcome/register
ScoringModel scoringModel = CohereScoringModel.withApiKey(System.getenv("COHERE_API_KEY"));
ContentAggregator contentAggregator = ReRankingContentAggregator.builder()
.scoringModel(scoringModel)
.minScore(0.8) // we want to present the LLM with only the truly relevant segments for the user's query
.build();
RetrievalAugmentor retrievalAugmentor = DefaultRetrievalAugmentor.builder()
.contentRetriever(contentRetriever)
.contentAggregator(contentAggregator)
.build();
return AiServices.builder(CustomerSupportAgent.class)
.chatLanguageModel(chatModel)
.retrievalAugmentor(retrievalAugmentor)
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
}
private static EmbeddingStore<TextSegment> embed(Path documentPath, EmbeddingModel embeddingModel) {
DocumentParser documentParser = new TextDocumentParser();
Document document = FileSystemDocumentLoader.loadDocument(documentPath, documentParser);
DocumentSplitter splitter = DocumentSplitters.recursive(300, 0);
List<TextSegment> segments = splitter.split(document);
List<Embedding> embeddings = embeddingModel.embedAll(segments).content();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
embeddingStore.addAll(embeddings, segments);
return embeddingStore;
}
interface CustomerSupportAgent {
String answer(String query);
}
private static Path toPath(String fileName) {
try {
URL fileUrl = _04_Advanced_RAG_with_ReRanking.class.getResource(fileName);
return Paths.get(fileUrl.toURI());
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
} | [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder",
"dev.langchain4j.rag.DefaultRetrievalAugmentor.builder",
"dev.langchain4j.rag.content.aggregator.ReRankingContentAggregator.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((4121, 4247), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4121, 4222), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4121, 4178), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4533, 4751), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((4533, 4700), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((4533, 4669), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((4533, 4621), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5049, 5275), 'dev.langchain4j.rag.content.aggregator.ReRankingContentAggregator.builder'), ((5049, 5160), 'dev.langchain4j.rag.content.aggregator.ReRankingContentAggregator.builder'), ((5049, 5129), 'dev.langchain4j.rag.content.aggregator.ReRankingContentAggregator.builder'), ((5326, 5492), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((5326, 5467), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((5326, 5413), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((5510, 5756), 'dev.langchain4j.service.AiServices.builder'), ((5510, 5731), 'dev.langchain4j.service.AiServices.builder'), ((5510, 5658), 'dev.langchain4j.service.AiServices.builder'), ((5510, 5602), 'dev.langchain4j.service.AiServices.builder')] |
package me.egaetan.xpchat;
import static dev.langchain4j.data.message.ChatMessageDeserializer.messagesFromJson;
import static dev.langchain4j.data.message.ChatMessageSerializer.messagesToJson;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.util.stream.Collectors.joining;
import static org.mapdb.Serializer.INTEGER;
import static org.mapdb.Serializer.STRING;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpRequest.BodyPublishers;
import java.net.http.HttpResponse;
import java.net.http.HttpResponse.BodyHandlers;
import java.time.Duration;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.eclipse.jetty.server.session.SessionHandler;
import org.jetbrains.annotations.NotNull;
import org.mapdb.DB;
import org.mapdb.DBMaker;
import org.testcontainers.containers.Container.ExecResult;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.output.OutputFrame.OutputType;
import org.testcontainers.containers.wait.strategy.DockerHealthcheckWaitStrategy;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.dockerjava.api.model.DeviceRequest;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentLoader;
import dev.langchain4j.data.document.DocumentSource;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.Metadata;
import dev.langchain4j.data.document.parser.apache.pdfbox.ApachePdfBoxDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.embedding.E5SmallV2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.ollama.OllamaStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.MemoryId;
import dev.langchain4j.service.SystemMessage;
import dev.langchain4j.service.TokenStream;
import dev.langchain4j.service.UserMessage;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import dev.langchain4j.store.memory.chat.ChatMemoryStore;
import io.javalin.Javalin;
import io.javalin.community.ssl.SslPlugin;
import io.javalin.http.Context;
import io.javalin.http.UploadedFile;
import io.javalin.http.staticfiles.Location;
import io.javalin.websocket.WsConnectContext;
public class MyCM {
private static final String WHISPER_MODEL = "whisper";
private static final String STABLEDIFFUSION_URL_TXT2IMG = "http://127.0.0.1:7860/sdapi/v1/txt2img";
private static final String PATH_TO_CERT = "C:\\Certbot\\live\\undefined.egaetan.me\\";
static String OLLAMA_MODEL_NAME = "gemma";
static String OLLAMA_DOCKER_IMAGE_NAME = "ollama/ollama";
static Integer OLLAMA_PORT = 11434;
static String DOCKER_LOCALAI_IMAGE_NAME = "localai/localai:v2.9.0-ffmpeg-core";
static Integer LOCALAI_PORT = 8080;
static GenericContainer<?> localai = new GenericContainer<>(DOCKER_LOCALAI_IMAGE_NAME)
.withFileSystemBind("whisperModels", "/build/models")
.withCommand("whisper-base")
.withExposedPorts(8080);
static GenericContainer<?> ollama = new GenericContainer<>(OLLAMA_DOCKER_IMAGE_NAME)
.withCreateContainerCmdModifier(cmd -> {
cmd
.getHostConfig()
.withDeviceRequests(
Collections.singletonList(
new DeviceRequest()
.withCapabilities(Collections.singletonList(Collections.singletonList("gpu")))
.withCount(-1)
)
);
})
.withFileSystemBind("ollama", "/root/.ollama")
.withExposedPorts(OLLAMA_PORT);
public static class Message {
public String message;
public Message() {
}
public Message(String message) {
super();
this.message = message;
}
}
public static class MessageService {
public String service;
public MessageService() {
}
public MessageService(String message) {
super();
this.service = message;
}
}
public static class MessageStop {
public String stop;
public MessageStop() {
}
public MessageStop(String message) {
super();
this.stop = message;
}
}
public static class Whisper {
public String text;
}
interface Assistant {
@SystemMessage("You are a helpful french assistant. Répond uniquement en français, ne parle jamais en anglais. Sois précis et juste dans toutes tes réponses")
TokenStream chat(@MemoryId String id, @UserMessage String userMessage);
}
public static void main(String[] args) throws UnsupportedOperationException, IOException, InterruptedException {
ollama.start();
ollama.followOutput(x -> System.out.println("OLLAMA>>"+x.getUtf8StringWithoutLineEnding()), OutputType.STDOUT);
ollama.followOutput(x -> System.err.println("OLLAMA>>"+x.getUtf8StringWithoutLineEnding()), OutputType.STDERR);
ollama.waitingFor(new DockerHealthcheckWaitStrategy());
localai.setCommand("whisper-base");
localai.start();
localai.followOutput(x -> System.out.println("LOCALAI"+x.getUtf8StringWithoutLineEnding()), OutputType.STDOUT);
localai.followOutput(x -> System.err.println("LOCALAI"+x.getUtf8StringWithoutLineEnding()), OutputType.STDERR);
localai.waitingFor(new DockerHealthcheckWaitStrategy());
System.out.println("Run Ollama");
ExecResult execInContainer = ollama.execInContainer("ollama", "run", "gemma:7b");
System.err.println(execInContainer.getStderr());
System.out.println(execInContainer.getStdout());
System.out.println("Create LanguageModels");
StreamingChatLanguageModel modelStreaming = OllamaStreamingChatModel.builder()
.baseUrl(String.format("http://%s:%d", ollama.getHost(), ollama.getMappedPort(OLLAMA_PORT)))
.timeout(Duration.ofMinutes(2))
.modelName("gemma:7b")
.numPredict(8192)
.temperature(0.0).build();
PersistentChatMemoryStore store = new PersistentChatMemoryStore();
DocumentSplitter splitter = DocumentSplitters.recursive(300, 50, new OpenAiTokenizer(GPT_3_5_TURBO));
EmbeddingModel embeddingModel = new E5SmallV2EmbeddingModel();
Map<String, EmbeddingStore<TextSegment>> embeddingStore = new ConcurrentHashMap<>();
ChatMemoryProvider chatMemoryProvider = memoryId -> MessageWindowChatMemory.builder().id(memoryId)
.maxMessages(20)
.chatMemoryStore(store)
.build();
Assistant assistant = AiServices.builder(Assistant.class)
.streamingChatLanguageModel(modelStreaming)
.chatMemoryProvider(chatMemoryProvider)
.build();
SslPlugin plugin = new SslPlugin(conf -> {
conf.pemFromPath(PATH_TO_CERT + "cert.pem",
PATH_TO_CERT + "privkey.pem");
conf.http2 = false;
});
Javalin app = Javalin.create(config -> {
config.staticFiles.add("src/main/resources/public", Location.EXTERNAL);
config.jetty.modifyServletContextHandler(handler -> handler.setSessionHandler(new SessionHandler()));
config.registerPlugin(plugin);
})
;
app.before(ctx -> {
ctx.req().getSession(true);
});
Map<String, WsConnectContext> rsp = new ConcurrentHashMap<>();
ExecutorService executor = Executors.newFixedThreadPool(2);
app.post("/api/chat2Img", ctx -> {
Message msg = ctx.bodyAsClass(Message.class);
String sessionId = ctx.req().getSession().getId();
draw(ctx, msg, sessionId);
});
app.post("/api/speech", ctx -> {
UploadedFile uploadedFile = ctx.uploadedFile("file");
MultiPartBodyPublisher publisher = new MultiPartBodyPublisher()
.addPart("model", WHISPER_MODEL)
.addPart("file", () -> uploadedFile.content(), "speech", "application/octet-stream");
HttpClient client = HttpClient.newHttpClient();
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:"+localai.getMappedPort(LOCALAI_PORT)+"/v1/audio/transcriptions"))
.header("Content-Type", "multipart/form-data; boundary=" + publisher.getBoundary())
.timeout(Duration.ofMinutes(1))
.POST(publisher.build())
.build();
HttpResponse<String> response = client.send(request, BodyHandlers.ofString());
System.out.println(response.statusCode());
System.out.println(response.body());
ObjectMapper mapperWhisper = new ObjectMapper();
mapperWhisper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
Whisper value = mapperWhisper.readValue(response.body(), Whisper.class);
Message msg = new Message(value.text);
String sessionId = ctx.req().getSession().getId();
System.out.println("SessionId : " + sessionId);
generateChat(embeddingModel, embeddingStore, assistant, rsp, executor, msg, sessionId);
ctx.json(msg);
});
app.post("/api/chat", ctx -> {
Message msg = ctx.bodyAsClass(Message.class);
String sessionId = ctx.req().getSession().getId();
System.out.println("SessionId : " + sessionId);
generateChat(embeddingModel, embeddingStore, assistant, rsp, executor, msg, sessionId);
System.out.println(msg.message);
});
app.post("/api/upload", ctx -> {
String sessionId = ctx.req().getSession().getId();
System.out.println("Upload");
UploadedFile uploadedFile = ctx.uploadedFile("document");
InputStream content = uploadedFile.content();
Document document = DocumentLoader.load(new DocumentSource() {
@Override
public Metadata metadata() {
return new Metadata();
}
@Override
public InputStream inputStream() throws IOException {
return content;
}
}, new ApachePdfBoxDocumentParser());
List<TextSegment> segments = splitter.split(document);
List<Embedding> embeddings = embeddingModel.embedAll(segments).content();
embeddingStore.computeIfAbsent(sessionId, __ -> new InMemoryEmbeddingStore<>()).addAll(embeddings, segments);
System.out.println("OK -pdf");
});
app.ws("/api/canal", ctx -> {
ctx.onConnect(r -> {
String sessionId = r.getUpgradeCtx$javalin().req().getSession().getId();
System.out.println("Session " + sessionId);
rsp.put(sessionId, r);
r.sendPing();
});
ctx.onClose(r -> {
String sessionId = r.getUpgradeCtx$javalin().req().getSession().getId();
System.out.println("Delete Session " + sessionId);
store.deleteMessages(sessionId);
embeddingStore.remove(sessionId);
rsp.remove(sessionId);
});
});
Executors.newSingleThreadScheduledExecutor().scheduleAtFixedRate(() -> {
for (var x : rsp.values()) {
x.sendPing();
}
}, 1, 1, TimeUnit.SECONDS);
app.start(7070);
}
private static void draw(@NotNull Context ctx, Message msg, String sessionId)
throws IOException, InterruptedException {
System.out.println("Chat2img : " + msg.message);
System.out.println("SessionId : " + sessionId);
HttpClient client = HttpClient.newHttpClient();
String body = """
{
"prompt": "$$$PROMPT$$$",
"negative_prompt" : "ugly, bad quality",
"steps": 20,
"cfg_scale": 5,
"sampler_name": "DPM++ 3M SDE Karras",
"width": 512,
"height": 512,
"override_settings": {
"sd_model_checkpoint": "icbinpICantBelieveIts_newYear",
"CLIP_stop_at_last_layers": 2
},
"extra_generation_params": {"ADetailer model": "face_yolov8n.pt", "ADetailer confidence": 0.3, "ADetailer dilate erode": 4, "ADetailer mask blur": 4, "ADetailer denoising strength": 0.4, "ADetailer inpaint only masked": true, "ADetailer inpaint padding": 32, "ADetailer version": "24.1.2", "Denoising strength": 0.4, "Mask blur": 4, "Inpaint area": "Only masked", "Masked area padding": 32}
}
""";
HttpRequest req = HttpRequest.newBuilder()
.uri(URI.create(STABLEDIFFUSION_URL_TXT2IMG))
.POST(BodyPublishers.ofString(body.replace("$$$PROMPT$$$", msg.message)))
.build();
HttpResponse<String> reponse = client.send(req, BodyHandlers.ofString());
System.out.println("Done");
ctx.result(reponse.body());
}
private static void generateChat(EmbeddingModel embeddingModel,
Map<String, EmbeddingStore<TextSegment>> embeddingStore, Assistant assistant,
Map<String, WsConnectContext> rsp, ExecutorService executor, Message msg, String sessionId) {
System.out.println(">>>" + msg.message);
EmbeddingStore<TextSegment> embeddings = embeddingStore.get(sessionId);
if (embeddings == null) {
executor.execute(() -> speak(assistant, rsp, msg, sessionId));
}
else {
Embedding questionEmbedding = embeddingModel.embed(msg.message).content();
int maxResults = 10;
double minScore = 0.7;
List<EmbeddingMatch<TextSegment>> relevantEmbeddings = embeddings.findRelevant(questionEmbedding,
maxResults, minScore);
PromptTemplate promptTemplate = PromptTemplate
.from("Répond à la question suivante avec la plus grande précisions:\n" + "\n" + "Question:\n"
+ "{{question}}\n" + "\n" + "En te basant sur les informations suivantes:\n"
+ "{{information}}");
String information = relevantEmbeddings.stream().map(match -> match.embedded().text())
.collect(joining("\n\n"));
System.out.println("Embeddings:" + information.length() +"\n------------------\n");
Map<String, Object> variables = new HashMap<>();
variables.put("question", msg.message);
variables.put("information", information);
Prompt prompt = promptTemplate.apply(variables);
executor.execute(() -> speak(assistant, rsp, new Message(prompt.text()), sessionId));
}
}
private static void speak(Assistant assistant, Map<String, WsConnectContext> rsp, Message msg, String sessionId) {
TokenStream tokenStream = assistant.chat(sessionId, msg.message);
AtomicBoolean receive = new AtomicBoolean(false);
tokenStream.onNext(t -> {
WsConnectContext x = rsp.get(sessionId);
if (x == null) {
System.out.println("No session");
tokenStream.onNext(__ -> {});
return;
}
try {
x.send(new ObjectMapper().writeValueAsString(new Message(t)));
} catch (JsonProcessingException e) {
e.printStackTrace();
}
if (!receive.getAndSet(true)) {
System.out.println("Début de la réponse");
}
})
.onComplete(t -> {
WsConnectContext x = rsp.get(sessionId);
if (x == null) {
return;
}
try {
x.send(new ObjectMapper().writeValueAsString(new MessageService(t.content().text())));
} catch (JsonProcessingException e) {
e.printStackTrace();
}
System.out.println(t);
})
.onError(t-> {
WsConnectContext x = rsp.get(sessionId);
if (x == null) {
return;
}
try {
x.send(new ObjectMapper().writeValueAsString(new MessageStop("ERROR")));
} catch (JsonProcessingException e) {
e.printStackTrace();
}
System.err.println(t);
})
.start();
}
static class PersistentChatMemoryStore implements ChatMemoryStore {
private final DB db = DBMaker.fileDB("multi-user-chat-memory.db").transactionEnable().make();
private final Map<Integer, String> map = db.hashMap("messages", INTEGER, STRING).createOrOpen();
public PersistentChatMemoryStore() {
map.clear();
}
@Override
public List<ChatMessage> getMessages(Object memoryId) {
String json = map.get((int) memoryId.hashCode());
return messagesFromJson(json);
}
@Override
public void updateMessages(Object memoryId, List<ChatMessage> messages) {
String json = messagesToJson(messages);
map.put((int) memoryId.hashCode(), json);
db.commit();
}
@Override
public void deleteMessages(Object memoryId) {
map.remove((int) memoryId.hashCode());
db.commit();
}
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder",
"dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder"
] | [((6543, 6789), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((6543, 6781), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((6543, 6759), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((6543, 6737), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((6543, 6710), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((6543, 6674), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((7173, 7281), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((7173, 7268), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((7173, 7240), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((7173, 7219), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((7308, 7448), 'dev.langchain4j.service.AiServices.builder'), ((7308, 7435), 'dev.langchain4j.service.AiServices.builder'), ((7308, 7391), 'dev.langchain4j.service.AiServices.builder'), ((8608, 8908), 'java.net.http.HttpRequest.newBuilder'), ((8608, 8894), 'java.net.http.HttpRequest.newBuilder'), ((8608, 8864), 'java.net.http.HttpRequest.newBuilder'), ((8608, 8827), 'java.net.http.HttpRequest.newBuilder'), ((8608, 8738), 'java.net.http.HttpRequest.newBuilder'), ((11199, 11355), 'java.util.concurrent.Executors.newSingleThreadScheduledExecutor'), ((12436, 12601), 'java.net.http.HttpRequest.newBuilder'), ((12436, 12588), 'java.net.http.HttpRequest.newBuilder'), ((12436, 12510), 'java.net.http.HttpRequest.newBuilder'), ((15598, 15668), 'org.mapdb.DBMaker.fileDB'), ((15598, 15661), 'org.mapdb.DBMaker.fileDB')] |
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import static java.time.Duration.ofSeconds;
public class _01_ModelParameters {
public static void main(String[] args) {
// OpenAI parameters are explained here: https://platform.openai.com/docs/api-reference/chat/create
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.modelName("gpt-3.5-turbo")
.temperature(0.3)
.timeout(ofSeconds(60))
.logRequests(true)
.logResponses(true)
.build();
String prompt = "Explain in three lines how to make a beautiful painting";
String response = model.generate(prompt);
System.out.println(response);
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((377, 664), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((377, 639), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((377, 603), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((377, 568), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((377, 528), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((377, 494), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((377, 450), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package dev.example;
import static dev.langchain4j.data.document.UrlDocumentLoader.load;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import java.io.IOException;
import java.net.URL;
import jakarta.enterprise.context.ApplicationScoped;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.retriever.Retriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
public class Beans {
@ApplicationScoped
Retriever<TextSegment> retriever(EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel) {
// You will need to adjust these parameters to find the optimal setting, which will depend on two main factors:
// - The nature of your data
// - The embedding model you are using
int maxResultsRetrieved = 1;
double minScore = 0.6;
return EmbeddingStoreRetriever.from(embeddingStore, embeddingModel, maxResultsRetrieved, minScore);
}
@ApplicationScoped
EmbeddingStore<TextSegment> embeddingStore(EmbeddingModel embeddingModel) throws IOException {
// Normally, you would already have your embedding store filled with your data.
// However, for the purpose of this demonstration, we will:
// 1. Create an in-memory embedding store
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
// 2. Load an example document ("Miles of Smiles" terms of use)
String documentName = "miles-of-smiles-terms-of-use.txt";
URL resource = Thread.currentThread().getContextClassLoader().getResource(documentName);
if (resource == null) {
throw new IllegalStateException("Unable to locate document: '" + documentName + "' on the classpath");
}
Document document = load(resource);
// 3. Split the document into segments 100 tokens each
// 4. Convert segments into embeddings
// 5. Store embeddings into embedding store
// All this can be done manually, but we will use EmbeddingStoreIngestor to automate this:
DocumentSplitter documentSplitter = DocumentSplitters.recursive(100, 0, new OpenAiTokenizer(GPT_3_5_TURBO));
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(documentSplitter)
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
ingestor.ingest(document);
return embeddingStore;
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((2707, 2912), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2707, 2887), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2707, 2839), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2707, 2791), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package com.kchandrakant;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
public class SimpleService {
interface Assistant {
String chat(String message);
}
public static void main(String[] args) {
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(ApiKeys.OPENAI_API_KEY))
.chatMemory(chatMemory)
.build();
String answer = assistant.chat("Hello! My name is Klaus.");
System.out.println(answer); // Hello Klaus! How can I assist you today?
String answerWithName = assistant.chat("What is my name?");
System.out.println(answerWithName); // Your name is Klaus.
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((481, 668), 'dev.langchain4j.service.AiServices.builder'), ((481, 643), 'dev.langchain4j.service.AiServices.builder'), ((481, 603), 'dev.langchain4j.service.AiServices.builder')] |
package com.redhat;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkiverse.langchain4j.redis.RedisEmbeddingStore;
import io.quarkus.runtime.StartupEvent;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.event.Observes;
import jakarta.inject.Inject;
import java.io.File;
import java.util.List;
@ApplicationScoped
public class IngestorExample {
/**
* The embedding store (the database).
* The bean is provided by the quarkus-langchain4j-redis extension.
*/
@Inject
RedisEmbeddingStore store;
/**
* The embedding model (how the vector of a document is computed).
* The bean is provided by the LLM (like openai) extension.
*/
@Inject
EmbeddingModel embeddingModel;
public void ingest(@Observes StartupEvent event) {
System.out.printf("Ingesting documents...%n");
// List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/bank").toPath(),
// List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/museum").toPath(),
// List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/halffoods").toPath(),
List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/insurance").toPath(),
new TextDocumentParser());
var ingestor = EmbeddingStoreIngestor.builder()
.embeddingStore(store)
.embeddingModel(embeddingModel)
.documentSplitter(recursive(500, 0))
.build();
ingestor.ingest(documents);
System.out.printf("Ingested %d documents.%n", documents.size());
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1785, 1982), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1785, 1957), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1785, 1904), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1785, 1856), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.structured.StructuredPrompt;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.output.structured.Description;
import dev.langchain4j.service.*;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.List;
import static java.util.Arrays.asList;
public class OtherServiceExamples {
static ChatLanguageModel chatLanguageModel = OpenAiChatModel.withApiKey(ApiKeys.OPENAI_API_KEY);
static class Sentiment_Extracting_AI_Service_Example {
enum Sentiment {
POSITIVE, NEUTRAL, NEGATIVE;
}
interface SentimentAnalyzer {
@UserMessage("Analyze sentiment of {{it}}")
Sentiment analyzeSentimentOf(String text);
@UserMessage("Does {{it}} have a positive sentiment?")
boolean isPositive(String text);
}
public static void main(String[] args) {
SentimentAnalyzer sentimentAnalyzer = AiServices.create(SentimentAnalyzer.class, chatLanguageModel);
Sentiment sentiment = sentimentAnalyzer.analyzeSentimentOf("It is good!");
System.out.println(sentiment); // POSITIVE
boolean positive = sentimentAnalyzer.isPositive("It is bad!");
System.out.println(positive); // false
}
}
static class Number_Extracting_AI_Service_Example {
interface NumberExtractor {
@UserMessage("Extract number from {{it}}")
int extractInt(String text);
@UserMessage("Extract number from {{it}}")
long extractLong(String text);
@UserMessage("Extract number from {{it}}")
BigInteger extractBigInteger(String text);
@UserMessage("Extract number from {{it}}")
float extractFloat(String text);
@UserMessage("Extract number from {{it}}")
double extractDouble(String text);
@UserMessage("Extract number from {{it}}")
BigDecimal extractBigDecimal(String text);
}
public static void main(String[] args) {
NumberExtractor extractor = AiServices.create(NumberExtractor.class, chatLanguageModel);
String text = "After countless millennia of computation, the supercomputer Deep Thought finally announced " +
"that the answer to the ultimate question of life, the universe, and everything was forty two.";
int intNumber = extractor.extractInt(text);
System.out.println(intNumber); // 42
long longNumber = extractor.extractLong(text);
System.out.println(longNumber); // 42
BigInteger bigIntegerNumber = extractor.extractBigInteger(text);
System.out.println(bigIntegerNumber); // 42
float floatNumber = extractor.extractFloat(text);
System.out.println(floatNumber); // 42.0
double doubleNumber = extractor.extractDouble(text);
System.out.println(doubleNumber); // 42.0
BigDecimal bigDecimalNumber = extractor.extractBigDecimal(text);
System.out.println(bigDecimalNumber); // 42.0
}
}
static class Date_and_Time_Extracting_AI_Service_Example {
interface DateTimeExtractor {
@UserMessage("Extract date from {{it}}")
LocalDate extractDateFrom(String text);
@UserMessage("Extract time from {{it}}")
LocalTime extractTimeFrom(String text);
@UserMessage("Extract date and time from {{it}}")
LocalDateTime extractDateTimeFrom(String text);
}
public static void main(String[] args) {
DateTimeExtractor extractor = AiServices.create(DateTimeExtractor.class, chatLanguageModel);
String text = "The tranquility pervaded the evening of 1968, just fifteen minutes shy of midnight," +
" following the celebrations of Independence Day.";
LocalDate date = extractor.extractDateFrom(text);
System.out.println(date); // 1968-07-04
LocalTime time = extractor.extractTimeFrom(text);
System.out.println(time); // 23:45
LocalDateTime dateTime = extractor.extractDateTimeFrom(text);
System.out.println(dateTime); // 1968-07-04T23:45
}
}
static class POJO_Extracting_AI_Service_Example {
static class Person {
private String firstName;
private String lastName;
private LocalDate birthDate;
@Override
public String toString() {
return "Person {" +
" firstName = \"" + firstName + "\"" +
", lastName = \"" + lastName + "\"" +
", birthDate = " + birthDate +
" }";
}
}
interface PersonExtractor {
@UserMessage("Extract information about a person from {{it}}")
Person extractPersonFrom(String text);
}
public static void main(String[] args) {
ChatLanguageModel chatLanguageModel = OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
// When extracting POJOs with the LLM that supports the "json mode" feature
// (e.g., OpenAI, Azure OpenAI, Ollama, etc.), it is advisable to use it to get more reliable results.
// When using this feature, LLM will be forced to output a valid JSON.
// Please note that this feature is not (yet) supported when using "demo" key.
.responseFormat("json_object")
.build();
PersonExtractor extractor = AiServices.create(PersonExtractor.class, chatLanguageModel);
String text = "In 1968, amidst the fading echoes of Independence Day, "
+ "a child named John arrived under the calm evening sky. "
+ "This newborn, bearing the surname Doe, marked the start of a new journey.";
Person person = extractor.extractPersonFrom(text);
System.out.println(person); // Person { firstName = "John", lastName = "Doe", birthDate = 1968-07-04 }
}
}
static class POJO_With_Descriptions_Extracting_AI_Service_Example {
static class Recipe {
@Description("short title, 3 words maximum")
private String title;
@Description("short description, 2 sentences maximum")
private String description;
@Description("each step should be described in 4 words, steps should rhyme")
private List<String> steps;
private Integer preparationTimeMinutes;
@Override
public String toString() {
return "Recipe {" +
" title = \"" + title + "\"" +
", description = \"" + description + "\"" +
", steps = " + steps +
", preparationTimeMinutes = " + preparationTimeMinutes +
" }";
}
}
@StructuredPrompt("Create a recipe of a {{dish}} that can be prepared using only {{ingredients}}")
static class CreateRecipePrompt {
private String dish;
private List<String> ingredients;
}
interface Chef {
Recipe createRecipeFrom(String... ingredients);
Recipe createRecipe(CreateRecipePrompt prompt);
}
public static void main(String[] args) {
Chef chef = AiServices.create(Chef.class, chatLanguageModel);
Recipe recipe = chef.createRecipeFrom("cucumber", "tomato", "feta", "onion", "olives");
System.out.println(recipe);
// Recipe {
// title = "Greek Salad",
// description = "A refreshing mix of veggies and feta cheese in a zesty dressing.",
// steps = [
// "Chop cucumber and tomato",
// "Add onion and olives",
// "Crumble feta on top",
// "Drizzle with dressing and enjoy!"
// ],
// preparationTimeMinutes = 10
// }
CreateRecipePrompt prompt = new CreateRecipePrompt();
prompt.dish = "salad";
prompt.ingredients = asList("cucumber", "tomato", "feta", "onion", "olives");
Recipe anotherRecipe = chef.createRecipe(prompt);
System.out.println(anotherRecipe);
// Recipe ...
}
}
static class AI_Service_with_System_Message_Example {
interface Chef {
@SystemMessage("You are a professional chef. You are friendly, polite and concise.")
String answer(String question);
}
public static void main(String[] args) {
Chef chef = AiServices.create(Chef.class, chatLanguageModel);
String answer = chef.answer("How long should I grill chicken?");
System.out.println(answer); // Grilling chicken usually takes around 10-15 minutes per side, depending on ...
}
}
static class AI_Service_with_System_and_User_Messages_Example {
interface TextUtils {
@SystemMessage("You are a professional translator into {{language}}")
@UserMessage("Translate the following text: {{text}}")
String translate(@V("text") String text, @V("language") String language);
@SystemMessage("Summarize every message from user in {{n}} bullet points. Provide only bullet points.")
List<String> summarize(@UserMessage String text, @V("n") int n);
}
public static void main(String[] args) {
TextUtils utils = AiServices.create(TextUtils.class, chatLanguageModel);
String translation = utils.translate("Hello, how are you?", "italian");
System.out.println(translation); // Ciao, come stai?
String text = "AI, or artificial intelligence, is a branch of computer science that aims to create " +
"machines that mimic human intelligence. This can range from simple tasks such as recognizing " +
"patterns or speech to more complex tasks like making decisions or predictions.";
List<String> bulletPoints = utils.summarize(text, 3);
System.out.println(bulletPoints);
// [
// "- AI is a branch of computer science",
// "- It aims to create machines that mimic human intelligence",
// "- It can perform simple or complex tasks"
// ]
}
}
static class AI_Service_with_UserName_Example {
interface Assistant {
String chat(@UserName String name, @UserMessage String message);
}
public static void main(String[] args) {
Assistant assistant = AiServices.create(Assistant.class, chatLanguageModel);
String answer = assistant.chat("Klaus", "Hi, tell me my name if you see it.");
System.out.println(answer); // Hello! Your name is Klaus. How can I assist you today?
}
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((5313, 5888), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5313, 5859), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5313, 5399), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package io.quarkiverse.langchain4j.workshop.chat;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import jakarta.enterprise.context.ApplicationScoped;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@ApplicationScoped
public class ChatMemoryBean implements ChatMemoryProvider {
private final Map<Object, ChatMemory> memories = new ConcurrentHashMap<>();
@Override
public ChatMemory get(Object memoryId) {
return memories.computeIfAbsent(memoryId, id -> MessageWindowChatMemory.builder()
.maxMessages(20)
.id(memoryId)
.build());
}
public void clear(Object session) {
memories.remove(session);
}
}
| [
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((608, 729), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((608, 704), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((608, 674), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')] |
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Duration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument;
import static java.util.stream.Collectors.joining;
public class ChatWithDocumentsExamples {
// Please also check ServiceWithRetrieverExample
static class IfYouNeedSimplicity {
public static void main(String[] args) throws Exception {
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(300, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
Document document = loadDocument(toPath("example-files/story-about-happy-carrot.txt"), new TextDocumentParser());
ingestor.ingest(document);
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(OpenAiChatModel.withApiKey(ApiKeys.OPENAI_API_KEY))
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
// .chatMemory() // you can override default chat memory
// .promptTemplate() // you can override default prompt template
.build();
String answer = chain.execute("Who is Charlie?");
System.out.println(answer); // Charlie is a cheerful carrot living in VeggieVille...
}
}
static class If_You_Need_More_Control {
public static void main(String[] args) {
// Load the document that includes the information you'd like to "chat" about with the model.
Document document = loadDocument(toPath("example-files/story-about-happy-carrot.txt"), new TextDocumentParser());
// Split document into segments 100 tokens each
DocumentSplitter splitter = DocumentSplitters.recursive(
100,
0,
new OpenAiTokenizer("gpt-3.5-turbo")
);
List<TextSegment> segments = splitter.split(document);
// Embed segments (convert them into vectors that represent the meaning) using embedding model
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
List<Embedding> embeddings = embeddingModel.embedAll(segments).content();
// Store embeddings into embedding store for further search / retrieval
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
embeddingStore.addAll(embeddings, segments);
// Specify the question you want to ask the model
String question = "Who is Charlie?";
// Embed the question
Embedding questionEmbedding = embeddingModel.embed(question).content();
// Find relevant embeddings in embedding store by semantic similarity
// You can play with parameters below to find a sweet spot for your specific use case
int maxResults = 3;
double minScore = 0.7;
List<EmbeddingMatch<TextSegment>> relevantEmbeddings
= embeddingStore.findRelevant(questionEmbedding, maxResults, minScore);
// Create a prompt for the model that includes question and relevant embeddings
PromptTemplate promptTemplate = PromptTemplate.from(
"Answer the following question to the best of your ability:\n"
+ "\n"
+ "Question:\n"
+ "{{question}}\n"
+ "\n"
+ "Base your answer on the following information:\n"
+ "{{information}}");
String information = relevantEmbeddings.stream()
.map(match -> match.embedded().text())
.collect(joining("\n\n"));
Map<String, Object> variables = new HashMap<>();
variables.put("question", question);
variables.put("information", information);
Prompt prompt = promptTemplate.apply(variables);
// Send the prompt to the OpenAI chat model
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.timeout(Duration.ofSeconds(60))
.build();
AiMessage aiMessage = chatModel.generate(prompt.toUserMessage()).content();
// See an answer from the model
String answer = aiMessage.text();
System.out.println(answer); // Charlie is a cheerful carrot living in VeggieVille...
}
}
private static Path toPath(String fileName) {
try {
URL fileUrl = ChatWithDocumentsExamples.class.getResource(fileName);
return Paths.get(fileUrl.toURI());
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1887, 2127), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1887, 2098), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1887, 2046), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1887, 1994), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2345, 2758), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2345, 2567), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2345, 2474), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((5749, 5908), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5749, 5879), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5749, 5826), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package ru.vzotov.ai.interfaces.facade.impl;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.Content;
import dev.langchain4j.data.message.TextContent;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.output.structured.Description;
import dev.langchain4j.rag.DefaultRetrievalAugmentor;
import dev.langchain4j.rag.RetrievalAugmentor;
import dev.langchain4j.rag.content.aggregator.ContentAggregator;
import dev.langchain4j.rag.content.aggregator.DefaultContentAggregator;
import dev.langchain4j.rag.content.injector.ContentInjector;
import dev.langchain4j.rag.content.injector.DefaultContentInjector;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.rag.query.Query;
import dev.langchain4j.rag.query.transformer.QueryTransformer;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.security.access.annotation.Secured;
import org.springframework.transaction.annotation.Transactional;
import ru.vzotov.accounting.infrastructure.security.SecurityUtils;
import ru.vzotov.accounting.interfaces.purchases.PurchasesApi;
import ru.vzotov.accounting.interfaces.purchases.facade.impl.assembler.PurchaseAssembler;
import ru.vzotov.ai.interfaces.facade.AIFacade;
import ru.vzotov.cashreceipt.domain.model.PurchaseCategory;
import ru.vzotov.cashreceipt.domain.model.PurchaseCategoryId;
import ru.vzotov.cashreceipt.domain.model.PurchaseCategoryRepository;
import ru.vzotov.purchase.domain.model.Purchase;
import ru.vzotov.purchase.domain.model.PurchaseId;
import ru.vzotov.purchases.domain.model.PurchaseRepository;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
public class AIFacadeImpl implements AIFacade {
private static final Logger log = LoggerFactory.getLogger(AIFacadeImpl.class);
private final PurchaseCategoryRepository purchaseCategoryRepository;
private final PurchaseRepository purchaseRepository;
private final EmbeddingStore<TextSegment> embeddingStore;
private final EmbeddingModel embeddingModel;
private final ChatLanguageModel chatLanguageModel;
private final ObjectMapper objectMapper;
@Builder
public AIFacadeImpl(PurchaseCategoryRepository purchaseCategoryRepository,
PurchaseRepository purchaseRepository,
EmbeddingStore<TextSegment> embeddingStore,
EmbeddingModel embeddingModel,
ChatLanguageModel chatLanguageModel,
ObjectMapper objectMapper
) {
this.purchaseCategoryRepository = purchaseCategoryRepository;
this.purchaseRepository = purchaseRepository;
this.embeddingStore = embeddingStore;
this.embeddingModel = embeddingModel;
this.chatLanguageModel = chatLanguageModel;
this.objectMapper = objectMapper;
}
@Override
@Transactional(value = "accounting-tx", readOnly = true)
@Secured({"ROLE_USER"})
public List<PurchasesApi.Purchase> classifyPurchasesBySimilarity(List<String> purchaseIdList) {
final int samples = 5;
final int threshold = samples - 1;
log.debug("Start hybrid classification of purchases {}. Samples={}, threshold={}",
purchaseIdList, samples, threshold);
try {
final List<PurchaseCategory> categories = purchaseCategoryRepository.findAll(SecurityUtils.getCurrentPerson());
final Map<PurchaseCategoryId, PurchaseCategory> purchaseCategoryMap = categories.stream()
.collect(Collectors.toMap(PurchaseCategory::categoryId, it -> it));
final List<Purchase> purchases = loadPurchases(purchaseIdList);
final List<Purchase> classified = new ArrayList<>();
final List<String> classifyByChatModel = new ArrayList<>();
log.debug("Get embeddings");
final List<Embedding> embeddings = embeddingModel.embedAll(
purchases.stream().map(p -> TextSegment.from(p.name())).toList()).content();
IntStream.range(0, purchases.size())
.boxed()
.parallel()
.forEach(i -> {
Purchase purchase = purchases.get(i);
Embedding embedding = embeddings.get(i);
log.debug("{}:: Find relevant documents for purchase {}, {}", i, purchase.purchaseId(), purchase.name());
List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(embedding, samples, 0.8);
Map<PurchaseCategoryId, Long> relevantCategories = relevant.stream()
.map(match -> match.embedded().text())
.peek(text -> log.debug("Sample: {}", text))
.map(AIFacadeImpl::extractCategoryId)
.filter(Objects::nonNull)
.map(PurchaseCategoryId::new)
.collect(Collectors.groupingBy(e -> e, Collectors.counting()));
Optional<PurchaseCategory> optionalKey = relevantCategories.entrySet().stream()
.filter(entry -> entry.getValue() > threshold)
.map(Map.Entry::getKey)
.findFirst()
.map(purchaseCategoryMap::get);
optionalKey.ifPresentOrElse(category -> {
log.debug("{}:: Relevant category: {}, {}", i, category.categoryId(), category.name());
purchase.assignCategory(category);
classified.add(purchase);
},
() -> classifyByChatModel.add(purchase.purchaseId().value()));
});
final List<PurchasesApi.Purchase> fromChatModel = new ArrayList<>();
if (!classifyByChatModel.isEmpty()) {
log.debug("This list of purchases is ambiguous. We will use chat model for classification: {}", classifyByChatModel);
Lists.partition(classifyByChatModel, 5).stream()
.map(this::classifyPurchases)
.forEach(fromChatModel::addAll);
}
return Stream.concat(
new PurchaseAssembler().toDTOList(classified).stream(),
fromChatModel.stream()
).toList();
} finally {
log.debug("Done");
}
}
private static String extractCategoryId(String document) {
Pattern pattern = Pattern.compile("with id '(.+?)'");
Matcher matcher = pattern.matcher(document);
if (matcher.find()) {
return matcher.group(1);
}
return null;
}
@Override
@Transactional(value = "accounting-tx", readOnly = true)
@Secured({"ROLE_USER"})
public List<PurchasesApi.Purchase> classifyPurchases(List<String> purchaseIdList) {
final List<PurchaseCategory> categories = purchaseCategoryRepository.findAll(SecurityUtils.getCurrentPerson());
final Map<PurchaseCategoryId, PurchaseCategory> purchaseCategoryMap = categories.stream()
.collect(Collectors.toMap(PurchaseCategory::categoryId, it -> it));
// todo: we can embed multiple queries in one request (use embedAll)
// The content retriever is responsible for retrieving relevant content based on a text query.
ContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.maxResults(2) // on each interaction we will retrieve the 2 most relevant segments
.minScore(0.5) // we want to retrieve segments at least somewhat similar to user query
.build();
// Aggregates all Contents retrieved from all ContentRetrievers using all queries.
ContentAggregator contentAggregator = new DefaultContentAggregator();
// todo: we can use special kind of query (list of simple queries)
// Splits collection query to multiple queries: one query for each item
QueryTransformer queryTransformer = query -> {
UserMessage userMessage = query.metadata().userMessage();
return jsonMessage(userMessage, objectMapper.constructType(AgentRequest.class),
(AgentRequest data) -> data.purchases().stream()
.map(s -> Query.from(s.purchaseName(), query.metadata()))
.toList());
};
ContentInjector defaultContentInjector = DefaultContentInjector.builder().build();
ContentInjector contentInjector = (contents, userMessage) -> defaultContentInjector.inject(contents,
UserMessage.from(jsonMessage(userMessage, objectMapper.constructType(AgentRequest.class),
(AgentRequest data) -> {
try {
return """
Please answer which categories the list of purchases belong to:
```json
%s
```
The purchase category must be one of this list of possible categories:
```json
%s
```
""".formatted(
objectMapper.writeValueAsString(data.purchases()),
objectMapper.writeValueAsString(categories.stream().map(c -> new CategoryData(c.name(), c.categoryId().value())).toList())
);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
})));
RetrievalAugmentor retrievalAugmentor = DefaultRetrievalAugmentor.builder()
.contentRetriever(contentRetriever)
.queryTransformer(queryTransformer)
.contentAggregator(contentAggregator)
.contentInjector(contentInjector)
.build();
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
PurchaseClassifyingAgent agent = AiServices.builder(PurchaseClassifyingAgent.class)
.chatLanguageModel(chatLanguageModel)
.retrievalAugmentor(retrievalAugmentor)
.chatMemory(chatMemory)
.build();
final List<Purchase> purchases = loadPurchases(purchaseIdList);
final Map<PurchaseId, Purchase> purchaseMap = purchases.stream()
.collect(Collectors.toMap(Purchase::purchaseId, it -> it));
try {
AgentResponse response = agent.classify(
objectMapper.writeValueAsString(new AgentRequest(purchases.stream().map(p -> new IdNameOfPurchase(p.purchaseId().value(), p.name())).toList())));
Optional.ofNullable(response)
.map(AgentResponse::classification)
.stream().flatMap(List::stream)
.forEach(item -> {
final Purchase p = Optional.ofNullable(item.getPurchaseId())
.map(PurchaseId::new)
.map(purchaseMap::get)
.orElse(null);
if (p == null) return;
final PurchaseCategory targetCategory = Optional.ofNullable(item.getCategoryId())
.map(PurchaseCategoryId::new)
.map(purchaseCategoryMap::get)
.orElse(null);
if (targetCategory == null) return;
p.assignCategory(targetCategory);
});
return new PurchaseAssembler().toDTOList(purchases);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
@NotNull
private List<Purchase> loadPurchases(List<String> purchaseIdList) {
return purchaseIdList.stream()
.filter(Objects::nonNull)
.map(PurchaseId::new)
.map(purchaseRepository::find)
.filter(Objects::nonNull)
.toList();
}
record CategoryData(String name, String id) {
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
static class PurchaseCategoryData {
private String purchaseId;
private String purchaseName;
private String categoryId;
private String categoryName;
}
record IdNameOfPurchase(String purchaseId, String purchaseName) {
}
interface PurchaseClassifyingAgent {
AgentResponse classify(String agentQuery);
}
record AgentResponse(
@Description("""
array of objects {"purchaseId": (type: string), "purchaseName": (type: string), "categoryId": (type: string), "categoryName": (type: string)}
""")
List<PurchaseCategoryData> classification) {
}
record AgentRequest(List<IdNameOfPurchase> purchases) {
}
<T, R> R jsonMessage(UserMessage userMessage, JavaType type, Function<T, R> action) {
if (!userMessage.hasSingleText())
throw new IllegalArgumentException("We support only single-text messages");
Content content = userMessage.contents().get(0);
if (content instanceof TextContent text) {
try {
T data = objectMapper.readValue(text.text(), type);
return action.apply(data);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
} else {
throw new IllegalArgumentException("Unsupported content type");
}
}
}
| [
"dev.langchain4j.rag.DefaultRetrievalAugmentor.builder",
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder",
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.rag.content.injector.DefaultContentInjector.builder"
] | [((5182, 7051), 'java.util.stream.IntStream.range'), ((5182, 5279), 'java.util.stream.IntStream.range'), ((5182, 5247), 'java.util.stream.IntStream.range'), ((7335, 7493), 'com.google.common.collect.Lists.partition'), ((7335, 7437), 'com.google.common.collect.Lists.partition'), ((7335, 7383), 'com.google.common.collect.Lists.partition'), ((7530, 7686), 'java.util.stream.Stream.concat'), ((8757, 9121), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((8757, 9024), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((8757, 8924), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((8757, 8893), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((8757, 8845), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((9923, 9963), 'dev.langchain4j.rag.content.injector.DefaultContentInjector.builder'), ((11454, 11722), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((11454, 11697), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((11454, 11647), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((11454, 11593), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((11454, 11541), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((11844, 12069), 'dev.langchain4j.service.AiServices.builder'), ((11844, 12044), 'dev.langchain4j.service.AiServices.builder'), ((11844, 12004), 'dev.langchain4j.service.AiServices.builder'), ((11844, 11948), 'dev.langchain4j.service.AiServices.builder'), ((12538, 13421), 'java.util.Optional.ofNullable'), ((12538, 12675), 'java.util.Optional.ofNullable'), ((12538, 12653), 'java.util.Optional.ofNullable'), ((12538, 12623), 'java.util.Optional.ofNullable'), ((12758, 12954), 'java.util.Optional.ofNullable'), ((12758, 12908), 'java.util.Optional.ofNullable'), ((12758, 12853), 'java.util.Optional.ofNullable'), ((13067, 13279), 'java.util.Optional.ofNullable'), ((13067, 13233), 'java.util.Optional.ofNullable'), ((13067, 13170), 'java.util.Optional.ofNullable')] |
package dev.langchain4j.model.qianfan;
import dev.langchain4j.internal.Utils;
import dev.langchain4j.model.StreamingResponseHandler;
import dev.langchain4j.model.language.StreamingLanguageModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.qianfan.client.QianfanClient;
import dev.langchain4j.model.qianfan.client.QianfanStreamingResponseBuilder;
import dev.langchain4j.model.qianfan.client.completion.CompletionRequest;
import dev.langchain4j.model.qianfan.client.SyncOrAsyncOrStreaming;
import dev.langchain4j.model.qianfan.client.completion.CompletionResponse;
import dev.langchain4j.model.qianfan.spi.QianfanStreamingLanguageModelBuilderFactory;
import lombok.Builder;
import static dev.langchain4j.internal.Utils.getOrDefault;
import static dev.langchain4j.spi.ServiceHelper.loadFactories;
/**
*
* see details here: https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu
*/
public class QianfanStreamingLanguageModel implements StreamingLanguageModel {
private final QianfanClient client;
private final String baseUrl;
private final Double temperature;
private final Double topP;
private final String modelName;
private final Double penaltyScore;
private final Integer maxRetries;
private final Integer topK;
private final String endpoint;
@Builder
public QianfanStreamingLanguageModel(String baseUrl,
String apiKey,
String secretKey,
Double temperature,
Integer maxRetries,
Integer topK,
Double topP,
String modelName,
String endpoint,
Double penaltyScore,
Boolean logRequests,
Boolean logResponses
) {
if (Utils.isNullOrBlank(apiKey)||Utils.isNullOrBlank(secretKey)) {
throw new IllegalArgumentException(" api key and secret key must be defined. It can be generated here: https://console.bce.baidu.com/qianfan/ais/console/applicationConsole/application");
}
this.modelName=modelName;
this.endpoint=Utils.isNullOrBlank(endpoint)? QianfanLanguageModelNameEnum.getEndpoint(modelName):endpoint;
if (Utils.isNullOrBlank(this.endpoint) ) {
throw new IllegalArgumentException("Qianfan is no such model name. You can see model name here: https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu");
}
this.baseUrl = getOrDefault(baseUrl, "https://aip.baidubce.com");
this.client = QianfanClient.builder()
.baseUrl(this.baseUrl)
.apiKey(apiKey)
.secretKey(secretKey)
.logRequests(logRequests)
.logStreamingResponses(logResponses)
.build();
this.temperature = getOrDefault(temperature, 0.7);
this.maxRetries = getOrDefault(maxRetries, 3);
this.topP = topP;
this.topK = topK;
this.penaltyScore = penaltyScore;
}
@Override
public void generate(String prompt, StreamingResponseHandler<String> handler) {
CompletionRequest request = CompletionRequest.builder()
.prompt(prompt)
.topK(topK)
.topP(topP)
.temperature(temperature)
.penaltyScore(penaltyScore)
.build();
QianfanStreamingResponseBuilder responseBuilder = new QianfanStreamingResponseBuilder(null);
SyncOrAsyncOrStreaming<CompletionResponse> response = client.completion(request, true, endpoint);
response.onPartialResponse(partialResponse -> {
responseBuilder.append(partialResponse);
handle(partialResponse, handler);
})
.onComplete(() -> {
Response<String> response1 = responseBuilder.build(null);
handler.onComplete(response1);
})
.onError(handler::onError)
.execute();
}
private static void handle(CompletionResponse partialResponse,
StreamingResponseHandler<String> handler) {
String result = partialResponse.getResult();
if (Utils.isNullOrBlank(result)) {
return;
}
handler.onNext(result);
}
public static QianfanStreamingLanguageModelBuilder builder() {
for (QianfanStreamingLanguageModelBuilderFactory factory : loadFactories(QianfanStreamingLanguageModelBuilderFactory.class)) {
return factory.get();
}
return new QianfanStreamingLanguageModelBuilder();
}
public static class QianfanStreamingLanguageModelBuilder {
public QianfanStreamingLanguageModelBuilder() {
// This is public so it can be extended
// By default with Lombok it becomes package private
}
}
}
| [
"dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder",
"dev.langchain4j.model.qianfan.client.QianfanClient.builder"
] | [((2840, 3092), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2840, 3067), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2840, 3014), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2840, 2972), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2840, 2934), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2840, 2902), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((3444, 3670), 'dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder'), ((3444, 3645), 'dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder'), ((3444, 3601), 'dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder'), ((3444, 3559), 'dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder'), ((3444, 3531), 'dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder'), ((3444, 3503), 'dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder')] |
package io.quarkiverse.langchain4j.sample.chatbot;
import java.util.function.Supplier;
import jakarta.enterprise.context.ApplicationScoped;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.rag.DefaultRetrievalAugmentor;
import dev.langchain4j.rag.RetrievalAugmentor;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
@ApplicationScoped
public class AugmentorExample implements Supplier<RetrievalAugmentor> {
private final EmbeddingStoreContentRetriever retriever;
AugmentorExample(EmbeddingStore store, EmbeddingModel model) {
retriever = EmbeddingStoreContentRetriever.builder()
.embeddingModel(model)
.embeddingStore(store)
.maxResults(20)
.build();
}
@Override
public RetrievalAugmentor get() {
return DefaultRetrievalAugmentor.builder()
.contentRetriever(retriever)
.build();
}
}
| [
"dev.langchain4j.rag.DefaultRetrievalAugmentor.builder",
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder"
] | [((672, 847), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((672, 822), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((672, 790), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((672, 751), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((923, 1028), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((923, 1003), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder')] |
import dev.langchain4j.data.image.Image;
import dev.langchain4j.model.azure.AzureOpenAiImageModel;
import dev.langchain4j.model.output.Response;
public class AzureOpenAIDallEExample {
public static void main(String[] args) {
AzureOpenAiImageModel model = AzureOpenAiImageModel.builder()
.endpoint(System.getenv("AZURE_OPENAI_ENDPOINT"))
.deploymentName(System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME"))
.apiKey(System.getenv("AZURE_OPENAI_KEY"))
.logRequestsAndResponses(true)
.build();
Response<Image> response = model.generate("A coffee mug in Paris, France");
System.out.println(response.toString());
Image image = response.content();
System.out.println("The remote image is here:" + image.url());
}
}
| [
"dev.langchain4j.model.azure.AzureOpenAiImageModel.builder"
] | [((268, 575), 'dev.langchain4j.model.azure.AzureOpenAiImageModel.builder'), ((268, 550), 'dev.langchain4j.model.azure.AzureOpenAiImageModel.builder'), ((268, 503), 'dev.langchain4j.model.azure.AzureOpenAiImageModel.builder'), ((268, 444), 'dev.langchain4j.model.azure.AzureOpenAiImageModel.builder'), ((268, 365), 'dev.langchain4j.model.azure.AzureOpenAiImageModel.builder')] |
package dev.langchain4j.model.openai;
import dev.ai4j.openai4j.OpenAiClient;
import dev.ai4j.openai4j.image.GenerateImagesRequest;
import dev.ai4j.openai4j.image.GenerateImagesResponse;
import dev.langchain4j.data.image.Image;
import dev.langchain4j.model.image.ImageModel;
import dev.langchain4j.model.openai.spi.OpenAiImageModelBuilderFactory;
import dev.langchain4j.model.output.Response;
import lombok.Builder;
import lombok.NonNull;
import java.net.Proxy;
import java.nio.file.Path;
import java.time.Duration;
import java.util.List;
import java.util.stream.Collectors;
import static dev.langchain4j.internal.RetryUtils.withRetry;
import static dev.langchain4j.internal.Utils.getOrDefault;
import static dev.langchain4j.model.openai.InternalOpenAiHelper.DEFAULT_USER_AGENT;
import static dev.langchain4j.model.openai.InternalOpenAiHelper.OPENAI_URL;
import static dev.langchain4j.model.openai.OpenAiModelName.DALL_E_2;
import static dev.langchain4j.spi.ServiceHelper.loadFactories;
import static java.time.Duration.ofSeconds;
/**
* Represents an OpenAI DALL·E models to generate artistic images. Versions 2 and 3 (default) are supported.
*/
public class OpenAiImageModel implements ImageModel {
private final String modelName;
private final String size;
private final String quality;
private final String style;
private final String user;
private final String responseFormat;
private final OpenAiClient client;
private final Integer maxRetries;
/**
* Instantiates OpenAI DALL·E image processing model.
* Find the parameters description <a href="https://platform.openai.com/docs/api-reference/images/create">here</a>.
*
* @param modelName dall-e-3 is default one
* @param persistTo specifies the local path where the generated image will be downloaded to (in case provided).
* The URL within <code>dev.ai4j.openai4j.image.GenerateImagesResponse</code> will contain
* the URL to local images then.
* @param withPersisting generated response will be persisted under <code>java.io.tmpdir</code>.
* The URL within <code>dev.ai4j.openai4j.image.GenerateImagesResponse</code> will contain
* the URL to local images then.
*/
@Builder
@SuppressWarnings("rawtypes")
public OpenAiImageModel(
String baseUrl,
@NonNull String apiKey,
String organizationId,
String modelName,
String size,
String quality,
String style,
String user,
String responseFormat,
Duration timeout,
Integer maxRetries,
Proxy proxy,
Boolean logRequests,
Boolean logResponses,
Boolean withPersisting,
Path persistTo
) {
timeout = getOrDefault(timeout, ofSeconds(60));
OpenAiClient.Builder cBuilder = OpenAiClient
.builder()
.baseUrl(getOrDefault(baseUrl, OPENAI_URL))
.openAiApiKey(apiKey)
.organizationId(organizationId)
.callTimeout(timeout)
.connectTimeout(timeout)
.readTimeout(timeout)
.writeTimeout(timeout)
.proxy(proxy)
.logRequests(getOrDefault(logRequests, false))
.logResponses(getOrDefault(logResponses, false))
.userAgent(DEFAULT_USER_AGENT)
.persistTo(persistTo);
if (withPersisting != null && withPersisting) {
cBuilder.withPersisting();
}
this.client = cBuilder.build();
this.maxRetries = getOrDefault(maxRetries, 3);
this.modelName = modelName;
this.size = size;
this.quality = quality;
this.style = style;
this.user = user;
this.responseFormat = responseFormat;
}
@Override
public Response<Image> generate(String prompt) {
GenerateImagesRequest request = requestBuilder(prompt).build();
GenerateImagesResponse response = withRetry(() -> client.imagesGeneration(request), maxRetries).execute();
return Response.from(fromImageData(response.data().get(0)));
}
@Override
public Response<List<Image>> generate(String prompt, int n) {
GenerateImagesRequest request = requestBuilder(prompt).n(n).build();
GenerateImagesResponse response = withRetry(() -> client.imagesGeneration(request), maxRetries).execute();
return Response.from(
response.data().stream().map(OpenAiImageModel::fromImageData).collect(Collectors.toList())
);
}
public static OpenAiImageModelBuilder builder() {
for (OpenAiImageModelBuilderFactory factory : loadFactories(OpenAiImageModelBuilderFactory.class)) {
return factory.get();
}
return new OpenAiImageModelBuilder();
}
public static class OpenAiImageModelBuilder {
public OpenAiImageModelBuilder() {
// This is public so it can be extended
// By default with Lombok it becomes package private
}
public OpenAiImageModelBuilder modelName(String modelName) {
this.modelName = modelName;
return this;
}
public OpenAiImageModelBuilder modelName(OpenAiImageModelName modelName) {
this.modelName = modelName.toString();
return this;
}
public OpenAiImageModelBuilder withPersisting() {
return withPersisting(true);
}
public OpenAiImageModelBuilder withPersisting(Boolean withPersisting) {
this.withPersisting = withPersisting;
return this;
}
}
public static OpenAiImageModel withApiKey(String apiKey) {
return builder().apiKey(apiKey).build();
}
private static Image fromImageData(GenerateImagesResponse.ImageData data) {
return Image.builder().url(data.url()).base64Data(data.b64Json()).revisedPrompt(data.revisedPrompt()).build();
}
private GenerateImagesRequest.Builder requestBuilder(String prompt) {
GenerateImagesRequest.Builder requestBuilder = GenerateImagesRequest
.builder()
.prompt(prompt)
.size(size)
.quality(quality)
.style(style)
.user(user)
.responseFormat(responseFormat);
if (DALL_E_2.equals(modelName)) {
requestBuilder.model(dev.ai4j.openai4j.image.ImageModel.DALL_E_2);
}
return requestBuilder;
}
}
| [
"dev.langchain4j.data.image.Image.builder"
] | [((6031, 6133), 'dev.langchain4j.data.image.Image.builder'), ((6031, 6125), 'dev.langchain4j.data.image.Image.builder'), ((6031, 6089), 'dev.langchain4j.data.image.Image.builder'), ((6031, 6062), 'dev.langchain4j.data.image.Image.builder')] |
package dev.langchain4j.model.openai;
import dev.ai4j.openai4j.chat.*;
import dev.ai4j.openai4j.shared.Usage;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.agent.tool.ToolParameters;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.image.Image;
import dev.langchain4j.data.message.Content;
import dev.langchain4j.data.message.SystemMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.data.message.*;
import dev.langchain4j.model.output.FinishReason;
import dev.langchain4j.model.output.TokenUsage;
import java.util.Collection;
import java.util.List;
import static dev.ai4j.openai4j.chat.ContentType.IMAGE_URL;
import static dev.ai4j.openai4j.chat.ContentType.TEXT;
import static dev.ai4j.openai4j.chat.ToolType.FUNCTION;
import static dev.langchain4j.data.message.AiMessage.aiMessage;
import static dev.langchain4j.internal.Exceptions.illegalArgument;
import static dev.langchain4j.internal.Utils.isNullOrEmpty;
import static dev.langchain4j.model.output.FinishReason.*;
import static java.lang.String.format;
import static java.util.stream.Collectors.toList;
public class InternalOpenAiHelper {
static final String OPENAI_URL = "https://api.openai.com/v1";
static final String OPENAI_DEMO_API_KEY = "demo";
static final String OPENAI_DEMO_URL = "http://langchain4j.dev/demo/openai/v1";
static final String DEFAULT_USER_AGENT = "langchain4j-openai";
public static List<Message> toOpenAiMessages(List<ChatMessage> messages) {
return messages.stream()
.map(InternalOpenAiHelper::toOpenAiMessage)
.collect(toList());
}
public static Message toOpenAiMessage(ChatMessage message) {
if (message instanceof SystemMessage) {
return dev.ai4j.openai4j.chat.SystemMessage.from(((SystemMessage) message).text());
}
if (message instanceof UserMessage) {
UserMessage userMessage = (UserMessage) message;
if (userMessage.hasSingleText()) {
return dev.ai4j.openai4j.chat.UserMessage.builder()
.content(userMessage.text())
.name(userMessage.name())
.build();
} else {
return dev.ai4j.openai4j.chat.UserMessage.builder()
.content(userMessage.contents().stream()
.map(InternalOpenAiHelper::toOpenAiContent)
.collect(toList()))
.name(userMessage.name())
.build();
}
}
if (message instanceof AiMessage) {
AiMessage aiMessage = (AiMessage) message;
if (!aiMessage.hasToolExecutionRequests()) {
return AssistantMessage.from(aiMessage.text());
}
ToolExecutionRequest toolExecutionRequest = aiMessage.toolExecutionRequests().get(0);
if (toolExecutionRequest.id() == null) {
FunctionCall functionCall = FunctionCall.builder()
.name(toolExecutionRequest.name())
.arguments(toolExecutionRequest.arguments())
.build();
return AssistantMessage.builder()
.functionCall(functionCall)
.build();
}
List<ToolCall> toolCalls = aiMessage.toolExecutionRequests().stream()
.map(it -> ToolCall.builder()
.id(it.id())
.type(FUNCTION)
.function(FunctionCall.builder()
.name(it.name())
.arguments(it.arguments())
.build())
.build())
.collect(toList());
return AssistantMessage.builder()
.toolCalls(toolCalls)
.build();
}
if (message instanceof ToolExecutionResultMessage) {
ToolExecutionResultMessage toolExecutionResultMessage = (ToolExecutionResultMessage) message;
if (toolExecutionResultMessage.id() == null) {
return FunctionMessage.from(toolExecutionResultMessage.toolName(), toolExecutionResultMessage.text());
}
return ToolMessage.from(toolExecutionResultMessage.id(), toolExecutionResultMessage.text());
}
throw illegalArgument("Unknown message type: " + message.type());
}
private static dev.ai4j.openai4j.chat.Content toOpenAiContent(Content content) {
if (content instanceof TextContent) {
return toOpenAiContent((TextContent) content);
} else if (content instanceof ImageContent) {
return toOpenAiContent((ImageContent) content);
} else {
throw illegalArgument("Unknown content type: " + content);
}
}
private static dev.ai4j.openai4j.chat.Content toOpenAiContent(TextContent content) {
return dev.ai4j.openai4j.chat.Content.builder()
.type(TEXT)
.text(content.text())
.build();
}
private static dev.ai4j.openai4j.chat.Content toOpenAiContent(ImageContent content) {
return dev.ai4j.openai4j.chat.Content.builder()
.type(IMAGE_URL)
.imageUrl(ImageUrl.builder()
.url(toUrl(content.image()))
.detail(toDetail(content.detailLevel()))
.build())
.build();
}
private static String toUrl(Image image) {
if (image.url() != null) {
return image.url().toString();
}
return format("data:%s;base64,%s", image.mimeType(), image.base64Data());
}
private static ImageDetail toDetail(ImageContent.DetailLevel detailLevel) {
if (detailLevel == null) {
return null;
}
return ImageDetail.valueOf(detailLevel.name());
}
public static List<Tool> toTools(Collection<ToolSpecification> toolSpecifications) {
return toolSpecifications.stream()
.map(InternalOpenAiHelper::toTool)
.collect(toList());
}
private static Tool toTool(ToolSpecification toolSpecification) {
Function function = Function.builder()
.name(toolSpecification.name())
.description(toolSpecification.description())
.parameters(toOpenAiParameters(toolSpecification.parameters()))
.build();
return Tool.from(function);
}
/**
* @deprecated Functions are deprecated by OpenAI, use {@link #toTools(Collection)} instead
*/
@Deprecated
public static List<Function> toFunctions(Collection<ToolSpecification> toolSpecifications) {
return toolSpecifications.stream()
.map(InternalOpenAiHelper::toFunction)
.collect(toList());
}
/**
* @deprecated Functions are deprecated by OpenAI, use {@link #toTool(ToolSpecification)} ()} instead
*/
@Deprecated
private static Function toFunction(ToolSpecification toolSpecification) {
return Function.builder()
.name(toolSpecification.name())
.description(toolSpecification.description())
.parameters(toOpenAiParameters(toolSpecification.parameters()))
.build();
}
private static dev.ai4j.openai4j.chat.Parameters toOpenAiParameters(ToolParameters toolParameters) {
if (toolParameters == null) {
return dev.ai4j.openai4j.chat.Parameters.builder().build();
}
return dev.ai4j.openai4j.chat.Parameters.builder()
.properties(toolParameters.properties())
.required(toolParameters.required())
.build();
}
public static AiMessage aiMessageFrom(ChatCompletionResponse response) {
AssistantMessage assistantMessage = response.choices().get(0).message();
List<ToolCall> toolCalls = assistantMessage.toolCalls();
if (!isNullOrEmpty(toolCalls)) {
List<ToolExecutionRequest> toolExecutionRequests = toolCalls.stream()
.filter(toolCall -> toolCall.type() == FUNCTION)
.map(InternalOpenAiHelper::toToolExecutionRequest)
.collect(toList());
return aiMessage(toolExecutionRequests);
}
FunctionCall functionCall = assistantMessage.functionCall();
if (functionCall != null) {
ToolExecutionRequest toolExecutionRequest = ToolExecutionRequest.builder()
.name(functionCall.name())
.arguments(functionCall.arguments())
.build();
return aiMessage(toolExecutionRequest);
}
return aiMessage(assistantMessage.content());
}
private static ToolExecutionRequest toToolExecutionRequest(ToolCall toolCall) {
FunctionCall functionCall = toolCall.function();
return ToolExecutionRequest.builder()
.id(toolCall.id())
.name(functionCall.name())
.arguments(functionCall.arguments())
.build();
}
public static TokenUsage tokenUsageFrom(Usage openAiUsage) {
if (openAiUsage == null) {
return null;
}
return new TokenUsage(
openAiUsage.promptTokens(),
openAiUsage.completionTokens(),
openAiUsage.totalTokens()
);
}
public static FinishReason finishReasonFrom(String openAiFinishReason) {
if (openAiFinishReason == null) {
return null;
}
switch (openAiFinishReason) {
case "stop":
return STOP;
case "length":
return LENGTH;
case "tool_calls":
case "function_call":
return TOOL_EXECUTION;
case "content_filter":
return CONTENT_FILTER;
default:
return null;
}
}
}
| [
"dev.langchain4j.agent.tool.ToolExecutionRequest.builder",
"dev.langchain4j.data.message.AiMessage.aiMessage.toolExecutionRequests"
] | [((2947, 2987), 'dev.langchain4j.data.message.AiMessage.aiMessage.toolExecutionRequests'), ((3462, 3939), 'dev.langchain4j.data.message.AiMessage.aiMessage.toolExecutionRequests'), ((3462, 3900), 'dev.langchain4j.data.message.AiMessage.aiMessage.toolExecutionRequests'), ((3462, 3504), 'dev.langchain4j.data.message.AiMessage.aiMessage.toolExecutionRequests'), ((8748, 8911), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((8748, 8882), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((8748, 8825), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((9193, 9379), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((9193, 9354), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((9193, 9301), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((9193, 9258), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')] |
package com.johnsosoka.selfdiscover.config;
import com.johnsosoka.selfdiscover.agent.SelfDiscovery;
import com.johnsosoka.selfdiscover.agent.Solving;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.service.AiServices;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class SelfDiscoveryAgentConfig {
/**
* Provisions an AIService `SelfDiscoverAgent` which contains the prompts
* which implement the SELF-DISCOVER algorithm to self-compose reasoning structures.
* @param chatLanguageModel
* @return
*/
@Bean
public SelfDiscovery selfDiscoveryAgent(ChatLanguageModel chatLanguageModel) {
return AiServices.builder(SelfDiscovery.class)
.chatLanguageModel(chatLanguageModel)
.build();
}
/**
* Provisions an AIService `SolverAgent` which contains the prompts for solving a given task.
* The self-composed reasoning structures generated by the SelfDiscover Agent are used to
* solve the task.
*
* @param chatLanguageModel
* @return
*/
@Bean
public Solving solverAgent(ChatLanguageModel chatLanguageModel) {
return AiServices.builder(Solving.class)
.chatLanguageModel(chatLanguageModel)
.build();
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((755, 873), 'dev.langchain4j.service.AiServices.builder'), ((755, 848), 'dev.langchain4j.service.AiServices.builder'), ((1263, 1375), 'dev.langchain4j.service.AiServices.builder'), ((1263, 1350), 'dev.langchain4j.service.AiServices.builder')] |
package dev.langchain4j.model.qianfan;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.internal.Utils;
import dev.langchain4j.model.StreamingResponseHandler;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.qianfan.client.QianfanClient;
import dev.langchain4j.model.qianfan.client.QianfanStreamingResponseBuilder;
import dev.langchain4j.model.qianfan.client.SyncOrAsyncOrStreaming;
import dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest;
import dev.langchain4j.model.qianfan.client.chat.ChatCompletionResponse;
import dev.langchain4j.model.qianfan.spi.QianfanStreamingChatModelBuilderFactory;
import lombok.Builder;
import static dev.langchain4j.model.qianfan.InternalQianfanHelper.*;
import java.util.List;
import static dev.langchain4j.internal.Utils.getOrDefault;
import static dev.langchain4j.spi.ServiceHelper.loadFactories;
/**
*
* see details here: https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu
*/
public class QianfanStreamingChatModel implements StreamingChatLanguageModel {
private final QianfanClient client;
private final String baseUrl;
private final Double temperature;
private final Double topP;
private final String modelName;
private final String endpoint;
private final Double penaltyScore;
private final String responseFormat;
@Builder
public QianfanStreamingChatModel(String baseUrl,
String apiKey,
String secretKey,
Double temperature,
Double topP,
String modelName,
String endpoint,
String responseFormat,
Double penaltyScore,
Boolean logRequests,
Boolean logResponses
) {
if (Utils.isNullOrBlank(apiKey)||Utils.isNullOrBlank(secretKey)) {
throw new IllegalArgumentException(" api key and secret key must be defined. It can be generated here: https://console.bce.baidu.com/qianfan/ais/console/applicationConsole/application");
}
this.modelName=modelName;
this.endpoint=Utils.isNullOrBlank(endpoint)? QianfanChatModelNameEnum.getEndpoint(modelName):endpoint;
if (Utils.isNullOrBlank(this.endpoint)) {
throw new IllegalArgumentException("Qianfan is no such model name. You can see model name here: https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu");
}
this.baseUrl = getOrDefault(baseUrl, "https://aip.baidubce.com");
this.client = QianfanClient.builder()
.baseUrl(this.baseUrl)
.apiKey(apiKey)
.secretKey(secretKey)
.logRequests(logRequests)
.logStreamingResponses(logResponses)
.build();
this.temperature = getOrDefault(temperature, 0.7);
this.topP = topP;
this.penaltyScore = penaltyScore;
this.responseFormat = responseFormat;
}
@Override
public void generate(List<ChatMessage> messages, StreamingResponseHandler<AiMessage> handler) {
generate(messages, null, null, handler);
}
@Override
public void generate(List<ChatMessage> messages, List<ToolSpecification> toolSpecifications, StreamingResponseHandler<AiMessage> handler) {
generate(messages, toolSpecifications, null, handler);
}
@Override
public void generate(List<ChatMessage> messages, ToolSpecification toolSpecification, StreamingResponseHandler<AiMessage> handler) {
throw new RuntimeException("Not supported");
}
private void generate(List<ChatMessage> messages,
List<ToolSpecification> toolSpecifications,
ToolSpecification toolThatMustBeExecuted,
StreamingResponseHandler<AiMessage> handler
) {
ChatCompletionRequest.Builder builder = ChatCompletionRequest.builder()
.messages(InternalQianfanHelper.toOpenAiMessages(messages))
.temperature(temperature)
.topP(topP)
.system(getSystemMessage(messages))
.responseFormat(responseFormat)
.penaltyScore(penaltyScore);
if (toolSpecifications != null && !toolSpecifications.isEmpty()) {
builder.functions(InternalQianfanHelper.toFunctions(toolSpecifications));
}
ChatCompletionRequest request = builder.build();
QianfanStreamingResponseBuilder responseBuilder = new QianfanStreamingResponseBuilder(null);
SyncOrAsyncOrStreaming<ChatCompletionResponse> response = client.chatCompletion(request, endpoint);
response.onPartialResponse(partialResponse -> {
responseBuilder.append(partialResponse);
handle(partialResponse, handler);
})
.onComplete(() -> {
Response<AiMessage> messageResponse = responseBuilder.build();
handler.onComplete(messageResponse);
})
.onError(handler::onError
)
.execute();
}
private static void handle(ChatCompletionResponse partialResponse,
StreamingResponseHandler<AiMessage> handler) {
String result = partialResponse.getResult();
if (Utils.isNullOrBlank(result)) {
return;
}
handler.onNext(result);
}
public static QianfanStreamingChatModelBuilder builder() {
for (QianfanStreamingChatModelBuilderFactory factory : loadFactories(QianfanStreamingChatModelBuilderFactory.class)) {
return factory.get();
}
return new QianfanStreamingChatModelBuilder();
}
public static class QianfanStreamingChatModelBuilder {
public QianfanStreamingChatModelBuilder() {
// This is public so it can be extended
// By default with Lombok it becomes package private
}
}
}
| [
"dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest.builder",
"dev.langchain4j.model.qianfan.client.QianfanClient.builder"
] | [((2941, 3193), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2941, 3168), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2941, 3115), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2941, 3073), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2941, 3035), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2941, 3003), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((4305, 4626), 'dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest.builder'), ((4305, 4582), 'dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest.builder'), ((4305, 4534), 'dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest.builder'), ((4305, 4482), 'dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest.builder'), ((4305, 4454), 'dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest.builder'), ((4305, 4412), 'dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest.builder')] |
package dev.langchain4j.data.message;
import dev.langchain4j.data.image.Image;
import java.net.URI;
import java.util.Objects;
import static dev.langchain4j.data.message.ContentType.IMAGE;
import static dev.langchain4j.data.message.ImageContent.DetailLevel.LOW;
import static dev.langchain4j.internal.ValidationUtils.ensureNotBlank;
import static dev.langchain4j.internal.ValidationUtils.ensureNotNull;
/**
* Represents an image with a DetailLevel.
*/
public class ImageContent implements Content {
/**
* The detail level of an {@code Image}.
*/
public enum DetailLevel {
/**
* Low detail.
*/
LOW,
/**
* High detail.
*/
HIGH,
/**
* Auto detail.
*/
AUTO
}
private final Image image;
private final DetailLevel detailLevel;
/**
* Create a new {@link ImageContent} from the given url.
*
* <p>The image will be created with {@code DetailLevel.LOW} detail.</p>
*
* @param url the url of the image.
*/
public ImageContent(URI url) {
this(url, LOW);
}
/**
* Create a new {@link ImageContent} from the given url.
*
* <p>The image will be created with {@code DetailLevel.LOW} detail.</p>
*
* @param url the url of the image.
*/
public ImageContent(String url) {
this(URI.create(url));
}
/**
* Create a new {@link ImageContent} from the given url and detail level.
*
* @param url the url of the image.
* @param detailLevel the detail level of the image.
*/
public ImageContent(URI url, DetailLevel detailLevel) {
this(Image.builder()
.url(ensureNotNull(url, "url"))
.build(), detailLevel);
}
/**
* Create a new {@link ImageContent} from the given url and detail level.
*
* @param url the url of the image.
* @param detailLevel the detail level of the image.
*/
public ImageContent(String url, DetailLevel detailLevel) {
this(URI.create(url), detailLevel);
}
/**
* Create a new {@link ImageContent} from the given base64 data and mime type.
*
* <p>The image will be created with {@code DetailLevel.LOW} detail.</p>
*
* @param base64Data the base64 data of the image.
* @param mimeType the mime type of the image.
*/
public ImageContent(String base64Data, String mimeType) {
this(base64Data, mimeType, LOW);
}
/**
* Create a new {@link ImageContent} from the given base64 data and mime type.
*
* @param base64Data the base64 data of the image.
* @param mimeType the mime type of the image.
* @param detailLevel the detail level of the image.
*/
public ImageContent(String base64Data, String mimeType, DetailLevel detailLevel) {
this(Image.builder()
.base64Data(ensureNotBlank(base64Data, "base64Data"))
.mimeType(ensureNotBlank(mimeType, "mimeType"))
.build(), detailLevel);
}
/**
* Create a new {@link ImageContent} from the given image.
*
* <p>The image will be created with {@code DetailLevel.LOW} detail.</p>
*
* @param image the image.
*/
public ImageContent(Image image) {
this(image, LOW);
}
/**
* Create a new {@link ImageContent} from the given image.
*
* @param image the image.
* @param detailLevel the detail level of the image.
*/
public ImageContent(Image image, DetailLevel detailLevel) {
this.image = ensureNotNull(image, "image");
this.detailLevel = ensureNotNull(detailLevel, "detailLevel");
}
/**
* Get the {@code Image}.
* @return the {@code Image}.
*/
public Image image() {
return image;
}
/**
* Get the {@code DetailLevel}.
* @return the {@code DetailLevel}.
*/
public DetailLevel detailLevel() {
return detailLevel;
}
@Override
public ContentType type() {
return IMAGE;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ImageContent that = (ImageContent) o;
return Objects.equals(this.image, that.image)
&& Objects.equals(this.detailLevel, that.detailLevel);
}
@Override
public int hashCode() {
return Objects.hash(image, detailLevel);
}
@Override
public String toString() {
return "ImageContent {" +
" image = " + image +
" detailLevel = " + detailLevel +
" }";
}
/**
* Create a new {@link ImageContent} from the given url.
*
* <p>The image will be created with {@code DetailLevel.LOW} detail.</p>
*
* @param url the url of the image.
* @return the new {@link ImageContent}.
*/
public static ImageContent from(URI url) {
return new ImageContent(url);
}
/**
* Create a new {@link ImageContent} from the given url.
*
* <p>The image will be created with {@code DetailLevel.LOW} detail.</p>
*
* @param url the url of the image.
* @return the new {@link ImageContent}.
*/
public static ImageContent from(String url) {
return new ImageContent(url);
}
/**
* Create a new {@link ImageContent} from the given url and detail level.
*
* @param url the url of the image.
* @param detailLevel the detail level of the image.
* @return the new {@link ImageContent}.
*/
public static ImageContent from(URI url, DetailLevel detailLevel) {
return new ImageContent(url, detailLevel);
}
/**
* Create a new {@link ImageContent} from the given url and detail level.
*
* @param url the url of the image.
* @param detailLevel the detail level of the image.
* @return the new {@link ImageContent}.
*/
public static ImageContent from(String url, DetailLevel detailLevel) {
return new ImageContent(url, detailLevel);
}
/**
* Create a new {@link ImageContent} from the given base64 data and mime type.
*
* <p>The image will be created with {@code DetailLevel.LOW} detail.</p>
*
* @param base64Data the base64 data of the image.
* @param mimeType the mime type of the image.
* @return the new {@link ImageContent}.
*/
public static ImageContent from(String base64Data, String mimeType) {
return new ImageContent(base64Data, mimeType);
}
/**
* Create a new {@link ImageContent} from the given base64 data and mime type.
*
* @param base64Data the base64 data of the image.
* @param mimeType the mime type of the image.
* @param detailLevel the detail level of the image.
* @return the new {@link ImageContent}.
*/
public static ImageContent from(String base64Data, String mimeType, DetailLevel detailLevel) {
return new ImageContent(base64Data, mimeType, detailLevel);
}
/**
* Create a new {@link ImageContent} from the given image.
*
* <p>The image will be created with {@code DetailLevel.LOW} detail.</p>
*
* @param image the image.
* @return the new {@link ImageContent}.
*/
public static ImageContent from(Image image) {
return new ImageContent(image);
}
/**
* Create a new {@link ImageContent} from the given image.
*
* @param image the image.
* @param detailLevel the detail level of the image.
* @return the new {@link ImageContent}.
*/
public static ImageContent from(Image image, DetailLevel detailLevel) {
return new ImageContent(image, detailLevel);
}
}
| [
"dev.langchain4j.data.image.Image.builder"
] | [((1691, 1779), 'dev.langchain4j.data.image.Image.builder'), ((1691, 1754), 'dev.langchain4j.data.image.Image.builder'), ((2889, 3063), 'dev.langchain4j.data.image.Image.builder'), ((2889, 3038), 'dev.langchain4j.data.image.Image.builder'), ((2889, 2974), 'dev.langchain4j.data.image.Image.builder')] |
package org.agoncal.fascicle.langchain4j.vectordb.chroma;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore;
import java.util.List;
// tag::adocSkip[]
/**
* @author Antonio Goncalves
* http://www.antoniogoncalves.org
* --
*/
// end::adocSkip[]
public class MusicianService {
public static void main(String[] args) {
MusicianService musicianService = new MusicianService();
musicianService.useQdrantToStoreEmbeddings();
}
public void useQdrantToStoreEmbeddings() {
System.out.println("### useQdrantToStoreEmbeddings");
// tag::adocSnippet[]
EmbeddingStore<TextSegment> embeddingStore =
ChromaEmbeddingStore.builder()
.build();
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
TextSegment segment1 = TextSegment.from("I've been to France twice.");
Embedding embedding1 = embeddingModel.embed(segment1).content();
embeddingStore.add(embedding1, segment1);
TextSegment segment2 = TextSegment.from("New Delhi is the capital of India.");
Embedding embedding2 = embeddingModel.embed(segment2).content();
embeddingStore.add(embedding2, segment2);
Embedding queryEmbedding = embeddingModel.embed("Did you ever travel abroad?").content();
List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(queryEmbedding, 1);
EmbeddingMatch<TextSegment> embeddingMatch = relevant.get(0);
System.out.println(embeddingMatch.score());
System.out.println(embeddingMatch.embedded().text());
// end::adocSnippet[]
}
}
| [
"dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore.builder"
] | [((977, 1024), 'dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore.builder')] |
package com.kchandrakant;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.structured.StructuredPromptProcessor;
import dev.langchain4j.model.openai.OpenAiChatModel;
import java.util.List;
import static java.time.Duration.ofSeconds;
import static java.util.Arrays.asList;
public class StructuredPrompt {
@dev.langchain4j.model.input.structured.StructuredPrompt({
"Create a recipe of a {{dish}} that can be prepared using only {{ingredients}}.",
"Structure your answer in the following way:",
"Recipe name: ...",
"Description: ...",
"Preparation time: ...",
"Required ingredients:",
"- ...",
"- ...",
"Instructions:",
"- ...",
"- ..."
})
static class CreateRecipePrompt {
String dish;
List<String> ingredients;
CreateRecipePrompt(String dish, List<String> ingredients) {
this.dish = dish;
this.ingredients = ingredients;
}
}
public static void main(String[] args) {
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.timeout(ofSeconds(60))
.build();
CreateRecipePrompt createRecipePrompt = new CreateRecipePrompt(
"salad",
asList("cucumber", "tomato", "feta", "onion", "olives")
);
Prompt prompt = StructuredPromptProcessor.toPrompt(createRecipePrompt);
String recipe = model.generate(prompt.text());
System.out.println(recipe);
}
} | [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1201, 1339), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1201, 1314), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1201, 1274), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package io.quarkiverse.langchain4j.bam.deployment;
import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrowsExactly;
import java.util.List;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.tomakehurst.wiremock.WireMockServer;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.output.FinishReason;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.output.TokenUsage;
import dev.langchain4j.service.SystemMessage;
import dev.langchain4j.service.UserMessage;
import io.quarkiverse.langchain4j.RegisterAiService;
import io.quarkiverse.langchain4j.bam.BamRestApi;
import io.quarkiverse.langchain4j.bam.Message;
import io.quarkiverse.langchain4j.bam.Parameters;
import io.quarkiverse.langchain4j.bam.TextGenerationRequest;
import io.quarkiverse.langchain4j.bam.runtime.config.LangChain4jBamConfig;
import io.quarkus.test.QuarkusUnitTest;
public class AiChatServiceTest {
static WireMockServer wireMockServer;
static ObjectMapper mapper;
static WireMockUtil mockServers;
@RegisterExtension
static QuarkusUnitTest unitTest = new QuarkusUnitTest()
.overrideRuntimeConfigKey("quarkus.langchain4j.bam.base-url", WireMockUtil.URL)
.overrideRuntimeConfigKey("quarkus.langchain4j.bam.api-key", WireMockUtil.API_KEY)
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class).addClass(WireMockUtil.class));
@BeforeAll
static void beforeAll() {
wireMockServer = new WireMockServer(options().port(WireMockUtil.PORT));
wireMockServer.start();
mapper = BamRestApi.objectMapper(new ObjectMapper());
mockServers = new WireMockUtil(wireMockServer);
}
@AfterAll
static void afterAll() {
wireMockServer.stop();
}
@RegisterAiService
@Singleton
interface NewAIService {
@SystemMessage("This is a systemMessage")
@UserMessage("This is a userMessage {text}")
String chat(String text);
}
@Inject
NewAIService service;
@Inject
ChatLanguageModel chatModel;
@Inject
LangChain4jBamConfig langchain4jBamConfig;
@Test
void chat() throws Exception {
var config = langchain4jBamConfig.defaultConfig();
var modelId = config.chatModel().modelId();
var parameters = Parameters.builder()
.decodingMethod(config.chatModel().decodingMethod())
.temperature(config.chatModel().temperature())
.minNewTokens(config.chatModel().minNewTokens())
.maxNewTokens(config.chatModel().maxNewTokens())
.build();
List<Message> messages = List.of(
new Message("system", "This is a systemMessage"),
new Message("user", "This is a userMessage Hello"));
var body = new TextGenerationRequest(modelId, messages, parameters);
mockServers
.mockBuilder(WireMockUtil.URL_CHAT_API, 200)
.body(mapper.writeValueAsString(body))
.response("""
{
"results": [
{
"generated_token_count": 20,
"input_token_count": 146,
"stop_reason": "max_tokens",
"seed": 40268626,
"generated_text": "AI Response"
}
]
}
""")
.build();
assertEquals("AI Response", service.chat("Hello"));
}
@Test
void chat_test_generate_1() throws Exception {
var config = langchain4jBamConfig.defaultConfig();
var modelId = config.chatModel().modelId();
var parameters = Parameters.builder()
.decodingMethod(config.chatModel().decodingMethod())
.temperature(config.chatModel().temperature())
.minNewTokens(config.chatModel().minNewTokens())
.maxNewTokens(config.chatModel().maxNewTokens())
.build();
List<Message> messages = List.of(
new Message("user", "Hello"));
var body = new TextGenerationRequest(modelId, messages, parameters);
mockServers
.mockBuilder(WireMockUtil.URL_CHAT_API, 200)
.body(mapper.writeValueAsString(body))
.response("""
{
"results": [
{
"generated_token_count": 20,
"input_token_count": 146,
"stop_reason": "max_tokens",
"seed": 40268626,
"generated_text": "AI Response"
}
]
}
""")
.build();
assertEquals("AI Response", chatModel.generate("Hello"));
}
@Test
void chat_test_generate_2() throws Exception {
var config = langchain4jBamConfig.defaultConfig();
var modelId = config.chatModel().modelId();
var parameters = Parameters.builder()
.decodingMethod(config.chatModel().decodingMethod())
.temperature(config.chatModel().temperature())
.minNewTokens(config.chatModel().minNewTokens())
.maxNewTokens(config.chatModel().maxNewTokens())
.build();
List<Message> messages = List.of(
new Message("system", "This is a systemMessage"),
new Message("user", "This is a userMessage"),
new Message("assistant", "This is a assistantMessage"));
var body = new TextGenerationRequest(modelId, messages, parameters);
mockServers
.mockBuilder(WireMockUtil.URL_CHAT_API, 200)
.body(mapper.writeValueAsString(body))
.response("""
{
"results": [
{
"generated_token_count": 20,
"input_token_count": 146,
"stop_reason": "max_tokens",
"seed": 40268626,
"generated_text": "AI Response"
}
]
}
""")
.build();
var expected = Response.from(AiMessage.from("AI Response"), new TokenUsage(146, 20, 166), FinishReason.LENGTH);
assertEquals(expected, chatModel.generate(List.of(
new dev.langchain4j.data.message.SystemMessage("This is a systemMessage"),
new dev.langchain4j.data.message.UserMessage("This is a userMessage"),
new dev.langchain4j.data.message.AiMessage("This is a assistantMessage"))));
assertEquals(expected, chatModel.generate(
new dev.langchain4j.data.message.SystemMessage("This is a systemMessage"),
new dev.langchain4j.data.message.UserMessage("This is a userMessage"),
new dev.langchain4j.data.message.AiMessage("This is a assistantMessage")));
}
@Test
void chat_test_tool_specification() throws Exception {
assertThrowsExactly(
IllegalArgumentException.class,
() -> chatModel.generate(List.of(), ToolSpecification.builder().build()));
assertThrowsExactly(
IllegalArgumentException.class,
() -> chatModel.generate(List.of(), List.of(ToolSpecification.builder().build())));
}
}
| [
"dev.langchain4j.agent.tool.ToolSpecification.builder"
] | [((1945, 2010), 'org.jboss.shrinkwrap.api.ShrinkWrap.create'), ((2915, 3222), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((2915, 3197), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((2915, 3132), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((2915, 3067), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((2915, 3004), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((4472, 4779), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((4472, 4754), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((4472, 4689), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((4472, 4624), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((4472, 4561), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((5947, 6254), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((5947, 6229), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((5947, 6164), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((5947, 6099), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((5947, 6036), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((8283, 8318), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((8460, 8495), 'dev.langchain4j.agent.tool.ToolSpecification.builder')] |
package com.exoreaction.quadim;
import static java.time.Duration.ofSeconds;
import com.exoreaction.quadim.service.ApiKeys;
import com.fasterxml.jackson.core.json.JsonReadFeature;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiModelName;
import dev.langchain4j.model.output.structured.Description;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.MemoryId;
import dev.langchain4j.service.UserMessage;
import java.util.List;
import java.util.Random;
import org.junit.jupiter.api.Test;
public class AiAssistedHRAssistantTest {
public static final ObjectMapper mapper = new ObjectMapper()
.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false)
.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false)
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.configure(JsonReadFeature.ALLOW_UNESCAPED_CONTROL_CHARS.mappedFeature(), true)
.enable(JsonReadFeature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER.mappedFeature())
.findAndRegisterModules();
@Test
public void testChatWithHRAI() throws Exception {
Random r = new Random();
int userNo = r.nextInt(100);
// PLan
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(ApiKeys.MY_OPENAI_API_KEY)
.modelName(OpenAiModelName.GPT_3_5_TURBO)
.timeout(ofSeconds(900))
.temperature(0.9)
.build();
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(model)
.chatMemoryProvider(memoryId -> MessageWindowChatMemory.withMaxMessages(10))
.build();
// a) create types for retrieving skills and experience objects from responses
SkillExtractor skillExtractor = AiServices.create(SkillExtractor.class, model);
// b) simulate a chat
String appendPrompt = "Answer acting as a friendly HR Consultant helping the user with his/her competence mapping, focussing on skills and projects."+
"Structure the answer friendly and selling with bullets for discovered or suggested supporting skills and potential typical projects"+
"where the user may have used those skills. " +
"Limit answer to the most relevant 5 skills and top 8 projects";
String q1 = "Yes, I do work with Java and java microservices on the backend ";
System.out.println("me: " + q1);
String res1 = assistant.chat(userNo, q1 + appendPrompt);
System.out.println(res1);
Skill extractedSkills1 = skillExtractor.extractSkillFrom(res1);
System.out.println("\n\n1. Skill mapped:" + mapper.writerWithDefaultPrettyPrinter().writeValueAsString(extractedSkills1) + "\n\n");
}
interface Assistant {
String chat(@MemoryId int memoryId, @UserMessage String userMessage);
}
static class Skill {
@Description("the name of this skill")
private String name;
@Description("description of this skill. please make it selling and not more than 10 lines of text")
private String description;
@Description("list of suggested skills which correlate to this skill")
private List<SkillReference> listOfCandidateSkillDefinitions;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public List<SkillReference> getListOfCandidateSkillDefinitions() {
return listOfCandidateSkillDefinitions;
}
public void setListOfCandidateSkillDefinitions(List<SkillReference> listOfCandidateSkillDefinitions) {
this.listOfCandidateSkillDefinitions = listOfCandidateSkillDefinitions;
}
@Override
public String toString() {
return "Skill{" +
"skillName='" + name + '\'' +
", skillDescription='" + description + '\'' +
", listOfCandidateSkillDefinitions=" + listOfCandidateSkillDefinitions +
'}';
}
}
static class SkillReference {
@Description("the name of this skill")
private String name;
@Description("description of this skill. please make it selling and not more than 10 lines of text")
private String description;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
@Override
public String toString() {
return "SkillReference{" +
"skillName='" + name + '\'' +
", skillDescription='" + description + '\'' +
'}';
}
}
interface SkillExtractor {
@UserMessage("Extract information about a skill from {{it}}")
Skill extractSkillFrom(String text);
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1181, 1242), 'com.fasterxml.jackson.core.json.JsonReadFeature.ALLOW_UNESCAPED_CONTROL_CHARS.mappedFeature'), ((1264, 1334), 'com.fasterxml.jackson.core.json.JsonReadFeature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER.mappedFeature'), ((1534, 1728), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1534, 1711), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1534, 1685), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1534, 1652), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1534, 1602), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1756, 1927), 'dev.langchain4j.service.AiServices.builder'), ((1756, 1910), 'dev.langchain4j.service.AiServices.builder'), ((1756, 1825), 'dev.langchain4j.service.AiServices.builder')] |
package dev.langchain4j.model.openai;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.message.*;
import dev.langchain4j.model.StreamingResponseHandler;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.chat.TestStreamingResponseHandler;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.output.TokenUsage;
import org.assertj.core.data.Percentage;
import org.junit.jupiter.api.Test;
import java.util.Base64;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import static dev.langchain4j.agent.tool.JsonSchemaProperty.INTEGER;
import static dev.langchain4j.data.message.ToolExecutionResultMessage.from;
import static dev.langchain4j.data.message.UserMessage.userMessage;
import static dev.langchain4j.internal.Utils.readBytes;
import static dev.langchain4j.model.openai.OpenAiChatModelIT.CAT_IMAGE_URL;
import static dev.langchain4j.model.openai.OpenAiChatModelIT.DICE_IMAGE_URL;
import static dev.langchain4j.model.openai.OpenAiChatModelName.GPT_3_5_TURBO;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO_1106;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_4_VISION_PREVIEW;
import static dev.langchain4j.model.output.FinishReason.STOP;
import static dev.langchain4j.model.output.FinishReason.TOOL_EXECUTION;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.data.Percentage.withPercentage;
class OpenAiStreamingChatModelIT {
StreamingChatLanguageModel model = OpenAiStreamingChatModel.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.apiKey(System.getenv("OPENAI_API_KEY"))
.organizationId(System.getenv("OPENAI_ORGANIZATION_ID"))
.temperature(0.0)
.logRequests(true)
.logResponses(true)
.build();
StreamingChatLanguageModel visionModel = OpenAiStreamingChatModel.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.apiKey(System.getenv("OPENAI_API_KEY"))
.organizationId(System.getenv("OPENAI_ORGANIZATION_ID"))
.modelName(GPT_4_VISION_PREVIEW)
.temperature(0.0)
.logRequests(true)
.logResponses(true)
.build();
ToolSpecification calculator = ToolSpecification.builder()
.name("calculator")
.description("returns a sum of two numbers")
.addParameter("first", INTEGER)
.addParameter("second", INTEGER)
.build();
Percentage tokenizerPrecision = withPercentage(5);
@Test
void should_stream_answer() throws Exception {
CompletableFuture<String> futureAnswer = new CompletableFuture<>();
CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>();
model.generate("What is the capital of Germany?", new StreamingResponseHandler<AiMessage>() {
private final StringBuilder answerBuilder = new StringBuilder();
@Override
public void onNext(String token) {
System.out.println("onNext: '" + token + "'");
answerBuilder.append(token);
}
@Override
public void onComplete(Response<AiMessage> response) {
System.out.println("onComplete: '" + response + "'");
futureAnswer.complete(answerBuilder.toString());
futureResponse.complete(response);
}
@Override
public void onError(Throwable error) {
futureAnswer.completeExceptionally(error);
futureResponse.completeExceptionally(error);
}
});
String answer = futureAnswer.get(30, SECONDS);
Response<AiMessage> response = futureResponse.get(30, SECONDS);
assertThat(answer).contains("Berlin");
assertThat(response.content().text()).isEqualTo(answer);
TokenUsage tokenUsage = response.tokenUsage();
assertThat(tokenUsage.inputTokenCount()).isEqualTo(14);
assertThat(tokenUsage.outputTokenCount()).isGreaterThan(0);
assertThat(tokenUsage.totalTokenCount())
.isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount());
assertThat(response.finishReason()).isEqualTo(STOP);
}
@Test
void should_execute_a_tool_then_stream_answer() throws Exception {
// given
UserMessage userMessage = userMessage("2+2=?");
List<ToolSpecification> toolSpecifications = singletonList(calculator);
// when
CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>();
model.generate(singletonList(userMessage), toolSpecifications, new StreamingResponseHandler<AiMessage>() {
@Override
public void onNext(String token) {
System.out.println("onNext: '" + token + "'");
Exception e = new IllegalStateException("onNext() should never be called when tool is executed");
futureResponse.completeExceptionally(e);
}
@Override
public void onComplete(Response<AiMessage> response) {
System.out.println("onComplete: '" + response + "'");
futureResponse.complete(response);
}
@Override
public void onError(Throwable error) {
futureResponse.completeExceptionally(error);
}
});
Response<AiMessage> response = futureResponse.get(30, SECONDS);
AiMessage aiMessage = response.content();
// then
assertThat(aiMessage.text()).isNull();
List<ToolExecutionRequest> toolExecutionRequests = aiMessage.toolExecutionRequests();
assertThat(toolExecutionRequests).hasSize(1);
ToolExecutionRequest toolExecutionRequest = toolExecutionRequests.get(0);
assertThat(toolExecutionRequest.name()).isEqualTo("calculator");
assertThat(toolExecutionRequest.arguments()).isEqualToIgnoringWhitespace("{\"first\": 2, \"second\": 2}");
TokenUsage tokenUsage = response.tokenUsage();
assertThat(tokenUsage.inputTokenCount()).isCloseTo(53, tokenizerPrecision);
assertThat(tokenUsage.outputTokenCount()).isCloseTo(14, tokenizerPrecision);
assertThat(tokenUsage.totalTokenCount())
.isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount());
assertThat(response.finishReason()).isEqualTo(TOOL_EXECUTION);
// given
ToolExecutionResultMessage toolExecutionResultMessage = from(toolExecutionRequest, "4");
List<ChatMessage> messages = asList(userMessage, aiMessage, toolExecutionResultMessage);
// when
CompletableFuture<Response<AiMessage>> secondFutureResponse = new CompletableFuture<>();
model.generate(messages, new StreamingResponseHandler<AiMessage>() {
@Override
public void onNext(String token) {
System.out.println("onNext: '" + token + "'");
}
@Override
public void onComplete(Response<AiMessage> response) {
System.out.println("onComplete: '" + response + "'");
secondFutureResponse.complete(response);
}
@Override
public void onError(Throwable error) {
secondFutureResponse.completeExceptionally(error);
}
});
Response<AiMessage> secondResponse = secondFutureResponse.get(30, SECONDS);
AiMessage secondAiMessage = secondResponse.content();
// then
assertThat(secondAiMessage.text()).contains("4");
assertThat(secondAiMessage.toolExecutionRequests()).isNull();
TokenUsage secondTokenUsage = secondResponse.tokenUsage();
assertThat(secondTokenUsage.inputTokenCount()).isCloseTo(33, tokenizerPrecision);
assertThat(secondTokenUsage.outputTokenCount()).isGreaterThan(0);
assertThat(secondTokenUsage.totalTokenCount())
.isEqualTo(secondTokenUsage.inputTokenCount() + secondTokenUsage.outputTokenCount());
assertThat(secondResponse.finishReason()).isEqualTo(STOP);
}
@Test
void should_execute_tool_forcefully_then_stream_answer() throws Exception {
// given
UserMessage userMessage = userMessage("2+2=?");
// when
CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>();
model.generate(singletonList(userMessage), calculator, new StreamingResponseHandler<AiMessage>() {
@Override
public void onNext(String token) {
System.out.println("onNext: '" + token + "'");
Exception e = new IllegalStateException("onNext() should never be called when tool is executed");
futureResponse.completeExceptionally(e);
}
@Override
public void onComplete(Response<AiMessage> response) {
System.out.println("onComplete: '" + response + "'");
futureResponse.complete(response);
}
@Override
public void onError(Throwable error) {
futureResponse.completeExceptionally(error);
}
});
Response<AiMessage> response = futureResponse.get(30, SECONDS);
AiMessage aiMessage = response.content();
// then
assertThat(aiMessage.text()).isNull();
List<ToolExecutionRequest> toolExecutionRequests = aiMessage.toolExecutionRequests();
assertThat(toolExecutionRequests).hasSize(1);
ToolExecutionRequest toolExecutionRequest = toolExecutionRequests.get(0);
assertThat(toolExecutionRequest.name()).isEqualTo("calculator");
assertThat(toolExecutionRequest.arguments()).isEqualToIgnoringWhitespace("{\"first\": 2, \"second\": 2}");
TokenUsage tokenUsage = response.tokenUsage();
assertThat(tokenUsage.inputTokenCount()).isCloseTo(59, tokenizerPrecision);
assertThat(tokenUsage.outputTokenCount()).isCloseTo(9, tokenizerPrecision);
assertThat(tokenUsage.totalTokenCount())
.isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount());
assertThat(response.finishReason()).isEqualTo(STOP); // not sure if a bug in OpenAI or stop is expected here
// given
ToolExecutionResultMessage toolExecutionResultMessage = from(toolExecutionRequest, "4");
List<ChatMessage> messages = asList(userMessage, aiMessage, toolExecutionResultMessage);
// when
CompletableFuture<Response<AiMessage>> secondFutureResponse = new CompletableFuture<>();
model.generate(messages, new StreamingResponseHandler<AiMessage>() {
@Override
public void onNext(String token) {
System.out.println("onNext: '" + token + "'");
}
@Override
public void onComplete(Response<AiMessage> response) {
System.out.println("onComplete: '" + response + "'");
secondFutureResponse.complete(response);
}
@Override
public void onError(Throwable error) {
secondFutureResponse.completeExceptionally(error);
}
});
Response<AiMessage> secondResponse = secondFutureResponse.get(30, SECONDS);
AiMessage secondAiMessage = secondResponse.content();
// then
assertThat(secondAiMessage.text()).contains("4");
assertThat(secondAiMessage.toolExecutionRequests()).isNull();
TokenUsage secondTokenUsage = secondResponse.tokenUsage();
assertThat(secondTokenUsage.inputTokenCount()).isCloseTo(33, tokenizerPrecision);
assertThat(secondTokenUsage.outputTokenCount()).isGreaterThan(0);
assertThat(secondTokenUsage.totalTokenCount())
.isEqualTo(secondTokenUsage.inputTokenCount() + secondTokenUsage.outputTokenCount());
assertThat(secondResponse.finishReason()).isEqualTo(STOP);
}
@Test
void should_execute_multiple_tools_in_parallel_then_stream_answer() throws Exception {
// given
StreamingChatLanguageModel model = OpenAiStreamingChatModel.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.apiKey(System.getenv("OPENAI_API_KEY"))
.organizationId(System.getenv("OPENAI_ORGANIZATION_ID"))
.modelName(GPT_3_5_TURBO_1106) // supports parallel function calling
.temperature(0.0)
.logRequests(true)
.logResponses(true)
.build();
UserMessage userMessage = userMessage("2+2=? 3+3=?");
List<ToolSpecification> toolSpecifications = singletonList(calculator);
// when
CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>();
model.generate(singletonList(userMessage), toolSpecifications, new StreamingResponseHandler<AiMessage>() {
@Override
public void onNext(String token) {
System.out.println("onNext: '" + token + "'");
Exception e = new IllegalStateException("onNext() should never be called when tool is executed");
futureResponse.completeExceptionally(e);
}
@Override
public void onComplete(Response<AiMessage> response) {
System.out.println("onComplete: '" + response + "'");
futureResponse.complete(response);
}
@Override
public void onError(Throwable error) {
futureResponse.completeExceptionally(error);
}
});
Response<AiMessage> response = futureResponse.get(30, SECONDS);
AiMessage aiMessage = response.content();
// then
assertThat(aiMessage.text()).isNull();
assertThat(aiMessage.toolExecutionRequests()).hasSize(2);
ToolExecutionRequest toolExecutionRequest1 = aiMessage.toolExecutionRequests().get(0);
assertThat(toolExecutionRequest1.name()).isEqualTo("calculator");
assertThat(toolExecutionRequest1.arguments()).isEqualToIgnoringWhitespace("{\"first\": 2, \"second\": 2}");
ToolExecutionRequest toolExecutionRequest2 = aiMessage.toolExecutionRequests().get(1);
assertThat(toolExecutionRequest2.name()).isEqualTo("calculator");
assertThat(toolExecutionRequest2.arguments()).isEqualToIgnoringWhitespace("{\"first\": 3, \"second\": 3}");
TokenUsage tokenUsage = response.tokenUsage();
assertThat(tokenUsage.inputTokenCount()).isCloseTo(57, tokenizerPrecision);
assertThat(tokenUsage.outputTokenCount()).isCloseTo(51, tokenizerPrecision);
assertThat(tokenUsage.totalTokenCount())
.isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount());
assertThat(response.finishReason()).isEqualTo(TOOL_EXECUTION);
// given
ToolExecutionResultMessage toolExecutionResultMessage1 = from(toolExecutionRequest1, "4");
ToolExecutionResultMessage toolExecutionResultMessage2 = from(toolExecutionRequest2, "6");
List<ChatMessage> messages = asList(userMessage, aiMessage, toolExecutionResultMessage1, toolExecutionResultMessage2);
// when
CompletableFuture<Response<AiMessage>> secondFutureResponse = new CompletableFuture<>();
model.generate(messages, new StreamingResponseHandler<AiMessage>() {
@Override
public void onNext(String token) {
System.out.println("onNext: '" + token + "'");
}
@Override
public void onComplete(Response<AiMessage> response) {
System.out.println("onComplete: '" + response + "'");
secondFutureResponse.complete(response);
}
@Override
public void onError(Throwable error) {
secondFutureResponse.completeExceptionally(error);
}
});
Response<AiMessage> secondResponse = secondFutureResponse.get(30, SECONDS);
AiMessage secondAiMessage = secondResponse.content();
// then
assertThat(secondAiMessage.text()).contains("4", "6");
assertThat(secondAiMessage.toolExecutionRequests()).isNull();
TokenUsage secondTokenUsage = secondResponse.tokenUsage();
assertThat(secondTokenUsage.inputTokenCount()).isCloseTo(83, tokenizerPrecision);
assertThat(secondTokenUsage.outputTokenCount()).isGreaterThan(0);
assertThat(secondTokenUsage.totalTokenCount())
.isEqualTo(secondTokenUsage.inputTokenCount() + secondTokenUsage.outputTokenCount());
assertThat(secondResponse.finishReason()).isEqualTo(STOP);
}
@Test
void should_stream_valid_json() throws Exception {
//given
String userMessage = "Return JSON with two fields: name and surname of Klaus Heisler. " +
"Before returning, tell me a joke."; // nudging it to say something additionally to json
StreamingChatLanguageModel model = OpenAiStreamingChatModel.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.apiKey(System.getenv("OPENAI_API_KEY"))
.organizationId(System.getenv("OPENAI_ORGANIZATION_ID"))
.modelName(GPT_3_5_TURBO_1106) // supports response_format = 'json_object'
.responseFormat("json_object")
.logRequests(true)
.logResponses(true)
.build();
// when
CompletableFuture<String> futureAnswer = new CompletableFuture<>();
CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>();
model.generate(userMessage, new StreamingResponseHandler<AiMessage>() {
private final StringBuilder answerBuilder = new StringBuilder();
@Override
public void onNext(String token) {
System.out.println("onNext: '" + token + "'");
answerBuilder.append(token);
}
@Override
public void onComplete(Response<AiMessage> response) {
System.out.println("onComplete: '" + response + "'");
futureAnswer.complete(answerBuilder.toString());
futureResponse.complete(response);
}
@Override
public void onError(Throwable error) {
futureAnswer.completeExceptionally(error);
futureResponse.completeExceptionally(error);
}
});
String json = futureAnswer.get(30, SECONDS);
Response<AiMessage> response = futureResponse.get(30, SECONDS);
// then
assertThat(json).isEqualToIgnoringWhitespace("{\"name\": \"Klaus\", \"surname\": \"Heisler\"}");
assertThat(response.content().text()).isEqualTo(json);
}
@Test
void should_accept_image_url() {
// given
ImageContent imageContent = ImageContent.from(CAT_IMAGE_URL);
UserMessage userMessage = UserMessage.from(imageContent);
// when
TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>();
visionModel.generate(singletonList(userMessage), handler);
Response<AiMessage> response = handler.get();
// then
assertThat(response.content().text()).containsIgnoringCase("cat");
assertThat(response.tokenUsage().inputTokenCount()).isEqualTo(92);
}
@Test
void should_accept_base64_image() {
// given
String base64Data = Base64.getEncoder().encodeToString(readBytes(CAT_IMAGE_URL));
ImageContent imageContent = ImageContent.from(base64Data, "image/png");
UserMessage userMessage = UserMessage.from(imageContent);
// when
TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>();
visionModel.generate(singletonList(userMessage), handler);
Response<AiMessage> response = handler.get();
// then
assertThat(response.content().text()).containsIgnoringCase("cat");
assertThat(response.tokenUsage().inputTokenCount()).isEqualTo(92);
}
@Test
void should_accept_text_and_image() {
// given
UserMessage userMessage = UserMessage.from(
TextContent.from("What do you see? Reply in one word."),
ImageContent.from(CAT_IMAGE_URL)
);
// when
TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>();
visionModel.generate(singletonList(userMessage), handler);
Response<AiMessage> response = handler.get();
// then
assertThat(response.content().text()).containsIgnoringCase("cat");
assertThat(response.tokenUsage().inputTokenCount()).isEqualTo(102);
}
@Test
void should_accept_text_and_multiple_images() {
// given
UserMessage userMessage = UserMessage.from(
TextContent.from("What do you see? Reply with one word per image."),
ImageContent.from(CAT_IMAGE_URL),
ImageContent.from(DICE_IMAGE_URL)
);
// when
TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>();
visionModel.generate(singletonList(userMessage), handler);
Response<AiMessage> response = handler.get();
// then
assertThat(response.content().text())
.containsIgnoringCase("cat")
.containsIgnoringCase("dice");
assertThat(response.tokenUsage().inputTokenCount()).isEqualTo(189);
}
@Test
void should_accept_text_and_multiple_images_from_different_sources() {
// given
UserMessage userMessage = UserMessage.from(
ImageContent.from(CAT_IMAGE_URL),
ImageContent.from(Base64.getEncoder().encodeToString(readBytes(DICE_IMAGE_URL)), "image/png"),
TextContent.from("What do you see? Reply with one word per image.")
);
// when
TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>();
visionModel.generate(singletonList(userMessage), handler);
Response<AiMessage> response = handler.get();
// then
assertThat(response.content().text())
.containsIgnoringCase("cat")
.containsIgnoringCase("dice");
assertThat(response.tokenUsage().inputTokenCount()).isEqualTo(189);
}
@Test
void should_use_enum_as_model_name() {
// given
OpenAiStreamingChatModel model = OpenAiStreamingChatModel.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.apiKey(System.getenv("OPENAI_API_KEY"))
.organizationId(System.getenv("OPENAI_ORGANIZATION_ID"))
.modelName(GPT_3_5_TURBO)
.logRequests(true)
.logResponses(true)
.build();
String question = "What is the capital of Germany?";
// when
TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>();
model.generate(question, handler);
Response<AiMessage> response = handler.get();
// then
assertThat(response.content().text()).containsIgnoringCase("Berlin");
}
} | [
"dev.langchain4j.agent.tool.ToolSpecification.builder"
] | [((2550, 2776), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2550, 2755), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2550, 2710), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2550, 2666), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2550, 2609), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((20008, 20068), 'java.util.Base64.getEncoder'), ((22330, 22391), 'java.util.Base64.getEncoder')] |
package dev.langchain4j.model.vertexai;
import com.google.cloud.vertexai.api.FunctionCall;
import com.google.cloud.vertexai.api.FunctionDeclaration;
import com.google.cloud.vertexai.api.Schema;
import com.google.cloud.vertexai.api.Tool;
import com.google.cloud.vertexai.api.Type;
import com.google.protobuf.ListValue;
import com.google.protobuf.NullValue;
import com.google.protobuf.Struct;
import com.google.protobuf.Value;
import dev.langchain4j.agent.tool.JsonSchemaProperty;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.agent.tool.ToolSpecification;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import static dev.langchain4j.model.vertexai.FunctionCallHelper.unwrapProtoValue;
import static org.assertj.core.api.Assertions.assertThat;
public class FunctionCallHelperTest {
@Test
void should_unwrap_proto_values() {
// check basic values
assertThat(unwrapProtoValue(Value.newBuilder().setStringValue("hello").build())).isEqualTo("hello");
assertThat(unwrapProtoValue(Value.newBuilder().setBoolValue(false).build())).isEqualTo(false);
assertThat(unwrapProtoValue(Value.newBuilder().setNumberValue(1.23).build())).isEqualTo(1.23);
assertThat(unwrapProtoValue(Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build())).isEqualTo(null);
// check list unwrapping
ListValue listValue = ListValue.newBuilder()
.addValues(Value.newBuilder().setStringValue("hello"))
.addValues(Value.newBuilder().setBoolValue(true))
.addValues(Value.newBuilder().setNumberValue(3.14))
.build();
assertThat(unwrapProtoValue(Value.newBuilder().setListValue(listValue).build())).isEqualTo(
Arrays.asList("hello", true, 3.14)
);
// check struct unwrapping
Struct struct = Struct.newBuilder()
.putFields("name", Value.newBuilder().setStringValue("Guillaume").build())
.putFields("numberOfKids", Value.newBuilder().setNumberValue(2).build())
.putFields("kids", Value.newBuilder().setListValue(ListValue.newBuilder()
.addValues(Value.newBuilder().setStringValue("Marion").build())
.addValues(Value.newBuilder().setStringValue("Érine").build())
.build()).build())
.putFields("flag", Value.newBuilder().setBoolValue(false).build())
.build();
HashMap<Object, Object> map = new HashMap<>();
map.put("name", "Guillaume");
map.put("numberOfKids", 2.0);
map.put("kids", Arrays.asList("Marion", "Érine"));
map.put("flag", false);
assertThat(unwrapProtoValue(Value.newBuilder().setStructValue(struct).build())).isEqualTo(map);
}
@Test
void should_convert_tool_specs() {
// given
ToolSpecification toolSpec = ToolSpecification.builder()
.description("Give the weather forecast for a location")
.name("getWeatherForecast")
.addParameter("location", JsonSchemaProperty.STRING,
JsonSchemaProperty.description("the location to get the weather forecast for"))
.addOptionalParameter("days", JsonSchemaProperty.INTEGER,
JsonSchemaProperty.description("the number of days in the forecast"))
.build();
// when
Tool tool = FunctionCallHelper.convertToolSpecifications(Collections.singletonList(toolSpec));
// then
assertThat(tool.getFunctionDeclarationsCount()).isEqualTo(1);
FunctionDeclaration funDecl = tool.getFunctionDeclarations(0);
assertThat(funDecl.getDescription()).isEqualTo("Give the weather forecast for a location");
assertThat(funDecl.getName()).isEqualTo("getWeatherForecast");
Schema parameters = funDecl.getParameters();
assertThat(parameters.getPropertiesCount()).isEqualTo(2);
assertThat(parameters.getPropertiesMap().get("location").getType()).isEqualTo(Type.STRING);
assertThat(parameters.getPropertiesMap().get("location").getDescription())
.isEqualTo("the location to get the weather forecast for");
assertThat(parameters.getRequiredCount()).isEqualTo(1);
assertThat(parameters.getRequired(0)).isEqualTo("location");
assertThat(parameters.getPropertiesMap().get("days").getType()).isEqualTo(Type.INTEGER);
}
@Test
void should_convert_function_calls_to_tool_execution_requests_and_back() {
// given
FunctionCall functionCall = FunctionCall.newBuilder()
.setName("getWeatherForecast")
.setArgs(
Struct.newBuilder()
.putFields("location", Value.newBuilder().setStringValue("Paris").build())
.build()
)
.build();
// when
List<ToolExecutionRequest> toolExecutionRequest =
FunctionCallHelper.fromFunctionCalls(Collections.singletonList(functionCall));
FunctionCall sameFunctionCall = FunctionCallHelper.fromToolExecutionRequest(toolExecutionRequest.get(0));
// then
assertThat(functionCall).isEqualTo(sameFunctionCall);
// given
ToolExecutionRequest newExecutionRequest = ToolExecutionRequest.builder()
.name("getWeatherForecast")
.arguments("{\"location\":\"Paris\"}")
.build();
// when
FunctionCall newFunctionCall = FunctionCallHelper.fromToolExecutionRequest(newExecutionRequest);
ToolExecutionRequest sameExecutionRequest =
FunctionCallHelper.fromFunctionCalls(Collections.singletonList(newFunctionCall)).get(0);
// then
assertThat(newExecutionRequest).isEqualTo(sameExecutionRequest);
}
}
| [
"dev.langchain4j.agent.tool.ToolSpecification.builder",
"dev.langchain4j.agent.tool.ToolExecutionRequest.builder"
] | [((1025, 1075), 'com.google.protobuf.Value.newBuilder'), ((1025, 1067), 'com.google.protobuf.Value.newBuilder'), ((1134, 1180), 'com.google.protobuf.Value.newBuilder'), ((1134, 1172), 'com.google.protobuf.Value.newBuilder'), ((1237, 1284), 'com.google.protobuf.Value.newBuilder'), ((1237, 1276), 'com.google.protobuf.Value.newBuilder'), ((1340, 1401), 'com.google.protobuf.Value.newBuilder'), ((1340, 1393), 'com.google.protobuf.Value.newBuilder'), ((1485, 1721), 'com.google.protobuf.ListValue.newBuilder'), ((1485, 1700), 'com.google.protobuf.ListValue.newBuilder'), ((1485, 1636), 'com.google.protobuf.ListValue.newBuilder'), ((1485, 1574), 'com.google.protobuf.ListValue.newBuilder'), ((1531, 1573), 'com.google.protobuf.Value.newBuilder'), ((1598, 1635), 'com.google.protobuf.Value.newBuilder'), ((1660, 1699), 'com.google.protobuf.Value.newBuilder'), ((1759, 1809), 'com.google.protobuf.Value.newBuilder'), ((1759, 1801), 'com.google.protobuf.Value.newBuilder'), ((1941, 2513), 'com.google.protobuf.Struct.newBuilder'), ((1941, 2492), 'com.google.protobuf.Struct.newBuilder'), ((1941, 2413), 'com.google.protobuf.Struct.newBuilder'), ((1941, 2132), 'com.google.protobuf.Struct.newBuilder'), ((1941, 2047), 'com.google.protobuf.Struct.newBuilder'), ((1992, 2046), 'com.google.protobuf.Value.newBuilder'), ((1992, 2038), 'com.google.protobuf.Value.newBuilder'), ((2087, 2131), 'com.google.protobuf.Value.newBuilder'), ((2087, 2123), 'com.google.protobuf.Value.newBuilder'), ((2164, 2412), 'com.google.protobuf.Value.newBuilder'), ((2164, 2404), 'com.google.protobuf.Value.newBuilder'), ((2196, 2403), 'com.google.protobuf.ListValue.newBuilder'), ((2196, 2378), 'com.google.protobuf.ListValue.newBuilder'), ((2196, 2298), 'com.google.protobuf.ListValue.newBuilder'), ((2246, 2297), 'com.google.protobuf.Value.newBuilder'), ((2246, 2289), 'com.google.protobuf.Value.newBuilder'), ((2326, 2377), 'com.google.protobuf.Value.newBuilder'), ((2326, 2369), 'com.google.protobuf.Value.newBuilder'), ((2445, 2491), 'com.google.protobuf.Value.newBuilder'), ((2445, 2483), 'com.google.protobuf.Value.newBuilder'), ((2774, 2823), 'com.google.protobuf.Value.newBuilder'), ((2774, 2815), 'com.google.protobuf.Value.newBuilder'), ((2952, 3426), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2952, 3405), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2952, 3249), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2952, 3088), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2952, 3048), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((4632, 4917), 'com.google.cloud.vertexai.api.FunctionCall.newBuilder'), ((4632, 4896), 'com.google.cloud.vertexai.api.FunctionCall.newBuilder'), ((4632, 4700), 'com.google.cloud.vertexai.api.FunctionCall.newBuilder'), ((4739, 4882), 'com.google.protobuf.Struct.newBuilder'), ((4739, 4853), 'com.google.protobuf.Struct.newBuilder'), ((4802, 4852), 'com.google.protobuf.Value.newBuilder'), ((4802, 4844), 'com.google.protobuf.Value.newBuilder'), ((5347, 5489), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((5347, 5468), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((5347, 5417), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')] |
package dev.langchain4j.data.message;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import org.assertj.core.api.WithAssertions;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
class AiMessageTest implements WithAssertions {
@Test
public void test_accessors() {
{
AiMessage m = new AiMessage("text");
assertThat(m.type()).isEqualTo(ChatMessageType.AI);
assertThat(m.text()).isEqualTo("text");
assertThat(m.toolExecutionRequests()).isNull();
assertThat(m.hasToolExecutionRequests()).isFalse();
assertThat(m).hasToString("AiMessage { text = \"text\" toolExecutionRequests = null }");
}
{
AiMessage m = new AiMessage(Arrays.asList(
ToolExecutionRequest.builder()
.id("foo")
.build(),
ToolExecutionRequest.builder()
.id("bar")
.build()));
assertThat(m.type()).isEqualTo(ChatMessageType.AI);
assertThat(m.text()).isNull();
assertThat(m.toolExecutionRequests()).hasSize(2);
assertThat(m.hasToolExecutionRequests()).isTrue();
assertThat(m).hasToString("AiMessage { text = null toolExecutionRequests = [ToolExecutionRequest { id = \"foo\", name = null, arguments = null }, ToolExecutionRequest { id = \"bar\", name = null, arguments = null }] }");
}
}
@Test
public void test_equals_and_hashCode() {
AiMessage m1 = new AiMessage("text");
AiMessage m2 = new AiMessage("text");
assertThat(m1)
.isEqualTo(m1)
.isNotEqualTo(null)
.isNotEqualTo(new Object())
.isEqualTo(m2)
.hasSameHashCodeAs(m2);
AiMessage m3 = new AiMessage("different");
assertThat(m1)
.isNotEqualTo(m3)
.doesNotHaveSameHashCodeAs(m3);
AiMessage m4 = AiMessage.from(
ToolExecutionRequest.builder()
.id("foo")
.build(),
ToolExecutionRequest.builder()
.id("bar")
.build());
AiMessage m5 = AiMessage.from(
ToolExecutionRequest.builder()
.id("foo")
.build(),
ToolExecutionRequest.builder()
.id("bar")
.build());
assertThat(m4)
.isNotEqualTo(m1)
.doesNotHaveSameHashCodeAs(m1)
.isEqualTo(m5)
.hasSameHashCodeAs(m5);
}
@Test
public void test_from() {
ToolExecutionRequest[] requests = new ToolExecutionRequest[]{
ToolExecutionRequest.builder()
.id("foo")
.build(),
ToolExecutionRequest.builder()
.id("bar")
.build()};
{
AiMessage m = AiMessage.from(requests);
assertThat(m.text()).isNull();
assertThat(m.toolExecutionRequests()).containsOnly(requests);
}
{
AiMessage m = AiMessage.aiMessage(requests);
assertThat(m.text()).isNull();
assertThat(m.toolExecutionRequests()).containsOnly(requests);
}
{
AiMessage m = AiMessage.from(Arrays.asList(requests));
assertThat(m.text()).isNull();
assertThat(m.toolExecutionRequests()).containsOnly(requests);
}
{
AiMessage m = AiMessage.aiMessage(Arrays.asList(requests));
assertThat(m.text()).isNull();
assertThat(m.toolExecutionRequests()).containsOnly(requests);
}
{
AiMessage m = AiMessage.from("text");
assertThat(m.text()).isEqualTo("text");
assertThat(m.toolExecutionRequests()).isNull();
}
{
AiMessage m = AiMessage.aiMessage("text");
assertThat(m.text()).isEqualTo("text");
assertThat(m.toolExecutionRequests()).isNull();
}
}
} | [
"dev.langchain4j.agent.tool.ToolExecutionRequest.builder"
] | [((790, 896), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((790, 859), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((918, 1024), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((918, 987), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2076, 2174), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2076, 2141), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2192, 2290), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2192, 2257), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2348, 2446), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2348, 2413), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2464, 2562), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2464, 2529), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2874, 2972), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2874, 2939), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2990, 3088), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2990, 3055), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')] |
package dev.langchain4j.service;
import dev.langchain4j.agent.tool.P;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.ToolExecutionResultMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.output.TokenUsage;
import org.assertj.core.data.Percentage;
import org.junit.jupiter.api.Test;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import static dev.langchain4j.model.openai.OpenAiChatModelName.GPT_3_5_TURBO_0613;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO_1106;
import static dev.langchain4j.model.output.FinishReason.STOP;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.data.Percentage.withPercentage;
import static org.mockito.Mockito.*;
public class StreamingAiServicesIT {
StreamingChatLanguageModel streamingChatModel = OpenAiStreamingChatModel.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.apiKey(System.getenv("OPENAI_API_KEY"))
.organizationId(System.getenv("OPENAI_ORGANIZATION_ID"))
.temperature(0.0)
.logRequests(true)
.logResponses(true)
.build();
Percentage tokenizerPrecision = withPercentage(5);
interface Assistant {
TokenStream chat(String userMessage);
}
@Test
void should_stream_answer() throws Exception {
Assistant assistant = AiServices.create(Assistant.class, streamingChatModel);
StringBuilder answerBuilder = new StringBuilder();
CompletableFuture<String> futureAnswer = new CompletableFuture<>();
CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>();
assistant.chat("What is the capital of Germany?")
.onNext(answerBuilder::append)
.onComplete(response -> {
futureAnswer.complete(answerBuilder.toString());
futureResponse.complete(response);
})
.onError(futureAnswer::completeExceptionally)
.start();
String answer = futureAnswer.get(30, SECONDS);
Response<AiMessage> response = futureResponse.get(30, SECONDS);
assertThat(answer).contains("Berlin");
assertThat(response.content().text()).isEqualTo(answer);
assertThat(response.tokenUsage().inputTokenCount()).isEqualTo(14);
assertThat(response.tokenUsage().outputTokenCount()).isGreaterThan(1);
assertThat(response.tokenUsage().totalTokenCount()).isGreaterThan(15);
assertThat(response.finishReason()).isEqualTo(STOP);
}
@Test
void should_stream_answers_with_memory() throws Exception {
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
Assistant assistant = AiServices.builder(Assistant.class)
.streamingChatLanguageModel(streamingChatModel)
.chatMemory(chatMemory)
.build();
String firstUserMessage = "Hi, my name is Klaus";
CompletableFuture<Response<AiMessage>> firstResultFuture = new CompletableFuture<>();
assistant.chat(firstUserMessage)
.onNext(System.out::println)
.onComplete(firstResultFuture::complete)
.onError(firstResultFuture::completeExceptionally)
.start();
Response<AiMessage> firstResponse = firstResultFuture.get(30, SECONDS);
assertThat(firstResponse.content().text()).contains("Klaus");
String secondUserMessage = "What is my name?";
CompletableFuture<Response<AiMessage>> secondResultFuture = new CompletableFuture<>();
assistant.chat(secondUserMessage)
.onNext(System.out::println)
.onComplete(secondResultFuture::complete)
.onError(secondResultFuture::completeExceptionally)
.start();
Response<AiMessage> secondResponse = secondResultFuture.get(30, SECONDS);
assertThat(secondResponse.content().text()).contains("Klaus");
List<ChatMessage> messages = chatMemory.messages();
assertThat(messages).hasSize(4);
assertThat(messages.get(0)).isInstanceOf(UserMessage.class);
assertThat(messages.get(0).text()).isEqualTo(firstUserMessage);
assertThat(messages.get(1)).isInstanceOf(AiMessage.class);
assertThat(messages.get(1)).isEqualTo(firstResponse.content());
assertThat(messages.get(2)).isInstanceOf(UserMessage.class);
assertThat(messages.get(2).text()).isEqualTo(secondUserMessage);
assertThat(messages.get(3)).isInstanceOf(AiMessage.class);
assertThat(messages.get(3)).isEqualTo(secondResponse.content());
}
static class Calculator {
@Tool("calculates the square root of the provided number")
double squareRoot(@P("number to operate on") double number) {
return Math.sqrt(number);
}
}
@Test
void should_execute_a_tool_then_stream_answer() throws Exception {
Calculator calculator = spy(new Calculator());
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
Assistant assistant = AiServices.builder(Assistant.class)
.streamingChatLanguageModel(streamingChatModel)
.chatMemory(chatMemory)
.tools(calculator)
.build();
StringBuilder answerBuilder = new StringBuilder();
CompletableFuture<String> futureAnswer = new CompletableFuture<>();
CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>();
String userMessage = "What is the square root of 485906798473894056 in scientific notation?";
assistant.chat(userMessage)
.onNext(answerBuilder::append)
.onComplete(response -> {
futureAnswer.complete(answerBuilder.toString());
futureResponse.complete(response);
})
.onError(futureAnswer::completeExceptionally)
.start();
String answer = futureAnswer.get(30, SECONDS);
Response<AiMessage> response = futureResponse.get(30, SECONDS);
assertThat(answer).contains("6.97");
assertThat(response.content().text()).isEqualTo(answer);
TokenUsage tokenUsage = response.tokenUsage();
assertThat(tokenUsage.inputTokenCount()).isCloseTo(72 + 110, tokenizerPrecision);
assertThat(tokenUsage.outputTokenCount()).isCloseTo(21 + 28, tokenizerPrecision);
assertThat(tokenUsage.totalTokenCount())
.isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount());
assertThat(response.finishReason()).isEqualTo(STOP);
verify(calculator).squareRoot(485906798473894056.0);
verifyNoMoreInteractions(calculator);
List<ChatMessage> messages = chatMemory.messages();
assertThat(messages).hasSize(4);
assertThat(messages.get(0)).isInstanceOf(UserMessage.class);
assertThat(messages.get(0).text()).isEqualTo(userMessage);
AiMessage aiMessage = (AiMessage) messages.get(1);
assertThat(aiMessage.text()).isNull();
assertThat(aiMessage.toolExecutionRequests()).hasSize(1);
ToolExecutionRequest toolExecutionRequest = aiMessage.toolExecutionRequests().get(0);
assertThat(toolExecutionRequest.id()).isNotBlank();
assertThat(toolExecutionRequest.name()).isEqualTo("squareRoot");
assertThat(toolExecutionRequest.arguments())
.isEqualToIgnoringWhitespace("{\"arg0\": 485906798473894056}");
ToolExecutionResultMessage toolExecutionResultMessage = (ToolExecutionResultMessage) messages.get(2);
assertThat(toolExecutionResultMessage.id()).isEqualTo(toolExecutionRequest.id());
assertThat(toolExecutionResultMessage.toolName()).isEqualTo("squareRoot");
assertThat(toolExecutionResultMessage.text()).isEqualTo("6.97070153193991E8");
assertThat(messages.get(3)).isInstanceOf(AiMessage.class);
assertThat(messages.get(3).text()).contains("6.97");
}
@Test
void should_execute_multiple_tools_sequentially_then_answer() throws Exception {
StreamingChatLanguageModel streamingChatModel = OpenAiStreamingChatModel.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.apiKey(System.getenv("OPENAI_API_KEY"))
.organizationId(System.getenv("OPENAI_ORGANIZATION_ID"))
.modelName(GPT_3_5_TURBO_0613) // this model can only call tools sequentially
.temperature(0.0)
.logRequests(true)
.logResponses(true)
.build();
Calculator calculator = spy(new Calculator());
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
Assistant assistant = AiServices.builder(Assistant.class)
.streamingChatLanguageModel(streamingChatModel)
.chatMemory(chatMemory)
.tools(calculator)
.build();
StringBuilder answerBuilder = new StringBuilder();
CompletableFuture<String> futureAnswer = new CompletableFuture<>();
CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>();
String userMessage = "What is the square root of 485906798473894056 and 97866249624785 in scientific notation?";
assistant.chat(userMessage)
.onNext(answerBuilder::append)
.onComplete(response -> {
futureAnswer.complete(answerBuilder.toString());
futureResponse.complete(response);
})
.onError(futureAnswer::completeExceptionally)
.start();
String answer = futureAnswer.get(30, SECONDS);
Response<AiMessage> response = futureResponse.get(30, SECONDS);
assertThat(answer).contains("6.97", "9.89");
assertThat(response.content().text()).isEqualTo(answer);
TokenUsage tokenUsage = response.tokenUsage();
assertThat(tokenUsage.inputTokenCount()).isCloseTo(79 + 117 + 152, tokenizerPrecision);
assertThat(tokenUsage.outputTokenCount()).isCloseTo(21 + 20 + 53, tokenizerPrecision);
assertThat(tokenUsage.totalTokenCount())
.isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount());
assertThat(response.finishReason()).isEqualTo(STOP);
verify(calculator).squareRoot(485906798473894056.0);
verify(calculator).squareRoot(97866249624785.0);
verifyNoMoreInteractions(calculator);
List<ChatMessage> messages = chatMemory.messages();
assertThat(messages).hasSize(6);
assertThat(messages.get(0)).isInstanceOf(dev.langchain4j.data.message.UserMessage.class);
assertThat(messages.get(0).text()).isEqualTo(userMessage);
AiMessage aiMessage = (AiMessage) messages.get(1);
assertThat(aiMessage.text()).isNull();
assertThat(aiMessage.toolExecutionRequests()).hasSize(1);
ToolExecutionRequest toolExecutionRequest = aiMessage.toolExecutionRequests().get(0);
assertThat(toolExecutionRequest.id()).isNotBlank();
assertThat(toolExecutionRequest.name()).isEqualTo("squareRoot");
assertThat(toolExecutionRequest.arguments())
.isEqualToIgnoringWhitespace("{\"arg0\": 485906798473894056}");
ToolExecutionResultMessage toolExecutionResultMessage = (ToolExecutionResultMessage) messages.get(2);
assertThat(toolExecutionResultMessage.id()).isEqualTo(toolExecutionRequest.id());
assertThat(toolExecutionResultMessage.toolName()).isEqualTo("squareRoot");
assertThat(toolExecutionResultMessage.text()).isEqualTo("6.97070153193991E8");
AiMessage secondAiMessage = (AiMessage) messages.get(3);
assertThat(secondAiMessage.text()).isNull();
assertThat(secondAiMessage.toolExecutionRequests()).hasSize(1);
ToolExecutionRequest secondToolExecutionRequest = secondAiMessage.toolExecutionRequests().get(0);
assertThat(secondToolExecutionRequest.id()).isNotBlank();
assertThat(secondToolExecutionRequest.name()).isEqualTo("squareRoot");
assertThat(secondToolExecutionRequest.arguments())
.isEqualToIgnoringWhitespace("{\"arg0\": 97866249624785}");
ToolExecutionResultMessage secondToolExecutionResultMessage = (ToolExecutionResultMessage) messages.get(4);
assertThat(secondToolExecutionResultMessage.id()).isEqualTo(secondToolExecutionRequest.id());
assertThat(secondToolExecutionResultMessage.toolName()).isEqualTo("squareRoot");
assertThat(secondToolExecutionResultMessage.text()).isEqualTo("9892737.215997653");
assertThat(messages.get(5)).isInstanceOf(AiMessage.class);
assertThat(messages.get(5).text()).contains("6.97", "9.89");
}
@Test
void should_execute_multiple_tools_in_parallel_then_answer() throws Exception {
Calculator calculator = spy(new Calculator());
StreamingChatLanguageModel streamingChatModel = OpenAiStreamingChatModel.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.apiKey(System.getenv("OPENAI_API_KEY"))
.organizationId(System.getenv("OPENAI_ORGANIZATION_ID"))
.modelName(GPT_3_5_TURBO_1106)
.temperature(0.0)
.logRequests(true)
.logResponses(true)
.build();
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
Assistant assistant = AiServices.builder(Assistant.class)
.streamingChatLanguageModel(streamingChatModel)
.chatMemory(chatMemory)
.tools(calculator)
.build();
StringBuilder answerBuilder = new StringBuilder();
CompletableFuture<String> futureAnswer = new CompletableFuture<>();
CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>();
String userMessage = "What is the square root of 485906798473894056 and 97866249624785 in scientific notation?";
assistant.chat(userMessage)
.onNext(answerBuilder::append)
.onComplete(response -> {
futureAnswer.complete(answerBuilder.toString());
futureResponse.complete(response);
})
.onError(futureAnswer::completeExceptionally)
.start();
String answer = futureAnswer.get(30, SECONDS);
Response<AiMessage> response = futureResponse.get(30, SECONDS);
assertThat(answer).contains("6.97", "9.89");
assertThat(response.content().text()).isEqualTo(answer);
TokenUsage tokenUsage = response.tokenUsage();
assertThat(tokenUsage.inputTokenCount()).isCloseTo(79 + 160, tokenizerPrecision);
assertThat(tokenUsage.outputTokenCount()).isCloseTo(54 + 58, tokenizerPrecision);
assertThat(tokenUsage.totalTokenCount())
.isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount());
assertThat(response.finishReason()).isEqualTo(STOP);
verify(calculator).squareRoot(485906798473894056.0);
verify(calculator).squareRoot(97866249624785.0);
verifyNoMoreInteractions(calculator);
List<ChatMessage> messages = chatMemory.messages();
assertThat(messages).hasSize(5);
assertThat(messages.get(0)).isInstanceOf(dev.langchain4j.data.message.UserMessage.class);
assertThat(messages.get(0).text()).isEqualTo(userMessage);
AiMessage aiMessage = (AiMessage) messages.get(1);
assertThat(aiMessage.text()).isNull();
assertThat(aiMessage.toolExecutionRequests()).hasSize(2);
ToolExecutionRequest firstToolExecutionRequest = aiMessage.toolExecutionRequests().get(0);
assertThat(firstToolExecutionRequest.id()).isNotBlank();
assertThat(firstToolExecutionRequest.name()).isEqualTo("squareRoot");
assertThat(firstToolExecutionRequest.arguments())
.isEqualToIgnoringWhitespace("{\"arg0\": 485906798473894056}");
ToolExecutionRequest secondToolExecutionRequest = aiMessage.toolExecutionRequests().get(1);
assertThat(secondToolExecutionRequest.id()).isNotBlank();
assertThat(secondToolExecutionRequest.name()).isEqualTo("squareRoot");
assertThat(secondToolExecutionRequest.arguments())
.isEqualToIgnoringWhitespace("{\"arg0\": 97866249624785}");
ToolExecutionResultMessage firstToolExecutionResultMessage = (ToolExecutionResultMessage) messages.get(2);
assertThat(firstToolExecutionResultMessage.id()).isEqualTo(firstToolExecutionRequest.id());
assertThat(firstToolExecutionResultMessage.toolName()).isEqualTo("squareRoot");
assertThat(firstToolExecutionResultMessage.text()).isEqualTo("6.97070153193991E8");
ToolExecutionResultMessage secondToolExecutionResultMessage = (ToolExecutionResultMessage) messages.get(3);
assertThat(secondToolExecutionResultMessage.id()).isEqualTo(secondToolExecutionRequest.id());
assertThat(secondToolExecutionResultMessage.toolName()).isEqualTo("squareRoot");
assertThat(secondToolExecutionResultMessage.text()).isEqualTo("9892737.215997653");
assertThat(messages.get(4)).isInstanceOf(AiMessage.class);
assertThat(messages.get(4).text()).contains("6.97", "9.89");
}
}
| [
"dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder"
] | [((1370, 1695), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((1370, 1674), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((1370, 1642), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((1370, 1611), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((1370, 1581), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((1370, 1512), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((1370, 1459), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((8819, 9266), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((8819, 9241), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((8819, 9205), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((8819, 9170), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((8819, 9089), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((8819, 9042), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((8819, 8969), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((8819, 8912), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((13702, 14102), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((13702, 14077), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((13702, 14041), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((13702, 14006), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((13702, 13972), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((13702, 13925), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((13702, 13852), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((13702, 13795), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder')] |
package com.redhat;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkiverse.langchain4j.redis.RedisEmbeddingStore;
import io.quarkus.runtime.StartupEvent;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.event.Observes;
import jakarta.inject.Inject;
import java.io.File;
import java.util.List;
@ApplicationScoped
public class IngestorExample {
/**
* The embedding store (the database).
* The bean is provided by the quarkus-langchain4j-redis extension.
*/
@Inject
RedisEmbeddingStore store;
/**
* The embedding model (how the vector of a document is computed).
* The bean is provided by the LLM (like openai) extension.
*/
@Inject
EmbeddingModel embeddingModel;
public void ingest(@Observes StartupEvent event) {
System.out.printf("Ingesting documents...%n");
// List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/bank").toPath(),
// List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/museum").toPath(),
// List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/halffoods").toPath(),
List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/insurance").toPath(),
new TextDocumentParser());
var ingestor = EmbeddingStoreIngestor.builder()
.embeddingStore(store)
.embeddingModel(embeddingModel)
.documentSplitter(recursive(500, 0))
.build();
ingestor.ingest(documents);
System.out.printf("Ingested %d documents.%n", documents.size());
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1785, 1982), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1785, 1957), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1785, 1904), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1785, 1856), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.structured.StructuredPrompt;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.output.structured.Description;
import dev.langchain4j.service.*;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.List;
import static java.util.Arrays.asList;
public class OtherServiceExamples {
static ChatLanguageModel chatLanguageModel = OpenAiChatModel.withApiKey(ApiKeys.OPENAI_API_KEY);
static class Sentiment_Extracting_AI_Service_Example {
enum Sentiment {
POSITIVE, NEUTRAL, NEGATIVE;
}
interface SentimentAnalyzer {
@UserMessage("Analyze sentiment of {{it}}")
Sentiment analyzeSentimentOf(String text);
@UserMessage("Does {{it}} have a positive sentiment?")
boolean isPositive(String text);
}
public static void main(String[] args) {
SentimentAnalyzer sentimentAnalyzer = AiServices.create(SentimentAnalyzer.class, chatLanguageModel);
Sentiment sentiment = sentimentAnalyzer.analyzeSentimentOf("It is good!");
System.out.println(sentiment); // POSITIVE
boolean positive = sentimentAnalyzer.isPositive("It is bad!");
System.out.println(positive); // false
}
}
static class Number_Extracting_AI_Service_Example {
interface NumberExtractor {
@UserMessage("Extract number from {{it}}")
int extractInt(String text);
@UserMessage("Extract number from {{it}}")
long extractLong(String text);
@UserMessage("Extract number from {{it}}")
BigInteger extractBigInteger(String text);
@UserMessage("Extract number from {{it}}")
float extractFloat(String text);
@UserMessage("Extract number from {{it}}")
double extractDouble(String text);
@UserMessage("Extract number from {{it}}")
BigDecimal extractBigDecimal(String text);
}
public static void main(String[] args) {
NumberExtractor extractor = AiServices.create(NumberExtractor.class, chatLanguageModel);
String text = "After countless millennia of computation, the supercomputer Deep Thought finally announced " +
"that the answer to the ultimate question of life, the universe, and everything was forty two.";
int intNumber = extractor.extractInt(text);
System.out.println(intNumber); // 42
long longNumber = extractor.extractLong(text);
System.out.println(longNumber); // 42
BigInteger bigIntegerNumber = extractor.extractBigInteger(text);
System.out.println(bigIntegerNumber); // 42
float floatNumber = extractor.extractFloat(text);
System.out.println(floatNumber); // 42.0
double doubleNumber = extractor.extractDouble(text);
System.out.println(doubleNumber); // 42.0
BigDecimal bigDecimalNumber = extractor.extractBigDecimal(text);
System.out.println(bigDecimalNumber); // 42.0
}
}
static class Date_and_Time_Extracting_AI_Service_Example {
interface DateTimeExtractor {
@UserMessage("Extract date from {{it}}")
LocalDate extractDateFrom(String text);
@UserMessage("Extract time from {{it}}")
LocalTime extractTimeFrom(String text);
@UserMessage("Extract date and time from {{it}}")
LocalDateTime extractDateTimeFrom(String text);
}
public static void main(String[] args) {
DateTimeExtractor extractor = AiServices.create(DateTimeExtractor.class, chatLanguageModel);
String text = "The tranquility pervaded the evening of 1968, just fifteen minutes shy of midnight," +
" following the celebrations of Independence Day.";
LocalDate date = extractor.extractDateFrom(text);
System.out.println(date); // 1968-07-04
LocalTime time = extractor.extractTimeFrom(text);
System.out.println(time); // 23:45
LocalDateTime dateTime = extractor.extractDateTimeFrom(text);
System.out.println(dateTime); // 1968-07-04T23:45
}
}
static class POJO_Extracting_AI_Service_Example {
static class Person {
private String firstName;
private String lastName;
private LocalDate birthDate;
@Override
public String toString() {
return "Person {" +
" firstName = \"" + firstName + "\"" +
", lastName = \"" + lastName + "\"" +
", birthDate = " + birthDate +
" }";
}
}
interface PersonExtractor {
@UserMessage("Extract information about a person from {{it}}")
Person extractPersonFrom(String text);
}
public static void main(String[] args) {
ChatLanguageModel chatLanguageModel = OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
// When extracting POJOs with the LLM that supports the "json mode" feature
// (e.g., OpenAI, Azure OpenAI, Ollama, etc.), it is advisable to use it to get more reliable results.
// When using this feature, LLM will be forced to output a valid JSON.
// Please note that this feature is not (yet) supported when using "demo" key.
.responseFormat("json_object")
.build();
PersonExtractor extractor = AiServices.create(PersonExtractor.class, chatLanguageModel);
String text = "In 1968, amidst the fading echoes of Independence Day, "
+ "a child named John arrived under the calm evening sky. "
+ "This newborn, bearing the surname Doe, marked the start of a new journey.";
Person person = extractor.extractPersonFrom(text);
System.out.println(person); // Person { firstName = "John", lastName = "Doe", birthDate = 1968-07-04 }
}
}
static class POJO_With_Descriptions_Extracting_AI_Service_Example {
static class Recipe {
@Description("short title, 3 words maximum")
private String title;
@Description("short description, 2 sentences maximum")
private String description;
@Description("each step should be described in 4 words, steps should rhyme")
private List<String> steps;
private Integer preparationTimeMinutes;
@Override
public String toString() {
return "Recipe {" +
" title = \"" + title + "\"" +
", description = \"" + description + "\"" +
", steps = " + steps +
", preparationTimeMinutes = " + preparationTimeMinutes +
" }";
}
}
@StructuredPrompt("Create a recipe of a {{dish}} that can be prepared using only {{ingredients}}")
static class CreateRecipePrompt {
private String dish;
private List<String> ingredients;
}
interface Chef {
Recipe createRecipeFrom(String... ingredients);
Recipe createRecipe(CreateRecipePrompt prompt);
}
public static void main(String[] args) {
Chef chef = AiServices.create(Chef.class, chatLanguageModel);
Recipe recipe = chef.createRecipeFrom("cucumber", "tomato", "feta", "onion", "olives");
System.out.println(recipe);
// Recipe {
// title = "Greek Salad",
// description = "A refreshing mix of veggies and feta cheese in a zesty dressing.",
// steps = [
// "Chop cucumber and tomato",
// "Add onion and olives",
// "Crumble feta on top",
// "Drizzle with dressing and enjoy!"
// ],
// preparationTimeMinutes = 10
// }
CreateRecipePrompt prompt = new CreateRecipePrompt();
prompt.dish = "salad";
prompt.ingredients = asList("cucumber", "tomato", "feta", "onion", "olives");
Recipe anotherRecipe = chef.createRecipe(prompt);
System.out.println(anotherRecipe);
// Recipe ...
}
}
static class AI_Service_with_System_Message_Example {
interface Chef {
@SystemMessage("You are a professional chef. You are friendly, polite and concise.")
String answer(String question);
}
public static void main(String[] args) {
Chef chef = AiServices.create(Chef.class, chatLanguageModel);
String answer = chef.answer("How long should I grill chicken?");
System.out.println(answer); // Grilling chicken usually takes around 10-15 minutes per side, depending on ...
}
}
static class AI_Service_with_System_and_User_Messages_Example {
interface TextUtils {
@SystemMessage("You are a professional translator into {{language}}")
@UserMessage("Translate the following text: {{text}}")
String translate(@V("text") String text, @V("language") String language);
@SystemMessage("Summarize every message from user in {{n}} bullet points. Provide only bullet points.")
List<String> summarize(@UserMessage String text, @V("n") int n);
}
public static void main(String[] args) {
TextUtils utils = AiServices.create(TextUtils.class, chatLanguageModel);
String translation = utils.translate("Hello, how are you?", "italian");
System.out.println(translation); // Ciao, come stai?
String text = "AI, or artificial intelligence, is a branch of computer science that aims to create " +
"machines that mimic human intelligence. This can range from simple tasks such as recognizing " +
"patterns or speech to more complex tasks like making decisions or predictions.";
List<String> bulletPoints = utils.summarize(text, 3);
System.out.println(bulletPoints);
// [
// "- AI is a branch of computer science",
// "- It aims to create machines that mimic human intelligence",
// "- It can perform simple or complex tasks"
// ]
}
}
static class AI_Service_with_UserName_Example {
interface Assistant {
String chat(@UserName String name, @UserMessage String message);
}
public static void main(String[] args) {
Assistant assistant = AiServices.create(Assistant.class, chatLanguageModel);
String answer = assistant.chat("Klaus", "Hi, tell me my name if you see it.");
System.out.println(answer); // Hello! Your name is Klaus. How can I assist you today?
}
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((5313, 5888), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5313, 5859), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5313, 5399), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
Subsets and Splits