code
stringlengths 419
47.9k
| apis
sequencelengths 1
7
| extract_api
stringlengths 67
6.13k
|
---|---|---|
package ai.equity.salt;
import dev.langchain4j.model.openai.OpenAiLanguageModel;
import dev.langchain4j.model.language.LanguageModel;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import static org.junit.Assert.assertThrows;
class OpenAiApiTest {
private static final String FAKE_API_KEY = "asdfghjkl";
LanguageModel modelNoApiKey = OpenAiLanguageModel.builder()
.apiKey(FAKE_API_KEY)
.logRequests(true)
.logResponses(true)
.build();
@Test
void testExceptionMessage() {
String prompt = "What is the capital of Germany?";
Exception exception = assertThrows(RuntimeException.class, () -> {
modelNoApiKey.generate(prompt);
});
String expectedMessage = "Incorrect API key provided: " + FAKE_API_KEY +
". You can find your API key at https://platform.openai.com/account/api-keys.";
String actualMessage = exception.getMessage();
Assertions.assertTrue(actualMessage.contains(expectedMessage));
}
} | [
"dev.langchain4j.model.openai.OpenAiLanguageModel.builder"
] | [((377, 524), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((377, 503), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((377, 471), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((377, 440), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder')] |
/****************************************************************************************
Copyright © 2003-2012 hbasesoft Corporation. All rights reserved. Reproduction or <br>
transmission in whole or in part, in any form or by any means, electronic, mechanical <br>
or otherwise, is prohibited without the prior written consent of the copyright owner. <br>
****************************************************************************************/
package com.hbasesoft.framework.langchain4j.demo;
import java.io.IOException;
import com.hbasesoft.framework.common.utils.PropertyHolder;
import com.hbasesoft.framework.langchain4j.dashscope.QwenChatModel;
import dev.langchain4j.chain.ConversationalChain;
/**
* <Description> <br>
*
* @author 王伟<br>
* @version 1.0<br>
* @taskId <br>
* @CreateDate 2023年10月26日 <br>
* @since V1.0<br>
* @see com.hbasesoft.framework.langchain4j.demo <br>
*/
public class ChatMemoryExamples {
/**
* Description: <br>
*
* @author 王伟<br>
* @taskId <br>
* @param args
* @throws IOException <br>
*/
public static void main(final String[] args) throws IOException {
ConversationalChain chain = ConversationalChain.builder()
.chatLanguageModel(QwenChatModel.builder().apiKey(PropertyHolder.getProperty("qwen.apikey")).build())
// .chatMemory() // you can override default chat memory
.build();
String answer = chain.execute("Hello, my name is Klaus");
System.out.println(answer); // Hello Klaus! How can I assist you today?
String answerWithName = chain.execute("What is my name?");
System.out.println(answerWithName); // Your name is Klaus.
}
}
| [
"dev.langchain4j.chain.ConversationalChain.builder"
] | [((1211, 1444), 'dev.langchain4j.chain.ConversationalChain.builder'), ((1211, 1354), 'dev.langchain4j.chain.ConversationalChain.builder'), ((1272, 1353), 'com.hbasesoft.framework.langchain4j.dashscope.QwenChatModel.builder'), ((1272, 1345), 'com.hbasesoft.framework.langchain4j.dashscope.QwenChatModel.builder')] |
package net.savantly.mainbot.config;
import static dev.langchain4j.model.openai.OpenAiModelName.TEXT_EMBEDDING_ADA_002;
import static java.time.Duration.ofSeconds;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import lombok.RequiredArgsConstructor;
@Configuration
@RequiredArgsConstructor
public class EmbeddingModelConfig {
private final OpenAIConfig openAIConfig;
@Bean
public EmbeddingModel embeddingModel() {
EmbeddingModel embeddingModel = OpenAiEmbeddingModel.builder()
.apiKey(openAIConfig.getApiKey()) // https://platform.openai.com/account/api-keys
.modelName(TEXT_EMBEDDING_ADA_002)
.timeout(ofSeconds(30))
.build();
return embeddingModel;
}
}
| [
"dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder"
] | [((651, 895), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((651, 870), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((651, 830), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((651, 731), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder')] |
package org.agoncal.fascicle.langchain4j.context;
// tag::adocSnippet[]
import dev.langchain4j.chain.ConversationalChain;
import dev.langchain4j.model.azure.AzureOpenAiChatModel;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
// tag::adocSkip[]
/**
* @author Antonio Goncalves
* http://www.antoniogoncalves.org
* --
*/
// end::adocSkip[]
public class MusicianService {
public static void main(String[] args) throws InterruptedException {
MusicianService musicianService = new MusicianService();
// musicianService.useNoMemory();
musicianService.useConversationalChain();
// musicianService.useConversationalChainWithMemory();
}
private static final String AZURE_OPENAI_KEY = System.getenv("AZURE_OPENAI_KEY");
private static final String AZURE_OPENAI_ENDPOINT = System.getenv("AZURE_OPENAI_ENDPOINT");
private static final String AZURE_OPENAI_DEPLOYMENT_NAME = System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME");
private static final String PROMPT = "When was the first Beatles album released?";
// #################
// ### NO MEMORY ###
// #################
public void useNoMemory() throws InterruptedException {
System.out.println("### useNoMemory");
AzureOpenAiChatModel model = AzureOpenAiChatModel.builder()
.apiKey(AZURE_OPENAI_KEY)
.endpoint(AZURE_OPENAI_ENDPOINT)
.deploymentName(AZURE_OPENAI_DEPLOYMENT_NAME)
.temperature(0.7)
.logRequestsAndResponses(false)
.build();
System.out.println(">>>>" + model.generate("My name is Antonio Goncalves"));
Thread.sleep(5000);
System.out.println(">>>>" + model.generate("When was the first Rolling Stones album released?"));
Thread.sleep(5000);
System.out.println(">>>>" + model.generate("What's the name of the singer?"));
Thread.sleep(5000);
System.out.println(">>>>" + model.generate("What is my name?"));
}
// ################################
// ### USE CONVERSATIONAL CHAIN ###
// ################################
public void useConversationalChain() throws InterruptedException {
System.out.println("### useConversationalChain");
AzureOpenAiChatModel model = AzureOpenAiChatModel.builder()
.apiKey(AZURE_OPENAI_KEY)
.endpoint(AZURE_OPENAI_ENDPOINT)
.deploymentName(AZURE_OPENAI_DEPLOYMENT_NAME)
.temperature(0.7)
.logRequestsAndResponses(true)
.build();
ConversationalChain chain = ConversationalChain.builder()
.chatLanguageModel(model)
.build();
System.out.println("\n\n################################");
System.out.println("# My name is Antonio Goncalves #");
System.out.println("################################");
System.out.println(">>>>" + chain.execute("My name is Antonio Goncalves"));
Thread.sleep(5000);
System.out.println("\n\n#####################################################");
System.out.println("# When was the first Rolling Stones album released? #");
System.out.println("#####################################################");
System.out.println(">>>>" + chain.execute("When was the first Rolling Stones album released?"));
Thread.sleep(5000);
System.out.println("\n\n##################################");
System.out.println("# What's the name of the singer? #");
System.out.println("##################################");
System.out.println(">>>>" + chain.execute("What's the name of the singer?"));
Thread.sleep(5000);
System.out.println("\n\n####################");
System.out.println("# What is my name? #");
System.out.println("####################");
System.out.println(">>>>" + chain.execute("What is my name?"));
}
// ################################
// ### USE CONVERSATIONAL CHAIN ###
// ################################
public void useConversationalChainWithMemory() throws InterruptedException {
System.out.println("### useConversationalChainWithMemory");
AzureOpenAiChatModel model = AzureOpenAiChatModel.builder()
.apiKey(AZURE_OPENAI_KEY)
.endpoint(AZURE_OPENAI_ENDPOINT)
.deploymentName(AZURE_OPENAI_DEPLOYMENT_NAME)
.temperature(0.7)
.logRequestsAndResponses(true)
.build();
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(2);
ConversationalChain chain = ConversationalChain.builder()
.chatLanguageModel(model)
.chatMemory(chatMemory)
.build();
System.out.println("\n\n################################");
System.out.println("# My name is Antonio Goncalves #");
System.out.println("################################");
System.out.println(">>>>" + chain.execute("My name is Antonio Goncalves"));
Thread.sleep(5000);
System.out.println("\n\n#####################################################");
System.out.println("# When was the first Rolling Stones album released? #");
System.out.println("#####################################################");
System.out.println(">>>>" + chain.execute("When was the first Rolling Stones album released?"));
Thread.sleep(5000);
System.out.println("\n\n##################################");
System.out.println("# What's the name of the singer? #");
System.out.println("##################################");
System.out.println(">>>>" + chain.execute("What's the name of the singer?"));
Thread.sleep(5000);
System.out.println("\n\n####################");
System.out.println("# What is my name? #");
System.out.println("####################");
System.out.println(">>>>" + chain.execute("What is my name?"));
}
}
// end::adocSnippet[]
| [
"dev.langchain4j.model.azure.AzureOpenAiChatModel.builder",
"dev.langchain4j.chain.ConversationalChain.builder"
] | [((1289, 1519), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((1289, 1504), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((1289, 1466), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((1289, 1442), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((1289, 1390), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((1289, 1351), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((2205, 2434), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((2205, 2419), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((2205, 2382), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((2205, 2358), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((2205, 2306), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((2205, 2267), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((2469, 2545), 'dev.langchain4j.chain.ConversationalChain.builder'), ((2469, 2530), 'dev.langchain4j.chain.ConversationalChain.builder'), ((4017, 4246), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((4017, 4231), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((4017, 4194), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((4017, 4170), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((4017, 4118), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((4017, 4079), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((4354, 4460), 'dev.langchain4j.chain.ConversationalChain.builder'), ((4354, 4445), 'dev.langchain4j.chain.ConversationalChain.builder'), ((4354, 4415), 'dev.langchain4j.chain.ConversationalChain.builder')] |
package ru.vzotov.ai;
import com.fasterxml.jackson.databind.ObjectMapper;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.autoconfigure.ImportAutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import ru.vzotov.accounting.domain.model.PersistentPropertyRepository;
import ru.vzotov.ai.application.PurchaseCategoryIndexer;
import ru.vzotov.ai.application.PurchaseCategoryProcessor;
import ru.vzotov.ai.interfaces.facade.AIFacade;
import ru.vzotov.ai.interfaces.facade.impl.AIFacadeImpl;
import ru.vzotov.cashreceipt.domain.model.PurchaseCategoryRepository;
import ru.vzotov.langchain4j.gigachat.spring.AutoConfig;
import ru.vzotov.purchases.domain.model.PurchaseRepository;
@ConditionalOnProperty(prefix = AIModuleProperties.PREFIX, name = "enabled")
@Configuration
@ImportAutoConfiguration(AutoConfig.class)
public class AIModule {
private static final Logger log = LoggerFactory.getLogger(AIModule.class);
@Bean
AIModuleProperties aiModuleProperties() {
return new AIModuleProperties();
}
@Bean
EmbeddingStore<TextSegment> embeddingStore(AIModuleProperties properties) {
PgVectorConfigProperties config = properties.getPgvector();
return PgVectorEmbeddingStore.builder()
.host(config.getHost())
.port(config.getPort())
.database(config.getDatabase())
.user(config.getUser())
.password(config.getPassword())
.dimension(config.getDimension())
.table(config.getTable())
.createTable(config.getCreate())
.dropTableFirst(config.getDrop())
.useIndex(true)
.indexListSize(config.getIndexListSize())
.build();
}
@Bean
PurchaseCategoryProcessor processor(EmbeddingStore<TextSegment> embeddingStore,
EmbeddingModel embeddingModel,
AIModuleProperties properties) {
PurchasesConfigProperties config = properties.getPurchases();
return PurchaseCategoryProcessor.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.partitionSize(config.getPartitionSize())
.build();
}
@Bean
AIFacade facade(
PurchaseCategoryRepository purchaseCategoryRepository,
PurchaseRepository purchaseRepository,
EmbeddingStore<TextSegment> embeddingStore,
EmbeddingModel embeddingModel,
ChatLanguageModel chatLanguageModel,
ObjectMapper objectMapper) {
return AIFacadeImpl.builder()
.purchaseCategoryRepository(purchaseCategoryRepository)
.purchaseRepository(purchaseRepository)
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.chatLanguageModel(chatLanguageModel)
.objectMapper(objectMapper)
.build();
}
@Bean
@ConditionalOnBean(PersistentPropertyRepository.class)
PurchaseCategoryIndexer indexer(AIModuleProperties properties,
ObjectMapper objectMapper,
PurchaseRepository purchaseRepository,
PersistentPropertyRepository propertyRepository,
PurchaseCategoryProcessor processor) {
return PurchaseCategoryIndexer.builder()
.modelType(properties.getModelType())
.objectMapper(objectMapper)
.purchaseRepository(purchaseRepository)
.propertyRepository(propertyRepository)
.processor(processor)
.build();
}
}
| [
"dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder"
] | [((1751, 2305), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 2280), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 2222), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 2190), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 2140), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 2091), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 2049), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 1999), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 1951), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 1911), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 1863), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 1823), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((2637, 2851), 'ru.vzotov.ai.application.PurchaseCategoryProcessor.builder'), ((2637, 2826), 'ru.vzotov.ai.application.PurchaseCategoryProcessor.builder'), ((2637, 2768), 'ru.vzotov.ai.application.PurchaseCategoryProcessor.builder'), ((2637, 2720), 'ru.vzotov.ai.application.PurchaseCategoryProcessor.builder'), ((3213, 3582), 'ru.vzotov.ai.interfaces.facade.impl.AIFacadeImpl.builder'), ((3213, 3557), 'ru.vzotov.ai.interfaces.facade.impl.AIFacadeImpl.builder'), ((3213, 3513), 'ru.vzotov.ai.interfaces.facade.impl.AIFacadeImpl.builder'), ((3213, 3459), 'ru.vzotov.ai.interfaces.facade.impl.AIFacadeImpl.builder'), ((3213, 3411), 'ru.vzotov.ai.interfaces.facade.impl.AIFacadeImpl.builder'), ((3213, 3363), 'ru.vzotov.ai.interfaces.facade.impl.AIFacadeImpl.builder'), ((3213, 3307), 'ru.vzotov.ai.interfaces.facade.impl.AIFacadeImpl.builder'), ((4040, 4346), 'ru.vzotov.ai.application.PurchaseCategoryIndexer.builder'), ((4040, 4321), 'ru.vzotov.ai.application.PurchaseCategoryIndexer.builder'), ((4040, 4283), 'ru.vzotov.ai.application.PurchaseCategoryIndexer.builder'), ((4040, 4227), 'ru.vzotov.ai.application.PurchaseCategoryIndexer.builder'), ((4040, 4171), 'ru.vzotov.ai.application.PurchaseCategoryIndexer.builder'), ((4040, 4127), 'ru.vzotov.ai.application.PurchaseCategoryIndexer.builder')] |
package fr.anthonyquere.talkwithme.core.ai.langchain;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.service.AiServices;
import fr.anthonyquere.talkwithme.core.ai.langchain.services.Summary;
import fr.anthonyquere.talkwithme.core.ai.langchain.services.TalkWithCompanion;
import fr.anthonyquere.talkwithme.core.crud.companions.Companion;
import fr.anthonyquere.talkwithme.core.crud.message.MessageRepository;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class CompanionAiService {
@Bean
public TalkWithCompanion buildAiCompanionService(
ChatLanguageModel model,
MessageRepository messageRepository
) {
return AiServices.builder(TalkWithCompanion.class)
.chatLanguageModel(model)
.chatMemoryProvider(companion -> new CompanionChatMemory((Companion) companion, messageRepository))
.build();
}
@Bean
public Summary buildAiSummaryService(
ChatLanguageModel model
) {
return AiServices.builder(Summary.class)
.chatLanguageModel(model)
.build();
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((749, 945), 'dev.langchain4j.service.AiServices.builder'), ((749, 930), 'dev.langchain4j.service.AiServices.builder'), ((749, 824), 'dev.langchain4j.service.AiServices.builder'), ((1045, 1125), 'dev.langchain4j.service.AiServices.builder'), ((1045, 1110), 'dev.langchain4j.service.AiServices.builder')] |
package com.revolvingSolutions.aicvgeneratorbackend.service;
import com.revolvingSolutions.aicvgeneratorbackend.agent.*;
import com.revolvingSolutions.aicvgeneratorbackend.constants.StaticValues;
import com.revolvingSolutions.aicvgeneratorbackend.model.aimodels.*;
import com.revolvingSolutions.aicvgeneratorbackend.model.user.User;
import com.revolvingSolutions.aicvgeneratorbackend.request.AI.ChatRequest;
import com.revolvingSolutions.aicvgeneratorbackend.request.AI.ExtractionRequest;
import com.revolvingSolutions.aicvgeneratorbackend.request.AI.GenerationRequest;
import com.revolvingSolutions.aicvgeneratorbackend.request.AI.UrlExtractionRequest;
import com.revolvingSolutions.aicvgeneratorbackend.response.AI.ChatResponse;
import com.revolvingSolutions.aicvgeneratorbackend.response.AI.ExtractionResponse;
import com.revolvingSolutions.aicvgeneratorbackend.response.AI.GenerationResponse;
import dev.langchain4j.classification.TextClassifier;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.SystemMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.moderation.ModerationModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.retriever.Retriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.memory.chat.ChatMemoryStore;
import lombok.RequiredArgsConstructor;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import static dev.langchain4j.data.message.SystemMessage.systemMessage;
@Service
@RequiredArgsConstructor
public class LangChainService {
@Value("${app.api.blockAI}")
private Boolean block;
private final UserService userService;
public GenerationResponse GenerateCV(
GenerationRequest request
) {
if (block) {
List<String> mylist = new ArrayList<>();
for (AIEmployment employment : request.getData().getExperience()) {
mylist.add(StaticValues.employment_description);
}
return GenerationResponse.builder()
.data(
CVData.builder()
.firstname(request.getData().getFirstname())
.lastname(request.getData().getLastname())
.phoneNumber(request.getData().getPhoneNumber())
.email(request.getData().getEmail())
.location(request.getData().getLocation())
.description(StaticValues.description)
.employmenthistory(request.getData().getExperience())
.qualifications(request.getData().getQualifications())
.links(request.getData().getLinks())
.references(request.getData().getReferences())
.skills(request.getData().getSkills())
.build()
)
.build();
}
String description = interact(descriptionAgent(chatLanguageModel()),createProfessionalSummaryModel(request.getData()).toString());
if (description == null) description = "Description";
if (request.getData().getExperience() == null) request.getData().setExperience(new ArrayList<>());
if (request.getData().getQualifications() == null) request.getData().setQualifications(new ArrayList<>());
return GenerationResponse.builder()
.data(
CVData.builder()
.firstname(request.getData().getFirstname())
.lastname(request.getData().getLastname())
.phoneNumber(request.getData().getPhoneNumber())
.email(request.getData().getEmail())
.location(request.getData().getLocation())
.description(description)
.employmenthistory(request.getData().getExperience())
.qualifications(request.getData().getQualifications())
.links(request.getData().getLinks())
.skills(request.getData().getSkills())
.references(request.getData().getReferences())
.build()
)
.build();
}
private ProfessionalSummaryModel createProfessionalSummaryModel(AIInputData data) {
return ProfessionalSummaryModel.builder()
.firstname(data.getFirstname())
.lastname(data.getLastname())
.description(data.getDescription())
.location(data.getLocation())
.experience(data.getExperience())
.qualifications(data.getQualifications())
.skills(data.getSkills())
.build();
}
public ExtractionResponse extractData(
ExtractionRequest request
) throws Exception
{
if (request.getText().split(" ").length > 1000) {
throw new Exception("Word Limit!!",null);
}
AIInputData data = extractionAgent(extractionChatLanguageModel()).extractPersonFrom(request.getText());
if (data.getFirstname() == null) data.setFirstname("First Name");
if (data.getLastname() == null) data.setLastname("Last Name");
if (data.getEmail() == null) data.setEmail("Email");
if (data.getLocation() == null) data.setLocation("Location");
if (data.getPhoneNumber() == null) data.setPhoneNumber("Phone number");
if (data.getDescription() == null) data.setDescription("Description");
if (data.getExperience() == null) data.setExperience(new ArrayList<>());
if (data.getQualifications() == null) data.setQualifications(new ArrayList<>());
if (data.getLinks() == null) data.setLinks(new ArrayList<>());
if (data.getSkills() == null) data.setReferences(new ArrayList<>());
return ExtractionResponse.builder()
.data(
data
)
.build();
}
public ExtractionResponse extractUrlData(
UrlExtractionRequest request
) throws IOException {
Document doc = Jsoup.connect(request.getUrl()).get();
AIInputData data = urlExtractionAgent(extractionChatLanguageModel()).extractPersonFrom(doc.toString());
if (data.getFirstname() == null) data.setFirstname("First Name");
if (data.getLastname() == null) data.setLastname("Last Name");
if (data.getEmail() == null) data.setEmail("Email");
if (data.getLocation() == null) data.setLocation("Location");
if (data.getPhoneNumber() == null) data.setPhoneNumber("Phone number");
if (data.getDescription() == null) data.setDescription("Description");
if (data.getExperience() == null) data.setExperience(new ArrayList<>());
if (data.getQualifications() == null) data.setQualifications(new ArrayList<>());
if (data.getLinks() == null) data.setLinks(new ArrayList<>());
if (data.getSkills() == null) data.setReferences(new ArrayList<>());
return ExtractionResponse.builder()
.data(
data
)
.build();
}
public User getAISafeModel() {
return userService.getUser().getUser();
}
public ChatResponse chatBotInteract(ChatRequest request) {
List<String> messages = new ArrayList<>();
ChatBotAgent chatBot = chatBotAgent(chatBotLanguageModel(),request.getMessages());
String response = chatBot.chat(0,request.getUserMessage());
request.getMessages().add(request.getUserMessage());
request.getMessages().add(response);
return ChatResponse.builder()
.messages(request.getMessages())
.build();
}
public static String interact(DescriptionAgent agent, String userMessage) {
System.out.println("==========================================================================================");
System.out.println("[User]: " + userMessage);
System.out.println("==========================================================================================");
String agentAnswer = agent.chat(userMessage);
System.out.println("==========================================================================================");
System.out.println("[DescriptionAgent]: " + agentAnswer);
System.out.println("==========================================================================================");
return agentAnswer;
}
private static String interact(EmploymentHistoryExpander agent, String userMessage) {
System.out.println("==========================================================================================");
System.out.println("[User]: " + userMessage);
System.out.println("==========================================================================================");
String agentAnswer = agent.chat(userMessage);
System.out.println("==========================================================================================");
System.out.println("[EmploymentHistoryExpander]: " + agentAnswer);
System.out.println("==========================================================================================");
return agentAnswer;
}
private static String interact(EducationDescriptionAgent agent, String userMessage) {
System.out.println("==========================================================================================");
System.out.println("[User]: " + userMessage);
System.out.println("==========================================================================================");
String agentAnswer = agent.chat(userMessage);
System.out.println("==========================================================================================");
System.out.println("[EducationDescriptionAgent]: " + agentAnswer);
System.out.println("==========================================================================================");
return agentAnswer;
}
private static String interact(ChatBotAgent agent, String userMessage) {
System.out.println("==========================================================================================");
System.out.println("[User]: " + userMessage);
System.out.println("==========================================================================================");
String agentAnswer = agent.chat(0,userMessage);
System.out.println("==========================================================================================");
System.out.println("[EducationDescriptionAgent]: " + agentAnswer);
System.out.println("==========================================================================================");
return agentAnswer;
}
@Value("${langchain4j.chat-model.openai.api-key}")
private String apikey;
@Value("${langchain4j.chat-model.openai.model-name}")
private String modelName;
@Value("${langchain4j.chat-model.openai.temperature}")
private Double temperature;
private final Retriever<TextSegment> retriever;
private final ModerationModel moderationModel;
private ChatLanguageModel chatLanguageModel() {
return OpenAiChatModel.builder()
.modelName(modelName)
.apiKey(apikey)
.temperature(temperature)
.logRequests(false)
.logResponses(false)
.maxRetries(2)
.maxTokens(1000)
.topP(1.0)
.timeout(Duration.ofMinutes(2))
.frequencyPenalty(0.0)
.presencePenalty(0.0)
.build();
}
private ChatLanguageModel educationDescriptionChatModel() {
return OpenAiChatModel.builder()
.modelName(modelName)
.apiKey(apikey)
.temperature(0.4)
.logRequests(false)
.logResponses(false)
.maxRetries(2)
.maxTokens(1000)
.topP(1.0)
.timeout(Duration.ofMinutes(2))
.frequencyPenalty(0.0)
.presencePenalty(0.0)
.build();
}
private ChatLanguageModel extractionChatLanguageModel() {
return OpenAiChatModel.builder()
.modelName(modelName)
.apiKey(apikey)
.temperature(temperature)
.logRequests(false)
.logResponses(false)
.maxRetries(2)
.maxTokens(1000)
.topP(1.0)
.timeout(Duration.ofMinutes(3))
.frequencyPenalty(0.0)
.presencePenalty(0.0)
.build();
}
private ChatLanguageModel chatBotLanguageModel() {
return OpenAiChatModel.builder()
.modelName("gpt-4")
.apiKey(apikey)
.temperature(0.0)
.logRequests(false)
.logResponses(false)
.maxRetries(2)
.maxTokens(500)
.topP(1.0)
.timeout(Duration.ofMinutes(3))
.frequencyPenalty(0.0)
.presencePenalty(0.0)
.build();
}
private DescriptionAgent descriptionAgent(ChatLanguageModel chatLanguageModel) {
return AiServices.builder(DescriptionAgent.class)
.chatLanguageModel(chatLanguageModel)
.chatMemory(MessageWindowChatMemory.withMaxMessages(3))
.build();
}
private EmploymentHistoryExpander employmentHistoryExpander(ChatLanguageModel chatLanguageModel) {
return AiServices.builder(EmploymentHistoryExpander.class)
.chatLanguageModel(chatLanguageModel)
.chatMemory(MessageWindowChatMemory.withMaxMessages(3))
.build();
}
private EducationDescriptionAgent educationDescriptionAgent(ChatLanguageModel chatLanguageModel) {
return AiServices.builder(EducationDescriptionAgent.class)
.chatLanguageModel(chatLanguageModel)
.chatMemory(MessageWindowChatMemory.withMaxMessages(3))
.build();
}
public ExtractionAgent extractionAgent(ChatLanguageModel extractionChatLanguageModel) {
return AiServices.builder(ExtractionAgent.class)
.chatLanguageModel(extractionChatLanguageModel)
.chatMemory(MessageWindowChatMemory.withMaxMessages(5))
.build();
}
public UrlExtractionAgent urlExtractionAgent(ChatLanguageModel extractionChatLanguageModel) {
return AiServices.builder(UrlExtractionAgent.class)
.chatLanguageModel(extractionChatLanguageModel)
.chatMemory(MessageWindowChatMemory.withMaxMessages(5))
.build();
}
public ChatBotAgent chatBotAgent(ChatLanguageModel chatLanguageModel, List<String> messages) {
List<ChatMessage> messagesOff = new ArrayList<ChatMessage>();
boolean user = true;
messagesOff.add(
systemMessage(
"The user has the following information: "+getAISafeModel().toString()
)
);
for (int x=0;x<messages.size();x++) {
if (user) {
user = false;
messagesOff.add(new UserMessage(messages.get(x)));
} else {
user = true;
messagesOff.add(new AiMessage(messages.get(x)));
}
}
PersistentChatMemoryStore store = new PersistentChatMemoryStore(messagesOff);
return AiServices.builder(ChatBotAgent.class)
.chatLanguageModel(chatLanguageModel)
.chatMemoryProvider(
memoryId-> MessageWindowChatMemory.builder()
.chatMemoryStore(store)
.maxMessages(100)
.build()
)
.moderationModel(moderationModel)
.retriever(retriever)
.build();
}
}
class PersistentChatMemoryStore implements ChatMemoryStore {
public PersistentChatMemoryStore(List<ChatMessage> messages) {
this.messages = messages;
}
private List<ChatMessage> messages;
@Override
public List<ChatMessage> getMessages(Object memoryId) {
return messages;
}
@Override
public void updateMessages(Object memoryId, List<ChatMessage> messages) {
this.messages = messages;
}
@Override
public void deleteMessages(Object memoryId) {
messages = new ArrayList<>();
}
} | [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((2489, 3569), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.GenerationResponse.builder'), ((2489, 3540), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.GenerationResponse.builder'), ((4022, 5025), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.GenerationResponse.builder'), ((4022, 5000), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.GenerationResponse.builder'), ((6662, 6781), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.ExtractionResponse.builder'), ((6662, 6756), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.ExtractionResponse.builder'), ((6927, 6964), 'org.jsoup.Jsoup.connect'), ((7847, 7970), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.ExtractionResponse.builder'), ((7847, 7945), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.ExtractionResponse.builder'), ((8463, 8559), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.ChatResponse.builder'), ((8463, 8534), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.ChatResponse.builder'), ((12156, 12607), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12582), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12544), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12505), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12457), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12430), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12397), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12366), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12329), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12293), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12251), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12219), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 13138), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 13113), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 13075), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 13036), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 12988), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 12961), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 12928), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 12897), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 12860), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 12824), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 12790), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 12758), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13675), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13650), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13612), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13573), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13525), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13498), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13465), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13434), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13397), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13361), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13319), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13287), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 14194), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 14169), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 14131), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 14092), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 14044), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 14017), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 13985), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 13954), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 13917), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 13881), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 13847), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 13815), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((14302, 14495), 'dev.langchain4j.service.AiServices.builder'), ((14302, 14470), 'dev.langchain4j.service.AiServices.builder'), ((14302, 14398), 'dev.langchain4j.service.AiServices.builder'), ((14622, 14824), 'dev.langchain4j.service.AiServices.builder'), ((14622, 14799), 'dev.langchain4j.service.AiServices.builder'), ((14622, 14727), 'dev.langchain4j.service.AiServices.builder'), ((14951, 15153), 'dev.langchain4j.service.AiServices.builder'), ((14951, 15128), 'dev.langchain4j.service.AiServices.builder'), ((14951, 15056), 'dev.langchain4j.service.AiServices.builder'), ((15269, 15471), 'dev.langchain4j.service.AiServices.builder'), ((15269, 15446), 'dev.langchain4j.service.AiServices.builder'), ((15269, 15374), 'dev.langchain4j.service.AiServices.builder'), ((15593, 15798), 'dev.langchain4j.service.AiServices.builder'), ((15593, 15773), 'dev.langchain4j.service.AiServices.builder'), ((15593, 15701), 'dev.langchain4j.service.AiServices.builder'), ((16593, 17069), 'dev.langchain4j.service.AiServices.builder'), ((16593, 17044), 'dev.langchain4j.service.AiServices.builder'), ((16593, 17006), 'dev.langchain4j.service.AiServices.builder'), ((16593, 16956), 'dev.langchain4j.service.AiServices.builder'), ((16593, 16685), 'dev.langchain4j.service.AiServices.builder'), ((16758, 16938), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((16758, 16897), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((16758, 16847), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')] |
package com.learning.ai.config;
import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import com.zaxxer.hikari.HikariDataSource;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.parser.apache.pdfbox.ApachePdfBoxDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore;
import java.io.IOException;
import java.net.URI;
import javax.sql.DataSource;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
@Configuration(proxyBeanMethods = false)
public class AIConfig {
@Bean
AICustomerSupportAgent customerSupportAgent(
ChatLanguageModel chatLanguageModel,
// ChatTools bookingTools,
ContentRetriever contentRetriever) {
return AiServices.builder(AICustomerSupportAgent.class)
.chatLanguageModel(chatLanguageModel)
.chatMemory(MessageWindowChatMemory.withMaxMessages(20))
// .tools(bookingTools)
.contentRetriever(contentRetriever)
.build();
}
@Bean
ContentRetriever contentRetriever(EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel) {
// You will need to adjust these parameters to find the optimal setting, which will depend on two main factors:
// - The nature of your data
// - The embedding model you are using
int maxResults = 1;
double minScore = 0.6;
return EmbeddingStoreContentRetriever.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.maxResults(maxResults)
.minScore(minScore)
.build();
}
@Bean
EmbeddingModel embeddingModel() {
return new AllMiniLmL6V2EmbeddingModel();
}
@Bean
EmbeddingStore<TextSegment> embeddingStore(
EmbeddingModel embeddingModel, ResourceLoader resourceLoader, DataSource dataSource) throws IOException {
// Normally, you would already have your embedding store filled with your data.
// However, for the purpose of this demonstration, we will:
HikariDataSource hikariDataSource = (HikariDataSource) dataSource;
String jdbcUrl = hikariDataSource.getJdbcUrl();
URI uri = URI.create(jdbcUrl.substring(5));
String host = uri.getHost();
int dbPort = uri.getPort();
String path = uri.getPath();
// 1. Create an postgres embedding store
// dimension of the embedding is 384 (all-minilm) and 1536 (openai)
EmbeddingStore<TextSegment> embeddingStore = PgVectorEmbeddingStore.builder()
.host(host)
.port(dbPort != -1 ? dbPort : 5432)
.user(hikariDataSource.getUsername())
.password(hikariDataSource.getPassword())
.database(path.substring(1))
.table("ai_vector_store")
.dimension(384)
.build();
// 2. Load an example document (medicaid-wa-faqs.pdf)
Resource pdfResource = resourceLoader.getResource("classpath:medicaid-wa-faqs.pdf");
Document document = loadDocument(pdfResource.getFile().toPath(), new ApachePdfBoxDocumentParser());
// 3. Split the document into segments 500 tokens each
// 4. Convert segments into embeddings
// 5. Store embeddings into embedding store
// All this can be done manually, but we will use EmbeddingStoreIngestor to automate this:
DocumentSplitter documentSplitter = DocumentSplitters.recursive(500, 0, new OpenAiTokenizer(GPT_3_5_TURBO));
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(documentSplitter)
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
ingestor.ingest(document);
return embeddingStore;
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
"dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder",
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder"
] | [((1815, 2122), 'dev.langchain4j.service.AiServices.builder'), ((1815, 2097), 'dev.langchain4j.service.AiServices.builder'), ((1815, 1990), 'dev.langchain4j.service.AiServices.builder'), ((1815, 1917), 'dev.langchain4j.service.AiServices.builder'), ((2536, 2773), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2536, 2748), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2536, 2712), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2536, 2672), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2536, 2624), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3692, 4060), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((3692, 4035), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((3692, 4003), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((3692, 3961), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((3692, 3916), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((3692, 3858), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((3692, 3804), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((3692, 3752), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((4747, 4952), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((4747, 4927), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((4747, 4879), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((4747, 4831), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package com.azure.migration.java.copilot.rag;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.azure.search.AzureAiSearchEmbeddingStore;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import static dev.langchain4j.model.azure.AzureOpenAiModelName.GPT_4_32K;
@Configuration
public class Configure {
@Bean
EmbeddingModel embeddingModel() {
return new AllMiniLmL6V2EmbeddingModel();
}
@Bean
EmbeddingStoreIngestor ingestor(AzureAiSearchEmbeddingStore azureAiSearchEmbeddingStore, EmbeddingModel embeddingModel) {
DocumentSplitter documentSplitter = DocumentSplitters.recursive(100, 0, new OpenAiTokenizer(GPT_4_32K));
return EmbeddingStoreIngestor.builder()
.documentSplitter(documentSplitter)
.embeddingStore(azureAiSearchEmbeddingStore)
.embeddingModel(embeddingModel)
.build();
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1088, 1306), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1088, 1281), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1088, 1233), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1088, 1172), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package net.herhoffer.mensabot.rag;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.loader.UrlDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkiverse.langchain4j.redis.RedisEmbeddingStore;
import io.quarkus.runtime.StartupEvent;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.event.Observes;
import jakarta.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.List;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
@ApplicationScoped
public class MensaIngestor
{
private static final Logger LOG = LoggerFactory.getLogger(MensaIngestor.class);
@Inject
RedisEmbeddingStore store;
@Inject
EmbeddingModel embeddingModel;
public void ingest(@Observes StartupEvent event)
{
LOG.info("ingesting documents");
Document docs = UrlDocumentLoader.load("https://neuland.app/api/mensa/", new TextDocumentParser());
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.embeddingStore(store)
.embeddingModel(embeddingModel)
.documentSplitter(recursive(500, 0))
.build();
ingestor.ingest(docs);
LOG.info("Documents successfully ingested");
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1248, 1393), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1248, 1381), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1248, 1341), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1248, 1306), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package my.samples;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.apache.pdfbox.ApachePdfBoxDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
public class LoadFINRARuletoES {
public static void main(String[] args) {
EmbeddingStore<TextSegment> embeddingStore = ElasticsearchEmbeddingStore.builder()
.serverUrl("http://localhost:9200")
.indexName("finra-rules-embeddings")
.dimension(384)
.build();
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(300, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
Path filePath = toPath("example-files/FINRARULES.pdf");
Document document = FileSystemDocumentLoader.loadDocument(filePath, new ApachePdfBoxDocumentParser());
document.metadata().add("fileName", filePath.getFileName().toString());
document.metadata().add("filePath", filePath.toString());
document.metadata().add("source", "FINRA");
document.metadata().add("category", "FINANCE");
ingestor.ingest(document);
System.out.println( "Document ingested successfully" );
}
private static Path toPath(String fileName) {
try {
// Corrected path assuming files are in src/main/resources/example-files
URL fileUrl = LoadFINRARuletoES.class.getClassLoader().getResource( fileName);
if (fileUrl == null) {
throw new RuntimeException("Resource not found: " + fileName);
}
return Paths.get(fileUrl.toURI());
} catch (URISyntaxException e) {
throw new RuntimeException("Failed to resolve URI for: " + fileName, e);
}
}
}
| [
"dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((909, 1108), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((909, 1083), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((909, 1051), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((909, 998), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1228, 1452), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1228, 1427), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1228, 1379), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1228, 1331), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package dev.langchain4j.model.zhipu;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.agent.tool.ToolParameters;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.SystemMessage;
import dev.langchain4j.data.message.ToolExecutionResultMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.internal.Utils;
import dev.langchain4j.model.output.FinishReason;
import dev.langchain4j.model.output.TokenUsage;
import dev.langchain4j.model.zhipu.chat.AssistantMessage;
import dev.langchain4j.model.zhipu.chat.ChatCompletionResponse;
import dev.langchain4j.model.zhipu.chat.Function;
import dev.langchain4j.model.zhipu.chat.FunctionCall;
import dev.langchain4j.model.zhipu.chat.Message;
import dev.langchain4j.model.zhipu.chat.Parameters;
import dev.langchain4j.model.zhipu.chat.Tool;
import dev.langchain4j.model.zhipu.chat.ToolCall;
import dev.langchain4j.model.zhipu.chat.ToolMessage;
import dev.langchain4j.model.zhipu.chat.ToolType;
import dev.langchain4j.model.zhipu.embedding.EmbeddingResponse;
import dev.langchain4j.model.zhipu.shared.Usage;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import static dev.langchain4j.internal.Exceptions.illegalArgument;
import static dev.langchain4j.internal.Utils.isNullOrEmpty;
import static dev.langchain4j.model.output.FinishReason.LENGTH;
import static dev.langchain4j.model.output.FinishReason.OTHER;
import static dev.langchain4j.model.output.FinishReason.STOP;
import static dev.langchain4j.model.output.FinishReason.TOOL_EXECUTION;
class DefaultZhipuAiHelper {
public static List<Embedding> toEmbed(EmbeddingResponse response) {
return response.getData().stream()
.map(zhipuAiEmbedding -> Embedding.from(zhipuAiEmbedding.getEmbedding()))
.collect(Collectors.toList());
}
public static String toEmbedTexts(List<TextSegment> textSegments) {
List<String> embedText = textSegments.stream()
.map(TextSegment::text)
.collect(Collectors.toList());
if (Utils.isNullOrEmpty(embedText)) {
return null;
}
return embedText.get(0);
}
public static List<Tool> toTools(List<ToolSpecification> toolSpecifications) {
return toolSpecifications.stream()
.map(toolSpecification -> Tool.from(toFunction(toolSpecification)))
.collect(Collectors.toList());
}
private static Function toFunction(ToolSpecification toolSpecification) {
return Function.builder()
.name(toolSpecification.name())
.description(toolSpecification.description())
.parameters(toFunctionParameters(toolSpecification.parameters()))
.build();
}
private static Parameters toFunctionParameters(ToolParameters toolParameters) {
return Parameters.builder()
.properties(toolParameters.properties())
.required(toolParameters.required())
.build();
}
public static List<Message> toZhipuAiMessages(List<ChatMessage> messages) {
return messages.stream()
.map(DefaultZhipuAiHelper::toZhipuAiMessage)
.collect(Collectors.toList());
}
private static Message toZhipuAiMessage(ChatMessage message) {
if (message instanceof SystemMessage) {
SystemMessage systemMessage = (SystemMessage) message;
return dev.langchain4j.model.zhipu.chat.SystemMessage.builder()
.content(systemMessage.text())
.build();
}
if (message instanceof UserMessage) {
UserMessage userMessage = (UserMessage) message;
return dev.langchain4j.model.zhipu.chat.UserMessage.builder()
.content(userMessage.text())
.build();
}
if (message instanceof AiMessage) {
AiMessage aiMessage = (AiMessage) message;
if (!aiMessage.hasToolExecutionRequests()) {
return AssistantMessage.builder()
.content(aiMessage.text())
.build();
}
List<ToolCall> toolCallArrayList = new ArrayList<>();
for (ToolExecutionRequest executionRequest : aiMessage.toolExecutionRequests()) {
toolCallArrayList.add(ToolCall.builder()
.function(
FunctionCall.builder()
.name(executionRequest.name())
.arguments(executionRequest.arguments())
.build()
)
.type(ToolType.FUNCTION)
.id(executionRequest.id())
.build()
);
}
return AssistantMessage.builder()
.content(aiMessage.text())
.toolCalls(toolCallArrayList)
.build();
}
if (message instanceof ToolExecutionResultMessage) {
ToolExecutionResultMessage resultMessage = (ToolExecutionResultMessage) message;
return ToolMessage.builder()
.content(resultMessage.text())
.build();
}
throw illegalArgument("Unknown message type: " + message.type());
}
public static AiMessage aiMessageFrom(ChatCompletionResponse response) {
Message message = response.getChoices().get(0).getMessage();
AssistantMessage assistantMessage = (AssistantMessage) message;
if (isNullOrEmpty(assistantMessage.getToolCalls())) {
return AiMessage.from(assistantMessage.getContent());
}
return AiMessage.from(specificationsFrom(assistantMessage.getToolCalls()));
}
public static List<ToolExecutionRequest> specificationsFrom(List<ToolCall> toolCalls) {
List<ToolExecutionRequest> specifications = new ArrayList<>(toolCalls.size());
for (ToolCall toolCall : toolCalls) {
specifications.add(
ToolExecutionRequest.builder()
.id(toolCall.getId())
.name(toolCall.getFunction().getName())
.arguments(toolCall.getFunction().getArguments())
.build()
);
}
return specifications;
}
public static TokenUsage tokenUsageFrom(Usage zhipuUsage) {
if (zhipuUsage == null) {
return null;
}
return new TokenUsage(
zhipuUsage.getPromptTokens(),
zhipuUsage.getCompletionTokens(),
zhipuUsage.getTotalTokens()
);
}
public static FinishReason finishReasonFrom(String finishReason) {
if (finishReason == null) {
return null;
}
switch (finishReason) {
case "stop":
return STOP;
case "length":
return LENGTH;
case "tool_calls":
return TOOL_EXECUTION;
default:
return OTHER;
}
}
}
| [
"dev.langchain4j.model.zhipu.chat.ToolMessage.builder",
"dev.langchain4j.agent.tool.ToolExecutionRequest.builder",
"dev.langchain4j.model.zhipu.chat.Function.builder",
"dev.langchain4j.model.zhipu.chat.AssistantMessage.builder",
"dev.langchain4j.model.zhipu.chat.FunctionCall.builder",
"dev.langchain4j.model.zhipu.chat.Parameters.builder",
"dev.langchain4j.model.zhipu.chat.ToolCall.builder"
] | [((2790, 3025), 'dev.langchain4j.model.zhipu.chat.Function.builder'), ((2790, 3000), 'dev.langchain4j.model.zhipu.chat.Function.builder'), ((2790, 2918), 'dev.langchain4j.model.zhipu.chat.Function.builder'), ((2790, 2856), 'dev.langchain4j.model.zhipu.chat.Function.builder'), ((3133, 3288), 'dev.langchain4j.model.zhipu.chat.Parameters.builder'), ((3133, 3263), 'dev.langchain4j.model.zhipu.chat.Parameters.builder'), ((3133, 3210), 'dev.langchain4j.model.zhipu.chat.Parameters.builder'), ((4327, 4437), 'dev.langchain4j.model.zhipu.chat.AssistantMessage.builder'), ((4327, 4404), 'dev.langchain4j.model.zhipu.chat.AssistantMessage.builder'), ((4651, 5119), 'dev.langchain4j.model.zhipu.chat.ToolCall.builder'), ((4651, 5086), 'dev.langchain4j.model.zhipu.chat.ToolCall.builder'), ((4651, 5035), 'dev.langchain4j.model.zhipu.chat.ToolCall.builder'), ((4651, 4986), 'dev.langchain4j.model.zhipu.chat.ToolCall.builder'), ((4737, 4960), 'dev.langchain4j.model.zhipu.chat.FunctionCall.builder'), ((4737, 4911), 'dev.langchain4j.model.zhipu.chat.FunctionCall.builder'), ((4737, 4830), 'dev.langchain4j.model.zhipu.chat.FunctionCall.builder'), ((5172, 5324), 'dev.langchain4j.model.zhipu.chat.AssistantMessage.builder'), ((5172, 5295), 'dev.langchain4j.model.zhipu.chat.AssistantMessage.builder'), ((5172, 5245), 'dev.langchain4j.model.zhipu.chat.AssistantMessage.builder'), ((5510, 5611), 'dev.langchain4j.model.zhipu.chat.ToolMessage.builder'), ((5510, 5582), 'dev.langchain4j.model.zhipu.chat.ToolMessage.builder'), ((6430, 6693), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((6430, 6656), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((6430, 6578), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((6430, 6510), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')] |
import dev.langchain4j.model.azure.AzureOpenAiChatModel;
public class AzureOpenAiChatModelExamples {
static class Simple_Prompt {
public static void main(String[] args) {
AzureOpenAiChatModel model = AzureOpenAiChatModel.builder()
.apiKey(System.getenv("AZURE_OPENAI_KEY"))
.endpoint(System.getenv("AZURE_OPENAI_ENDPOINT"))
.deploymentName(System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME"))
.temperature(0.3)
.logRequestsAndResponses(true)
.build();
String response = model.generate("Provide 3 short bullet points explaining why Java is awesome");
System.out.println(response);
}
}
}
| [
"dev.langchain4j.model.azure.AzureOpenAiChatModel.builder"
] | [((228, 592), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((228, 563), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((228, 512), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((228, 474), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((228, 391), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((228, 321), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder')] |
package ma.enset.projet.Dao;
import dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore;
import java.time.Duration;
public class VectorBd implements Dao<ChromaEmbeddingStore>{
private String CHROMA_BASE_URL;
private String CHROMA_COLLECTION_NAME;
public VectorBd(String CHROMA_BASE_URL, String CHROMA_COLLECTION_NAME) {
this.CHROMA_BASE_URL = CHROMA_BASE_URL;
this.CHROMA_COLLECTION_NAME = CHROMA_COLLECTION_NAME;
}
public VectorBd(){
}
public String getCHROMA_BASE_URL() {
return CHROMA_BASE_URL;
}
public void setCHROMA_BASE_URL(String CHROMA_BASE_URL) {
this.CHROMA_BASE_URL = CHROMA_BASE_URL;
}
public String getCHROMA_COLLECTION_NAME() {
return CHROMA_COLLECTION_NAME;
}
public void setCHROMA_COLLECTION_NAME(String CHROMA_COLLECTION_NAME) {
this.CHROMA_COLLECTION_NAME = CHROMA_COLLECTION_NAME;
}
@Override
public ChromaEmbeddingStore getConnection() {
return ChromaEmbeddingStore.builder()
.baseUrl(CHROMA_BASE_URL)
.collectionName(CHROMA_COLLECTION_NAME)
.timeout(Duration.ofSeconds(6))
.build();
}
}
| [
"dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore.builder"
] | [((1005, 1206), 'dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore.builder'), ((1005, 1181), 'dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore.builder'), ((1005, 1133), 'dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore.builder'), ((1005, 1077), 'dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore.builder')] |
package de.htwg.rag.ingestor;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.model.embedding.AllMiniLmL6V2QuantizedEmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkiverse.langchain4j.pgvector.PgVectorEmbeddingStore;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import java.util.List;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
@ApplicationScoped
public class DocumentIngestor {
@Inject
PgVectorEmbeddingStore store;
@Inject
AllMiniLmL6V2QuantizedEmbeddingModel embeddingModel;
// creates the Ingestor and ingests the documents into the store.
// Maybe adapt the overlapSize or change the Documentsplitter for better performance
public void ingest(List<Document> documents) {
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.embeddingStore(store)
.embeddingModel(embeddingModel)
.documentSplitter(recursive(300, 20))
.build();
if (documents.isEmpty()) {
throw new IllegalArgumentException("No documents to ingest");
}
else {
ingestor.ingest(documents);
}
}
public void setStore(PgVectorEmbeddingStore store) {
this.store = store;
}
public void setEmbeddingModel(AllMiniLmL6V2QuantizedEmbeddingModel embeddingModel) {
this.embeddingModel = embeddingModel;
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((896, 1094), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((896, 1069), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((896, 1015), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((896, 967), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package com.magicrepokit.chat.component;
import cn.hutool.core.util.StrUtil;
import com.magicrepokit.chat.constant.GptModel;
import com.magicrepokit.langchain.config.ConfigProperties;
import dev.langchain4j.data.message.SystemMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.Tokenizer;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.List;
@Component
@AllArgsConstructor
@Slf4j
public class LangchainComponent {
private ConfigProperties langchainConfigProperties;
/**
* 获取elasticsearch存储
*
* @param indexName 索引名称
* @return ElasticsearchEmbeddingStore
*/
public ElasticsearchEmbeddingStore getDefaultElasticsearchEmbeddingStore(String indexName) {
if (!langchainConfigProperties.getEnabled()) {
log.error("未开启elasticsearch");
return null;
}
String elasticHost = langchainConfigProperties.getElasticHost();
int elasticPort = langchainConfigProperties.getElasticPort();
String url = StrUtil.format("{}:{}", elasticHost, elasticPort);
return ElasticsearchEmbeddingStore.builder()
.serverUrl(url)
.userName(langchainConfigProperties.getElasticUsername())
.password(langchainConfigProperties.getElasticPassword())
.indexName(indexName)
.dimension(1536)
.build();
}
/**
* 向量检索
*
* @param indexName 索引名称
* @param question 问题
* @return List<TextSegment>
*/
public List<TextSegment> findRelevant(String indexName, String question) {
EmbeddingStoreRetriever embeddingStoreRetriever = new EmbeddingStoreRetriever(getDefaultElasticsearchEmbeddingStore(indexName),
getDefaultEmbeddingModel(),
5,
0.8
);
return embeddingStoreRetriever.findRelevant(question);
}
/**
* 向量检索
*
* @param indexName 索引名称
* @param question 问题
* @param maxResult 最大结果
* @param minScore 最小分数
* @return List<TextSegment>
*/
public List<TextSegment> findRelevant(String indexName, String question, int maxResult, double minScore) {
if(maxResult<=0){
maxResult=5;
}
if(minScore<=0){
minScore=0.7;
}
EmbeddingStoreRetriever embeddingStoreRetriever = new EmbeddingStoreRetriever(getDefaultElasticsearchEmbeddingStore(indexName),
getDefaultEmbeddingModel(),
maxResult,
minScore
);
return embeddingStoreRetriever.findRelevant(question);
}
/**
* 获取分词模型
*/
public EmbeddingModel getDefaultEmbeddingModel() {
return OpenAiEmbeddingModel.builder().apiKey("sk-gRbZ9FJz2E7c7mwO5JOvp2u2rtoWoAbg12CxDy3Y25eLeDvd").baseUrl("https://api.chatanywhere.tech/v1").build();
}
/**
* 获取分词器
*
* @param gptModel
* @return
*/
public Tokenizer getOpenAiTokenizer(GptModel gptModel) {
return new OpenAiTokenizer(gptModel.getAcutualModelName());
}
/**
* 获取默认聊天模型
*
* @return StreamingChatLanguageModel
*/
private StreamingChatLanguageModel getStreamingDefaultChatLanguageModel(GptModel gptModel) {
return OpenAiStreamingChatModel.builder()
.apiKey("sk-gRbZ9FJz2E7c7mwO5JOvp2u2rtoWoAbg12CxDy3Y25eLeDvd")
.baseUrl("https://api.chatanywhere.tech/")
.modelName(gptModel.getAcutualModelName())
.build();
}
private StreamingChatLanguageModel getStreamingDefaultChatLanguageModel(GptModel gptModel,Double temperature) {
return OpenAiStreamingChatModel.builder()
.apiKey("sk-gRbZ9FJz2E7c7mwO5JOvp2u2rtoWoAbg12CxDy3Y25eLeDvd")
.baseUrl("https://api.chatanywhere.tech/")
.modelName(gptModel.getAcutualModelName())
.temperature(temperature)
.build();
}
/**
* 获取聊天模型
*
* @return StreamingChatLanguageModel
*/
public StreamingChatLanguageModel getStreamingChatLanguageModel(GptModel gptModel) {
//TODO 获取用户信息 1.查询用户key 2.如果有使用用户,如果没有使用默认
return getStreamingDefaultChatLanguageModel(gptModel);
}
public StreamingChatLanguageModel getStreamingChatLanguageModel(GptModel gptModel,Double temperature) {
//TODO 获取用户信息 1.查询用户key 2.如果有使用用户,如果没有使用默认
return getStreamingDefaultChatLanguageModel(gptModel,temperature);
}
public SystemMessage getDefalutSystemMessage(GptModel gptModel){
String modelName = gptModel.getModelName();
String prompt = "你是MagicRepoKit研发的"+modelName+"模型。别人问你有关你的身份信息,你可以回答:我是MagicRepoKit研发的"+modelName+"模型。";
return new SystemMessage(prompt);
}
}
| [
"dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder",
"dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder",
"dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder"
] | [((1628, 1941), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1628, 1916), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1628, 1883), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1628, 1845), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1628, 1771), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1628, 1697), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((3351, 3495), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((3351, 3487), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((3351, 3443), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((3933, 4189), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((3933, 4164), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((3933, 4105), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((3933, 4046), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4329, 4627), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4329, 4602), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4329, 4560), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4329, 4501), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4329, 4442), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder')] |
package com.example.demo;
import java.time.Duration;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.structured.StructuredPrompt;
import dev.langchain4j.model.input.structured.StructuredPromptProcessor;
import dev.langchain4j.model.openai.OpenAiChatModel;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.PropertySource;
import org.springframework.stereotype.Component;
public class _03_StructuredPromptTemplates {
@StructuredPrompt({
"Design a medical insurance plan for {{coverageType}} with the following features:",
"Structure your answer as follows:",
"Insurance Plan Name: ...",
"Description: ...",
"Coverage Duration: ...",
"Covered Medical Services:",
"- ...",
"- ...",
"Financial Details:",
"Premium: ...",
"Deductible: ...",
"Claims Process:",
"- ...",
"- ..."
})
static class CreateMedicalInsurancePlan{
String coverageType;
CreateMedicalInsurancePlan(String coverageType){
this.coverageType = coverageType;
}
}
public static void main(String[] args) {
Duration duration = Duration.ofSeconds(60);
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.timeout(duration)
.build();
///ComprehensiveFamilyPlan - DentalAndVisionInsurance - MaternityCoverage
CreateMedicalInsurancePlan createMedicalInsurancePlan = new CreateMedicalInsurancePlan("BasicHealthCoverage");
Prompt prompt = StructuredPromptProcessor.toPrompt(createMedicalInsurancePlan);
String plan = model.generate(prompt.text());
System.out.println(plan);
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1297, 1394), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1297, 1381), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1297, 1358), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package ai.equity.salt.openai.model;
import ai.equity.salt.openai.OpenAiProperties;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiLanguageModel;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Component;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO_1106;
@Component
@RequiredArgsConstructor
public class OpenAiModelFactory {
private final OpenAiProperties properties;
public OpenAiLanguageModel create() {
return OpenAiLanguageModel.builder()
.apiKey(properties.key())
.logRequests(true)
.logResponses(true)
.build();
}
public OpenAiChatModel createDefaultChatModel() {
return OpenAiChatModel.builder()
.modelName(GPT_3_5_TURBO_1106)
.apiKey(properties.key())
.maxTokens(1024)
.temperature(0.0)
.topP(0.0)
.logRequests(true)
.logResponses(true)
.build();
}
}
| [
"dev.langchain4j.model.openai.OpenAiLanguageModel.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((540, 707), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((540, 682), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((540, 646), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((540, 611), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((785, 1089), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((785, 1064), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((785, 1028), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((785, 993), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((785, 966), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((785, 932), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((785, 899), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((785, 857), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package io.quarkiverse.langchain4j.openai;
import static dev.langchain4j.internal.RetryUtils.withRetry;
import static dev.langchain4j.model.openai.OpenAiModelName.DALL_E_2;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.time.Duration;
import java.util.Base64;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
import java.util.stream.Collectors;
import dev.ai4j.openai4j.image.GenerateImagesRequest;
import dev.ai4j.openai4j.image.GenerateImagesResponse;
import dev.langchain4j.data.image.Image;
import dev.langchain4j.model.image.ImageModel;
import dev.langchain4j.model.output.Response;
@SuppressWarnings("OptionalUsedAsFieldOrParameterType")
public class QuarkusOpenAiImageModel implements ImageModel {
private final String modelName;
private final String size;
private final String quality;
private final String style;
private final Optional<String> user;
private final String responseFormat;
private final Integer maxRetries;
private final Optional<Path> persistDirectory;
private final QuarkusOpenAiClient client;
public QuarkusOpenAiImageModel(String baseUrl, String apiKey, String organizationId, String modelName, String size,
String quality, String style, Optional<String> user, String responseFormat, Duration timeout,
Integer maxRetries, Boolean logRequests, Boolean logResponses,
Optional<Path> persistDirectory) {
this.modelName = modelName;
this.size = size;
this.quality = quality;
this.style = style;
this.user = user;
this.responseFormat = responseFormat;
this.maxRetries = maxRetries;
this.persistDirectory = persistDirectory;
this.client = QuarkusOpenAiClient.builder()
.baseUrl(baseUrl)
.openAiApiKey(apiKey)
.organizationId(organizationId)
.callTimeout(timeout)
.connectTimeout(timeout)
.readTimeout(timeout)
.writeTimeout(timeout)
.logRequests(logRequests)
.logResponses(logResponses)
.build();
}
@Override
public Response<Image> generate(String prompt) {
GenerateImagesRequest request = requestBuilder(prompt).build();
GenerateImagesResponse response = withRetry(() -> client.imagesGeneration(request), maxRetries).execute();
persistIfNecessary(response);
return Response.from(fromImageData(response.data().get(0)));
}
@Override
public Response<List<Image>> generate(String prompt, int n) {
GenerateImagesRequest request = requestBuilder(prompt).n(n).build();
GenerateImagesResponse response = withRetry(() -> client.imagesGeneration(request), maxRetries).execute();
persistIfNecessary(response);
return Response.from(
response.data().stream().map(QuarkusOpenAiImageModel::fromImageData).collect(Collectors.toList()));
}
private void persistIfNecessary(GenerateImagesResponse response) {
if (persistDirectory.isEmpty()) {
return;
}
Path persistTo = persistDirectory.get();
try {
Files.createDirectories(persistTo);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
for (GenerateImagesResponse.ImageData data : response.data()) {
try {
data.url(
data.url() != null
? FilePersistor.persistFromUri(data.url(), persistTo).toUri()
: FilePersistor.persistFromBase64String(data.b64Json(), persistTo).toUri());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
private static Image fromImageData(GenerateImagesResponse.ImageData data) {
return Image.builder().url(data.url()).base64Data(data.b64Json()).revisedPrompt(data.revisedPrompt()).build();
}
private GenerateImagesRequest.Builder requestBuilder(String prompt) {
var builder = GenerateImagesRequest
.builder()
.prompt(prompt)
.size(size)
.quality(quality)
.style(style)
.responseFormat(responseFormat);
if (DALL_E_2.equals(modelName)) {
builder.model(dev.ai4j.openai4j.image.ImageModel.DALL_E_2);
}
if (user.isPresent()) {
builder.user(user.get());
}
return builder;
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String baseUrl;
private String apiKey;
private String organizationId;
private String modelName;
private String size;
private String quality;
private String style;
private Optional<String> user;
private String responseFormat;
private Duration timeout;
private Integer maxRetries;
private Boolean logRequests;
private Boolean logResponses;
private Optional<Path> persistDirectory;
public Builder baseUrl(String baseUrl) {
this.baseUrl = baseUrl;
return this;
}
public Builder apiKey(String apiKey) {
this.apiKey = apiKey;
return this;
}
public Builder organizationId(String organizationId) {
this.organizationId = organizationId;
return this;
}
public Builder timeout(Duration timeout) {
this.timeout = timeout;
return this;
}
public Builder maxRetries(Integer maxRetries) {
this.maxRetries = maxRetries;
return this;
}
public Builder logRequests(Boolean logRequests) {
this.logRequests = logRequests;
return this;
}
public Builder logResponses(Boolean logResponses) {
this.logResponses = logResponses;
return this;
}
public Builder modelName(String modelName) {
this.modelName = modelName;
return this;
}
public Builder size(String size) {
this.size = size;
return this;
}
public Builder quality(String quality) {
this.quality = quality;
return this;
}
public Builder style(String style) {
this.style = style;
return this;
}
public Builder user(Optional<String> user) {
this.user = user;
return this;
}
public Builder responseFormat(String responseFormat) {
this.responseFormat = responseFormat;
return this;
}
public Builder persistDirectory(Optional<Path> persistDirectory) {
this.persistDirectory = persistDirectory;
return this;
}
public QuarkusOpenAiImageModel build() {
return new QuarkusOpenAiImageModel(baseUrl, apiKey, organizationId, modelName, size, quality, style, user,
responseFormat, timeout, maxRetries, logRequests, logResponses,
persistDirectory);
}
}
/**
* Copied from {@code dev.ai4j.openai4j.FilePersistor}
*/
private static class FilePersistor {
static Path persistFromUri(URI uri, Path destinationFolder) {
try {
Path fileName = Paths.get(uri.getPath()).getFileName();
Path destinationFilePath = destinationFolder.resolve(fileName);
try (InputStream inputStream = uri.toURL().openStream()) {
java.nio.file.Files.copy(inputStream, destinationFilePath, StandardCopyOption.REPLACE_EXISTING);
}
return destinationFilePath;
} catch (IOException e) {
throw new RuntimeException("Error persisting file from URI: " + uri, e);
}
}
public static Path persistFromBase64String(String base64EncodedString, Path destinationFolder) throws IOException {
byte[] decodedBytes = Base64.getDecoder().decode(base64EncodedString);
Path destinationFile = destinationFolder.resolve(randomFileName());
Files.write(destinationFile, decodedBytes, StandardOpenOption.CREATE);
return destinationFile;
}
private static String randomFileName() {
return UUID.randomUUID().toString().replaceAll("-", "").substring(0, 20);
}
}
}
| [
"dev.langchain4j.data.image.Image.builder"
] | [((4164, 4266), 'dev.langchain4j.data.image.Image.builder'), ((4164, 4258), 'dev.langchain4j.data.image.Image.builder'), ((4164, 4222), 'dev.langchain4j.data.image.Image.builder'), ((4164, 4195), 'dev.langchain4j.data.image.Image.builder'), ((7813, 7851), 'java.nio.file.Paths.get'), ((8498, 8545), 'java.util.Base64.getDecoder'), ((8827, 8892), 'java.util.UUID.randomUUID'), ((8827, 8875), 'java.util.UUID.randomUUID'), ((8827, 8855), 'java.util.UUID.randomUUID')] |
package org.acme;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.StreamingResponseHandler;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.ollama.OllamaStreamingChatModel;
import dev.langchain4j.model.output.Response;
import org.acme.persona.Persona;
import java.util.List;
import java.util.Scanner;
import java.util.concurrent.CountDownLatch;
public class StreamingChatRunner {
public static final String MODEL_NAME = "dolphin-mixtral";
public static final String BASE_URL = "http://localhost:11434";
public static void main(String[] args) throws InterruptedException {
// Build the ChatLanguageModel
StreamingChatLanguageModel model = OllamaStreamingChatModel.builder().baseUrl(BASE_URL).modelName(MODEL_NAME)
.temperature(0.7).repeatPenalty(1.5).build();
var input = "";
var scanner = new Scanner(System.in);
var latch = new CountDownLatch(1);
do {
System.out.print("> ");
input = scanner.nextLine();
converse(model, input, latch);
latch.await();
}
while(!input.equals("bye"));
}
private static void converse(StreamingChatLanguageModel model, String message, CountDownLatch latch) {
model.generate(
List.of(Persona.ABUSIVE.getSystemMessage(), UserMessage.from(message)),
new StreamingResponseHandler<>() {
@Override
public void onComplete(Response<AiMessage> response) {
System.out.println();
latch.countDown();
}
@Override
public void onNext(String token) {
System.out.print(token);
}
@Override
public void onError(Throwable error) {
error.printStackTrace();
}
}
);
}
}
| [
"dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder"
] | [((791, 926), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((791, 918), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((791, 899), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((791, 865), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((791, 843), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((1411, 1445), 'org.acme.persona.Persona.ABUSIVE.getSystemMessage')] |
package net.savantly.mainbot.service.languagetools;
import java.time.Duration;
import java.util.List;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.service.AiServices;
import lombok.RequiredArgsConstructor;
import net.savantly.mainbot.config.OpenAIConfig;
@Configuration
@RequiredArgsConstructor
public class LanguageToolsConfiguration {
private final OpenAIConfig openAIConfig;
@Value("${language-tools.log-requests:false}")
private boolean logRequests = false;
@Value("${language-tools.log-responses:true}")
private boolean logResponses = true;
@Bean
@ConditionalOnProperty(prefix = "openai", name = "enabled", havingValue = "true")
public LanguageToolModel getLanguageToolModel() {
String apiKey = openAIConfig.getApiKey();
var chatModel = OpenAiChatModel.builder()
.apiKey(apiKey) // https://platform.openai.com/account/api-keys
.modelName(openAIConfig.getChatModelId())
.temperature(openAIConfig.getTemperature())
.logResponses(logResponses)
.logRequests(logRequests)
.timeout(Duration.ofSeconds(openAIConfig.getTimeoutSeconds()))
.build();
return new LanguageToolModel() {
@Override
public Response<AiMessage> generate(List<ChatMessage> messages) {
return chatModel.generate(messages);
}
@Override
public Response<AiMessage> generate(List<ChatMessage> messages,
List<ToolSpecification> toolSpecifications) {
return chatModel.generate(messages, toolSpecifications);
}
@Override
public Response<AiMessage> generate(List<ChatMessage> messages, ToolSpecification toolSpecification) {
return chatModel.generate(messages, toolSpecification);
}
};
}
@Bean
@ConditionalOnProperty(prefix = "openai", name = "enabled", havingValue = "false", matchIfMissing = true)
public LanguageToolModel getMockLanguageToolModel() {
return new MockChatLanguageModel();
}
@Bean
public LanguageTools getLanguageTools(LanguageToolModel model) {
return AiServices.builder(LanguageTools.class)
.chatLanguageModel(model)
.build();
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1275, 1688), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1275, 1663), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1275, 1584), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1275, 1542), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1275, 1498), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1275, 1438), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1275, 1332), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2717, 2823), 'dev.langchain4j.service.AiServices.builder'), ((2717, 2798), 'dev.langchain4j.service.AiServices.builder')] |
package de.htwg.rag.dataTools;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import jakarta.enterprise.context.ApplicationScoped;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@ApplicationScoped
public class Summarizer {
public String summarize(String text) {
if (text == null || text.isEmpty()) {
throw new IllegalArgumentException("Text is empty or null");
}
// Summarize the text and ingest it
// Use of the @RegisterAiService annotation is not possible on TextSummarizer because no option to say chatMemory = null
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_APIKEY"))
.modelName("gpt-3.5-turbo")
.timeout(Duration.ofMinutes(5))
.logRequests(true)
.logResponses(true)
.build();
TextSummarizerService summarizer = AiServices.create(TextSummarizerService.class, model);
List<String> textList = Arrays.asList(text.split("\n"));
// Create list of always 30 texts together
List<List<String>> chunk = new ArrayList<>();
for (int i = 0; i < textList.size(); i += 30) {
chunk.add(textList.subList(i, Math.min(i + 30, textList.size())));
}
// Summarize the chunks and add them to the document
StringBuilder summarizedText = new StringBuilder();
for (List<String> strings : chunk) {
summarizedText.append(summarizer.summarize(String.join("\n", strings)));
}
return summarizedText.toString();
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((764, 1033), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((764, 1008), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((764, 972), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((764, 937), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((764, 889), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((764, 845), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
public class _10_ServiceWithToolsExample {
// Please also check CustomerSupportApplication and CustomerSupportApplicationTest
// from spring-boot-example module
static class Calculator {
@Tool("Calculates the length of a string")
int stringLength(String s) {
System.out.println("Called stringLength() with s='" + s + "'");
return s.length();
}
@Tool("Calculates the sum of two numbers")
int add(int a, int b) {
System.out.println("Called add() with a=" + a + ", b=" + b);
return a + b;
}
@Tool("Calculates the square root of a number")
double sqrt(int x) {
System.out.println("Called sqrt() with x=" + x);
return Math.sqrt(x);
}
}
interface Assistant {
String chat(String userMessage);
}
public static void main(String[] args) {
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.logRequests(false)
.build();
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(model)
.tools(new Calculator())
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
String question = "What is the square root of the sum of the numbers of letters in the words \"hello\" and \"world\"?";
String answer = assistant.chat(question);
System.out.println(answer);
// The square root of the sum of the number of letters in the words "hello" and "world" is approximately 3.162.
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1202, 1336), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1202, 1311), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1202, 1275), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1369, 1585), 'dev.langchain4j.service.AiServices.builder'), ((1369, 1560), 'dev.langchain4j.service.AiServices.builder'), ((1369, 1487), 'dev.langchain4j.service.AiServices.builder'), ((1369, 1446), 'dev.langchain4j.service.AiServices.builder')] |
package com.tencent.supersonic.headless.core.chat.parser.llm;
import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.core.config.OptimizationConfig;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMReq;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMReq.SqlGenerationMode;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMResp;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMSqlResp;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.output.Response;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Service
@Slf4j
public class OnePassSqlGeneration implements SqlGeneration, InitializingBean {
private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline");
@Autowired
private ChatLanguageModel chatLanguageModel;
@Autowired
private SqlExamplarLoader sqlExampleLoader;
@Autowired
private OptimizationConfig optimizationConfig;
@Autowired
private SqlPromptGenerator sqlPromptGenerator;
@Override
public LLMResp generation(LLMReq llmReq, Long dataSetId) {
//1.retriever sqlExamples
keyPipelineLog.info("dataSetId:{},llmReq:{}", dataSetId, llmReq);
List<Map<String, String>> sqlExamples = sqlExampleLoader.retrieverSqlExamples(llmReq.getQueryText(),
optimizationConfig.getText2sqlExampleNum());
//2.generator linking and sql prompt by sqlExamples,and generate response.
String promptStr = sqlPromptGenerator.generatorLinkingAndSqlPrompt(llmReq, sqlExamples);
Prompt prompt = PromptTemplate.from(JsonUtil.toString(promptStr)).apply(new HashMap<>());
keyPipelineLog.info("request prompt:{}", prompt.toSystemMessage());
Response<AiMessage> response = chatLanguageModel.generate(prompt.toSystemMessage());
String result = response.content().text();
keyPipelineLog.info("model response:{}", result);
//3.format response.
String schemaLinkStr = OutputFormat.getSchemaLinks(response.content().text());
String sql = OutputFormat.getSql(response.content().text());
Map<String, LLMSqlResp> sqlRespMap = new HashMap<>();
sqlRespMap.put(sql, LLMSqlResp.builder().sqlWeight(1D).fewShots(sqlExamples).build());
keyPipelineLog.info("schemaLinkStr:{},sqlRespMap:{}", schemaLinkStr, sqlRespMap);
LLMResp llmResp = new LLMResp();
llmResp.setQuery(llmReq.getQueryText());
llmResp.setSqlRespMap(sqlRespMap);
return llmResp;
}
@Override
public void afterPropertiesSet() {
SqlGenerationFactory.addSqlGenerationForFactory(SqlGenerationMode.ONE_PASS_AUTO_COT, this);
}
}
| [
"dev.langchain4j.model.input.PromptTemplate.from"
] | [((2083, 2155), 'dev.langchain4j.model.input.PromptTemplate.from'), ((2710, 2774), 'com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMSqlResp.builder'), ((2710, 2766), 'com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMSqlResp.builder'), ((2710, 2744), 'com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMSqlResp.builder')] |
package com.ramesh.langchain;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static java.time.Duration.ofSeconds;
import static java.util.stream.Collectors.joining;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2QuantizedEmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
/***
* This project demonstrates how to use LangChain embeddings and receive LLM response based
* on the relevant embeddings of the input prompt
* Note that no LangChain chain is created here. So the response comes in one single shot
*/
public class ChatWithDocumentLive {
// Open AI Key and Chat GPT Model to use
public static String OPENAI_API_KEY = "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";
public static String OPENAI_MODEL = "gpt-3.5-turbo";
public static void main(String[] args) {
System.out.println("Loading sample document and splitting into words...");
// loading input document and splitting into segments
Document document = loadDocument(".\\simpsons_adventures.txt");
DocumentSplitter splitter = DocumentSplitters.recursive(100, 0, new OpenAiTokenizer(OPENAI_MODEL));
List<TextSegment> segments = splitter.split(document);
// Generating embeddings for the words in document and storing in memory
System.out.println("Generating embeddings for the words in document and storing in memory...");
EmbeddingModel embeddingModel = new AllMiniLmL6V2QuantizedEmbeddingModel();
List<Embedding> embeddings = embeddingModel.embedAll(segments)
.content();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
embeddingStore.addAll(embeddings, segments);
// Generating embeddings for prompt \"Who is Simpson?\
System.out.println("\nGenerating embeddings for prompt \"Who is Simpson?\"");
String question = "Who is Simpson?";
Embedding questionEmbedding = embeddingModel.embed(question)
.content();
int maxResults = 3;
double minScore = 0.7;
// Getting relavant embeddings or words for prompt \"Who is Simpson?\" from the embeddings stored in memory")
System.out.println("Getting relavant embeddings or words for prompt \"Who is Simpson?\" from the embeddings stored in memory");
List<EmbeddingMatch<TextSegment>> relevantEmbeddings = embeddingStore.findRelevant(questionEmbedding, maxResults, minScore);
// Sending relevant embeddings and prompt \"Who is Simpson?\" to chat gpt
System.out.println("Sending relevant embeddings and prompt \"Who is Simpson?\" to chat gpt");
// creating a LangChain PromptTemplate
PromptTemplate promptTemplate = PromptTemplate.from("Answer the following question to the best of your ability:\n" + "\n" + "Question:\n" + "{{question}}\n" + "\n" + "Base your answer on the following information:\n" + "{{information}}");
// streaming the responses from chatGPT and joining in the end
String information = relevantEmbeddings.stream()
.map(match -> match.embedded()
.text())
.collect(joining("\n\n"));
Map<String, Object> variables = new HashMap<>();
variables.put("question", question);
variables.put("information", information);
Prompt prompt = promptTemplate.apply(variables);
// creating the chatmodel
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey(OPENAI_API_KEY)
.timeout(ofSeconds(60))
.build();
// calling chatgpt and generating the response
AiMessage aiMessage = chatModel.generate(prompt.toUserMessage())
.content();
System.out.println("response from ChatGPT for prompt \"Who is Simpson?\"\n");
System.out.println(aiMessage.text());
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((4321, 4439), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4321, 4418), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4321, 4382), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore;
import java.util.List;
public class ElasticsearchEmbeddingStoreExample {
/**
* To run this example, ensure you have Elasticsearch running locally. If not, then:
* - Execute "docker pull docker.elastic.co/elasticsearch/elasticsearch:8.9.0"
* - Execute "docker run -d -p 9200:9200 -p 9300:9300 -e discovery.type=single-node -e xpack.security.enabled=false docker.elastic.co/elasticsearch/elasticsearch:8.9.0"
* - Wait until Elasticsearch is ready to serve (may take a few minutes)
*/
public static void main(String[] args) throws InterruptedException {
EmbeddingStore<TextSegment> embeddingStore = ElasticsearchEmbeddingStore.builder()
.serverUrl("http://localhost:9200")
.build();
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
TextSegment segment1 = TextSegment.from("I like football.");
Embedding embedding1 = embeddingModel.embed(segment1).content();
embeddingStore.add(embedding1, segment1);
TextSegment segment2 = TextSegment.from("The weather is good today.");
Embedding embedding2 = embeddingModel.embed(segment2).content();
embeddingStore.add(embedding2, segment2);
Thread.sleep(1000); // to be sure that embeddings were persisted
Embedding queryEmbedding = embeddingModel.embed("What is your favourite sport?").content();
List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(queryEmbedding, 1);
EmbeddingMatch<TextSegment> embeddingMatch = relevant.get(0);
System.out.println(embeddingMatch.score()); // 0.81442887
System.out.println(embeddingMatch.embedded().text()); // I like football.
}
}
| [
"dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder"
] | [((1055, 1169), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1055, 1144), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder')] |
package org.acme;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static dev.langchain4j.data.message.UserMessage.userMessage;
import dev.langchain4j.chain.ConversationalChain;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.StreamingResponseHandler;
import dev.langchain4j.model.Tokenizer;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.model.output.Response;
import io.quarkus.logging.Log;
import io.smallrye.mutiny.Multi;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.Produces;
import jakarta.ws.rs.core.MediaType;
import org.eclipse.microprofile.reactive.messaging.Channel;
import org.eclipse.microprofile.reactive.messaging.Emitter;
@Path("/stream")
public class StreamingResource {
@StreamingOpenAi
StreamingChatLanguageModel streamModel;
@OpenAi
ChatLanguageModel model;
@Channel("response")
Multi<String> response;
@Channel("response")
Emitter<String> emitter;
@GET
@Path("/")
@Produces(MediaType.SERVER_SENT_EVENTS)
public Multi<String> stream() {
return response;
}
@GET
@Path("/memory")
@Produces(MediaType.TEXT_PLAIN)
public void memory() {
Tokenizer tokenizer = new OpenAiTokenizer(GPT_3_5_TURBO);
ChatMemory chatMemory = TokenWindowChatMemory.withMaxTokens(1000, tokenizer);
/**SystemMessage systemMessage = SystemMessage.from(
"You are a senior developer explaining to another senior Java developer " +
"using developing in Quarkus framework");
chatMemory.add(systemMessage);**/
UserMessage userMessage1 = userMessage(
"How to write a REST endpoint in Java? ");
chatMemory.add(userMessage1);
emitter.send("[User]: " + userMessage1.text());
final Response<AiMessage> response1 = model.generate(chatMemory.messages());
chatMemory.add(response1.content());
emitter.send("[LLM]: "+ response1.content().text());
UserMessage userMessage2 = userMessage(
"Create a test of the first point? " +
"Be short, 15 lines of code maximum.");
chatMemory.add(userMessage2);
emitter.send("[User]: " + userMessage2.text());
final Response<AiMessage> response2 = model.generate(chatMemory.messages());
emitter.send("[LLM]: " + response2.content().text());
}
@GET
@Path("/chain")
@Produces(MediaType.TEXT_PLAIN)
public void chain() {
ConversationalChain chain = ConversationalChain.builder()
.chatLanguageModel(model)
.build();
String userMessage1 = "Can you give a brief explanation of Kubernetes, 3 lines max?";
emitter.send("[User]: " + userMessage1);
String answer1 = chain.execute(userMessage1);
emitter.send("[LLM]: " + answer1);
String userMessage2 = "Can you give me a YAML example to deploy an application for that?";
emitter.send("[User]: " + userMessage2);
String answer2 = chain.execute(userMessage2);
emitter.send("[LLM]: " + answer2);
}
@GET
@Path("/ask")
@Produces(MediaType.TEXT_PLAIN)
public void hello() {
String prompt = "Explain me why earth is flat";
streamModel.generate(prompt, new StreamingResponseHandler<>() {
@Override
public void onNext(String token) {
emitter.send(token);
}
@Override
public void onError(Throwable error) {
Log.error(error.getMessage());
}
});
}
}
| [
"dev.langchain4j.chain.ConversationalChain.builder"
] | [((2851, 2939), 'dev.langchain4j.chain.ConversationalChain.builder'), ((2851, 2918), 'dev.langchain4j.chain.ConversationalChain.builder')] |
package com.redhat.hackathon;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkiverse.langchain4j.redis.RedisEmbeddingStore;
import io.quarkus.runtime.StartupEvent;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.event.Observes;
import jakarta.inject.Inject;
import java.io.File;
import java.util.List;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
@ApplicationScoped
public class DocumentIngestor {
/**
* The embedding store (the database).
* The bean is provided by the quarkus-langchain4j-redis extension.
*/
@Inject
RedisEmbeddingStore store;
/**
* The embedding model (how the vector of a document is computed).
* The bean is provided by the LLM (like openai) extension.
*/
@Inject
EmbeddingModel embeddingModel;
public void ingest(@Observes StartupEvent event) {
if ("disabled".equals("disabled")) {
System.out.println("Skipping ingesting document for RAG; seems not really applicable for our hackathon scope.\n");
return;
}
System.out.printf("Ingesting documents...%n");
List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/catalog").toPath(), new TextDocumentParser());
var ingestor = EmbeddingStoreIngestor.builder()
.embeddingStore(store)
.embeddingModel(embeddingModel)
.documentSplitter(recursive(500, 0))
.build();
ingestor.ingest(documents);
System.out.printf("Ingested %d documents.%n", documents.size());
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1596, 1793), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1596, 1768), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1596, 1715), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1596, 1667), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
/*
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gemini.workshop;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.vertexai.VertexAiGeminiChatModel;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.output.Response;
import java.util.Map;
public class Step6_TextClassification {
public static void main(String[] args) {
ChatLanguageModel model = VertexAiGeminiChatModel.builder()
.project(System.getenv("PROJECT_ID"))
.location(System.getenv("LOCATION"))
.modelName("gemini-1.0-pro")
.maxOutputTokens(10)
.maxRetries(3)
.build();
PromptTemplate promptTemplate = PromptTemplate.from("""
Analyze the sentiment of the text below. Respond only with one word to describe the sentiment.
INPUT: This is fantastic news!
OUTPUT: POSITIVE
INPUT: Pi is roughly equal to 3.14
OUTPUT: NEUTRAL
INPUT: I really disliked the pizza. Who would use pineapples as a pizza topping?
OUTPUT: NEGATIVE
INPUT: {{text}}
OUTPUT:
""");
Prompt prompt = promptTemplate.apply(
Map.of("text", "I love strawberries!"));
Response<AiMessage> response = model.generate(prompt.toUserMessage());
System.out.println(response.content().text());
}
}
| [
"dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder"
] | [((1068, 1322), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1068, 1301), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1068, 1274), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1068, 1241), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1068, 1200), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1068, 1151), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder')] |
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore;
import java.util.List;
public class PineconeEmbeddingStoreExample {
public static void main(String[] args) {
EmbeddingStore<TextSegment> embeddingStore = PineconeEmbeddingStore.builder()
.apiKey(System.getenv("PINECONE_API_KEY"))
.environment("northamerica-northeast1-gcp")
// Project ID can be found in the Pinecone url:
// https://app.pinecone.io/organizations/{organization}/projects/{environment}:{projectId}/indexes
.projectId("19a129b")
// Make sure the dimensions of the Pinecone index match the dimensions of the embedding model
// (384 for all-MiniLM-L6-v2, 1536 for text-embedding-ada-002, etc.)
.index("test")
.build();
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
TextSegment segment1 = TextSegment.from("I like football.");
Embedding embedding1 = embeddingModel.embed(segment1).content();
embeddingStore.add(embedding1, segment1);
TextSegment segment2 = TextSegment.from("The weather is good today.");
Embedding embedding2 = embeddingModel.embed(segment2).content();
embeddingStore.add(embedding2, segment2);
Embedding queryEmbedding = embeddingModel.embed("What is your favourite sport?").content();
List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(queryEmbedding, 1);
EmbeddingMatch<TextSegment> embeddingMatch = relevant.get(0);
System.out.println(embeddingMatch.score()); // 0.8144288515898701
System.out.println(embeddingMatch.embedded().text()); // I like football.
}
}
| [
"dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder"
] | [((573, 1192), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((573, 1167), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((573, 941), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((573, 724), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((573, 664), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder')] |
package org.agoncal.fascicle.langchain4j.testingdebugging;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.time.Duration.ofSeconds;
// tag::adocSkip[]
/**
* @author Antonio Goncalves
* http://www.antoniogoncalves.org
* --
*/
// end::adocSkip[]
public class AuthorService {
private static final String OPENAI_API_KEY = System.getenv("OPENAI_API_KEY");
private String[] scifiAuthors = {"Isaac Asimov", "Nora Jemisin", "Douglas Adams"};
String url;
public AuthorService(/*String url*/) {
this.url = url;
}
public String getAuthorBiography(int index) {
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(OPENAI_API_KEY)
//.baseUrl(this.url)
//.proxy("http://localhost:3128")
.modelName(GPT_3_5_TURBO)
.temperature(0.3)
.timeout(ofSeconds(60))
.logRequests(true)
.logResponses(true)
.build();
String prompt = "Write a short biography about " + scifiAuthors[index];
String biography = model.generate(prompt);
return biography;
}
public static void main(String[] args) {
AuthorService authorService = new AuthorService();
System.out.println(authorService.getAuthorBiography(0));
}
}
// end::adocSnippet[]
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((758, 1032), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((758, 1017), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((758, 991), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((758, 966), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((758, 936), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((758, 912), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((758, 813), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package org.agoncal.fascicle.langchain4j.accessing.localai;
import dev.langchain4j.model.localai.LocalAiChatModel;
// tag::adocSkip[]
/**
* @author Antonio Goncalves
* http://www.antoniogoncalves.org
* --
*/
// end::adocSkip[]
public class MusicianService {
public static void main(String[] args) {
MusicianService musicianService = new MusicianService();
musicianService.useLocalAiChatModel();
}
private static final String AZURE_OPENAI_KEY = System.getenv("AZURE_OPENAI_KEY");
private static final String AZURE_OPENAI_ENDPOINT = System.getenv("AZURE_OPENAI_ENDPOINT");
private static final String AZURE_OPENAI_DEPLOYMENT_NAME = System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME");
private static final String PROMPT = "When was the first Beatles album released?";
// ###############################
// ### AZURE OPENAI CHAT MODEL ###
// ###############################
public void useLocalAiChatModel() {
System.out.println("### useLocalAiChatModel");
// tag::adocSnippet[]
LocalAiChatModel model = LocalAiChatModel.builder()
.temperature(0.3)
.build();
// end::adocSnippet[]
String completion = model.generate("When was the first Rolling Stones album released?");
System.out.println(completion);
}
}
| [
"dev.langchain4j.model.localai.LocalAiChatModel.builder"
] | [((1048, 1113), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((1048, 1098), 'dev.langchain4j.model.localai.LocalAiChatModel.builder')] |
package io.quarkiverse.langchain4j.azure.openai;
import static dev.langchain4j.internal.RetryUtils.withRetry;
import static dev.langchain4j.internal.ValidationUtils.ensureNotBlank;
import static dev.langchain4j.model.openai.OpenAiModelName.DALL_E_2;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.time.Duration;
import java.util.Base64;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.stream.Collectors;
import dev.ai4j.openai4j.SyncOrAsync;
import dev.ai4j.openai4j.image.GenerateImagesRequest;
import dev.ai4j.openai4j.image.GenerateImagesResponse;
import dev.langchain4j.data.image.Image;
import dev.langchain4j.model.image.ImageModel;
import dev.langchain4j.model.output.Response;
import io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient;
@SuppressWarnings("OptionalUsedAsFieldOrParameterType")
public class AzureOpenAiImageModel implements ImageModel {
private final String modelName;
private final String size;
private final String quality;
private final String style;
private final Optional<String> user;
private final String responseFormat;
private final Integer maxRetries;
private final Optional<Path> persistDirectory;
private final QuarkusOpenAiClient client;
public AzureOpenAiImageModel(String endpoint, String apiKey, String apiVersion, String modelName, String size,
String quality, String style, Optional<String> user, String responseFormat, Duration timeout,
Integer maxRetries, Boolean logRequests, Boolean logResponses,
Optional<Path> persistDirectory) {
this.modelName = modelName;
this.size = size;
this.quality = quality;
this.style = style;
this.user = user;
this.responseFormat = responseFormat;
this.maxRetries = maxRetries;
this.persistDirectory = persistDirectory;
this.client = QuarkusOpenAiClient.builder()
.baseUrl(ensureNotBlank(endpoint, "endpoint"))
.azureApiKey(apiKey)
.apiVersion(apiVersion)
.callTimeout(timeout)
.connectTimeout(timeout)
.readTimeout(timeout)
.writeTimeout(timeout)
.logRequests(logRequests)
.logResponses(logResponses)
.build();
}
@Override
public Response<Image> generate(String prompt) {
var request = requestBuilder(prompt).build();
var response = withRetry(new ImageGenerator(request), maxRetries).execute();
persistIfNecessary(response);
return Response.from(fromImageData(response.data().get(0)));
}
@Override
public Response<List<Image>> generate(String prompt, int n) {
var request = requestBuilder(prompt).n(n).build();
var response = withRetry(new ImageGenerator(request), maxRetries).execute();
persistIfNecessary(response);
return Response.from(
response.data().stream().map(AzureOpenAiImageModel::fromImageData).collect(Collectors.toList()));
}
private void persistIfNecessary(GenerateImagesResponse response) {
if (persistDirectory.isEmpty()) {
return;
}
var persistTo = persistDirectory.get();
try {
Files.createDirectories(persistTo);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
for (GenerateImagesResponse.ImageData data : response.data()) {
try {
data.url(
data.url() != null
? FilePersistor.persistFromUri(data.url(), persistTo).toUri()
: FilePersistor.persistFromBase64String(data.b64Json(), persistTo).toUri());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
private static Image fromImageData(GenerateImagesResponse.ImageData data) {
return Image.builder().url(data.url()).base64Data(data.b64Json()).revisedPrompt(data.revisedPrompt()).build();
}
private GenerateImagesRequest.Builder requestBuilder(String prompt) {
var builder = GenerateImagesRequest.builder()
.prompt(prompt)
.size(size)
.quality(quality)
.style(style)
.responseFormat(responseFormat);
if (DALL_E_2.equals(modelName)) {
builder.model(dev.ai4j.openai4j.image.ImageModel.DALL_E_2);
}
if (user.isPresent()) {
builder.user(user.get());
}
return builder;
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String endpoint;
private String apiKey;
private String apiVersion;
private String modelName;
private String size;
private String quality;
private String style;
private Optional<String> user;
private String responseFormat;
private Duration timeout;
private Integer maxRetries;
private Boolean logRequests;
private Boolean logResponses;
private Optional<Path> persistDirectory;
public Builder endpoint(String endpoint) {
this.endpoint = endpoint;
return this;
}
public Builder apiKey(String apiKey) {
this.apiKey = apiKey;
return this;
}
public Builder apiVersion(String apiVersion) {
this.apiVersion = apiVersion;
return this;
}
public Builder timeout(Duration timeout) {
this.timeout = timeout;
return this;
}
public Builder maxRetries(Integer maxRetries) {
this.maxRetries = maxRetries;
return this;
}
public Builder logRequests(Boolean logRequests) {
this.logRequests = logRequests;
return this;
}
public Builder logResponses(Boolean logResponses) {
this.logResponses = logResponses;
return this;
}
public Builder modelName(String modelName) {
this.modelName = modelName;
return this;
}
public Builder size(String size) {
this.size = size;
return this;
}
public Builder quality(String quality) {
this.quality = quality;
return this;
}
public Builder style(String style) {
this.style = style;
return this;
}
public Builder user(Optional<String> user) {
this.user = user;
return this;
}
public Builder responseFormat(String responseFormat) {
this.responseFormat = responseFormat;
return this;
}
public Builder persistDirectory(Optional<Path> persistDirectory) {
this.persistDirectory = persistDirectory;
return this;
}
public AzureOpenAiImageModel build() {
return new AzureOpenAiImageModel(endpoint, apiKey, apiVersion, modelName, size, quality, style, user,
responseFormat, timeout, maxRetries, logRequests, logResponses,
persistDirectory);
}
}
private class ImageGenerator implements Callable<SyncOrAsync<GenerateImagesResponse>> {
private final GenerateImagesRequest request;
private ImageGenerator(GenerateImagesRequest request) {
this.request = request;
}
@Override
public SyncOrAsync<GenerateImagesResponse> call() {
return client.imagesGeneration(request);
}
}
/**
* Copied from {@code dev.ai4j.openai4j.FilePersistor}
*/
private static class FilePersistor {
static Path persistFromUri(URI uri, Path destinationFolder) {
try {
Path fileName = Paths.get(uri.getPath()).getFileName();
Path destinationFilePath = destinationFolder.resolve(fileName);
try (InputStream inputStream = uri.toURL().openStream()) {
Files.copy(inputStream, destinationFilePath, StandardCopyOption.REPLACE_EXISTING);
}
return destinationFilePath;
} catch (IOException e) {
throw new RuntimeException("Error persisting file from URI: " + uri, e);
}
}
public static Path persistFromBase64String(String base64EncodedString, Path destinationFolder) throws IOException {
byte[] decodedBytes = Base64.getDecoder().decode(base64EncodedString);
Path destinationFile = destinationFolder.resolve(randomFileName());
Files.write(destinationFile, decodedBytes, StandardOpenOption.CREATE);
return destinationFile;
}
private static String randomFileName() {
return UUID.randomUUID().toString().replaceAll("-", "").substring(0, 20);
}
}
}
| [
"dev.langchain4j.data.image.Image.builder"
] | [((2190, 2626), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((2190, 2601), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((2190, 2557), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((2190, 2515), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((2190, 2476), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((2190, 2438), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((2190, 2397), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((2190, 2359), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((2190, 2319), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((2190, 2282), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((4295, 4397), 'dev.langchain4j.data.image.Image.builder'), ((4295, 4389), 'dev.langchain4j.data.image.Image.builder'), ((4295, 4353), 'dev.langchain4j.data.image.Image.builder'), ((4295, 4326), 'dev.langchain4j.data.image.Image.builder'), ((4502, 4705), 'dev.ai4j.openai4j.image.GenerateImagesRequest.builder'), ((4502, 4657), 'dev.ai4j.openai4j.image.GenerateImagesRequest.builder'), ((4502, 4627), 'dev.ai4j.openai4j.image.GenerateImagesRequest.builder'), ((4502, 4593), 'dev.ai4j.openai4j.image.GenerateImagesRequest.builder'), ((4502, 4565), 'dev.ai4j.openai4j.image.GenerateImagesRequest.builder'), ((8309, 8347), 'java.nio.file.Paths.get'), ((8980, 9027), 'java.util.Base64.getDecoder'), ((9309, 9374), 'java.util.UUID.randomUUID'), ((9309, 9357), 'java.util.UUID.randomUUID'), ((9309, 9337), 'java.util.UUID.randomUUID')] |
package org.agoncal.fascicle.langchain4j.accessing.ollama;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.SystemMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.ollama.OllamaChatModel;
import dev.langchain4j.model.ollama.OllamaLanguageModel;
import dev.langchain4j.model.output.FinishReason;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.output.TokenUsage;
// tag::adocSkip[]
/**
* @author Antonio Goncalves
* http://www.antoniogoncalves.org
* --
*/
// end::adocSkip[]
public class MusicianService {
public static void main(String[] args) {
MusicianService musicianService = new MusicianService();
musicianService.useOllamaLanguageModel();
// musicianService.useOllamaLanguageModelPrompt();
// musicianService.useOllamaLanguageModelBuilder();
// musicianService.useOllamaChatModel();
// musicianService.useOllamaChatModelBuilder();
}
private static final String PROMPT = "When was the first Beatles album released?";
// #############################
// ### OPENAI LANGUAGE MODEL ###
// #############################
public void useOllamaLanguageModel() {
System.out.println("### useOpenAiLanguageModel");
// tag::adocSnippet[]
OllamaLanguageModel model = OllamaLanguageModel.builder()
.baseUrl("http://localhost:11434")
.modelName("orca-mini")
.build();
// end::adocSnippet[]
Response<String> completion = model.generate("When was the first Beatles album released?");
String content = completion.content();
TokenUsage tokenUsage = completion.tokenUsage();
System.out.println(content);
System.out.println(tokenUsage.inputTokenCount());
System.out.println(tokenUsage.outputTokenCount());
System.out.println(tokenUsage.totalTokenCount());
}
public void useOllamaLanguageModelPrompt() {
System.out.println("### useOpenAiLanguageModelPrompt");
OllamaLanguageModel model = OllamaLanguageModel.builder()
.baseUrl("http://localhost:11434")
.modelName("orca-mini")
.build();
Prompt prompt = new Prompt("When was the first Beatles album released?");
Response<String> completion = model.generate(prompt);
String content = completion.content();
FinishReason finishReason = completion.finishReason();
TokenUsage tokenUsage = completion.tokenUsage();
System.out.println(content);
System.out.println(finishReason.name());
System.out.println(tokenUsage.inputTokenCount());
System.out.println(tokenUsage.outputTokenCount());
System.out.println(tokenUsage.totalTokenCount());
}
public void useOllamaLanguageModelBuilder() {
System.out.println("### useOpenAiLanguageModelBuilder");
OllamaLanguageModel model = OllamaLanguageModel.builder()
.baseUrl("http://localhost:11434")
.modelName("orca-mini")
.temperature(0.3)
.build();
Response<String> completion = model.generate(PROMPT);
System.out.println(completion.content());
System.out.println(completion.finishReason());
System.out.println(completion.tokenUsage());
}
// #########################
// ### OPENAI CHAT MODEL ###
// #########################
public void useOllamaChatModel() {
System.out.println("### useOpenAiChatModel");
OllamaChatModel model = OllamaChatModel.builder().build();
String completion = model.generate("When was the first Rolling Stones album released?");
System.out.println(completion);
}
public void useOllamaChatModelBuilder() {
System.out.println("### useOpenAiChatModelBuilder");
OllamaChatModel model = OllamaChatModel.builder()
// .modelName(GPT_3_5_TURBO)
.temperature(0.9)
.build();
String completion = model.generate("When was the first Rolling Stones album released?");
System.out.println(completion);
}
public void useOllamaChatModelAiMessage() {
System.out.println("### useOpenAiChatModelAiMessage");
OllamaChatModel model = OllamaChatModel.builder().build();
SystemMessage sysMsg = new SystemMessage("You are a music expert.");
UserMessage userMsg = new UserMessage("When was the first Rolling Stones album released?");
Response<AiMessage> completion = model.generate(sysMsg, userMsg);
System.out.println(completion);
}
}
| [
"dev.langchain4j.model.ollama.OllamaLanguageModel.builder",
"dev.langchain4j.model.ollama.OllamaChatModel.builder"
] | [((1359, 1474), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((1359, 1459), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((1359, 1429), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((2038, 2153), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((2038, 2138), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((2038, 2108), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((2837, 2976), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((2837, 2961), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((2837, 2937), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((2837, 2907), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((3398, 3431), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((3699, 3797), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((3699, 3782), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((4069, 4102), 'dev.langchain4j.model.ollama.OllamaChatModel.builder')] |
package com.baeldung.langchain;
import static org.assertj.core.api.Assertions.assertThat;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
public class ServiceWithToolsLiveTest {
private static final Logger logger = LoggerFactory.getLogger(ServiceWithToolsLiveTest.class);
static class Calculator {
@Tool("Calculates the length of a string")
int stringLength(String s) {
return s.length();
}
@Tool("Calculates the sum of two numbers")
int add(int a, int b) {
return a + b;
}
}
interface Assistant {
String chat(String userMessage);
}
@Test
public void givenServiceWithTools_whenPrompted_thenValidResponse() {
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(Constants.OPENAI_API_KEY))
.tools(new Calculator())
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
String question = "What is the sum of the numbers of letters in the words \"language\" and \"model\"?";
String answer = assistant.chat(question);
logger.info(answer);
assertThat(answer).contains("13");
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((987, 1234), 'dev.langchain4j.service.AiServices.builder'), ((987, 1213), 'dev.langchain4j.service.AiServices.builder'), ((987, 1144), 'dev.langchain4j.service.AiServices.builder'), ((987, 1107), 'dev.langchain4j.service.AiServices.builder')] |
package io.stargate.test.data;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.model.openai.OpenAiModelName;
import dev.langchain4j.model.output.Response;
import io.stargate.sdk.data.DataApiClient;
import io.stargate.sdk.data.CollectionRepository;
import io.stargate.sdk.data.NamespaceClient;
import io.stargate.sdk.data.domain.query.Filter;
import io.stargate.sdk.data.domain.odm.Document;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.MethodOrderer;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestMethodOrder;
import java.io.File;
import java.io.FileNotFoundException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
/**
* Philosopher Demo with Vector Client.
*/
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
public class VectorClientPhilosopherTest {
@Data @AllArgsConstructor @NoArgsConstructor
private static class Quote {
private String philosopher;
private String quote;
private Set<String> tags;
}
// OpenAI Usual Suspects
static OpenAiEmbeddingModel openaiVectorizer = OpenAiEmbeddingModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
.modelName(OpenAiModelName.TEXT_EMBEDDING_ADA_002)
.timeout(Duration.ofSeconds(15))
.logRequests(true)
.logResponses(true)
.build();
public static float[] vectorize(String inputText) {
return openaiVectorizer.embed(inputText).content().vector();
}
public static CollectionRepository<Quote> vectorStore;
@Test
@Order(1)
@DisplayName("01. Create a namespace and a collection")
public void init() {
// Initialization
DataApiClient jsonApiClient = new DataApiClient();
NamespaceClient nsClient = jsonApiClient.createNamespace("vector_openai");
nsClient.deleteCollection("philosophers");
nsClient.createCollection("philosophers", 1536);
// Low level client
jsonApiClient.namespace("vector_openai").collection("philosophers");
// Crud Repository on a Collection
jsonApiClient.namespace("vector_openai").collectionRepository("philosophers", Quote.class);
// Vector = crud repository + vector native
vectorStore = jsonApiClient
.namespace("vector_openai")
.collectionRepository("philosophers", Quote.class);
}
@Test
@Order(2)
@DisplayName("02. Loading DataSet")
public void shouldLoadDataset() {
// Ingest CSV
AtomicInteger rowId = new AtomicInteger();
loadQuotesFromCsv("/philosopher-quotes.csv").forEach(quote -> {
System.out.println("Inserting " + rowId.get() + ") " + quote.getQuote());
vectorStore.insert(new Document<Quote>(
String.valueOf(rowId.incrementAndGet()),
quote, vectorize(quote.getQuote())));
});
}
@Test
@Order(3)
@DisplayName("03. Should Similarity Search")
public void shouldSimilaritySearch() {
vectorStore = new DataApiClient()
.namespace("vector_openai")
.collectionRepository("philosophers", Quote.class);
float[] embeddings = vectorize("We struggle all our life for nothing");
vectorStore.findVector(embeddings, null,3)
.stream()
.map(Document::getData)
.map(Quote::getQuote)
.forEach(System.out::println);
}
@Test
@Order(4)
@DisplayName("04. Should filter with meta data")
public void shouldMetaDataFiltering() {
new DataApiClient()
.namespace("vector_openai")
.collectionRepository("philosophers", Quote.class)
.findVector(
vectorize("We struggle all our life for nothing"),
new Filter().where("philosopher").isEqualsTo("plato"),
2)
.forEach(r -> System.out.println(r.getSimilarity() + " - " + r.getData().getQuote()));
}
@Test
@Order(5)
@DisplayName("05. Should filter with meta data tags")
public void shouldMetaDataFilteringWithTags() {
vectorStore = new DataApiClient()
.namespace("vector_openai")
.collectionRepository("philosophers", Quote.class);
vectorStore.count(new Filter().where("tags").isAnArrayContaining("love"));
}
static ChatLanguageModel openaiGenerator = OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
.modelName(OpenAiModelName.GPT_3_5_TURBO)
.temperature(0.7)
.timeout(Duration.ofSeconds(15))
.maxRetries(3)
.logResponses(true)
.logRequests(true)
.build();
@Test
@Order(6)
@DisplayName("06. Should Generate new quotes")
public void should_generate_new_quotes() {
vectorStore = new DataApiClient()
.namespace("vector_openai")
.collectionRepository("philosophers", Quote.class);
// === Params ==
String topic = "politics and virtue";
String author = "nietzsche";
int records = 4;
// ==== RAG ===
List<String> ragQuotes = vectorStore
.findVector(
vectorize(topic),
new Filter().where("philosopher").isEqualsTo(author),2)
.stream()
.map(r -> r.getData().getQuote())
.collect(Collectors.toList());
// === Completion ===
PromptTemplate promptTemplate = PromptTemplate.from(
"Generate a single short philosophical quote on the given topic,\n" +
"similar in spirit and form to the provided actual example quotes.\n" +
"Do not exceed 20-30 words in your quote.\n" +
"REFERENCE TOPIC: \n{{topic}}" +
"\nACTUAL EXAMPLES:\n{{rag}}");
Map<String, Object> variables = new HashMap<>();
variables.put("topic", topic);
variables.put("information", String.join(", ", ragQuotes));
Prompt prompt = promptTemplate.apply(variables);
Response<AiMessage> aiMessage = openaiGenerator.generate(prompt.toUserMessage());
String answer = aiMessage.content().text();
System.out.println(answer);
}
// --- Utilities (loading CSV) ---
private List<Quote> loadQuotesFromCsv(String filePath) {
List<Quote> quotes = new ArrayList<>();
File csvFile = new File(VectorClientPhilosopherTest.class.getResource(filePath).getFile());
try (Scanner scanner = new Scanner(csvFile)) {
while (scanner.hasNextLine()) {
Quote q = mapCsvLine(scanner.nextLine());
if (q != null) quotes.add(q);
}
} catch (FileNotFoundException fex) {
throw new IllegalArgumentException("file is not in the classpath", fex);
}
return quotes;
}
private Quote mapCsvLine(String line) {
String[] parts = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1);
if (parts.length >= 3) {
String author = parts[0];
String quote = parts[1].replaceAll("\"", "");
Set<String> tags = new HashSet<>(Arrays.asList(parts[2].split("\\;")));
return new Quote(author, quote, tags);
}
return null;
}
}
| [
"dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1750, 2031), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1750, 2009), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1750, 1976), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1750, 1944), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1750, 1898), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1750, 1834), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((5155, 5473), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5155, 5452), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5155, 5421), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5155, 5389), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5155, 5362), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5155, 5317), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5155, 5287), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5155, 5233), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package dev.langchain4j.model.vertexai;
import com.google.cloud.vertexai.VertexAI;
import com.google.cloud.vertexai.api.GenerationConfig;
import com.google.cloud.vertexai.generativeai.GenerativeModel;
import dev.langchain4j.agent.tool.JsonSchemaProperty;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.message.*;
import dev.langchain4j.model.StreamingResponseHandler;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.chat.TestStreamingResponseHandler;
import dev.langchain4j.model.output.Response;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.Base64;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import static dev.langchain4j.internal.Utils.readBytes;
import static dev.langchain4j.model.output.FinishReason.LENGTH;
import static dev.langchain4j.model.output.FinishReason.STOP;
import static dev.langchain4j.model.vertexai.VertexAiGeminiChatModelIT.CAT_IMAGE_URL;
import static dev.langchain4j.model.vertexai.VertexAiGeminiChatModelIT.DICE_IMAGE_URL;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
class VertexAiGeminiStreamingChatModelIT {
StreamingChatLanguageModel model = VertexAiGeminiStreamingChatModel.builder()
.project(System.getenv("GCP_PROJECT_ID"))
.location(System.getenv("GCP_LOCATION"))
.modelName("gemini-pro")
.build();
StreamingChatLanguageModel visionModel = VertexAiGeminiStreamingChatModel.builder()
.project(System.getenv("GCP_PROJECT_ID"))
.location(System.getenv("GCP_LOCATION"))
.modelName("gemini-pro-vision")
.build();
@Test
void should_stream_answer() throws Exception {
// given
String userMessage = "What is the capital of Germany?";
// when
CompletableFuture<String> futureAnswer = new CompletableFuture<>();
CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>();
model.generate(userMessage, new StreamingResponseHandler<AiMessage>() {
private final StringBuilder answerBuilder = new StringBuilder();
@Override
public void onNext(String token) {
System.out.println("onNext: '" + token + "'");
answerBuilder.append(token);
}
@Override
public void onComplete(Response<AiMessage> response) {
System.out.println("onComplete: '" + response.content() + "'");
futureAnswer.complete(answerBuilder.toString());
futureResponse.complete(response);
}
@Override
public void onError(Throwable error) {
futureAnswer.completeExceptionally(error);
futureResponse.completeExceptionally(error);
}
});
String answer = futureAnswer.get(30, SECONDS);
Response<AiMessage> response = futureResponse.get(30, SECONDS);
// then
assertThat(answer).contains("Berlin");
assertThat(response.content().text()).isEqualTo(answer);
assertThat(response.tokenUsage().inputTokenCount()).isEqualTo(7);
assertThat(response.tokenUsage().outputTokenCount()).isGreaterThan(0);
assertThat(response.tokenUsage().totalTokenCount())
.isEqualTo(response.tokenUsage().inputTokenCount() + response.tokenUsage().outputTokenCount());
assertThat(response.finishReason()).isEqualTo(STOP);
}
@Test
void should_deny_system_message() {
// given
SystemMessage systemMessage = SystemMessage.from("Be polite");
UserMessage userMessage = UserMessage.from("Tell me a joke");
// when-then
assertThatThrownBy(() -> model.generate(asList(systemMessage, userMessage), null))
.isExactlyInstanceOf(IllegalArgumentException.class)
.hasMessage("SystemMessage is currently not supported by Gemini");
}
@Test
void should_respect_maxOutputTokens() throws Exception {
// given
StreamingChatLanguageModel model = VertexAiGeminiStreamingChatModel.builder()
.project(System.getenv("GCP_PROJECT_ID"))
.location(System.getenv("GCP_LOCATION"))
.modelName("gemini-pro")
.maxOutputTokens(1)
.build();
String userMessage = "Tell me a joke";
// when
CompletableFuture<String> futureAnswer = new CompletableFuture<>();
CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>();
model.generate(userMessage, new StreamingResponseHandler<AiMessage>() {
private final StringBuilder answerBuilder = new StringBuilder();
@Override
public void onNext(String token) {
System.out.println("onNext: '" + token + "'");
answerBuilder.append(token);
}
@Override
public void onComplete(Response<AiMessage> response) {
System.out.println("onComplete: '" + response.content() + "'");
futureAnswer.complete(answerBuilder.toString());
futureResponse.complete(response);
}
@Override
public void onError(Throwable error) {
futureAnswer.completeExceptionally(error);
futureResponse.completeExceptionally(error);
}
});
String answer = futureAnswer.get(30, SECONDS);
Response<AiMessage> response = futureResponse.get(30, SECONDS);
// then
assertThat(answer).isNotBlank();
assertThat(response.content().text()).isEqualTo(answer);
assertThat(response.tokenUsage().inputTokenCount()).isEqualTo(4);
assertThat(response.tokenUsage().outputTokenCount()).isEqualTo(1);
assertThat(response.tokenUsage().totalTokenCount())
.isEqualTo(response.tokenUsage().inputTokenCount() + response.tokenUsage().outputTokenCount());
assertThat(response.finishReason()).isEqualTo(LENGTH);
}
@Test
void should_allow_custom_generativeModel_and_generationConfig() throws Exception {
// given
VertexAI vertexAi = new VertexAI(System.getenv("GCP_PROJECT_ID"), System.getenv("GCP_LOCATION"));
GenerativeModel generativeModel = new GenerativeModel("gemini-pro", vertexAi);
GenerationConfig generationConfig = GenerationConfig.getDefaultInstance();
StreamingChatLanguageModel model = new VertexAiGeminiStreamingChatModel(generativeModel, generationConfig);
String userMessage = "What is the capital of Germany?";
// when
CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>();
model.generate(userMessage, new StreamingResponseHandler<AiMessage>() {
@Override
public void onNext(String token) {
}
@Override
public void onComplete(Response<AiMessage> response) {
futureResponse.complete(response);
}
@Override
public void onError(Throwable error) {
futureResponse.completeExceptionally(error);
}
});
Response<AiMessage> response = futureResponse.get(30, SECONDS);
// then
assertThat(response.content().text()).contains("Berlin");
}
@Test
void should_accept_text_and_image_from_public_url() {
// given
UserMessage userMessage = UserMessage.from(
ImageContent.from(CAT_IMAGE_URL),
TextContent.from("What do you see? Reply in one word.")
);
// when
TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>();
visionModel.generate(singletonList(userMessage), handler);
Response<AiMessage> response = handler.get();
// then
assertThat(response.content().text()).containsIgnoringCase("cat");
}
@Test
void should_accept_text_and_image_from_google_storage_url() {
// given
UserMessage userMessage = UserMessage.from(
ImageContent.from("gs://langchain4j-test/cat.png"),
TextContent.from("What do you see? Reply in one word.")
);
// when
TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>();
visionModel.generate(singletonList(userMessage), handler);
Response<AiMessage> response = handler.get();
// then
assertThat(response.content().text()).containsIgnoringCase("cat");
}
@Test
void should_accept_text_and_base64_image() {
// given
String base64Data = Base64.getEncoder().encodeToString(readBytes(CAT_IMAGE_URL));
UserMessage userMessage = UserMessage.from(
ImageContent.from(base64Data, "image/png"),
TextContent.from("What do you see? Reply in one word.")
);
// when
TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>();
visionModel.generate(singletonList(userMessage), handler);
Response<AiMessage> response = handler.get();
// then
assertThat(response.content().text()).containsIgnoringCase("cat");
}
@Test
void should_accept_text_and_multiple_images_from_public_urls() {
// given
UserMessage userMessage = UserMessage.from(
ImageContent.from(CAT_IMAGE_URL),
ImageContent.from(DICE_IMAGE_URL),
TextContent.from("What do you see? Reply with one word per image.")
);
// when
TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>();
visionModel.generate(singletonList(userMessage), handler);
Response<AiMessage> response = handler.get();
// then
assertThat(response.content().text())
.containsIgnoringCase("cat")
.containsIgnoringCase("dice");
}
@Test
void should_accept_text_and_multiple_images_from_google_storage_urls() {
// given
UserMessage userMessage = UserMessage.from(
ImageContent.from("gs://langchain4j-test/cat.png"),
ImageContent.from("gs://langchain4j-test/dice.png"),
TextContent.from("What do you see? Reply with one word per image.")
);
// when
TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>();
visionModel.generate(singletonList(userMessage), handler);
Response<AiMessage> response = handler.get();
// then
assertThat(response.content().text())
.containsIgnoringCase("cat")
.containsIgnoringCase("dice");
}
@Test
void should_accept_text_and_multiple_base64_images() {
// given
String catBase64Data = Base64.getEncoder().encodeToString(readBytes(CAT_IMAGE_URL));
String diceBase64Data = Base64.getEncoder().encodeToString(readBytes(DICE_IMAGE_URL));
UserMessage userMessage = UserMessage.from(
ImageContent.from(catBase64Data, "image/png"),
ImageContent.from(diceBase64Data, "image/png"),
TextContent.from("What do you see? Reply with one word per image.")
);
// when
TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>();
visionModel.generate(singletonList(userMessage), handler);
Response<AiMessage> response = handler.get();
// then
assertThat(response.content().text())
.containsIgnoringCase("cat")
.containsIgnoringCase("dice");
}
@Test
void should_accept_text_and_multiple_images_from_different_sources() {
// given
UserMessage userMessage = UserMessage.from(
ImageContent.from(CAT_IMAGE_URL),
ImageContent.from("gs://langchain4j-test/dog.jpg"),
ImageContent.from(Base64.getEncoder().encodeToString(readBytes(DICE_IMAGE_URL)), "image/png"),
TextContent.from("What do you see? Reply with one word per image.")
);
// when
TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>();
visionModel.generate(singletonList(userMessage), handler);
Response<AiMessage> response = handler.get();
// then
assertThat(response.content().text())
.containsIgnoringCase("cat")
.containsIgnoringCase("dog")
.containsIgnoringCase("dice");
}
@Test
void should_accept_function_call() {
// given
VertexAiGeminiStreamingChatModel model = VertexAiGeminiStreamingChatModel.builder()
.project(System.getenv("GCP_PROJECT_ID"))
.location(System.getenv("GCP_LOCATION"))
.modelName("gemini-pro")
.build();
ToolSpecification weatherToolSpec = ToolSpecification.builder()
.name("getWeatherForecast")
.description("Get the weather forecast for a location")
.addParameter("location", JsonSchemaProperty.STRING,
JsonSchemaProperty.description("the location to get the weather forecast for"))
.build();
List<ChatMessage> allMessages = new ArrayList<>();
UserMessage weatherQuestion = UserMessage.from("What is the weather in Paris?");
System.out.println("Question: " + weatherQuestion.text());
allMessages.add(weatherQuestion);
// when
TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>();
model.generate(allMessages, weatherToolSpec, handler);
Response<AiMessage> messageResponse = handler.get();
// then
assertThat(messageResponse.content().hasToolExecutionRequests()).isTrue();
ToolExecutionRequest toolExecutionRequest = messageResponse.content().toolExecutionRequests().get(0);
assertThat(toolExecutionRequest.arguments()).contains("Paris");
assertThat(toolExecutionRequest.name()).isEqualTo("getWeatherForecast");
allMessages.add(messageResponse.content());
// when (feeding the function return value back)
ToolExecutionResultMessage toolExecResMsg = ToolExecutionResultMessage.from(toolExecutionRequest,
"{\"location\":\"Paris\",\"forecast\":\"sunny\", \"temperature\": 20}");
allMessages.add(toolExecResMsg);
handler = new TestStreamingResponseHandler<>();
model.generate(allMessages, handler);
Response<AiMessage> weatherResponse = handler.get();
// then
System.out.println("Answer: " + weatherResponse.content().text());
assertThat(weatherResponse.content().text()).containsIgnoringCase("sunny");
}
} | [
"dev.langchain4j.agent.tool.ToolSpecification.builder"
] | [((9104, 9164), 'java.util.Base64.getEncoder'), ((11338, 11398), 'java.util.Base64.getEncoder'), ((11432, 11493), 'java.util.Base64.getEncoder'), ((12472, 12533), 'java.util.Base64.getEncoder'), ((13457, 13774), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((13457, 13753), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((13457, 13592), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((13457, 13524), 'dev.langchain4j.agent.tool.ToolSpecification.builder')] |
package com.exoreaction.quadim;
import static java.time.Duration.ofSeconds;
import com.exoreaction.quadim.resource.util.SkillDefinitionHelper;
import com.exoreaction.quadim.service.ApiKeys;
import com.fasterxml.jackson.core.json.JsonReadFeature;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiModelName;
import dev.langchain4j.service.MemoryId;
import dev.langchain4j.service.UserMessage;
import java.util.Arrays;
import org.junit.jupiter.api.Test;
public class AiAssistedTranslationTest {
public static final ObjectMapper mapper = new ObjectMapper()
.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false)
.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false)
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.configure(JsonReadFeature.ALLOW_UNESCAPED_CONTROL_CHARS.mappedFeature(), true)
.enable(JsonReadFeature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER.mappedFeature())
.findAndRegisterModules();
SkillDefinitionHelper helper;
@Test
public void testAIAssistedTranslationFromEnglishToNorwegian() throws Exception {
// PLan
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(ApiKeys.MY_OPENAI_API_KEY)
.modelName(OpenAiModelName.GPT_3_5_TURBO_16K)
.timeout(ofSeconds(900))
.temperature(0.2)
.build();
String initialProjectDescription = "Chief Developer, Technical Architect: Developed several core modules in PasientSky's platform, " +
"including communication module against Norwegian public health authorities, topology module for clinical diagnosis " +
"(ICPC2, CDC10 and SNOWMED), product module (medicines and prescription goods) m.m. Technical architect, Quality assurer. ";
int n = 343;
try {
String res0 = model.generate( "Translate " + initialProjectDescription + " from English to Norwegian");
System.out.println(n + " Translated descriptions:" + n++ + "\n\n" + res0 + "\n\n");
} catch (Exception e) {
System.out.println("Exception handling - Stacktrace:" + Arrays.toString(e.getStackTrace()));
}
}
@Test
public void testAIAssistedTranslationFromEnglishToSpanish() throws Exception {
// PLan
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(ApiKeys.MY_OPENAI_API_KEY)
.modelName(OpenAiModelName.GPT_3_5_TURBO)
.timeout(ofSeconds(20))
.temperature(0.2)
.maxTokens(1000)
.build();
String initialProjectDescription = "Chief Developer, Technical Architect: Developed several core modules in PasientSky's platform, " +
"including communication module against Norwegian public health authorities, topology module for clinical diagnosis " +
"(ICPC2, CDC10 and SNOWMED), product module (medicines and prescription goods) m.m. Technical architect, Quality assurer. ";
int n = 343;
try {
String res0 = model.generate( "Translate " + initialProjectDescription + " from English to Spanish");
System.out.println(n + " Translated descriptions:" + n++ + "\n\n" + res0 + "\n\n");
} catch (Exception e) {
System.out.println("Exception handling - Stacktrace:" + Arrays.toString(e.getStackTrace()));
}
}
interface Assistant {
String chat(@MemoryId int memoryId, @UserMessage String userMessage);
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1091, 1152), 'com.fasterxml.jackson.core.json.JsonReadFeature.ALLOW_UNESCAPED_CONTROL_CHARS.mappedFeature'), ((1180, 1250), 'com.fasterxml.jackson.core.json.JsonReadFeature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER.mappedFeature'), ((1475, 1693), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1475, 1672), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1475, 1642), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1475, 1605), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1475, 1547), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2661, 2903), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2661, 2882), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2661, 2853), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2661, 2823), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2661, 2787), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2661, 2733), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package dev.langchain4j.data.message;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import java.util.List;
import java.util.stream.Stream;
import static dev.langchain4j.data.message.ChatMessageDeserializer.messageFromJson;
import static dev.langchain4j.data.message.ChatMessageDeserializer.messagesFromJson;
import static dev.langchain4j.data.message.ChatMessageSerializer.messageToJson;
import static dev.langchain4j.data.message.UserMessage.userMessage;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.assertThat;
class ChatMessageSerializerTest {
@ParameterizedTest
@MethodSource
void should_serialize_and_deserialize_chat_message(ChatMessage message, String expectedJson) {
String json = messageToJson(message);
assertThat(json).isEqualToIgnoringWhitespace(expectedJson);
ChatMessage deserializedMessage = messageFromJson(json);
assertThat(deserializedMessage).isEqualTo(message);
}
static Stream<Arguments> should_serialize_and_deserialize_chat_message() {
return Stream.of(
Arguments.of(
SystemMessage.from("hello"),
"{\"text\":\"hello\",\"type\":\"SYSTEM\"}"
),
Arguments.of(
UserMessage.from("hello"),
"{\"contents\":[{\"text\":\"hello\",\"type\":\"TEXT\"}],\"type\":\"USER\"}"
),
Arguments.of(
UserMessage.from("Klaus", "hello"),
"{\"name\":\"Klaus\",\"contents\":[{\"text\":\"hello\",\"type\":\"TEXT\"}],\"type\":\"USER\"}"
),
Arguments.of(
UserMessage.from(ImageContent.from("http://image.url")),
"{\"contents\":[{\"image\":{\"url\":\"http://image.url\"},\"detailLevel\":\"LOW\",\"type\":\"IMAGE\"}],\"type\":\"USER\"}"
),
Arguments.of(
UserMessage.from(ImageContent.from("aGVsbG8=", "image/png")),
"{\"contents\":[{\"image\":{\"base64Data\":\"aGVsbG8\\u003d\",\"mimeType\":\"image/png\"},\"detailLevel\":\"LOW\",\"type\":\"IMAGE\"}],\"type\":\"USER\"}"
),
Arguments.of(
AiMessage.from("hello"),
"{\"text\":\"hello\",\"type\":\"AI\"}"
),
Arguments.of(
AiMessage.from(ToolExecutionRequest.builder()
.name("weather")
.arguments("{\"city\": \"Munich\"}")
.build()),
"{\"toolExecutionRequests\":[{\"name\":\"weather\",\"arguments\":\"{\\\"city\\\": \\\"Munich\\\"}\"}],\"type\":\"AI\"}"
),
Arguments.of(
ToolExecutionResultMessage.from("12345", "weather", "sunny"),
"{\"id\":\"12345\",\"toolName\":\"weather\",\"text\":\"sunny\",\"type\":\"TOOL_EXECUTION_RESULT\"}"
)
);
}
@Test
void should_deserialize_user_message_in_old_schema() {
String json = "{\"text\":\"hello\",\"type\":\"USER\"}";
ChatMessage deserializedMessage = messageFromJson(json);
assertThat(deserializedMessage).isEqualTo(UserMessage.from("hello"));
}
@Test
void should_serialize_and_deserialize_empty_list() {
List<ChatMessage> messages = emptyList();
String json = ChatMessageSerializer.messagesToJson(messages);
List<ChatMessage> deserializedMessages = messagesFromJson(json);
assertThat(deserializedMessages).isEmpty();
}
@Test
void should_deserialize_null_as_empty_list() {
assertThat(messagesFromJson(null)).isEmpty();
}
@Test
void should_serialize_and_deserialize_list_with_one_message() {
List<ChatMessage> messages = singletonList(userMessage("hello"));
String json = ChatMessageSerializer.messagesToJson(messages);
assertThat(json).isEqualTo("[{\"contents\":[{\"text\":\"hello\",\"type\":\"TEXT\"}],\"type\":\"USER\"}]");
List<ChatMessage> deserializedMessages = messagesFromJson(json);
assertThat(deserializedMessages).isEqualTo(messages);
}
@Test
void should_serialize_and_deserialize_list_with_one_message_in_old_schema() {
String json = "[{\"text\":\"hello\",\"type\":\"USER\"}]";
List<ChatMessage> deserializedMessages = messagesFromJson(json);
assertThat(deserializedMessages).containsExactly(UserMessage.from("hello"));
}
} | [
"dev.langchain4j.agent.tool.ToolExecutionRequest.builder"
] | [((2765, 2954), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2765, 2913), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2765, 2844), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')] |
package com.dtsx.astra.sdk.vector;
import com.dtsx.astra.sdk.AstraDB;
import com.dtsx.astra.sdk.AstraDBAdmin;
import com.dtsx.astra.sdk.AstraDBRepository;
import com.dtsx.astra.sdk.utils.AstraRc;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.model.openai.OpenAiModelName;
import io.stargate.sdk.data.domain.odm.Document;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
import java.io.File;
import java.io.FileNotFoundException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Scanner;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import static org.junit.jupiter.api.Assertions.assertTrue;
@Slf4j
class VectorClientPhilosopherTest {
static final String DBNAME_VECTOR_CLIENT = "test_java_astra_db_client";
static final String VECTOR_STORE_NAME = "demo_philosophers";
static final String DATASET = "/philosopher-quotes.csv";
@Data @AllArgsConstructor @NoArgsConstructor
static class Quote {
private String philosopher;
private String quote;
private Set<String> tags;
}
static AstraDBRepository<Quote> quoteRepository;
static OpenAiEmbeddingModel openaiVectorizer = OpenAiEmbeddingModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
.modelName(OpenAiModelName.TEXT_EMBEDDING_ADA_002)
.timeout(Duration.ofSeconds(15))
.logRequests(true)
.logResponses(true)
.build();
static float[] vectorize(String inputText) {
return openaiVectorizer.embed(inputText).content().vector();
}
@BeforeAll
public static void setup() {
if (System.getenv(AstraRc.ASTRA_DB_APPLICATION_TOKEN) == null) {
throw new IllegalStateException("Please setup 'ASTRA_DB_APPLICATION_TOKEN' env variable");
}
new AstraDBAdmin().createDatabase(DBNAME_VECTOR_CLIENT);
log.info("db is created and active");
}
@Test
@Order(1)
@DisplayName("01. Import Data")
@EnabledIfEnvironmentVariable(named = "ASTRA_DB_APPLICATION_TOKEN", matches = "Astra.*")
public void shouldIngestCsv() {
// Init the Store
AstraDB dbClient = new AstraDBAdmin().getDatabase(DBNAME_VECTOR_CLIENT);
dbClient.deleteCollection(VECTOR_STORE_NAME);
quoteRepository = dbClient.createCollection(VECTOR_STORE_NAME, 1536, Quote.class);
log.info("store {} is created ", VECTOR_STORE_NAME);
assertTrue(dbClient.isCollectionExists(VECTOR_STORE_NAME));
// Populate the Store
AtomicInteger rowId = new AtomicInteger();
loadQuotesFromCsv(DATASET).forEach(quote -> {
log.info("Inserting {}: {}", rowId.get(), quote.getQuote());
Document<Quote> quoteDoc = new Document<Quote>(
String.valueOf(rowId.incrementAndGet()),
quote,
vectorize(quote.getQuote()));
quoteRepository.insert(quoteDoc);
});
}
@Test
@Order(2)
@DisplayName("02. Should Similarity Search")
public void shouldSimilaritySearch() {
quoteRepository = new AstraDBAdmin()
.getDatabase(DBNAME_VECTOR_CLIENT)
.getCollection(VECTOR_STORE_NAME, Quote.class);
float[] embeddings = vectorize("We struggle all our life for nothing");
quoteRepository.findVector(embeddings,3)
.stream()
.map(Document::getData)
.map(Quote::getQuote)
.forEach(System.out::println);
}
// --- Utilities (loading CSV) ---
private List<Quote> loadQuotesFromCsv(String filePath) {
List<Quote> quotes = new ArrayList<>();
File csvFile = new File(VectorClientPhilosopherTest.class.getResource(filePath).getFile());
try (Scanner scanner = new Scanner(csvFile)) {
while (scanner.hasNextLine()) {
Quote q = mapCsvLine(scanner.nextLine());
if (q != null) quotes.add(q);
}
} catch (FileNotFoundException fex) {
throw new IllegalArgumentException("file is not in the classpath", fex);
}
return quotes;
}
private Quote mapCsvLine(String line) {
String[] parts = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1);
if (parts.length >= 3) {
String author = parts[0];
String quote = parts[1].replaceAll("\"", "");
Set<String> tags = new HashSet<>(Arrays.asList(parts[2].split("\\;")));
return new Quote(author, quote, tags);
}
return null;
}
}
| [
"dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder"
] | [((1581, 1856), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1581, 1835), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1581, 1803), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1581, 1772), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1581, 1727), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1581, 1664), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder')] |
package org.acme.example.openai.aiservices;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import jakarta.annotation.PreDestroy;
import jakarta.enterprise.context.RequestScoped;
import jakarta.inject.Singleton;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.Path;
import org.jboss.resteasy.reactive.RestQuery;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import io.quarkiverse.langchain4j.RegisterAiService;
@Path("assistant-with-tool")
public class AssistantWithToolsResource {
private final Assistant assistant;
public AssistantWithToolsResource(Assistant assistant) {
this.assistant = assistant;
}
@GET
public String get(@RestQuery String message) {
return assistant.chat(message);
}
@RegisterAiService(tools = Calculator.class, chatMemoryProviderSupplier = RegisterAiService.BeanChatMemoryProviderSupplier.class)
public interface Assistant {
String chat(String userMessage);
}
@Singleton
public static class Calculator {
@Tool("Calculates the length of a string")
int stringLength(String s) {
return s.length();
}
@Tool("Calculates the sum of two numbers")
int add(int a, int b) {
return a + b;
}
@Tool("Calculates the square root of a number")
double sqrt(int x) {
return Math.sqrt(x);
}
}
@RequestScoped
public static class ChatMemoryBean implements ChatMemoryProvider {
private final Map<Object, ChatMemory> memories = new ConcurrentHashMap<>();
@Override
public ChatMemory get(Object memoryId) {
return memories.computeIfAbsent(memoryId, id -> MessageWindowChatMemory.builder()
.maxMessages(20)
.id(memoryId)
.build());
}
@PreDestroy
public void close() {
memories.clear();
}
}
}
| [
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((1867, 2000), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((1867, 1971), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((1867, 1937), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')] |
package dev.langchain4j.model.chat;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.StreamingResponseHandler;
import dev.langchain4j.model.output.Response;
import org.assertj.core.api.WithAssertions;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
class StreamingChatLanguageModelTest implements WithAssertions {
public static class StreamingUpperCaseEchoModel implements StreamingChatLanguageModel {
@Override
public void generate(List<ChatMessage> messages, StreamingResponseHandler<AiMessage> handler) {
ChatMessage lastMessage = messages.get(messages.size() - 1);
Response<AiMessage> response = new Response<>(new AiMessage(lastMessage.text().toUpperCase(Locale.ROOT)));
handler.onComplete(response);
}
}
public static final class CollectorResponseHandler<T> implements StreamingResponseHandler<T> {
private final List<Response<T>> responses = new ArrayList<>();
public List<Response<T>> responses() {
return responses;
}
@Override
public void onNext(String token) {}
@Override
public void onError(Throwable error) {}
@Override
public void onComplete(Response<T> response) {
responses.add(response);
}
}
@Test
public void test_not_supported() {
StreamingUpperCaseEchoModel model = new StreamingUpperCaseEchoModel();
CollectorResponseHandler<AiMessage> handler = new CollectorResponseHandler<>();
List<ChatMessage> messages = new ArrayList<>();
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> model.generate(messages, new ArrayList<>(), handler))
.withMessageContaining("Tools are currently not supported by this model");
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> model.generate(messages, ToolSpecification.builder().name("foo").build(), handler))
.withMessageContaining("Tools are currently not supported by this model");
}
@Test
public void test_generate() {
StreamingChatLanguageModel model = new StreamingUpperCaseEchoModel();
{
List<ChatMessage> messages = new ArrayList<>();
messages.add(new UserMessage("Hello"));
messages.add(new AiMessage("Hi"));
messages.add(new UserMessage("How are you?"));
CollectorResponseHandler<AiMessage> handler = new CollectorResponseHandler<>();
model.generate(messages, handler);
Response<AiMessage> response = handler.responses().get(0);
assertThat(response.content().text()).isEqualTo("HOW ARE YOU?");
assertThat(response.tokenUsage()).isNull();
assertThat(response.finishReason()).isNull();
}
{
CollectorResponseHandler<AiMessage> handler = new CollectorResponseHandler<>();
model.generate("How are you?", handler);
Response<AiMessage> response = handler.responses().get(0);
assertThat(response.content().text()).isEqualTo("HOW ARE YOU?");
assertThat(response.tokenUsage()).isNull();
assertThat(response.finishReason()).isNull();
}
}
} | [
"dev.langchain4j.agent.tool.ToolSpecification.builder"
] | [((2183, 2230), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2183, 2222), 'dev.langchain4j.agent.tool.ToolSpecification.builder')] |
package com.baeldung.langchain;
import static org.assertj.core.api.Assertions.assertThat;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
public class ServiceWithToolsLiveTest {
private static final Logger logger = LoggerFactory.getLogger(ServiceWithToolsLiveTest.class);
static class Calculator {
@Tool("Calculates the length of a string")
int stringLength(String s) {
return s.length();
}
@Tool("Calculates the sum of two numbers")
int add(int a, int b) {
return a + b;
}
}
interface Assistant {
String chat(String userMessage);
}
@Test
public void givenServiceWithTools_whenPrompted_thenValidResponse() {
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(Constants.OPENAI_API_KEY))
.tools(new Calculator())
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
String question = "What is the sum of the numbers of letters in the words \"language\" and \"model\"?";
String answer = assistant.chat(question);
logger.info(answer);
assertThat(answer).contains("13");
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((987, 1234), 'dev.langchain4j.service.AiServices.builder'), ((987, 1213), 'dev.langchain4j.service.AiServices.builder'), ((987, 1144), 'dev.langchain4j.service.AiServices.builder'), ((987, 1107), 'dev.langchain4j.service.AiServices.builder')] |
package com.baeldung.langchain;
import static org.assertj.core.api.Assertions.assertThat;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
public class ServiceWithToolsLiveTest {
private static final Logger logger = LoggerFactory.getLogger(ServiceWithToolsLiveTest.class);
static class Calculator {
@Tool("Calculates the length of a string")
int stringLength(String s) {
return s.length();
}
@Tool("Calculates the sum of two numbers")
int add(int a, int b) {
return a + b;
}
}
interface Assistant {
String chat(String userMessage);
}
@Test
public void givenServiceWithTools_whenPrompted_thenValidResponse() {
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(Constants.OPENAI_API_KEY))
.tools(new Calculator())
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
String question = "What is the sum of the numbers of letters in the words \"language\" and \"model\"?";
String answer = assistant.chat(question);
logger.info(answer);
assertThat(answer).contains("13");
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((987, 1234), 'dev.langchain4j.service.AiServices.builder'), ((987, 1213), 'dev.langchain4j.service.AiServices.builder'), ((987, 1144), 'dev.langchain4j.service.AiServices.builder'), ((987, 1107), 'dev.langchain4j.service.AiServices.builder')] |
package com.moyz.adi.common.interfaces;
import com.moyz.adi.common.exception.BaseException;
import com.moyz.adi.common.util.JsonUtil;
import com.moyz.adi.common.util.LocalCache;
import com.moyz.adi.common.vo.AnswerMeta;
import com.moyz.adi.common.vo.ChatMeta;
import com.moyz.adi.common.vo.PromptMeta;
import com.moyz.adi.common.vo.SseAskParams;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.TokenStream;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
import java.io.IOException;
import java.net.Proxy;
import java.util.UUID;
import static com.moyz.adi.common.enums.ErrorEnum.B_LLM_SERVICE_DISABLED;
@Slf4j
public abstract class AbstractLLMService<T> {
protected Proxy proxy;
protected String modelName;
protected T setting;
protected StreamingChatLanguageModel streamingChatLanguageModel;
protected ChatLanguageModel chatLanguageModel;
public AbstractLLMService(String modelName, String settingName, Class<T> clazz, Proxy proxy) {
this.modelName = modelName;
this.proxy = proxy;
String st = LocalCache.CONFIGS.get(settingName);
setting = JsonUtil.fromJson(st, clazz);
}
/**
* 检测该service是否可用(不可用的情况通常是没有配置key)
*
* @return
*/
public abstract boolean isEnabled();
public ChatLanguageModel getChatLLM() {
if (null != chatLanguageModel) {
return chatLanguageModel;
}
chatLanguageModel = buildChatLLM();
return chatLanguageModel;
}
public StreamingChatLanguageModel getStreamingChatLLM() {
if (null != streamingChatLanguageModel) {
return streamingChatLanguageModel;
}
streamingChatLanguageModel = buildStreamingChatLLM();
return streamingChatLanguageModel;
}
protected abstract ChatLanguageModel buildChatLLM();
protected abstract StreamingChatLanguageModel buildStreamingChatLLM();
protected abstract String parseError(Object error);
public Response<AiMessage> chat(ChatMessage chatMessage) {
if(!isEnabled()){
log.error("llm service is disabled");
throw new BaseException(B_LLM_SERVICE_DISABLED);
}
return getChatLLM().generate(chatMessage);
}
public void sseChat(SseAskParams params, TriConsumer<String, PromptMeta, AnswerMeta> consumer) {
if(!isEnabled()){
log.error("llm service is disabled");
throw new BaseException(B_LLM_SERVICE_DISABLED);
}
//create chat assistant
AiServices<IChatAssistant> serviceBuilder = AiServices.builder(IChatAssistant.class)
.streamingChatLanguageModel(getStreamingChatLLM());
if (null != params.getChatMemory()) {
serviceBuilder.chatMemory(params.getChatMemory());
}
IChatAssistant chatAssistant = serviceBuilder.build();
TokenStream tokenStream;
if (StringUtils.isNotBlank(params.getSystemMessage())) {
tokenStream = chatAssistant.chat(params.getSystemMessage(), params.getUserMessage());
} else {
tokenStream = chatAssistant.chat(params.getUserMessage());
}
tokenStream.onNext((content) -> {
log.info("get content:{}", content);
//加空格配合前端的fetchEventSource进行解析,见https://github.com/Azure/fetch-event-source/blob/45ac3cfffd30b05b79fbf95c21e67d4ef59aa56a/src/parse.ts#L129-L133
try {
params.getSseEmitter().send(" " + content);
} catch (IOException e) {
log.error("stream onNext error", e);
}
})
.onComplete((response) -> {
log.info("返回数据结束了:{}", response);
String questionUuid = StringUtils.isNotBlank(params.getRegenerateQuestionUuid()) ? params.getRegenerateQuestionUuid() : UUID.randomUUID().toString().replace("-", "");
PromptMeta questionMeta = new PromptMeta(response.tokenUsage().inputTokenCount(), questionUuid);
AnswerMeta answerMeta = new AnswerMeta(response.tokenUsage().outputTokenCount(), UUID.randomUUID().toString().replace("-", ""));
ChatMeta chatMeta = new ChatMeta(questionMeta, answerMeta);
String meta = JsonUtil.toJson(chatMeta).replaceAll("\r\n", "");
log.info("meta:" + meta);
try {
params.getSseEmitter().send(" [META]" + meta);
} catch (IOException e) {
log.error("stream onComplete error", e);
throw new RuntimeException(e);
}
// close eventSourceEmitter after tokens was calculated
params.getSseEmitter().complete();
consumer.accept(response.content().text(), questionMeta, answerMeta);
})
.onError((error) -> {
log.error("stream error", error);
try {
String errorMsg = parseError(error);
if(StringUtils.isBlank(errorMsg)){
errorMsg = error.getMessage();
}
params.getSseEmitter().send(SseEmitter.event().name("error").data(errorMsg));
} catch (IOException e) {
log.error("sse error", e);
}
params.getSseEmitter().complete();
})
.start();
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((1439, 1474), 'com.moyz.adi.common.util.LocalCache.CONFIGS.get'), ((2985, 3092), 'dev.langchain4j.service.AiServices.builder'), ((4357, 4402), 'java.util.UUID.randomUUID'), ((4357, 4385), 'java.util.UUID.randomUUID'), ((4622, 4667), 'java.util.UUID.randomUUID'), ((4622, 4650), 'java.util.UUID.randomUUID'), ((4784, 4832), 'com.moyz.adi.common.util.JsonUtil.toJson'), ((5780, 5827), 'org.springframework.web.servlet.mvc.method.annotation.SseEmitter.event'), ((5780, 5812), 'org.springframework.web.servlet.mvc.method.annotation.SseEmitter.event')] |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package services;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.stream.*;
import javax.annotation.PostConstruct;
import com.google.cloud.aiplatform.v1.Endpoint;
import com.google.cloud.aiplatform.v1.EndpointName;
import com.google.cloud.aiplatform.v1.EndpointServiceClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestHeader;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import services.actuator.StartupCheck;
// Vision API packages
import com.google.cloud.vision.v1.*;
import com.google.cloud.vision.v1.Feature.Type;
import com.google.cloud.MetadataConfig;
import com.google.cloud.firestore.*;
import com.google.api.core.ApiFuture;
//LangChain4j packages
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.vertexai.VertexAiChatModel;
import dev.langchain4j.model.vertexai.VertexAiLanguageModel;
// Vertex AI packages
@RestController
public class EventController {
private static final Logger logger = LoggerFactory.getLogger(EventController.class);
private static final String projectID = MetadataConfig.getProjectId();
private static final String zone = MetadataConfig.getZone();
private static final List<String> requiredFields = Arrays.asList("ce-id", "ce-source", "ce-type", "ce-specversion");
@Autowired
private EventService eventService;
@PostConstruct
public void init() {
logger.info("ImageAnalysisApplication: EventController Post Construct Initializer " + new SimpleDateFormat("HH:mm:ss.SSS").format(new java.util.Date(System.currentTimeMillis())));
logger.info("ImageAnalysisApplication: EventController Post Construct - StartupCheck can be enabled");
StartupCheck.up();
}
@GetMapping("start")
String start(){
logger.info("ImageAnalysisApplication: EventController - Executed start endpoint request " + new SimpleDateFormat("HH:mm:ss.SSS").format(new java.util.Date(System.currentTimeMillis())));
return "EventController started";
}
@RequestMapping(value = "/", method = RequestMethod.POST)
public ResponseEntity<String> receiveMessage(
@RequestBody Map<String, Object> body, @RequestHeader Map<String, String> headers) throws IOException, InterruptedException, ExecutionException {
// Validate the number of available processors
logger.info("EventController: Active processors: " + Runtime.getRuntime().availableProcessors());
System.out.println("Header elements");
for (String field : requiredFields) {
if (headers.get(field) == null) {
String msg = String.format("Missing expected header: %s.", field);
System.out.println(msg);
return new ResponseEntity<String>(msg, HttpStatus.BAD_REQUEST);
} else {
System.out.println(field + " : " + headers.get(field));
}
}
System.out.println("Body elements");
for (String bodyField : body.keySet()) {
System.out.println(bodyField + " : " + body.get(bodyField));
}
if (headers.get("ce-subject") == null) {
String msg = "Missing expected header: ce-subject.";
System.out.println(msg);
return new ResponseEntity<String>(msg, HttpStatus.BAD_REQUEST);
}
String ceSubject = headers.get("ce-subject");
String msg = "Detected change in Cloud Storage bucket: (ce-subject) : " + ceSubject;
System.out.println(msg);
String fileName = (String)body.get("name");
String bucketName = (String)body.get("bucket");
logger.info("New picture uploaded " + fileName);
if(fileName == null){
msg = "Missing expected body element: file name";
System.out.println(msg);
return new ResponseEntity<String>(msg, HttpStatus.BAD_REQUEST);
}
try (ImageAnnotatorClient vision = ImageAnnotatorClient.create()) {
List<AnnotateImageRequest> requests = new ArrayList<>();
ImageSource imageSource = ImageSource.newBuilder()
.setGcsImageUri("gs://" + bucketName + "/" + fileName)
.build();
Image image = Image.newBuilder()
.setSource(imageSource)
.build();
Feature featureLabel = Feature.newBuilder()
.setType(Type.LABEL_DETECTION)
.build();
Feature featureImageProps = Feature.newBuilder()
.setType(Type.IMAGE_PROPERTIES)
.build();
Feature featureSafeSearch = Feature.newBuilder()
.setType(Type.SAFE_SEARCH_DETECTION)
.build();
Feature featureTextDetection = Feature.newBuilder()
.setType(Type.TEXT_DETECTION)
.build();
Feature featureLogoDetection = Feature.newBuilder()
.setType(Type.LOGO_DETECTION)
.build();
AnnotateImageRequest request = AnnotateImageRequest.newBuilder()
.addFeatures(featureLabel)
.addFeatures(featureImageProps)
.addFeatures(featureSafeSearch)
.addFeatures(featureTextDetection)
.addFeatures(featureLogoDetection)
.setImage(image)
.build();
requests.add(request);
logger.info("Calling the Vision API...");
BatchAnnotateImagesResponse result = vision.batchAnnotateImages(requests);
List<AnnotateImageResponse> responses = result.getResponsesList();
if (responses.size() == 0) {
logger.info("No response received from Vision API.");
return new ResponseEntity<String>(msg, HttpStatus.BAD_REQUEST);
}
AnnotateImageResponse response = responses.get(0);
if (response.hasError()) {
logger.info("Error: " + response.getError().getMessage());
return new ResponseEntity<String>(msg, HttpStatus.BAD_REQUEST);
}
List<String> labels = response.getLabelAnnotationsList().stream()
.map(annotation -> annotation.getDescription())
.collect(Collectors.toList());
logger.info("Annotations found by Vision API:");
for (String label: labels) {
logger.info("- " + label);
}
String mainColor = "#FFFFFF";
ImageProperties imgProps = response.getImagePropertiesAnnotation();
if (imgProps.hasDominantColors()) {
DominantColorsAnnotation colorsAnn = imgProps.getDominantColors();
ColorInfo colorInfo = colorsAnn.getColors(0);
mainColor = rgbHex(
colorInfo.getColor().getRed(),
colorInfo.getColor().getGreen(),
colorInfo.getColor().getBlue());
logger.info("Color: " + mainColor);
}
boolean isSafe = false;
if (response.hasSafeSearchAnnotation()) {
SafeSearchAnnotation safeSearch = response.getSafeSearchAnnotation();
isSafe = Stream.of(
safeSearch.getAdult(), safeSearch.getMedical(), safeSearch.getRacy(),
safeSearch.getSpoof(), safeSearch.getViolence())
.allMatch( likelihood ->
likelihood != Likelihood.LIKELY && likelihood != Likelihood.VERY_LIKELY
);
logger.info("Is Image Safe? " + isSafe);
}
logger.info("Logo Annotations:");
for (EntityAnnotation annotation : response.getLogoAnnotationsList()) {
logger.info("Logo: " + annotation.getDescription());
List<Property> properties = annotation.getPropertiesList();
logger.info("Logo property list:");
for (Property property : properties) {
logger.info(String.format("Name: %s, Value: %s"), property.getName(), property.getValue());
}
}
String prompt = "Explain the text ";
String textElements = "";
logger.info("Text Annotations:");
for (EntityAnnotation annotation : response.getTextAnnotationsList()) {
textElements = annotation.getDescription();
prompt += textElements + " ";
logger.info("Text: " + textElements);
// if(textElements.matches("^[a-zA-Z0-9]+$"))
prompt += textElements;
}
// build alternative prompt using Vertex AI
// extractTextFromImage(bucketName, fileName);
Response<AiMessage> modelResponse = null;
if (prompt.length() > 0) {
VertexAiChatModel vertexAiChatModel = VertexAiChatModel.builder()
.endpoint("us-central1-aiplatform.googleapis.com:443")
.project(projectID)
.location(zone)
.publisher("google")
.modelName("chat-bison@001")
.temperature(0.1)
.maxOutputTokens(50)
.topK(0)
.topP(0.0)
.maxRetries(3)
.build();
modelResponse = vertexAiChatModel.generate(UserMessage.from(prompt));
logger.info("Result Chat Model: " + modelResponse.content().text());
}
if (prompt.length() > 0) {
VertexAiLanguageModel vertexAiTextModel = VertexAiLanguageModel.builder()
.endpoint("us-central1-aiplatform.googleapis.com:443")
.project(projectID)
.location(zone)
.publisher("google")
.modelName("text-bison@001")
.temperature(0.1)
.maxOutputTokens(50)
.topK(0)
.topP(0.0)
.maxRetries(3)
.build();
Response<String> textResponse = vertexAiTextModel.generate(prompt);
logger.info("Result Text Model: " + textResponse.content());
}
// Saving result to Firestore
if (isSafe && modelResponse != null) {
ApiFuture<WriteResult> writeResult = eventService.storeImage(fileName, labels, mainColor, modelResponse.content().text());
logger.info("Picture metadata saved in Firestore at " + writeResult.get().getUpdateTime());
}
}
return new ResponseEntity<String>(msg, HttpStatus.OK);
}
// private void extractTextFromImage(String bucketName, String fileName) throws IOException {
// try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) {
// EndpointName name =
// EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]");
// Endpoint response = endpointServiceClient.getEndpoint(name);
// logger.info("Endpoint description: " +response.getDescription());
// }
// }
private static String rgbHex(float red, float green, float blue) {
return String.format("#%02x%02x%02x", (int)red, (int)green, (int)blue);
}
}
// [END eventarc_audit_storage_handler]
| [
"dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder",
"dev.langchain4j.model.vertexai.VertexAiChatModel.builder"
] | [((9657, 10150), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 10119), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 10082), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 10049), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 10018), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 9975), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 9935), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 9884), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 9841), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 9803), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 9761), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((10409, 10906), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10875), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10838), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10805), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10774), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10731), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10691), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10640), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10597), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10559), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10517), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder')] |
package com.roy.langchainjavachat.controller;
import com.roy.langchainjavachat.annotation.ReWriteBody;
import com.roy.langchainjavachat.model.req.ChatMsgReq;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import static dev.langchain4j.model.openai.OpenAiChatModelName.GPT_3_5_TURBO;
/**
* 对话管理
*
* @author roy
*/
@Slf4j
@ReWriteBody
@RestController
@Api(tags = "对话管理")
@RequestMapping("/v1/chat/")
public class ChatController {
@Value("${OPENAI_API_KEY}")
String OPENAI_API_KEY;
@GetMapping("qa")
@ApiOperation(value = "与大模型对话(单轮问答)")
public String llmQA(@ApiParam(value = "问句", required = true) @RequestParam String question) {
ChatLanguageModel model = OpenAiChatModel.builder()
.baseUrl("https://dgr.life/v1")
.apiKey(OPENAI_API_KEY) // Please use your own OpenAI API key
.modelName(GPT_3_5_TURBO)
.build();
return model.generate(question);
}
@GetMapping("chat")
@ApiOperation(value = "与大模型对话(多轮问答)")
public void llm(@ApiParam(value = "问句", required = true) @RequestBody List<ChatMsgReq> req) {
}
@GetMapping("knowledge_base_chat")
@ApiOperation(value = "与知识库对话")
public String knowledgeBaseChat(@ApiParam(value = "问句", required = true) @RequestParam String question) {
ChatLanguageModel model = OpenAiChatModel.builder()
.baseUrl("https://dgr.life/v1")
.apiKey(System.getenv("OPENAI_API_KEY")) // Please use your own OpenAI API key
.modelName(GPT_3_5_TURBO)
.build();
return model.generate(question);
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1077, 1295), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1077, 1270), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1077, 1190), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1077, 1150), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1776, 2011), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1776, 1986), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1776, 1906), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1776, 1849), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package fr.anthonyquere.talkwithme.core.ai.langchain.models;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.mistralai.MistralAiChatModel;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class MistralAIConfiguration {
@Value("${mistral-ia.api-key}")
private String mistralApiKey;
@Bean
public ChatLanguageModel buildMistralModel() {
return MistralAiChatModel.builder()
.apiKey(mistralApiKey)
.maxTokens(100)
.modelName("mistral-tiny")
.temperature(1.0d)
.logRequests(true)
.logResponses(true)
.maxRetries(1)
.build();
}
}
| [
"dev.langchain4j.model.mistralai.MistralAiChatModel.builder"
] | [((535, 759), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((535, 744), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((535, 723), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((535, 697), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((535, 672), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((535, 647), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((535, 614), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((535, 592), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder')] |
package com.example.block18springai.controller;
import com.example.block18springai.ai_config.ApiKeys;
import com.example.block18springai.ai_config.Assistant;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@RequestMapping("/ai")
@RestController
public class AIController_LangChain4j {
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(ApiKeys.OPENAI_API_KEY))
.chatMemory(chatMemory)
.build();
@PostMapping("/message")
public String chat(@RequestParam String message) {
return assistant.chat(message);
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((784, 959), 'dev.langchain4j.service.AiServices.builder'), ((784, 938), 'dev.langchain4j.service.AiServices.builder'), ((784, 902), 'dev.langchain4j.service.AiServices.builder')] |
package io.thomasvitale.langchain4j.spring.openai;
import java.util.List;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.agent.tool.ToolParameters;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.image.Image;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.Content;
import dev.langchain4j.data.message.ContentType;
import dev.langchain4j.data.message.ImageContent;
import dev.langchain4j.data.message.SystemMessage;
import dev.langchain4j.data.message.TextContent;
import dev.langchain4j.data.message.ToolExecutionResultMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.output.FinishReason;
import dev.langchain4j.model.output.TokenUsage;
import org.springframework.lang.Nullable;
import org.springframework.util.CollectionUtils;
import io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionFinishReason;
import io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage;
import io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionRequest;
import io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionResponse;
import io.thomasvitale.langchain4j.spring.openai.api.chat.Tool;
import io.thomasvitale.langchain4j.spring.openai.api.embedding.EmbeddingResponse;
import io.thomasvitale.langchain4j.spring.openai.api.image.ImageGenerationResponse;
import io.thomasvitale.langchain4j.spring.openai.api.shared.Usage;
import static java.lang.String.format;
import static java.util.stream.Collectors.toList;
/**
* Utility class to convert between OpenAI and Langchain4j types.
* <p>
* Based on the original LangChain4j implementation.
*/
public final class OpenAiAdapters {
/**
* Converts from a list of LangChain4J ChatMessage to a list of OpenAI ChatCompletionMessage.
*/
public static List<ChatCompletionMessage> toOpenAiMessages(List<ChatMessage> messages) {
return messages.stream()
.map(OpenAiAdapters::toOpenAiMessage)
.toList();
}
/**
* Converts from LangChain4J ChatMessage to ChatCompletionMessage.
*/
private static ChatCompletionMessage toOpenAiMessage(ChatMessage message) {
if (message instanceof SystemMessage systemMessage) {
return ChatCompletionMessage.builder()
.role(ChatCompletionMessage.Role.SYSTEM)
.content(systemMessage.text())
.build();
}
if (message instanceof UserMessage userMessage) {
if (userMessage.hasSingleText()) {
return ChatCompletionMessage.builder()
.role(ChatCompletionMessage.Role.USER)
.content(userMessage.text())
.name(userMessage.name())
.build();
} else {
return ChatCompletionMessage.builder()
.content(userMessage.contents().stream()
.map(OpenAiAdapters::toOpenAiContent)
.collect(toList()))
.name(userMessage.name())
.build();
}
}
if (message instanceof AiMessage aiMessage) {
if (!aiMessage.hasToolExecutionRequests()) {
return ChatCompletionMessage.builder()
.role(ChatCompletionMessage.Role.ASSISTANT)
.content(aiMessage.text())
.build();
}
return ChatCompletionMessage.builder()
.role(ChatCompletionMessage.Role.ASSISTANT)
.toolCalls(toOpenAiToolCalls(aiMessage.toolExecutionRequests()))
.build();
}
if (message instanceof ToolExecutionResultMessage toolExecutionResultMessage) {
return ChatCompletionMessage.builder()
.role(ChatCompletionMessage.Role.TOOL)
.content(toolExecutionResultMessage.text())
.toolCallId(toolExecutionResultMessage.id())
.build();
}
throw new IllegalArgumentException("Unknown message type: " + message.type());
}
/**
* Converts from LangChain4J Content to OpenAI Content.
*/
public static ChatCompletionMessage.Content toOpenAiContent(Content content) {
if (ContentType.TEXT.equals(content.type())) {
var textContent = (TextContent) content;
return ChatCompletionMessage.Content.builder()
.type(ChatCompletionMessage.ContentType.TEXT)
.text(textContent.text())
.build();
} else if (ContentType.IMAGE.equals(content.type())) {
var imageContent = (ImageContent) content;
return ChatCompletionMessage.Content.builder()
.type(ChatCompletionMessage.ContentType.IMAGE_URL)
.imageUrl(new ChatCompletionMessage.ImageUrl(
toOpenAiUrl(imageContent.image()),
toOpenAiDetail(imageContent.detailLevel())))
.build();
} else {
throw new IllegalArgumentException("Unknown content type: " + content.type());
}
}
/**
* Converts from LangChain4J Image to OpenAI Image URL.
*/
private static String toOpenAiUrl(Image image) {
if (image.url() != null) {
return image.url().toString();
}
return format("data:%s;base64,%s", image.mimeType(), image.base64Data());
}
/**
* Converts from LangChain4J DetailLevel to OpenAI DetailLevel.
*/
private static String toOpenAiDetail(ImageContent.DetailLevel detailLevel) {
if (detailLevel == null) {
return null;
}
return detailLevel.name();
}
/**
* Converts from a list of LangChain4J ToolExecutionRequest to a list of OpenAI ToolCall.
*/
private static List<ChatCompletionMessage.ToolCall> toOpenAiToolCalls(List<ToolExecutionRequest> toolExecutionRequests) {
return toolExecutionRequests.stream()
.map(OpenAiAdapters::toOpenAiToolCall)
.toList();
}
/**
* Converts from LangChain4J ToolExecutionRequest to OpenAI ToolCall.
*/
private static ChatCompletionMessage.ToolCall toOpenAiToolCall(ToolExecutionRequest toolExecutionRequest) {
var functionCall = new ChatCompletionMessage.ChatCompletionFunction(
toolExecutionRequest.name(),
toolExecutionRequest.arguments());
return new ChatCompletionMessage.ToolCall(toolExecutionRequest.id(), functionCall);
}
/**
* Converts from LangChain4J ToolSpecification to OpenAI ToolChoice.
*/
public static ChatCompletionRequest.ToolChoice toOpenAiToolChoice(ToolSpecification toolSpecification) {
return new ChatCompletionRequest.ToolChoice(toolSpecification.name());
}
/**
* Converts from a list of LangChain4J ToolSpecification to a list of OpenAI Tool.
*/
public static List<Tool> toOpenAiTools(List<ToolSpecification> toolSpecifications) {
return toolSpecifications.stream()
.map(OpenAiAdapters::toOpenAiTool)
.toList();
}
/**
* Converts from LangChain4J ToolSpecification to OpenAI Tool.
*/
private static Tool toOpenAiTool(ToolSpecification toolSpecification) {
var function = Tool.Function.builder()
.description(toolSpecification.description())
.name(toolSpecification.name())
.parameters(OpenAiAdapters.toOpenAiParameters(toolSpecification.parameters()))
.build();
return new Tool(function);
}
/**
* Converts from LangChain4J ToolParameters to OpenAI Tool.Parameters.
*/
private static Tool.Parameters toOpenAiParameters(@Nullable ToolParameters toolParameters) {
if (toolParameters == null) {
return Tool.Parameters.builder().build();
}
return Tool.Parameters.builder()
.properties(toolParameters.properties())
.required(toolParameters.required())
.build();
}
/**
* Converts from OpenAI Usage to LangChain4J Usage.
*/
public static TokenUsage toTokenUsage(Usage usage) {
return new TokenUsage(usage.promptTokens(), usage.completionTokens(), usage.totalTokens());
}
/**
* Converts from OpenAI ChatCompletionResponse to LangChain4J AiMessage.
*/
public static AiMessage toAiMessage(ChatCompletionResponse response) {
var assistantMessage = response.choices().get(0).message();
var toolCalls = assistantMessage.toolCalls();
if (!(CollectionUtils.isEmpty(toolCalls))) {
List<ToolExecutionRequest> toolExecutionRequests = toolCalls.stream()
.filter(toolCall -> "function".equals(toolCall.type()))
.map(OpenAiAdapters::toToolExecutionRequest)
.toList();
return AiMessage.from(toolExecutionRequests);
}
return AiMessage.from((String) assistantMessage.content());
}
/**
* Converts from OpenAI ToolCall to LangChain4J ToolExecutionRequest.
*/
private static ToolExecutionRequest toToolExecutionRequest(ChatCompletionMessage.ToolCall toolCall) {
return ToolExecutionRequest.builder()
.id(toolCall.id())
.name(toolCall.function().name())
.arguments(toolCall.function().arguments())
.build();
}
/**
* Converts from OpenAI ChatCompletionFinishReason to LangChain4J FinishReason.
*/
public static FinishReason toFinishReason(ChatCompletionFinishReason finishReason) {
return switch (finishReason) {
case STOP -> FinishReason.STOP;
case LENGTH -> FinishReason.LENGTH;
case TOOL_CALLS -> FinishReason.TOOL_EXECUTION;
case CONTENT_FILTER -> FinishReason.CONTENT_FILTER;
};
}
/**
* Converts from OpenAI EmbeddingData to LangChain4J Embedding.
*/
public static Embedding toEmbedding(EmbeddingResponse.EmbeddingData embeddingData) {
var floatVectors = embeddingData.embedding().stream()
.map(Double::floatValue)
.toList();
return Embedding.from(floatVectors);
}
/**
* Converts from OpenAI ImageData to LangChain4J Image.
*/
public static Image toImage(ImageGenerationResponse.ImageData imageData) {
return Image.builder()
.url(imageData.url())
.base64Data(imageData.b64Json())
.revisedPrompt(imageData.revisedPrompt())
.build();
}
}
| [
"dev.langchain4j.agent.tool.ToolExecutionRequest.builder",
"dev.langchain4j.data.message.ContentType.TEXT.equals",
"dev.langchain4j.data.image.Image.builder",
"dev.langchain4j.data.message.ContentType.IMAGE.equals"
] | [((2434, 2606), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((2434, 2577), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((2434, 2526), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((2747, 2977), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((2747, 2944), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((2747, 2894), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((2747, 2841), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((3023, 3324), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((3023, 3291), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((3023, 3241), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((3485, 3668), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((3485, 3635), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((3485, 3584), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((3704, 3913), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((3704, 3884), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((3704, 3799), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((4033, 4281), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((4033, 4252), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((4033, 4187), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((4033, 4123), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((4559, 4598), 'dev.langchain4j.data.message.ContentType.TEXT.equals'), ((4674, 4854), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.Content.builder'), ((4674, 4825), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.Content.builder'), ((4674, 4779), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.Content.builder'), ((4674, 4713), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.Content.builder'), ((4875, 4915), 'dev.langchain4j.data.message.ContentType.IMAGE.equals'), ((4993, 5334), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.Content.builder'), ((4993, 5305), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.Content.builder'), ((4993, 5103), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.Content.builder'), ((4993, 5032), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.Content.builder'), ((7675, 7928), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Function.builder'), ((7675, 7903), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Function.builder'), ((7675, 7808), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Function.builder'), ((7675, 7760), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Function.builder'), ((7675, 7698), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Function.builder'), ((8217, 8250), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Parameters.builder'), ((8217, 8242), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Parameters.builder'), ((8277, 8437), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Parameters.builder'), ((8277, 8412), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Parameters.builder'), ((8277, 8359), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Parameters.builder'), ((8277, 8302), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Parameters.builder'), ((9635, 9835), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((9635, 9810), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((9635, 9750), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((9635, 9700), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((10831, 11016), 'dev.langchain4j.data.image.Image.builder'), ((10831, 10991), 'dev.langchain4j.data.image.Image.builder'), ((10831, 10933), 'dev.langchain4j.data.image.Image.builder'), ((10831, 10884), 'dev.langchain4j.data.image.Image.builder')] |
package org.jugph;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.UrlDocumentLoader;
import dev.langchain4j.data.document.transformer.HtmlTextExtractor;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.UserMessage;
import static java.time.Duration.ofSeconds;
public class JavaNewsRetrieverToolsExample {
static class JavaNewsRetriever {
@Tool("Retrieves the latest java news. Limit to the 3 latest news")
String retrieveJavaNews() {
Document javaNews = UrlDocumentLoader.load("https://dev.java/news/");
Document transformedJavaNews = new HtmlTextExtractor(".container", null, true)
.transform(javaNews);
return transformedJavaNews.text().replaceAll("\n", " ");
}
}
interface Assistant {
String chat(String userMessage);
}
interface NewsPrettierAssistant {
@UserMessage("Given a jumbled java news {{it}}, summarize each, and list down them in numerical format, latest to oldest. " +
"Include details such as url and date announced.")
String prettify(String userMessage);
}
public static void main(String[] args) {
var model = OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
.timeout(ofSeconds(120))
.build();
var assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(model)
.tools(new JavaNewsRetriever())
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
var newsPrettierAssistant = AiServices.builder(NewsPrettierAssistant.class)
.chatLanguageModel(model)
.build();
var question = "What are latest java news?";
var answer = assistant.chat(question);
var prettiedAnswer = newsPrettierAssistant.prettify(answer);
System.out.println("\n=================================\n"+prettiedAnswer);
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1405, 1553), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1405, 1528), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1405, 1487), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1580, 1803), 'dev.langchain4j.service.AiServices.builder'), ((1580, 1778), 'dev.langchain4j.service.AiServices.builder'), ((1580, 1705), 'dev.langchain4j.service.AiServices.builder'), ((1580, 1657), 'dev.langchain4j.service.AiServices.builder'), ((1842, 1956), 'dev.langchain4j.service.AiServices.builder'), ((1842, 1931), 'dev.langchain4j.service.AiServices.builder')] |
package io.quarkiverse.langchain4j.sample.chatbot;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
import java.io.File;
import java.util.List;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.event.Observes;
import jakarta.inject.Inject;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkus.logging.Log;
import io.quarkus.runtime.StartupEvent;
@ApplicationScoped
public class IngestorExample {
/**
* The embedding store (the database).
* The bean is provided by the quarkus-langchain4j-redis extension.
*/
@Inject
EmbeddingStore store;
/**
* The embedding model (how the vector of a document is computed).
* The bean is provided by the LLM (like openai) extension.
*/
@Inject
EmbeddingModel embeddingModel;
public void ingest(@Observes StartupEvent event) {
Log.infof("Ingesting documents...");
List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/catalog").toPath(),
new TextDocumentParser());
var ingestor = EmbeddingStoreIngestor.builder()
.embeddingStore(store)
.embeddingModel(embeddingModel)
.documentSplitter(recursive(500, 0))
.build();
ingestor.ingest(documents);
Log.infof("Ingested %d documents.%n", documents.size());
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1441, 1638), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1441, 1613), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1441, 1560), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1441, 1512), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package ma.enset.projet.service;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore;
import ma.enset.projet.Dao.DocumentImp;
import ma.enset.projet.Dao.VectorBd;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
public class ConversationBuild {
private final ChatMemory customChatMemory = MessageWindowChatMemory.withMaxMessages(20);
private DocumentImp document;
private VectorBd vectorBd = new VectorBd() ;
public void setVectorBd(VectorBd vectorBd) {
this.vectorBd = vectorBd;
}
public ConversationBuild() {
}
public ChatMemory getCustomChatMemory() {
return customChatMemory;
}
public DocumentImp getDocument() {
return document;
}
public void setDocument(DocumentImp document) {
this.document = document;
}
public VectorBd getVectorBd() {
return vectorBd;
}
public int getMaxResults() {
return maxResults;
}
public Double getMinScore() {
return minScore;
}
private final int maxResults = 4;
private final Double minScore = 0.7;
public EmbeddingStoreRetriever retriever(){
ChromaEmbeddingStore chromaStore = vectorBd.getConnection();
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
return new EmbeddingStoreRetriever(chromaStore, embeddingModel, maxResults, minScore);
}
public void build(String path) throws IOException {
document = new DocumentImp(path);
String txtFilePath = document.setTxtFilePath(path);
String txtContent = Files.readString(Path.of(txtFilePath));
DocumentSplitter lineSplitter = document.SplitDocument();
Document doc = document.Document(txtContent);
// EmbeddingStoreIngestor.builder()
// .documentSplitter(lineSplitter)
// .embeddingModel(new AllMiniLmL6V2EmbeddingModel())
// .embeddingStore(vectorBd.getConnection())
// .build()
// .ingest(doc);
}
public ConversationalRetrievalChain chain(String API){
return ConversationalRetrievalChain.builder()
.chatLanguageModel(OpenAiChatModel.withApiKey(API))
.chatMemory(customChatMemory)
.promptTemplate(document.template())
.retriever(retriever())
.build();
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder"
] | [((2701, 2971), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2701, 2946), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2701, 2906), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2701, 2853), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2701, 2807), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] |
package com.datawise.bertdocqa.config;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore;
import dev.langchain4j.data.segment.TextSegment;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class ElasticsearchConfig {
@Value("${elasticsearch.server.url}")
private String serverUrl;
@Value("${elasticsearch.index.name}")
private String indexName;
@Value("${elasticsearch.dimension}")
private int dimension;
@Bean
public EmbeddingStore<TextSegment> embeddingStore() {
return ElasticsearchEmbeddingStore.builder()
.serverUrl(serverUrl)
.indexName(indexName)
.dimension(dimension)
.build();
}
}
| [
"dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder"
] | [((747, 923), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((747, 898), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((747, 860), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((747, 822), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder')] |
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentParser;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.rag.DefaultRetrievalAugmentor;
import dev.langchain4j.rag.RetrievalAugmentor;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.rag.query.transformer.CompressingQueryTransformer;
import dev.langchain4j.rag.query.transformer.QueryTransformer;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Scanner;
public class _02_Advanced_RAG_with_Query_Compression {
/**
* Please refer to previous examples for basic context.
* <p>
* Advanced RAG in LangChain4j is described here: https://github.com/langchain4j/langchain4j/pull/538
* <p>
* This example illustrates the implementation of a more sophisticated RAG application
* using a technique known as "query compression".
* Often, a query from a user is a follow-up question that refers back to earlier parts of the conversation
* and lacks all the necessary details for effective retrieval.
* For example, consider this conversation:
* User: What is the legacy of John Doe?
* AI: John Doe was a...
* User: When was he born?
* <p>
* In such scenarios, using a basic RAG approach with a query like "When was he born?"
* would likely fail to find articles about John Doe, as it doesn't contain "John Doe" in the query.
* Query compression involves taking the user's query and the preceding conversation, then asking the LLM
* to "compress" this into a single, self-contained query.
* The LLM should generate a query like "When was John Doe born?".
* This method adds a bit of latency and cost but significantly enhances the quality of the RAG process.
* It's worth noting that the LLM used for compression doesn't have to be the same as the one
* used for conversation. For instance, you might use a smaller local model trained for summarization.
* <p>
* In this example, we will continue using {@link AiServices},
* but the same principles apply to {@link ConversationalRetrievalChain}, or you can develop your custom RAG flow.
*/
public static void main(String[] args) {
Biographer biographer = createBiographer();
// First, ask "What is the legacy of John Doe?"
// Then, ask "When was he born?"
// Now, review the logs:
// The first query was not compressed as there was no preceding context to compress.
// The second query, however, was compressed into something like "When was John Doe born?"
try (Scanner scanner = new Scanner(System.in)) {
while (true) {
System.out.println("==================================================");
System.out.print("User: ");
String userQuery = scanner.nextLine();
System.out.println("==================================================");
if ("exit".equalsIgnoreCase(userQuery)) {
break;
}
String biographerAnswer = biographer.answer(userQuery);
System.out.println("==================================================");
System.out.println("Biographer: " + biographerAnswer);
}
}
}
private static Biographer createBiographer() {
// Check _01_Naive_RAG if you need more details on what is going on here
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey("demo")
.build();
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
Path documentPath = toPath("biography-of-john-doe.txt");
EmbeddingStore<TextSegment> embeddingStore = embed(documentPath, embeddingModel);
// We will create a CompressingQueryTransformer, which is responsible for compressing
// the user's query and the preceding conversation into a single, stand-alone query.
// This should significantly improve the quality of the retrieval process.
QueryTransformer queryTransformer = new CompressingQueryTransformer(chatModel);
ContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.maxResults(2)
.minScore(0.6)
.build();
// The RetrievalAugmentor serves as the entry point into the RAG flow in LangChain4j.
// It can be configured to customize the RAG behavior according to your requirements.
// In subsequent examples, we will explore more customizations.
RetrievalAugmentor retrievalAugmentor = DefaultRetrievalAugmentor.builder()
.queryTransformer(queryTransformer)
.contentRetriever(contentRetriever)
.build();
return AiServices.builder(Biographer.class)
.chatLanguageModel(chatModel)
.retrievalAugmentor(retrievalAugmentor)
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
}
private static EmbeddingStore<TextSegment> embed(Path documentPath, EmbeddingModel embeddingModel) {
DocumentParser documentParser = new TextDocumentParser();
Document document = FileSystemDocumentLoader.loadDocument(documentPath, documentParser);
DocumentSplitter splitter = DocumentSplitters.recursive(300, 0);
List<TextSegment> segments = splitter.split(document);
List<Embedding> embeddings = embeddingModel.embedAll(segments).content();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
embeddingStore.addAll(embeddings, segments);
return embeddingStore;
}
interface Biographer {
String answer(String query);
}
private static Path toPath(String fileName) {
try {
URL fileUrl = _02_Advanced_RAG_with_Query_Compression.class.getResource(fileName);
return Paths.get(fileUrl.toURI());
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
} | [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.rag.DefaultRetrievalAugmentor.builder",
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((4539, 4621), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4539, 4596), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5259, 5482), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5259, 5457), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5259, 5426), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5259, 5395), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5259, 5347), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5793, 5957), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((5793, 5932), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((5793, 5880), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((5975, 6211), 'dev.langchain4j.service.AiServices.builder'), ((5975, 6186), 'dev.langchain4j.service.AiServices.builder'), ((5975, 6113), 'dev.langchain4j.service.AiServices.builder'), ((5975, 6057), 'dev.langchain4j.service.AiServices.builder')] |
package _Engenharia;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
//import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.data.message.UserMessage.userMessage;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.util.stream.Collectors.joining;
import static java.time.Duration.ofSeconds;
import assistente.ApiKeys;
public class AssistenteMemoryDocument {
private static final String vetModel = "sentence-transformers/all-MiniLM-L6-v2";
private static final ChatMemory chatMemory = TokenWindowChatMemory.withMaxTokens(300, new OpenAiTokenizer(GPT_3_5_TURBO));
public String fazerPergunta(String pergunta) throws Exception {
//ChatMemoria
//ChatLanguageModel model = OpenAiChatModel.withApiKey(ApiKeys.OPENAI_API_KEY);
//Chat Language Model Builder OpenAi
ChatLanguageModel chatLanguageModel = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.temperature(0.0)
.timeout(ofSeconds(900))
.build();
//EmbeddingModel Builder HuggingFace
EmbeddingModel embeddingModel = HuggingFaceEmbeddingModel.builder()
.accessToken(ApiKeys.HF_API_KEY)
.modelId(vetModel)
.waitForModel(true)
.timeout(ofSeconds(60))
.build();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
//"Cosumo" do texto do arquivo
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
Document document = loadDocument(toPath("template.txt"));
ingestor.ingest(document);
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(chatLanguageModel)
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
.chatMemory(chatMemory) // you can override default chat memory
// .promptTemplate() // you can override default prompt template
.build();
chatMemory.add(userMessage(pergunta));
return chain.execute(pergunta);
}
//Procura arquivo
private static Path toPath(String fileName) {
try {
URL fileUrl = AssistenteMemoryDocument.class.getResource(fileName);
return Paths.get(fileUrl.toURI());
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((2335, 2508), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2335, 2483), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2335, 2442), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2335, 2408), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2615, 2835), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((2615, 2810), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((2615, 2770), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((2615, 2734), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((2615, 2699), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((3022, 3246), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3022, 3221), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3022, 3173), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3022, 3125), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3396, 3763), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3396, 3617), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3396, 3577), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3396, 3488), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] |
package io.quarkiverse.langchain4j.sample.chatbot;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.event.Observes;
import jakarta.inject.Inject;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVRecord;
import org.eclipse.microprofile.config.inject.ConfigProperty;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.Metadata;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkiverse.langchain4j.redis.RedisEmbeddingStore;
import io.quarkus.runtime.StartupEvent;
@ApplicationScoped
public class CsvIngestorExample {
/**
* The embedding store (the database).
* The bean is provided by the quarkus-langchain4j-redis extension.
*/
@Inject
RedisEmbeddingStore store;
/**
* The embedding model (how the vector of a document is computed).
* The bean is provided by the LLM (like openai) extension.
*/
@Inject
EmbeddingModel embeddingModel;
@ConfigProperty(name = "csv.file")
File file;
@ConfigProperty(name = "csv.headers")
List<String> headers;
public void ingest(@Observes StartupEvent event) throws IOException {
CSVFormat csvFormat = CSVFormat.DEFAULT.builder()
.setHeader(headers.toArray(new String[0]))
.setSkipHeaderRecord(true)
.build();
List<Document> documents = new ArrayList<>();
try (Reader reader = new FileReader(file)) {
// Generate on document per row, the document is using the following syntax:
// key1: value1
// key2: value2
Iterable<CSVRecord> records = csvFormat.parse(reader);
int i = 1;
for (CSVRecord record : records) {
Map<String, String> metadata = new HashMap<>();
metadata.put("source", file.getAbsolutePath());
metadata.put("row", String.valueOf(i++));
StringBuilder content = new StringBuilder();
for (String header : headers) {
metadata.put(header, record.get(header)); // Include all headers in the metadata.
content.append(header).append(": ").append(record.get(header)).append("\n");
}
documents.add(new Document(content.toString(), Metadata.from(metadata)));
}
var ingestor = EmbeddingStoreIngestor.builder()
.embeddingStore(store)
.embeddingModel(embeddingModel)
.documentSplitter(recursive(300, 0))
.build();
ingestor.ingest(documents);
System.out.printf("Ingested %d documents.%n", documents.size());
}
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1575, 1729), 'org.apache.commons.csv.CSVFormat.DEFAULT.builder'), ((1575, 1704), 'org.apache.commons.csv.CSVFormat.DEFAULT.builder'), ((1575, 1661), 'org.apache.commons.csv.CSVFormat.DEFAULT.builder'), ((1575, 1602), 'org.apache.commons.csv.CSVFormat.DEFAULT.builder'), ((2766, 2979), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2766, 2950), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2766, 2893), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2766, 2841), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package org.goafabric.dbagent.ai;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import static java.time.Duration.ofSeconds;
@Configuration
@Profile("openai")
public class OpenAiConfiguration {
@Bean
ChatLanguageModel chatModelOpenAi(DatabaseTool databaseTool) {
return OpenAiChatModel.builder().apiKey("demo")
.modelName("gpt-3.5-turbo")
.timeout(ofSeconds(30)).temperature(0.0)
.build();
}
@Bean
DatabaseAgent databaseAgent(ChatLanguageModel chatLanguageModel, DatabaseTool databaseTool) {
return AiServices.builder(DatabaseAgent.class)
.chatLanguageModel(chatLanguageModel)
.chatMemory(MessageWindowChatMemory.withMaxMessages(20))
.tools(databaseTool)
.build();
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((620, 786), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((620, 761), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((620, 744), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((620, 704), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((620, 660), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((918, 1146), 'dev.langchain4j.service.AiServices.builder'), ((918, 1121), 'dev.langchain4j.service.AiServices.builder'), ((918, 1084), 'dev.langchain4j.service.AiServices.builder'), ((918, 1011), 'dev.langchain4j.service.AiServices.builder')] |
package com.elec5620.studyhelper.api.llm;
import io.github.cdimascio.dotenv.Dotenv;
import java.net.URL;
import java.util.Scanner;
import com.elec5620.studyhelper.core.HelperSystem;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Duration;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
public class ChatBot {
public static String response(String filePath, String request) {
Dotenv dotenv = Dotenv.load();
String token = dotenv.get("OPENAI_API_KEY");
System.out.println("file path: " + filePath);
try {
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
OpenAiChatModel model = OpenAiChatModel.builder().apiKey(token).timeout(Duration.ofMinutes(1)).build();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
Document document = loadDocument(toPath(filePath));
ingestor.ingest(document);
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(model)
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
// .chatMemory() // you can override default chat memory
// .promptTemplate() // you can override default prompt template
.build();
String answer = chain.execute(request);
System.out.println(answer); // answer based on given information
answer = answer.replace("\\n", System.lineSeparator());
return answer;
} catch (Exception e) {
return e.getMessage();
}
}
public static void main(String[] args) {
Dotenv dotenv = Dotenv.load();
String token = dotenv.get("OPENAI_API_KEY");
try (Scanner myObj = new Scanner(System.in)) {
System.out.println("Enter question to ask:");
String question = myObj.nextLine();
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
Document document = loadDocument(toPath("example.txt"));
ingestor.ingest(document);
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(OpenAiChatModel.withApiKey(token))
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
// .chatMemory() // you can override default chat memory
// .promptTemplate() // you can override default prompt template
.build();
String answer = chain.execute(question);
System.out.println(answer); // answer based on given information
}
}
private static Path toPath(String fileName) {
try {
Path filePath = Paths.get(fileName);
URL fileURL = filePath.toUri().toURL();
return Paths.get(fileURL.toURI());
} catch (URISyntaxException e) {
throw new RuntimeException(e);
} catch (MalformedURLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1554, 1632), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1554, 1624), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1554, 1593), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1788, 2060), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1788, 2023), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1788, 1963), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1788, 1903), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2248, 2656), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2248, 2441), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2248, 2340), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3503, 3759), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3503, 3726), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3503, 3670), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3503, 3614), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3940, 4356), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3940, 4153), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3940, 4056), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] |
package embedding.model;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.vertexai.VertexAiEmbeddingModel;
public class VertexAiEmbeddingModelExample {
public static void main(String[] args) {
EmbeddingModel embeddingModel = VertexAiEmbeddingModel.builder()
.endpoint("us-central1-aiplatform.googleapis.com:443")
.project("langchain4j")
.location("us-central1")
.publisher("google")
.modelName("textembedding-gecko@001")
.build();
Response<Embedding> response = embeddingModel.embed("Hello, how are you?");
System.out.println(response);
}
}
| [
"dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder"
] | [((371, 671), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((371, 646), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((371, 592), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((371, 555), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((371, 514), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((371, 474), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder')] |
package io.quarkiverse.langchain4j.deployment;
import static dev.langchain4j.agent.tool.JsonSchemaProperty.ARRAY;
import static dev.langchain4j.agent.tool.JsonSchemaProperty.BOOLEAN;
import static dev.langchain4j.agent.tool.JsonSchemaProperty.INTEGER;
import static dev.langchain4j.agent.tool.JsonSchemaProperty.NUMBER;
import static dev.langchain4j.agent.tool.JsonSchemaProperty.OBJECT;
import static dev.langchain4j.agent.tool.JsonSchemaProperty.STRING;
import static dev.langchain4j.agent.tool.JsonSchemaProperty.description;
import static dev.langchain4j.agent.tool.JsonSchemaProperty.enums;
import static java.util.Arrays.stream;
import static java.util.stream.Collectors.toList;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.BiFunction;
import java.util.stream.Collectors;
import org.jboss.jandex.AnnotationInstance;
import org.jboss.jandex.AnnotationTarget;
import org.jboss.jandex.AnnotationValue;
import org.jboss.jandex.ClassInfo;
import org.jboss.jandex.DotName;
import org.jboss.jandex.IndexView;
import org.jboss.jandex.MethodInfo;
import org.jboss.jandex.MethodParameterInfo;
import org.jboss.jandex.Type;
import org.jboss.logging.Logger;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.Opcodes;
import dev.langchain4j.agent.tool.JsonSchemaProperty;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.agent.tool.ToolMemoryId;
import dev.langchain4j.agent.tool.ToolParameters;
import dev.langchain4j.agent.tool.ToolSpecification;
import io.quarkiverse.langchain4j.runtime.ToolsRecorder;
import io.quarkiverse.langchain4j.runtime.prompt.Mappable;
import io.quarkiverse.langchain4j.runtime.tool.ToolInvoker;
import io.quarkiverse.langchain4j.runtime.tool.ToolMethodCreateInfo;
import io.quarkiverse.langchain4j.runtime.tool.ToolParametersObjectSubstitution;
import io.quarkiverse.langchain4j.runtime.tool.ToolSpanWrapper;
import io.quarkiverse.langchain4j.runtime.tool.ToolSpecificationObjectSubstitution;
import io.quarkus.arc.deployment.AdditionalBeanBuildItem;
import io.quarkus.arc.deployment.ValidationPhaseBuildItem;
import io.quarkus.deployment.Capabilities;
import io.quarkus.deployment.Capability;
import io.quarkus.deployment.GeneratedClassGizmoAdaptor;
import io.quarkus.deployment.annotations.BuildProducer;
import io.quarkus.deployment.annotations.BuildStep;
import io.quarkus.deployment.annotations.ExecutionTime;
import io.quarkus.deployment.annotations.Record;
import io.quarkus.deployment.builditem.BytecodeTransformerBuildItem;
import io.quarkus.deployment.builditem.CombinedIndexBuildItem;
import io.quarkus.deployment.builditem.GeneratedClassBuildItem;
import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem;
import io.quarkus.deployment.recording.RecorderContext;
import io.quarkus.gizmo.ClassCreator;
import io.quarkus.gizmo.ClassOutput;
import io.quarkus.gizmo.ClassTransformer;
import io.quarkus.gizmo.FieldDescriptor;
import io.quarkus.gizmo.MethodCreator;
import io.quarkus.gizmo.MethodDescriptor;
import io.quarkus.gizmo.ResultHandle;
public class ToolProcessor {
private static final Logger log = Logger.getLogger(AiServicesProcessor.class);
private static final DotName TOOL = DotName.createSimple(Tool.class);
private static final DotName TOOL_MEMORY_ID = DotName.createSimple(ToolMemoryId.class);
private static final DotName P = DotName.createSimple(dev.langchain4j.agent.tool.P.class);
private static final MethodDescriptor METHOD_METADATA_CTOR = MethodDescriptor
.ofConstructor(ToolInvoker.MethodMetadata.class, boolean.class, Map.class, Integer.class);
private static final MethodDescriptor HASHMAP_CTOR = MethodDescriptor.ofConstructor(HashMap.class);
public static final MethodDescriptor MAP_PUT = MethodDescriptor.ofMethod(Map.class, "put", Object.class, Object.class,
Object.class);
@BuildStep
public void telemetry(Capabilities capabilities, BuildProducer<AdditionalBeanBuildItem> additionalBeanProducer) {
var addOpenTelemetrySpan = capabilities.isPresent(Capability.OPENTELEMETRY_TRACER);
if (addOpenTelemetrySpan) {
additionalBeanProducer.produce(AdditionalBeanBuildItem.builder().addBeanClass(ToolSpanWrapper.class).build());
}
}
@BuildStep
@Record(ExecutionTime.STATIC_INIT)
public void handleTools(CombinedIndexBuildItem indexBuildItem,
ToolsRecorder recorder,
RecorderContext recorderContext,
BuildProducer<BytecodeTransformerBuildItem> transformerProducer,
BuildProducer<GeneratedClassBuildItem> generatedClassProducer,
BuildProducer<ReflectiveClassBuildItem> reflectiveClassProducer,
BuildProducer<ValidationPhaseBuildItem.ValidationErrorBuildItem> validation,
BuildProducer<ToolsMetadataBuildItem> toolsMetadataProducer) {
recorderContext.registerSubstitution(ToolSpecification.class, ToolSpecificationObjectSubstitution.Serialized.class,
ToolSpecificationObjectSubstitution.class);
recorderContext.registerSubstitution(ToolParameters.class, ToolParametersObjectSubstitution.Serialized.class,
ToolParametersObjectSubstitution.class);
IndexView index = indexBuildItem.getIndex();
Collection<AnnotationInstance> instances = index.getAnnotations(TOOL);
Map<String, List<ToolMethodCreateInfo>> metadata = new HashMap<>();
List<String> generatedInvokerClasses = new ArrayList<>();
List<String> generatedArgumentMapperClasses = new ArrayList<>();
if (!instances.isEmpty()) {
ClassOutput classOutput = new GeneratedClassGizmoAdaptor(generatedClassProducer, true);
Map<DotName, List<MethodInfo>> methodsPerClass = new HashMap<>();
for (AnnotationInstance instance : instances) {
if (instance.target().kind() != AnnotationTarget.Kind.METHOD) {
continue;
}
MethodInfo methodInfo = instance.target().asMethod();
ClassInfo classInfo = methodInfo.declaringClass();
if (classInfo.isInterface() || Modifier.isAbstract(classInfo.flags())) {
validation.produce(
new ValidationPhaseBuildItem.ValidationErrorBuildItem(new IllegalStateException(
"@Tool is only supported on non-abstract classes, all other usages are ignored. Offending method is '"
+ methodInfo.declaringClass().name().toString() + "#" + methodInfo.name() + "'")));
continue;
}
DotName declaringClassName = classInfo.name();
methodsPerClass.computeIfAbsent(declaringClassName, (n -> new ArrayList<>())).add(methodInfo);
}
boolean validationErrorFound = false;
Map<String, ClassInfo> discoveredTools = new HashMap<>();
for (var entry : methodsPerClass.entrySet()) {
DotName className = entry.getKey();
List<MethodInfo> toolMethods = entry.getValue();
List<MethodInfo> privateMethods = new ArrayList<>();
for (MethodInfo toolMethod : toolMethods) {
// Validation
// - Must not have another tool with the same method name
// - Must have at least one parameter
if (discoveredTools.containsKey(toolMethod.name())) {
validation.produce(
new ValidationPhaseBuildItem.ValidationErrorBuildItem(new IllegalStateException(
"A tool with the name '" + toolMethod.name() + "' from class '"
+ className + "' is already declared in class '"
+ discoveredTools.get(toolMethod.name())
+ "'. Tools method name must be unique.")));
validationErrorFound = true;
continue;
}
discoveredTools.put(toolMethod.name(), toolMethod.declaringClass());
if (Modifier.isPrivate(toolMethod.flags())) {
privateMethods.add(toolMethod);
}
}
if (!privateMethods.isEmpty()) {
transformerProducer.produce(new BytecodeTransformerBuildItem(className.toString(),
new RemovePrivateFromMethodsVisitor(privateMethods)));
}
if (validationErrorFound) {
return;
}
for (MethodInfo toolMethod : toolMethods) {
AnnotationInstance instance = toolMethod.annotation(TOOL);
AnnotationValue nameValue = instance.value("name");
AnnotationValue descriptionValue = instance.value();
String toolName = getToolName(nameValue, toolMethod);
String toolDescription = getToolDescription(descriptionValue);
ToolSpecification.Builder builder = ToolSpecification.builder()
.name(toolName)
.description(toolDescription);
MethodParameterInfo memoryIdParameter = null;
for (MethodParameterInfo parameter : toolMethod.parameters()) {
if (parameter.hasAnnotation(TOOL_MEMORY_ID)) {
memoryIdParameter = parameter;
continue;
}
builder.addParameter(parameter.name(), toJsonSchemaProperties(parameter, index));
}
Map<String, Integer> nameToParamPosition = toolMethod.parameters().stream().collect(
Collectors.toMap(MethodParameterInfo::name, i -> Integer.valueOf(i.position())));
String methodSignature = createUniqueSignature(toolMethod);
String invokerClassName = generateInvoker(toolMethod, classOutput, nameToParamPosition,
memoryIdParameter != null ? memoryIdParameter.position() : null, methodSignature);
generatedInvokerClasses.add(invokerClassName);
String argumentMapperClassName = generateArgumentMapper(toolMethod, classOutput,
methodSignature);
generatedArgumentMapperClasses.add(argumentMapperClassName);
ToolSpecification toolSpecification = builder.build();
ToolMethodCreateInfo methodCreateInfo = new ToolMethodCreateInfo(
toolMethod.name(), invokerClassName,
toolSpecification, argumentMapperClassName);
metadata.computeIfAbsent(className.toString(), (c) -> new ArrayList<>()).add(methodCreateInfo);
}
}
}
if (!generatedInvokerClasses.isEmpty()) {
reflectiveClassProducer.produce(ReflectiveClassBuildItem
.builder(generatedInvokerClasses.toArray(String[]::new))
.constructors(true)
.build());
}
if (!generatedArgumentMapperClasses.isEmpty()) {
reflectiveClassProducer.produce(ReflectiveClassBuildItem
.builder(generatedArgumentMapperClasses.toArray(String[]::new))
.fields(true)
.constructors(true)
.build());
}
toolsMetadataProducer.produce(new ToolsMetadataBuildItem(metadata));
recorder.setMetadata(metadata);
}
private static String createUniqueSignature(MethodInfo toolMethod) {
StringBuilder sigBuilder = new StringBuilder();
sigBuilder.append(toolMethod.name())
.append(toolMethod.returnType().name().toString());
for (MethodParameterInfo t : toolMethod.parameters()) {
sigBuilder.append(t.type().name().toString());
}
return sigBuilder.toString();
}
private static String getToolName(AnnotationValue nameValue, MethodInfo methodInfo) {
if (nameValue == null) {
return methodInfo.name();
}
String annotationValue = nameValue.asString();
if (annotationValue.isEmpty()) {
return methodInfo.name();
}
return annotationValue;
}
private String getToolDescription(AnnotationValue descriptionValue) {
if (descriptionValue == null) {
return "";
}
return String.join("\n", descriptionValue.asStringArray());
}
private static String generateInvoker(MethodInfo methodInfo, ClassOutput classOutput,
Map<String, Integer> nameToParamPosition, Short memoryIdParamPosition, String methodSignature) {
String implClassName = methodInfo.declaringClass().name() + "$$QuarkusInvoker$" + methodInfo.name() + "_"
+ HashUtil.sha1(methodSignature);
try (ClassCreator classCreator = ClassCreator.builder()
.classOutput(classOutput)
.className(implClassName)
.interfaces(ToolInvoker.class)
.build()) {
MethodCreator invokeMc = classCreator.getMethodCreator(
MethodDescriptor.ofMethod(implClassName, "invoke", Object.class, Object.class, Object[].class));
ResultHandle result;
if (methodInfo.parametersCount() > 0) {
List<ResultHandle> argumentHandles = new ArrayList<>(methodInfo.parametersCount());
for (int i = 0; i < methodInfo.parametersCount(); i++) {
argumentHandles.add(invokeMc.readArrayValue(invokeMc.getMethodParam(1), i));
}
ResultHandle[] targetMethodHandles = argumentHandles.toArray(new ResultHandle[0]);
result = invokeMc.invokeVirtualMethod(MethodDescriptor.of(methodInfo), invokeMc.getMethodParam(0),
targetMethodHandles);
} else {
result = invokeMc.invokeVirtualMethod(MethodDescriptor.of(methodInfo), invokeMc.getMethodParam(0));
}
boolean toolReturnsVoid = methodInfo.returnType().kind() == Type.Kind.VOID;
if (toolReturnsVoid) {
invokeMc.returnValue(invokeMc.load("Success"));
} else {
invokeMc.returnValue(result);
}
MethodCreator methodMetadataMc = classCreator
.getMethodCreator(MethodDescriptor.ofMethod(implClassName, "methodMetadata",
ToolInvoker.MethodMetadata.class));
ResultHandle nameToParamPositionHandle = methodMetadataMc.newInstance(HASHMAP_CTOR);
for (var entry : nameToParamPosition.entrySet()) {
methodMetadataMc.invokeInterfaceMethod(MAP_PUT, nameToParamPositionHandle,
methodMetadataMc.load(entry.getKey()),
methodMetadataMc.load(entry.getValue()));
}
ResultHandle resultHandle = methodMetadataMc.newInstance(METHOD_METADATA_CTOR,
methodMetadataMc.load(toolReturnsVoid),
nameToParamPositionHandle,
memoryIdParamPosition != null ? methodMetadataMc.load(Integer.valueOf(memoryIdParamPosition))
: methodMetadataMc.loadNull());
methodMetadataMc.returnValue(resultHandle);
}
return implClassName;
}
private String generateArgumentMapper(MethodInfo methodInfo, ClassOutput classOutput,
String methodSignature) {
String implClassName = methodInfo.declaringClass().name() + "$$QuarkusToolArgumentMapper$" + methodInfo.name() + "_"
+ HashUtil.sha1(methodSignature);
try (ClassCreator classCreator = ClassCreator.builder()
.classOutput(classOutput)
.className(implClassName)
.interfaces(Mappable.class)
.build()) {
List<FieldDescriptor> fieldDescriptors = new ArrayList<>();
for (MethodParameterInfo parameter : methodInfo.parameters()) {
FieldDescriptor fieldDescriptor = FieldDescriptor.of(implClassName, parameter.name(),
parameter.type().name().toString());
fieldDescriptors.add(fieldDescriptor);
classCreator.getFieldCreator(fieldDescriptor).setModifiers(Modifier.PUBLIC);
}
MethodCreator mc = classCreator
.getMethodCreator(MethodDescriptor.ofMethod(implClassName, "obtainFieldValuesMap", Map.class));
ResultHandle mapHandle = mc.newInstance(MethodDescriptor.ofConstructor(HashMap.class));
for (FieldDescriptor field : fieldDescriptors) {
ResultHandle fieldValue = mc.readInstanceField(field, mc.getThis());
mc.invokeInterfaceMethod(MAP_PUT, mapHandle, mc.load(field.getName()), fieldValue);
}
mc.returnValue(mapHandle);
}
return implClassName;
}
private Iterable<JsonSchemaProperty> toJsonSchemaProperties(MethodParameterInfo parameter, IndexView index) {
Type type = parameter.type();
DotName typeName = parameter.type().name();
AnnotationInstance pInstance = parameter.annotation(P);
JsonSchemaProperty description = pInstance == null ? null : description(pInstance.value().asString());
if (DotNames.STRING.equals(typeName) || DotNames.CHARACTER.equals(typeName)
|| DotNames.PRIMITIVE_CHAR.equals(typeName)) {
return removeNulls(STRING, description);
}
if (DotNames.BOOLEAN.equals(typeName) || DotNames.PRIMITIVE_BOOLEAN.equals(typeName)) {
return removeNulls(BOOLEAN, description);
}
if (DotNames.BYTE.equals(typeName) || DotNames.PRIMITIVE_BYTE.equals(typeName)
|| DotNames.SHORT.equals(typeName) || DotNames.PRIMITIVE_SHORT.equals(typeName)
|| DotNames.INTEGER.equals(typeName) || DotNames.PRIMITIVE_INT.equals(typeName)
|| DotNames.LONG.equals(typeName) || DotNames.PRIMITIVE_LONG.equals(typeName)
|| DotNames.BIG_INTEGER.equals(typeName)) {
return removeNulls(INTEGER, description);
}
// TODO put constraints on min and max?
if (DotNames.FLOAT.equals(typeName) || DotNames.PRIMITIVE_FLOAT.equals(typeName)
|| DotNames.DOUBLE.equals(typeName) || DotNames.PRIMITIVE_DOUBLE.equals(typeName)
|| DotNames.BIG_DECIMAL.equals(typeName)) {
return removeNulls(NUMBER, description);
}
if ((type.kind() == Type.Kind.ARRAY)
|| DotNames.LIST.equals(typeName)
|| DotNames.SET.equals(typeName)) { // TODO something else?
return removeNulls(ARRAY, description); // TODO provide type of array?
}
if (isEnum(type, index)) {
return removeNulls(STRING, enums(enumConstants(type)), description);
}
return removeNulls(OBJECT, description); // TODO provide internals
}
private Iterable<JsonSchemaProperty> removeNulls(JsonSchemaProperty... properties) {
return stream(properties)
.filter(Objects::nonNull)
.collect(toList());
}
private boolean isEnum(Type returnType, IndexView index) {
if (returnType.kind() != Type.Kind.CLASS) {
return false;
}
ClassInfo maybeEnum = index.getClassByName(returnType.name());
return maybeEnum != null && maybeEnum.isEnum();
}
private static Object[] enumConstants(Type type) {
return JandexUtil.load(type, Thread.currentThread().getContextClassLoader()).getEnumConstants();
}
/**
* Simply removes the {@code private} modifier from tool methods
*/
private static class RemovePrivateFromMethodsVisitor implements
BiFunction<String, ClassVisitor, ClassVisitor> {
private final List<MethodInfo> privateMethods;
private RemovePrivateFromMethodsVisitor(List<MethodInfo> privateMethods) {
this.privateMethods = privateMethods;
}
@Override
public ClassVisitor apply(String className, ClassVisitor classVisitor) {
ClassTransformer transformer = new ClassTransformer(className);
for (MethodInfo method : privateMethods) {
transformer.modifyMethod(MethodDescriptor.of(method)).removeModifiers(Opcodes.ACC_PRIVATE);
}
return transformer.applyTo(classVisitor);
}
}
}
| [
"dev.langchain4j.agent.tool.ToolSpecification.builder"
] | [((4315, 4392), 'io.quarkus.arc.deployment.AdditionalBeanBuildItem.builder'), ((4315, 4384), 'io.quarkus.arc.deployment.AdditionalBeanBuildItem.builder'), ((9418, 9547), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((9418, 9489), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((13423, 13601), 'io.quarkus.gizmo.ClassCreator.builder'), ((13423, 13576), 'io.quarkus.gizmo.ClassCreator.builder'), ((13423, 13529), 'io.quarkus.gizmo.ClassCreator.builder'), ((13423, 13487), 'io.quarkus.gizmo.ClassCreator.builder'), ((16279, 16454), 'io.quarkus.gizmo.ClassCreator.builder'), ((16279, 16429), 'io.quarkus.gizmo.ClassCreator.builder'), ((16279, 16385), 'io.quarkus.gizmo.ClassCreator.builder'), ((16279, 16343), 'io.quarkus.gizmo.ClassCreator.builder')] |
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.ollama.OllamaChatModel;
import org.junit.jupiter.api.Test;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
@Testcontainers
class OllamaChatModelTest {
/**
* The first time you run this test, it will download a Docker image with Ollama and a model.
* It might take a few minutes.
* <p>
* This test uses modified Ollama Docker images, which already contain models inside them.
* All images with pre-packaged models are available here: https://hub.docker.com/repositories/langchain4j
* <p>
* However, you are not restricted to these images.
* You can run any model from https://ollama.ai/library by following these steps:
* 1. Run "docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama"
* 2. Run "docker exec -it ollama ollama run mistral" <- specify the desired model here
*/
static String MODEL_NAME = "orca-mini"; // try "mistral", "llama2", "codellama", "phi" or "tinyllama"
@Container
static GenericContainer<?> ollama = new GenericContainer<>("langchain4j/ollama-" + MODEL_NAME + ":latest")
.withExposedPorts(11434);
@Test
void simple_example() {
ChatLanguageModel model = OllamaChatModel.builder()
.baseUrl(baseUrl())
.modelName(MODEL_NAME)
.build();
String answer = model.generate("Provide 3 short bullet points explaining why Java is awesome");
System.out.println(answer);
}
@Test
void json_output_example() {
ChatLanguageModel model = OllamaChatModel.builder()
.baseUrl(baseUrl())
.modelName(MODEL_NAME)
.format("json")
.build();
String json = model.generate("Give me a JSON with 2 fields: name and age of a John Doe, 42");
System.out.println(json);
}
static String baseUrl() {
return String.format("http://%s:%d", ollama.getHost(), ollama.getFirstMappedPort());
}
}
| [
"dev.langchain4j.model.ollama.OllamaChatModel.builder"
] | [((1404, 1529), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((1404, 1504), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((1404, 1465), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((1758, 1915), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((1758, 1890), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((1758, 1858), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((1758, 1819), 'dev.langchain4j.model.ollama.OllamaChatModel.builder')] |
package com.honvay.flychat.langchain.llama.embedding;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.model.openai.OpenAiModelName;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.time.Duration;
import java.util.List;
import java.util.stream.Collectors;
@Service
public class OpenAiEmbeddingService implements EmbeddingService {
private final String apiKey;
public OpenAiEmbeddingService(@Value("${openai.apiKey:}") String apiKey) {
this.apiKey = apiKey;
}
@Override
public List<float[]> embed(List<String> texts){
List<TextSegment> segments = texts.stream()
.map(TextSegment::from)
.collect(Collectors.toList());
EmbeddingModel embeddingModel = OpenAiEmbeddingModel.builder()
.apiKey(apiKey) // https://platform.openai.com/account/api-keys
.modelName(OpenAiModelName.TEXT_EMBEDDING_ADA_002)
.timeout(Duration.ofSeconds(15))
.build();
List<Embedding> embeddings = embeddingModel.embedAll(segments);
return embeddings
.stream()
.map(Embedding::vector)
.collect(Collectors.toList());
}
@Override
public float[] embed(String text) {
EmbeddingModel embeddingModel = OpenAiEmbeddingModel.builder()
.apiKey(apiKey) // https://platform.openai.com/account/api-keys
.modelName(OpenAiModelName.TEXT_EMBEDDING_ADA_002)
.timeout(Duration.ofSeconds(15))
.build();
Embedding embedding = embeddingModel.embed(text);
return embedding.vector();
}
}
| [
"dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder"
] | [((981, 1228), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((981, 1204), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((981, 1156), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((981, 1042), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1539, 1790), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1539, 1765), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1539, 1716), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1539, 1601), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder')] |
package com.wxm158.promptgeneration.service;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.reflect.TypeToken;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel;
import com.wxm158.promptgeneration.mapper.QuestionMapper;
import com.wxm158.promptgeneration.model.dto.ChatRequest;
import com.wxm158.promptgeneration.model.dto.QuestionGeneration;
import com.wxm158.promptgeneration.model.dto.TopicResponse;
import com.wxm158.promptgeneration.model.entity.Question;
import com.wxm158.promptgeneration.repository.QuestionRepository;
import com.wxm158.promptgeneration.weviate.WeaviateEmbeddingStore;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.SystemMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.retriever.Retriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringEscapeUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.reflect.Type;
import java.net.Proxy;
import java.nio.file.Paths;
import java.time.Duration;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.internal.Utils.getOrDefault;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.util.stream.Collectors.joining;
@Slf4j
@Service
@RequiredArgsConstructor
public class EmbeddingService {
@Value("${OPENAI_API_KEY}")
private String API_KEY;
@Value("${WEAVIATE_API_KEY}")
private String WEAVIATE_API_KEY;
private static final String DEFAULT_NAMESPACE = "default"; // do not change, will break backward compatibility!
private static final String DEFAULT_METADATA_TEXT_KEY = "text_segment"; // do not change, will break backward compatibility!
String baseUrl = "https://api.openai.com/v1";
String modelName = "text-embedding-ada-002"; // You can change this if needed
Duration timeout = Duration.ofSeconds(120); // You can change this if needed
Integer maxRetries = 3; // You can change this if needed
Proxy proxy = null; // You can provide a proxy if needed
Boolean logRequests = true; // Set to true if you want to log requests
Boolean logResponses = true; // Set to true if you want to log responses
private final QuestionRepository questionRepository;
private final QuestionMapper questionMapper;
// Create embedding model
private EmbeddingModel createEmbeddingModel() {
return OpenAiEmbeddingModel.builder()
.baseUrl(baseUrl)
.apiKey(API_KEY)
.modelName(modelName)
.timeout(timeout)
.maxRetries(maxRetries)
.proxy(proxy)
.logRequests(logRequests)
.logResponses(logResponses)
.build();
}
// Create embedding store
private EmbeddingStore<TextSegment> createEmbeddingStore() {
return WeaviateEmbeddingStore.builder()
.apiKey(WEAVIATE_API_KEY)
.scheme("https")
.host("question-gen-wwxbinax.weaviate.network")
.avoidDups(true)
.consistencyLevel("ALL")
.build();
}
// Create Chat Model
private OpenAiChatModel createChatModel() {
return OpenAiChatModel.builder()
.apiKey(API_KEY)
// old key 8T6eTtmk
.modelName("ft:gpt-3.5-turbo-1106:personal::8VzKieWR")
.timeout(timeout)
.temperature(0.3)
.build();
}
private String format(List<TextSegment> relevantSegments) {
return relevantSegments.stream()
.map(TextSegment::text)
.map(segment -> "..." + segment + "...")
.collect(joining("\n\n"));
}
// Create question from embedding and fine tuning
public List<QuestionGeneration> createQuestions(ChatRequest chatRequest) {
String message = chatRequest.getTopic();
String questionType = chatRequest.getQuestionType();
String questionAmount = chatRequest.getQuestionAmount();
// Initialise fine tuned chat model, embedding model, embedding store.
OpenAiChatModel chatModel = createChatModel();
EmbeddingModel embeddingModel = createEmbeddingModel();
EmbeddingStore<TextSegment> embeddingStore = createEmbeddingStore();
// retrieve relevant text from embedding store. max three text segments.
Retriever<TextSegment> retriever = EmbeddingStoreRetriever.from(embeddingStore, embeddingModel, 1);
String information = format(retriever.findRelevant(message)).replace("\n", " ");
// Create the prompt in format used in training fine-tuned model.
ChatMessage[] messagesArray = {
new SystemMessage("You are an A-level Computer Science teacher. You aim to generate various questions for your students."),
new UserMessage("SCOPE: " + message + ", QUESTION_TYPE: " + questionType + ", QUESTION_AMOUNT: " + questionAmount +
", TEXT: " + information)
};
List<ChatMessage> messages = new ArrayList<>(List.of(messagesArray));
System.out.println(messages.toString());
// Get response from model (json list of questions and answers)
Response<AiMessage> response = chatModel.generate(messages);
String stringResponse = response.content().text();
System.out.println(stringResponse);
// Map response to List of QuestionGeneration object for the frontend.
int startIndex = stringResponse.indexOf("[");
int endIndex = stringResponse.lastIndexOf("]");
Gson gson = new Gson();
Type type = new TypeToken<List<QuestionGeneration>>(){}.getType();
List<QuestionGeneration> questions = gson.fromJson(stringResponse.substring(startIndex, endIndex + 1), type);
for (QuestionGeneration question : questions) {
question.setId((long) questions.indexOf(question));
question.setQuestionType(questionType);
}
return questions;
}
// Save Questions in question-generation-db
public List<Question> saveQuestions(List<QuestionGeneration> questions, String userId) {
List<Question> questionList = questionMapper.mapQuestionGenerationsToQuestions(questions, userId);
List<Question> savedQuestions = new ArrayList<>();
for (Question question: questionList) {
if (!questionRepository.existsByQuestionAndQuestionType(question.getQuestion(), question.getQuestionType())) {
savedQuestions.add(questionRepository.save(question));
}
}
return savedQuestions;
}
public List<Question> getAllQuestions(String userId) {
return questionRepository.findAllByUserId(Long.valueOf(userId));
// return questionMapper.mapQuestionsToQuestionGenerations(questionList);
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((3990, 4340), 'com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel.builder'), ((3990, 4315), 'com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel.builder'), ((3990, 4271), 'com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel.builder'), ((3990, 4229), 'com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel.builder'), ((3990, 4199), 'com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel.builder'), ((3990, 4159), 'com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel.builder'), ((3990, 4125), 'com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel.builder'), ((3990, 4087), 'com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel.builder'), ((3990, 4054), 'com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel.builder'), ((4457, 4727), 'com.wxm158.promptgeneration.weviate.WeaviateEmbeddingStore.builder'), ((4457, 4702), 'com.wxm158.promptgeneration.weviate.WeaviateEmbeddingStore.builder'), ((4457, 4661), 'com.wxm158.promptgeneration.weviate.WeaviateEmbeddingStore.builder'), ((4457, 4628), 'com.wxm158.promptgeneration.weviate.WeaviateEmbeddingStore.builder'), ((4457, 4564), 'com.wxm158.promptgeneration.weviate.WeaviateEmbeddingStore.builder'), ((4457, 4531), 'com.wxm158.promptgeneration.weviate.WeaviateEmbeddingStore.builder'), ((4822, 5080), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4822, 5055), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4822, 5021), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4822, 4987), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4822, 4880), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package com.gonnect.helpme.config;
import com.gonnect.helpme.agent.ReservationSupportAgent;
import com.gonnect.helpme.service.ReservationToolService;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.retriever.Retriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import java.io.IOException;
import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
@Configuration
public class ReservationHelpMeApplicationConfigurer {
/**
* Run ReservationSupportApplicationTest to see simulated conversation with customer support agent
*/
@Bean
ReservationSupportAgent reservationSupportAgent(ChatLanguageModel chatLanguageModel,
ReservationToolService reservationToolService,
Retriever<TextSegment> retriever) {
return AiServices.builder(ReservationSupportAgent.class)
.chatLanguageModel(chatLanguageModel)
.chatMemory(MessageWindowChatMemory.withMaxMessages(20))
.tools(reservationToolService)
.retriever(retriever)
.build();
}
@Bean
Retriever<TextSegment> fetch(EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel) {
// You will need to adjust these parameters to find the optimal setting, which will depend on two main factors:
// - The nature of your data
// - The embedding model you are using
int maxResultsRetrieved = 1;
double minScore = 0.6;
return EmbeddingStoreRetriever.from(embeddingStore, embeddingModel, maxResultsRetrieved, minScore);
}
@Bean
EmbeddingModel embeddingModel() {
return new AllMiniLmL6V2EmbeddingModel();
}
@Bean
EmbeddingStore<TextSegment> embeddingStore(EmbeddingModel embeddingModel, ResourceLoader resourceLoader) throws IOException {
// Embedding Store Setup
// --------------------
// For demonstration purposes, the embedding store is populated
// dynamically instead of being pre-filled with application data.
// This allows the code to run self-sufficiently for demos.
// The first step is initializing an embedding store.
// For this example we use an in-memory implementation.
// This stores the vector representations of text for similarity lookups.
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
// Load "Gonnect Support Bot" training guidelines as sample
Resource resource = resourceLoader.getResource("classpath:gonnect-miles-terms-and-condition.txt");
Document document = loadDocument(resource.getFile().toPath(), new TextDocumentParser());
// Ingest Sample Document
// ---------------------
// 1. Split document into 100-token segments
// 2. Convert text segments into vector embeddings
// 3. Save embeddings in the store
// The EmbeddingStoreIngestor automates this process of
// analyzing text and populating the embedding store
DocumentSplitter documentSplitter = DocumentSplitters.recursive(100, 0, new OpenAiTokenizer(GPT_3_5_TURBO));
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(documentSplitter)
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
ingestor.ingest(document);
return embeddingStore;
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1951, 2237), 'dev.langchain4j.service.AiServices.builder'), ((1951, 2212), 'dev.langchain4j.service.AiServices.builder'), ((1951, 2174), 'dev.langchain4j.service.AiServices.builder'), ((1951, 2127), 'dev.langchain4j.service.AiServices.builder'), ((1951, 2054), 'dev.langchain4j.service.AiServices.builder'), ((4356, 4561), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((4356, 4536), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((4356, 4488), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((4356, 4440), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package com.anthonyquere.companionapi.completion.langchain;
import com.anthonyquere.companionapi.completion.langchain.services.Summary;
import com.anthonyquere.companionapi.completion.langchain.services.TalkWithCompanion;
import com.anthonyquere.companionapi.crud.companions.Companion;
import com.anthonyquere.companionapi.crud.message.MessageRepository;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.service.AiServices;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class CompanionAiService {
@Bean
public TalkWithCompanion buildAiCompanionService(
ChatLanguageModel model,
MessageRepository messageRepository
) {
return AiServices.builder(TalkWithCompanion.class)
.chatLanguageModel(model)
.chatMemoryProvider(companion -> new CompanionChatMemory((Companion) companion, messageRepository))
.build();
}
@Bean
public Summary buildAiSummaryService(
ChatLanguageModel model
) {
return AiServices.builder(Summary.class)
.chatLanguageModel(model)
.build();
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((789, 1015), 'dev.langchain4j.service.AiServices.builder'), ((789, 990), 'dev.langchain4j.service.AiServices.builder'), ((789, 874), 'dev.langchain4j.service.AiServices.builder'), ((1135, 1235), 'dev.langchain4j.service.AiServices.builder'), ((1135, 1210), 'dev.langchain4j.service.AiServices.builder')] |
package my.samples;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.apache.pdfbox.ApachePdfBoxDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
public class InMemoryEmbeddingManualExample {
public static final String ANSI_GREEN = "\u001B[32m";
public static final String ANSI_RESET = "\u001B[0m";
public static final String ANSI_YELLOW = "\u001B[33m";
public static void main(String[] args) {
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(300, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
Path filePath = toPath("example-files/2025_US_F150_Warranty_Guide_ENG_V1.pdf");
Document document = FileSystemDocumentLoader.loadDocument(filePath, new ApachePdfBoxDocumentParser());
document.metadata().add("fileName", filePath.getFileName().toString());
document.metadata().add("filePath", filePath.toString());
document.metadata().add("company", "FORD");
document.metadata().add("product", "F150");
document.metadata().add("language", "ENG");
document.metadata().add("version", "V1");
document.metadata().add("year", "2025");
document.metadata().add("type", "Warranty Guide");
document.metadata().add("country", "US");
document.metadata().add("category", "Automotive");
ingestor.ingest(document);
Scanner scanner = new Scanner(System.in);
while (true) {
System.out.println("Enter your query (or type 'exit' to quit):");
// Wait for the user to input a query
String query = scanner.nextLine();
// Check if the user wants to exit the program
if ("exit".equalsIgnoreCase(query)) {
System.out.println("Exiting program.");
break;
}
// Who Pays For Warranty Repairs?
// What is the warranty period?
// What is the warranty period for the powertrain?
// What is the warranty period for the powertrain?
// Process the query and get an answer
Embedding queryEmbedding = embeddingModel.embed(query).content();
List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(queryEmbedding,5 );
System.out.println("Start --------- Matching Context from Document: 2025_US_F150_Warranty_Guide_ENG_V1.pdf");
List<String> answers = new ArrayList<>();
for (EmbeddingMatch<TextSegment> match : relevant) {
System.out.println(match.score());
answers.add(match.embedded().text());
System.out.println(ANSI_GREEN+match.embedded().text()+ANSI_RESET);
System.out.println("");
}
System.out.println("End --------- Matching Context from Document: 2025_US_F150_Warranty_Guide_ENG_V1.pdf");
if(!answers.isEmpty()){
try {
System.out.println(ANSI_YELLOW+ RestClient.getAnswer(query, answers) + ANSI_RESET);
} catch (IOException e) {
e.printStackTrace();
}
}
}
// Close the scanner
scanner.close();
// In-memory embedding store can be serialized and deserialized to/from JSON
String serializedStore = ((InMemoryEmbeddingStore)embeddingStore).serializeToJson();
System.out.println(serializedStore);
// InMemoryEmbeddingStore<TextSegment> deserializedStore = InMemoryEmbeddingStore.fromJson(serializedStore);
// In-memory embedding store can be serialized and deserialized to/from file
// String filePath = "/home/me/embedding.store";
// embeddingStore.serializeToFile(filePath);
// InMemoryEmbeddingStore<TextSegment> deserializedStore = InMemoryEmbeddingStore.fromFile(filePath);
}
private static Path toPath(String fileName) {
try {
// Corrected path assuming files are in src/main/resources/example-files
URL fileUrl = InMemoryEmbeddingManualExample.class.getClassLoader().getResource( fileName);
if (fileUrl == null) {
throw new RuntimeException("Resource not found: " + fileName);
}
return Paths.get(fileUrl.toURI());
} catch (URISyntaxException e) {
throw new RuntimeException("Failed to resolve URI for: " + fileName, e);
}
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1444, 1668), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1444, 1643), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1444, 1595), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1444, 1547), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package dev.langchain4j.model.qianfan;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.agent.tool.ToolParameters;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.message.*;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.internal.Utils;
import dev.langchain4j.model.qianfan.client.embedding.EmbeddingResponse;
import dev.langchain4j.model.qianfan.client.chat.Parameters;
import dev.langchain4j.model.output.FinishReason;
import dev.langchain4j.model.output.TokenUsage;
import dev.langchain4j.model.qianfan.client.chat.ChatCompletionResponse;
import dev.langchain4j.model.qianfan.client.chat.Message;
import dev.langchain4j.model.qianfan.client.chat.Role;
import dev.langchain4j.model.qianfan.client.chat.FunctionCall;
import dev.langchain4j.model.qianfan.client.chat.Function;
import dev.langchain4j.model.qianfan.client.completion.CompletionResponse;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import static dev.langchain4j.data.message.AiMessage.aiMessage;
import static dev.langchain4j.internal.Exceptions.illegalArgument;
import static dev.langchain4j.model.output.FinishReason.*;
import static java.util.stream.Collectors.toList;
public class InternalQianfanHelper {
public static List<Function> toFunctions(Collection<ToolSpecification> toolSpecifications) {
return toolSpecifications.stream()
.map(InternalQianfanHelper::toFunction)
.collect(toList());
}
private static Function toFunction(ToolSpecification toolSpecification) {
return Function.builder()
.name(toolSpecification.name())
.description(toolSpecification.description())
.parameters(toOpenAiParameters(toolSpecification.parameters()))
.build();
}
private static Parameters toOpenAiParameters(ToolParameters toolParameters) {
if (toolParameters == null) {
return Parameters.builder().build();
}
return Parameters.builder()
.properties(toolParameters.properties())
.required(toolParameters.required())
.build();
}
public static Message toQianfanMessage(ChatMessage message) {
if (message instanceof UserMessage) {
UserMessage userMessage = (UserMessage) message;
return Message.builder()
.role(Role.USER)
.content(userMessage.text())
.name(userMessage.name())
.build();
}
if (message instanceof AiMessage) {
AiMessage aiMessage = (AiMessage) message;
if (!aiMessage.hasToolExecutionRequests()) {
return Message.builder()
.content(message.text())
.role(Role.ASSISTANT)
.build();
}
ToolExecutionRequest toolExecutionRequest = aiMessage.toolExecutionRequests().get(0);
if (toolExecutionRequest.id() == null) {
FunctionCall functionCall = FunctionCall.builder()
.name(toolExecutionRequest.name())
.arguments(toolExecutionRequest.arguments())
.build();
return Message.builder()
.content(message.text())
.role(Role.ASSISTANT)
.functionCall(functionCall)
.build();
}
}
if (message instanceof ToolExecutionResultMessage) {
ToolExecutionResultMessage toolExecutionResultMessage = (ToolExecutionResultMessage) message;
FunctionCall functionCall = FunctionCall.builder()
.name(toolExecutionResultMessage.toolName())
.arguments(toolExecutionResultMessage.text())
.build();
return Message.builder()
.content(message.text())
.role(Role.FUNCTION)
.name(functionCall.name())
.build();
}
throw illegalArgument("Unknown message type: " + message.type());
}
static TokenUsage tokenUsageFrom(ChatCompletionResponse response) {
return Optional.of(response)
.map(ChatCompletionResponse::getUsage)
.map(usage -> new TokenUsage(usage.promptTokens(), usage.completionTokens(), usage.totalTokens()))
.orElse(null);
}
static TokenUsage tokenUsageFrom(CompletionResponse response) {
return Optional.of(response)
.map(CompletionResponse::getUsage)
.map(usage -> new TokenUsage(usage.promptTokens(), usage.completionTokens(), usage.totalTokens()))
.orElse(null);
}
static TokenUsage tokenUsageFrom(EmbeddingResponse response) {
return Optional.of(response)
.map(EmbeddingResponse::getUsage)
.map(usage -> new TokenUsage(usage.promptTokens(), usage.completionTokens(), usage.totalTokens()))
.orElse(null);
}
public static FinishReason finishReasonFrom(String finishReason) {
if(Utils.isNullOrBlank(finishReason)){
return null;
}
switch (finishReason) {
case "normal":
return STOP;
case "stop":
return STOP;
case "length":
return LENGTH;
case "content_filter":
return CONTENT_FILTER;
case "function_call":
return TOOL_EXECUTION;
default:
return null;
}
}
public static AiMessage aiMessageFrom(ChatCompletionResponse response) {
FunctionCall functionCall = response.getFunctionCall();
if (functionCall != null) {
ToolExecutionRequest toolExecutionRequest = ToolExecutionRequest.builder()
.name(functionCall.name())
.arguments(functionCall.arguments())
.build();
return aiMessage(toolExecutionRequest);
}
return aiMessage(response.getResult());
}
static String getSystemMessage(List<ChatMessage> messages) {
List<ChatMessage> systemMessages = messages.stream().filter(message -> message instanceof SystemMessage).collect(toList());
if (systemMessages.size() > 1) {
throw new RuntimeException("Multiple system messages are not supported");
}
if(Utils.isNullOrEmpty(systemMessages)){
return null;
}
return ((SystemMessage) systemMessages.get(0)).text();
}
public static List<Message> toOpenAiMessages(List<ChatMessage> messages) {
return messages.stream()
.filter(chatMessage -> !(chatMessage instanceof SystemMessage))
.map(InternalQianfanHelper::toQianfanMessage)
.collect(toList());
}
}
| [
"dev.langchain4j.model.qianfan.client.chat.FunctionCall.builder",
"dev.langchain4j.model.qianfan.client.chat.Message.builder",
"dev.langchain4j.agent.tool.ToolExecutionRequest.builder",
"dev.langchain4j.model.qianfan.client.chat.Parameters.builder",
"dev.langchain4j.model.qianfan.client.chat.Function.builder",
"dev.langchain4j.data.message.AiMessage.aiMessage.toolExecutionRequests"
] | [((1632, 1865), 'dev.langchain4j.model.qianfan.client.chat.Function.builder'), ((1632, 1840), 'dev.langchain4j.model.qianfan.client.chat.Function.builder'), ((1632, 1760), 'dev.langchain4j.model.qianfan.client.chat.Function.builder'), ((1632, 1698), 'dev.langchain4j.model.qianfan.client.chat.Function.builder'), ((2013, 2041), 'dev.langchain4j.model.qianfan.client.chat.Parameters.builder'), ((2068, 2223), 'dev.langchain4j.model.qianfan.client.chat.Parameters.builder'), ((2068, 2198), 'dev.langchain4j.model.qianfan.client.chat.Parameters.builder'), ((2068, 2145), 'dev.langchain4j.model.qianfan.client.chat.Parameters.builder'), ((2424, 2602), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((2424, 2573), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((2424, 2527), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((2424, 2478), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((2797, 2942), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((2797, 2909), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((2797, 2863), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((3015, 3055), 'dev.langchain4j.data.message.AiMessage.aiMessage.toolExecutionRequests'), ((3154, 3337), 'dev.langchain4j.model.qianfan.client.chat.FunctionCall.builder'), ((3154, 3304), 'dev.langchain4j.model.qianfan.client.chat.FunctionCall.builder'), ((3154, 3235), 'dev.langchain4j.model.qianfan.client.chat.FunctionCall.builder'), ((3363, 3560), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((3363, 3527), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((3363, 3475), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((3363, 3429), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((3799, 3993), 'dev.langchain4j.model.qianfan.client.chat.FunctionCall.builder'), ((3799, 3960), 'dev.langchain4j.model.qianfan.client.chat.FunctionCall.builder'), ((3799, 3890), 'dev.langchain4j.model.qianfan.client.chat.FunctionCall.builder'), ((4019, 4214), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((4019, 4181), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((4019, 4130), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((4019, 4085), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((4394, 4615), 'java.util.Optional.of'), ((4394, 4585), 'java.util.Optional.of'), ((4394, 4470), 'java.util.Optional.of'), ((4707, 4924), 'java.util.Optional.of'), ((4707, 4894), 'java.util.Optional.of'), ((4707, 4779), 'java.util.Optional.of'), ((5016, 5232), 'java.util.Optional.of'), ((5016, 5202), 'java.util.Optional.of'), ((5016, 5087), 'java.util.Optional.of'), ((6045, 6208), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((6045, 6179), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((6045, 6122), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')] |
package io.quarkiverse.langchain4j.runtime.tool;
import java.util.List;
import java.util.Map;
import dev.langchain4j.agent.tool.ToolParameters;
import io.quarkus.runtime.ObjectSubstitution;
import io.quarkus.runtime.annotations.RecordableConstructor;
public class ToolParametersObjectSubstitution
implements ObjectSubstitution<ToolParameters, ToolParametersObjectSubstitution.Serialized> {
@Override
public Serialized serialize(ToolParameters obj) {
return new Serialized(obj.type(), obj.properties(), obj.required());
}
@Override
public ToolParameters deserialize(Serialized obj) {
return ToolParameters.builder()
.type(obj.type)
.required(obj.required)
.properties(obj.properties).build();
}
public static class Serialized {
private final String type;
private final Map<String, Map<String, Object>> properties;
private final List<String> required;
@RecordableConstructor
public Serialized(String type, Map<String, Map<String, Object>> properties, List<String> required) {
this.type = type;
this.properties = properties;
this.required = required;
}
public String getType() {
return type;
}
public Map<String, Map<String, Object>> getProperties() {
return properties;
}
public List<String> getRequired() {
return required;
}
}
}
| [
"dev.langchain4j.agent.tool.ToolParameters.builder"
] | [((639, 787), 'dev.langchain4j.agent.tool.ToolParameters.builder'), ((639, 779), 'dev.langchain4j.agent.tool.ToolParameters.builder'), ((639, 735), 'dev.langchain4j.agent.tool.ToolParameters.builder'), ((639, 695), 'dev.langchain4j.agent.tool.ToolParameters.builder')] |
package me.nzuguem.something.story.configurations.langchain;
import dev.langchain4j.rag.DefaultRetrievalAugmentor;
import dev.langchain4j.rag.RetrievalAugmentor;
import jakarta.enterprise.context.ApplicationScoped;
import java.util.function.Supplier;
@ApplicationScoped
public class StoryRetrievalAugmentor implements Supplier<RetrievalAugmentor> {
private final StoryContentRetriever retriever;
public StoryRetrievalAugmentor(StoryContentRetriever retriever) {
this.retriever = retriever;
}
@Override
public RetrievalAugmentor get() {
return DefaultRetrievalAugmentor.builder()
.contentRetriever(this.retriever)
.build();
}
}
| [
"dev.langchain4j.rag.DefaultRetrievalAugmentor.builder"
] | [((585, 695), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((585, 670), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder')] |
package org.example;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.MemoryId;
import dev.langchain4j.service.UserMessage;
import dev.langchain4j.store.memory.chat.ChatMemoryStore;
public class _09_AIServices_05_ChatMemory {
public static void main(String[] args) {
OpenAiChatModel model = OpenAiChatModel.withApiKey(ApiKeys.OPENAI_DEMO);
ChatAssistant assistant = AiServices.builder(ChatAssistant.class)
.chatLanguageModel(model)
.chatMemoryProvider(memoryId -> MessageWindowChatMemory.withMaxMessages(10))
.build();
System.out.println(assistant.chat(1, "Hello, my name is Michael"));
System.out.println(assistant.chat(2, "Hello, my name is Karl"));
System.out.println(assistant.chat(2, "What is my name?"));
System.out.println(assistant.chat(1, "What is my name?"));
}
}
interface ChatAssistant {
String chat(@MemoryId int memoryId, @UserMessage String message);
} | [
"dev.langchain4j.service.AiServices.builder"
] | [((529, 728), 'dev.langchain4j.service.AiServices.builder'), ((529, 703), 'dev.langchain4j.service.AiServices.builder'), ((529, 610), 'dev.langchain4j.service.AiServices.builder')] |