code
stringlengths 419
47.9k
| apis
sequencelengths 1
7
| extract_api
stringlengths 67
6.13k
|
---|---|---|
package dev.langchain4j.service;
import dev.langchain4j.agent.tool.DefaultToolExecutor;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.ToolExecutionResultMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.input.structured.StructuredPrompt;
import dev.langchain4j.model.moderation.Moderation;
import dev.langchain4j.model.moderation.ModerationModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.rag.DefaultRetrievalAugmentor;
import dev.langchain4j.rag.RetrievalAugmentor;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.retriever.Retriever;
import dev.langchain4j.spi.services.AiServicesFactory;
import java.lang.reflect.Method;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import static dev.langchain4j.agent.tool.ToolSpecifications.toolSpecificationFrom;
import static dev.langchain4j.exception.IllegalConfigurationException.illegalConfiguration;
import static dev.langchain4j.internal.ValidationUtils.ensureNotNull;
import static dev.langchain4j.spi.ServiceHelper.loadFactories;
import static java.util.stream.Collectors.toList;
/**
* AI Services provide a simpler and more flexible alternative to chains.
* You can define your own API (a Java interface with one or more methods),
* and AiServices will provide an implementation for it (we call this "AI Service").
* <p>
* Currently, AI Services support:
* <pre>
* - Prompt templates for user and system messages using {@link UserMessage} and {@link SystemMessage}
* - Structured prompts as method arguments (see {@link StructuredPrompt})
* - Shared or per-user (see {@link MemoryId}) chat memory
* - RAG (see {@link RetrievalAugmentor})
* - Tools (see {@link Tool})
* - Various return types (output parsers), see below
* - Streaming (use {@link TokenStream} as a return type)
* - Auto-moderation using {@link Moderate}
* </pre>
* <p>
* Here is the simplest example of an AI Service:
*
* <pre>
* interface Assistant {
*
* String chat(String userMessage);
* }
*
* Assistant assistant = AiServices.create(Assistant.class, model);
*
* String answer = assistant.chat("hello");
* System.out.println(answer); // Hello, how can I help you today?
* </pre>
*
* <pre>
* The return type of methods in your AI Service can be any of the following:
* - a {@link String}, an {@link AiMessage} or a {@code Response<AiMessage>}, if you want to get the answer from the LLM as-is
* - a {@code List<String>} or {@code Set<String>}, if you want to receive the answer as a collection of items or bullet points
* - any {@link Enum} or a {@code boolean}, if you want to use the LLM for classification
* - a primitive or boxed Java type: {@code int}, {@code Double}, etc., if you want to use the LLM for data extraction
* - many default Java types: {@code Date}, {@code LocalDateTime}, {@code BigDecimal}, etc., if you want to use the LLM for data extraction
* - any custom POJO, if you want to use the LLM for data extraction.
* For POJOs, it is advisable to use the "json mode" feature if the LLM provider supports it. For OpenAI, this can be enabled by calling {@code responseFormat("json_object")} during model construction.
*
* </pre>
* <p>
* Let's see how we can classify the sentiment of a text:
* <pre>
* enum Sentiment {
* POSITIVE, NEUTRAL, NEGATIVE
* }
*
* interface SentimentAnalyzer {
*
* {@code @UserMessage}("Analyze sentiment of {{it}}")
* Sentiment analyzeSentimentOf(String text);
* }
*
* SentimentAnalyzer assistant = AiServices.create(SentimentAnalyzer.class, model);
*
* Sentiment sentiment = analyzeSentimentOf.chat("I love you");
* System.out.println(sentiment); // POSITIVE
* </pre>
* <p>
* As demonstrated, you can put {@link UserMessage} and {@link SystemMessage} annotations above a method to define
* templates for user and system messages, respectively.
* In this example, the special {@code {{it}}} prompt template variable is used because there's only one method parameter.
* However, you can use more parameters as demonstrated in the following example:
* <pre>
* interface Translator {
*
* {@code @SystemMessage}("You are a professional translator into {{language}}")
* {@code @UserMessage}("Translate the following text: {{text}}")
* String translate(@V("text") String text, @V("language") String language);
* }
* </pre>
* <p>
* See more examples <a href="https://github.com/langchain4j/langchain4j-examples/tree/main/other-examples/src/main/java">here</a>.
*
* @param <T> The interface for which AiServices will provide an implementation.
*/
public abstract class AiServices<T> {
protected static final String DEFAULT = "default";
protected final AiServiceContext context;
private boolean retrieverSet = false;
private boolean contentRetrieverSet = false;
private boolean retrievalAugmentorSet = false;
protected AiServices(AiServiceContext context) {
this.context = context;
}
/**
* Creates an AI Service (an implementation of the provided interface), that is backed by the provided chat model.
* This convenience method can be used to create simple AI Services.
* For more complex cases, please use {@link #builder}.
*
* @param aiService The class of the interface to be implemented.
* @param chatLanguageModel The chat model to be used under the hood.
* @return An instance of the provided interface, implementing all its defined methods.
*/
public static <T> T create(Class<T> aiService, ChatLanguageModel chatLanguageModel) {
return builder(aiService)
.chatLanguageModel(chatLanguageModel)
.build();
}
/**
* Creates an AI Service (an implementation of the provided interface), that is backed by the provided streaming chat model.
* This convenience method can be used to create simple AI Services.
* For more complex cases, please use {@link #builder}.
*
* @param aiService The class of the interface to be implemented.
* @param streamingChatLanguageModel The streaming chat model to be used under the hood.
* The return type of all methods should be {@link TokenStream}.
* @return An instance of the provided interface, implementing all its defined methods.
*/
public static <T> T create(Class<T> aiService, StreamingChatLanguageModel streamingChatLanguageModel) {
return builder(aiService)
.streamingChatLanguageModel(streamingChatLanguageModel)
.build();
}
/**
* Begins the construction of an AI Service.
*
* @param aiService The class of the interface to be implemented.
* @return builder
*/
public static <T> AiServices<T> builder(Class<T> aiService) {
AiServiceContext context = new AiServiceContext(aiService);
for (AiServicesFactory factory : loadFactories(AiServicesFactory.class)) {
return factory.create(context);
}
return new DefaultAiServices<>(context);
}
/**
* Configures chat model that will be used under the hood of the AI Service.
* <p>
* Either {@link ChatLanguageModel} or {@link StreamingChatLanguageModel} should be configured,
* but not both at the same time.
*
* @param chatLanguageModel Chat model that will be used under the hood of the AI Service.
* @return builder
*/
public AiServices<T> chatLanguageModel(ChatLanguageModel chatLanguageModel) {
context.chatModel = chatLanguageModel;
return this;
}
/**
* Configures streaming chat model that will be used under the hood of the AI Service.
* The methods of the AI Service must return a {@link TokenStream} type.
* <p>
* Either {@link ChatLanguageModel} or {@link StreamingChatLanguageModel} should be configured,
* but not both at the same time.
*
* @param streamingChatLanguageModel Streaming chat model that will be used under the hood of the AI Service.
* @return builder
*/
public AiServices<T> streamingChatLanguageModel(StreamingChatLanguageModel streamingChatLanguageModel) {
context.streamingChatModel = streamingChatLanguageModel;
return this;
}
/**
* Configures the chat memory that will be used to preserve conversation history between method calls.
* <p>
* Unless a {@link ChatMemory} or {@link ChatMemoryProvider} is configured, all method calls will be independent of each other.
* In other words, the LLM will not remember the conversation from the previous method calls.
* <p>
* The same {@link ChatMemory} instance will be used for every method call.
* <p>
* If you want to have a separate {@link ChatMemory} for each user/conversation, configure {@link #chatMemoryProvider} instead.
* <p>
* Either a {@link ChatMemory} or a {@link ChatMemoryProvider} can be configured, but not both simultaneously.
*
* @param chatMemory An instance of chat memory to be used by the AI Service.
* @return builder
*/
public AiServices<T> chatMemory(ChatMemory chatMemory) {
context.chatMemories = new ConcurrentHashMap<>();
context.chatMemories.put(DEFAULT, chatMemory);
return this;
}
/**
* Configures the chat memory provider, which provides a dedicated instance of {@link ChatMemory} for each user/conversation.
* To distinguish between users/conversations, one of the method's arguments should be a memory ID (of any data type)
* annotated with {@link MemoryId}.
* For each new (previously unseen) memoryId, an instance of {@link ChatMemory} will be automatically obtained
* by invoking {@link ChatMemoryProvider#get(Object id)}.
* Example:
* <pre>
* interface Assistant {
*
* String chat(@MemoryId int memoryId, @UserMessage String message);
* }
* </pre>
* If you prefer to use the same (shared) {@link ChatMemory} for all users/conversations, configure a {@link #chatMemory} instead.
* <p>
* Either a {@link ChatMemory} or a {@link ChatMemoryProvider} can be configured, but not both simultaneously.
*
* @param chatMemoryProvider The provider of a {@link ChatMemory} for each new user/conversation.
* @return builder
*/
public AiServices<T> chatMemoryProvider(ChatMemoryProvider chatMemoryProvider) {
context.chatMemories = new ConcurrentHashMap<>();
context.chatMemoryProvider = chatMemoryProvider;
return this;
}
/**
* Configures a moderation model to be used for automatic content moderation.
* If a method in the AI Service is annotated with {@link Moderate}, the moderation model will be invoked
* to check the user content for any inappropriate or harmful material.
*
* @param moderationModel The moderation model to be used for content moderation.
* @return builder
* @see Moderate
*/
public AiServices<T> moderationModel(ModerationModel moderationModel) {
context.moderationModel = moderationModel;
return this;
}
/**
* Configures the tools that the LLM can use.
* A {@link ChatMemory} that can hold at least 3 messages is required for the tools to work properly.
*
* @param objectsWithTools One or more objects whose methods are annotated with {@link Tool}.
* All these tools (methods annotated with {@link Tool}) will be accessible to the LLM.
* Note that inherited methods are ignored.
* @return builder
* @see Tool
*/
public AiServices<T> tools(Object... objectsWithTools) {
return tools(Arrays.asList(objectsWithTools));
}
/**
* Configures the tools that the LLM can use.
* A {@link ChatMemory} that can hold at least 3 messages is required for the tools to work properly.
*
* @param objectsWithTools A list of objects whose methods are annotated with {@link Tool}.
* All these tools (methods annotated with {@link Tool}) are accessible to the LLM.
* Note that inherited methods are ignored.
* @return builder
* @see Tool
*/
public AiServices<T> tools(List<Object> objectsWithTools) {
context.toolSpecifications = new ArrayList<>();
context.toolExecutors = new HashMap<>();
for (Object objectWithTool : objectsWithTools) {
for (Method method : objectWithTool.getClass().getDeclaredMethods()) {
if (method.isAnnotationPresent(Tool.class)) {
ToolSpecification toolSpecification = toolSpecificationFrom(method);
context.toolSpecifications.add(toolSpecification);
context.toolExecutors.put(toolSpecification.name(), new DefaultToolExecutor(objectWithTool, method));
}
}
}
return this;
}
/**
* Deprecated. Use {@link #contentRetriever(ContentRetriever)}
* (e.g. {@link EmbeddingStoreContentRetriever}) instead.
* <br>
* Configures a retriever that will be invoked on every method call to fetch relevant information
* related to the current user message from an underlying source (e.g., embedding store).
* This relevant information is automatically injected into the message sent to the LLM.
*
* @param retriever The retriever to be used by the AI Service.
* @return builder
*/
@Deprecated
public AiServices<T> retriever(Retriever<TextSegment> retriever) {
if(contentRetrieverSet || retrievalAugmentorSet) {
throw illegalConfiguration("Only one out of [retriever, contentRetriever, retrievalAugmentor] can be set");
}
if (retriever != null) {
AiServices<T> withContentRetriever = contentRetriever(retriever.toContentRetriever());
retrieverSet = true;
return withContentRetriever;
}
return this;
}
/**
* Configures a content retriever to be invoked on every method call for retrieving relevant content
* related to the user's message from an underlying data source
* (e.g., an embedding store in the case of an {@link EmbeddingStoreContentRetriever}).
* The retrieved relevant content is then automatically incorporated into the message sent to the LLM.
* <br>
* This method provides a straightforward approach for those who do not require
* a customized {@link RetrievalAugmentor}.
* It configures a {@link DefaultRetrievalAugmentor} with the provided {@link ContentRetriever}.
*
* @param contentRetriever The content retriever to be used by the AI Service.
* @return builder
*/
public AiServices<T> contentRetriever(ContentRetriever contentRetriever) {
if(retrieverSet || retrievalAugmentorSet) {
throw illegalConfiguration("Only one out of [retriever, contentRetriever, retrievalAugmentor] can be set");
}
contentRetrieverSet = true;
context.retrievalAugmentor = DefaultRetrievalAugmentor.builder()
.contentRetriever(ensureNotNull(contentRetriever, "contentRetriever"))
.build();
return this;
}
/**
* Configures a retrieval augmentor to be invoked on every method call.
*
* @param retrievalAugmentor The retrieval augmentor to be used by the AI Service.
* @return builder
*/
public AiServices<T> retrievalAugmentor(RetrievalAugmentor retrievalAugmentor) {
if(retrieverSet || contentRetrieverSet) {
throw illegalConfiguration("Only one out of [retriever, contentRetriever, retrievalAugmentor] can be set");
}
retrievalAugmentorSet = true;
context.retrievalAugmentor = ensureNotNull(retrievalAugmentor, "retrievalAugmentor");
return this;
}
/**
* Constructs and returns the AI Service.
*
* @return An instance of the AI Service implementing the specified interface.
*/
public abstract T build();
protected void performBasicValidation() {
if (context.chatModel == null && context.streamingChatModel == null) {
throw illegalConfiguration("Please specify either chatLanguageModel or streamingChatLanguageModel");
}
if (context.toolSpecifications != null && !context.hasChatMemory()) {
throw illegalConfiguration(
"Please set up chatMemory or chatMemoryProvider in order to use tools. "
+ "A ChatMemory that can hold at least 3 messages is required for the tools to work properly. "
+ "While the LLM can technically execute a tool without chat memory, if it only receives the " +
"result of the tool's execution without the initial message from the user, it won't interpret " +
"the result properly."
);
}
}
public static List<ChatMessage> removeToolMessages(List<ChatMessage> messages) {
return messages.stream()
.filter(it -> !(it instanceof ToolExecutionResultMessage))
.filter(it -> !(it instanceof AiMessage && ((AiMessage) it).hasToolExecutionRequests()))
.collect(toList());
}
public static void verifyModerationIfNeeded(Future<Moderation> moderationFuture) {
if (moderationFuture != null) {
try {
Moderation moderation = moderationFuture.get();
if (moderation.flagged()) {
throw new ModerationException(String.format("Text \"%s\" violates content policy", moderation.flaggedText()));
}
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
}
}
}
| [
"dev.langchain4j.rag.DefaultRetrievalAugmentor.builder"
] | [((15779, 15926), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((15779, 15901), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder')] |
package org.mfusco;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import static java.time.Duration.ofSeconds;
public class MortgageChat {
private final ChatLanguageModel model;
private final PersonExtractor extractor;
private final DroolsMortgageCalculator droolsMortgageCalculator = new DroolsMortgageCalculator();
private final Assistant assistant;
public MortgageChat(String openAiApiKey) {
model = OpenAiChatModel.builder()
.apiKey(openAiApiKey)
.timeout(ofSeconds(60))
.build();
extractor = AiServices.create(PersonExtractor.class, model);
assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(model)
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.tools(droolsMortgageCalculator)
.build();
}
public String chat(String text) {
return text.endsWith("?") ? assistant.chat(text) : extractPerson(text);
}
private String extractPerson(String text) {
Person person = extractor.extractPersonFrom(text);
droolsMortgageCalculator.register(person);
return person.toString();
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((601, 729), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((601, 704), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((601, 664), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((822, 1046), 'dev.langchain4j.service.AiServices.builder'), ((822, 1021), 'dev.langchain4j.service.AiServices.builder'), ((822, 972), 'dev.langchain4j.service.AiServices.builder'), ((822, 899), 'dev.langchain4j.service.AiServices.builder')] |
package com.moyz.adi.common.service;
import com.moyz.adi.common.helper.LLMContext;
import com.moyz.adi.common.interfaces.TriConsumer;
import com.moyz.adi.common.util.AdiPgVectorEmbeddingStore;
import com.moyz.adi.common.vo.AnswerMeta;
import com.moyz.adi.common.vo.PromptMeta;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.commons.lang3.tuple.Triple;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.util.stream.Collectors.joining;
@Slf4j
@Service
public class RAGService {
@Value("${spring.datasource.url}")
private String dataBaseUrl;
@Value("${spring.datasource.username}")
private String dataBaseUserName;
@Value("${spring.datasource.password}")
private String dataBasePassword;
private static final PromptTemplate promptTemplate = PromptTemplate.from("尽可能准确地回答下面的问题: {{question}}\n\n根据以下知识库的内容:\n{{information}}");
private EmbeddingModel embeddingModel;
private EmbeddingStore<TextSegment> embeddingStore;
public void init() {
log.info("initEmbeddingModel");
embeddingModel = new AllMiniLmL6V2EmbeddingModel();
embeddingStore = initEmbeddingStore();
}
private EmbeddingStore<TextSegment> initEmbeddingStore() {
// 正则表达式匹配
String regex = "jdbc:postgresql://([^:/]+):(\\d+)/(\\w+).+";
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(dataBaseUrl);
String host = "";
String port = "";
String databaseName = "";
if (matcher.matches()) {
host = matcher.group(1);
port = matcher.group(2);
databaseName = matcher.group(3);
System.out.println("Host: " + host);
System.out.println("Port: " + port);
System.out.println("Database: " + databaseName);
} else {
throw new RuntimeException("parse url error");
}
AdiPgVectorEmbeddingStore embeddingStore = AdiPgVectorEmbeddingStore.builder()
.host(host)
.port(Integer.parseInt(port))
.database(databaseName)
.user(dataBaseUserName)
.password(dataBasePassword)
.dimension(384)
.createTable(true)
.dropTableFirst(false)
.table("adi_knowledge_base_embedding")
.build();
return embeddingStore;
}
private EmbeddingStoreIngestor getEmbeddingStoreIngestor() {
DocumentSplitter documentSplitter = DocumentSplitters.recursive(1000, 0, new OpenAiTokenizer(GPT_3_5_TURBO));
EmbeddingStoreIngestor embeddingStoreIngestor = EmbeddingStoreIngestor.builder()
.documentSplitter(documentSplitter)
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
return embeddingStoreIngestor;
}
/**
* 对文档切块并向量化
*
* @param document 知识库文档
*/
public void ingest(Document document) {
getEmbeddingStoreIngestor().ingest(document);
}
public Prompt retrieveAndCreatePrompt(String kbUuid, String question) {
// Embed the question
Embedding questionEmbedding = embeddingModel.embed(question).content();
// Find relevant embeddings in embedding store by semantic similarity
// You can play with parameters below to find a sweet spot for your specific use case
int maxResults = 3;
double minScore = 0.6;
List<EmbeddingMatch<TextSegment>> relevantEmbeddings = ((AdiPgVectorEmbeddingStore) embeddingStore).findRelevantByKbUuid(kbUuid, questionEmbedding, maxResults, minScore);
// Create a prompt for the model that includes question and relevant embeddings
String information = relevantEmbeddings.stream()
.map(match -> match.embedded().text())
.collect(joining("\n\n"));
if (StringUtils.isBlank(information)) {
return null;
}
return promptTemplate.apply(Map.of("question", question, "information", Matcher.quoteReplacement(information)));
}
/**
* 召回并提问
*
* @param kbUuid 知识库uuid
* @param question 用户的问题
* @param modelName LLM model name
* @return
*/
public Pair<String, Response<AiMessage>> retrieveAndAsk(String kbUuid, String question, String modelName) {
Prompt prompt = retrieveAndCreatePrompt(kbUuid, question);
if (null == prompt) {
return null;
}
Response<AiMessage> response = new LLMContext(modelName).getLLMService().chat(prompt.toUserMessage());
return new ImmutablePair<>(prompt.text(), response);
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((3196, 3615), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3590), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3535), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3496), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3461), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3429), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3385), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3345), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3305), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3259), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3894, 4099), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3894, 4074), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3894, 4026), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3894, 3978), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package dev.zbendhiba.demo.telegram.openapi;
import java.util.List;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import jakarta.enterprise.context.ApplicationScoped;
import static java.time.Duration.ofSeconds;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.telegram.model.IncomingMessage;
import org.eclipse.microprofile.config.inject.ConfigProperty;
@ApplicationScoped
public class Routes extends RouteBuilder {
@ConfigProperty(name="open-api-key")
String openApiKey;
private EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
private EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
@Override
public void configure() throws Exception {
// REST endpoint to add a bio
rest("data")
.post("/camel-split-ingest/")
.to("direct:camel-split-ingest")
.post("/langchain4j-split-ingest/")
.to("direct:langchain4j-split-ingest");
// Ingest Data
from("direct:camel-split-ingest")
.wireTap("direct:processBio")
.transform().simple("Thanks");
from("direct:processBio")
// split into paragraphs and use OpenApiTokenizer
.split(body().tokenize("\\s*\\n\\s*\\n"))
.setHeader("paragraphNumber", simple("${exchangeProperty.CamelSplitIndex}"))
// Process each paragraph using the OpenAiTokenizerProcessor
.process(new CamelSplitterProcessor())
.to("direct:processTokenizedPart")
.end();
// Embed paragraphs into Vector Database
from("direct:processTokenizedPart")
.process(exchange -> {
embed(exchange.getIn().getBody(List.class));
});
from("direct:process-langchain4j-split-ingest")
.process(new LangchainSplitterProcessor())
.to("direct:processTokenizedPart");
from("direct:langchain4j-split-ingest")
.wireTap("direct:process-langchain4j-split-ingest")
.transform().simple("Thanks");
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(openApiKey)
.modelName(GPT_3_5_TURBO)
.temperature(0.3)
.timeout(ofSeconds(3000))
.build();
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(model)
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.promptTemplate(PromptTemplate
.from("Answer the following question to the best of your ability: {{question}}\n\nBase your answer on the following information:\n{{information}}"))
.build();
from("telegram:bots?timeout=30000")
.log("Text received in Telegram : ${body}")
// this is just a Hello World, we suppose that we receive only text messages from user
.filter(simple("${body} != '/start'"))
.process(e->{
IncomingMessage incomingMessage = e.getMessage().getBody(IncomingMessage.class);
var openapiMessage = chain.execute(incomingMessage.getText());
e.getMessage().setBody(openapiMessage);
})
.log("Text to send to user based on response from ChatGPT : ${body}")
.to("telegram:bots")
.end();
}
public void embed(List<TextSegment> textSegments ) {
List<Embedding> embeddings = embeddingModel.embedAll(textSegments).content();
embeddingStore.addAll(embeddings, textSegments);
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((2918, 3122), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2918, 3097), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2918, 3055), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2918, 3021), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2918, 2979), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3171, 3658), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3171, 3633), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3171, 3413), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3171, 3340), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3171, 3251), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] |
package eu.luminis.faqlangchain.service;
import java.io.File;
import java.io.FileNotFoundException;
import java.time.Duration;
import java.util.Arrays;
import java.util.stream.Collectors;
import com.fasterxml.jackson.databind.JsonNode;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.FileSystemResource;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.client.MultipartBodyBuilder;
import org.springframework.stereotype.Service;
import org.springframework.util.ResourceUtils;
import org.springframework.web.reactive.function.BodyInserters;
import org.springframework.web.reactive.function.client.WebClient;
import reactor.core.publisher.Mono;
@Service
public class IngestService {
private static final Logger LOGGER = LoggerFactory.getLogger(IngestService.class);
private final WebClient webClient;
private final EmbeddingStore<TextSegment> embeddingStore;
private final EmbeddingModel embeddingModel;
public IngestService(@Value("${unstructured.apiKey}") String unstructuredApiKey,
@Qualifier("openaiModel") EmbeddingModel embeddingModel,
@Qualifier("inMemoryEmbeddingStore") EmbeddingStore<TextSegment> embeddingStore) {
this.embeddingModel = embeddingModel;
this.embeddingStore = embeddingStore;
this.webClient = WebClient.builder()
.baseUrl("https://api.unstructured.io/general/v0/")
.defaultHeader("unstructured-api-key", unstructuredApiKey)
.build();
}
public boolean ingestPDF() throws FileNotFoundException {
LOGGER.info("Ingesting PDF");
File file = ResourceUtils.getFile("classpath:data/faq.pdf");
MultipartBodyBuilder builder = new MultipartBodyBuilder();
builder.part("files", new FileSystemResource(file));
builder.part("strategy", "ocr_only");
builder.part("ocr_languages", "eng");
Mono<Object> mono = webClient.post()
.uri("general")
.contentType(MediaType.MULTIPART_FORM_DATA)
.body(BodyInserters.fromMultipartData(builder.build()))
.exchangeToMono(response -> {
if (response.statusCode().equals(HttpStatus.OK)) {
return response.bodyToMono(UnstructuredResponse[].class);
} else {
LOGGER.error("Something went wrong when uploading file to Unstructured API. Received status code {}", response.statusCode());
return response.bodyToMono(JsonNode.class);
}
});
Object response = mono.block(Duration.ofMinutes(1));
if (response instanceof JsonNode jsonNode) {
LOGGER.error("Response: {}", jsonNode);
return false;
}
if (response instanceof UnstructuredResponse[] unstructuredResponses) {
String text = Arrays.stream(unstructuredResponses).map(UnstructuredResponse::getText).collect(Collectors.joining(" "));
Document document = Document.from(text);
DocumentSplitter documentSplitter = DocumentSplitters.recursive(300);
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(documentSplitter)
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
ingestor.ingest(document);
LOGGER.info("Ingestion of PDF finished");
return true;
}
return false;
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1939, 2126), 'org.springframework.web.reactive.function.client.WebClient.builder'), ((1939, 2101), 'org.springframework.web.reactive.function.client.WebClient.builder'), ((1939, 2026), 'org.springframework.web.reactive.function.client.WebClient.builder'), ((3531, 3635), 'java.util.Arrays.stream'), ((3531, 3602), 'java.util.Arrays.stream'), ((3819, 4040), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3819, 4011), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3819, 3959), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3819, 3907), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package org.agoncal.fascicle.langchain4j.accessing.vertexai;
import dev.langchain4j.model.vertexai.VertexAiChatModel;
// tag::adocSkip[]
/**
* @author Antonio Goncalves
* http://www.antoniogoncalves.org
* --
*/
// end::adocSkip[]
public class MusicianService {
public static void main(String[] args) {
MusicianService musicianService = new MusicianService();
musicianService.useVertexAiLanguageModelBuilder();
}
private static final String AZURE_OPENAI_KEY = System.getenv("AZURE_OPENAI_KEY");
private static final String AZURE_OPENAI_ENDPOINT = System.getenv("AZURE_OPENAI_ENDPOINT");
private static final String AZURE_OPENAI_DEPLOYMENT_NAME = System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME");
private static final String PROMPT = "When was the first Beatles album released?";
// ###################################
// ### AZURE OPENAI LANGUAGE MODEL ###
// ###################################
public void useVertexAiLanguageModelBuilder() {
System.out.println("### useVertexAiLanguageModelBuilder");
// tag::adocSnippet[]
VertexAiChatModel model = VertexAiChatModel.builder()
.endpoint(AZURE_OPENAI_ENDPOINT)
.temperature(0.3)
.build();
// end::adocSnippet[]
String completion = model.generate(PROMPT);
}
}
| [
"dev.langchain4j.model.vertexai.VertexAiChatModel.builder"
] | [((1100, 1205), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((1100, 1190), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((1100, 1166), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder')] |
package com.example.application;
import com.example.application.services.BookingTools;
import com.example.application.services.CustomerSupportAgent;
import com.vaadin.flow.component.page.AppShellConfigurator;
import com.vaadin.flow.theme.Theme;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.Tokenizer;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import java.io.IOException;
import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_4;
@SpringBootApplication
@Theme(value = "customer-service-chatbot")
public class Application implements AppShellConfigurator {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
@Bean
EmbeddingModel embeddingModel() {
return new AllMiniLmL6V2EmbeddingModel();
}
@Bean
EmbeddingStore<TextSegment> embeddingStore() {
return new InMemoryEmbeddingStore<>();
}
@Bean
Tokenizer tokenizer() {
return new OpenAiTokenizer(GPT_3_5_TURBO);
}
// In the real world, ingesting documents would often happen separately, on a CI server or similar
@Bean
CommandLineRunner docsToEmbeddings(
EmbeddingModel embeddingModel,
EmbeddingStore<TextSegment> embeddingStore,
Tokenizer tokenizer,
ResourceLoader resourceLoader
) throws IOException {
return args -> {
Resource resource =
resourceLoader.getResource("classpath:terms-of-service.txt");
var termsOfUse = loadDocument(resource.getFile().toPath(), new TextDocumentParser());
DocumentSplitter documentSplitter = DocumentSplitters.recursive(200, 0,
tokenizer);
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(documentSplitter)
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
ingestor.ingest(termsOfUse);
};
}
@Bean
StreamingChatLanguageModel chatLanguageModel() {
return OpenAiStreamingChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.modelName(GPT_3_5_TURBO)
.build();
}
@Bean
ContentRetriever retriever(
EmbeddingStore<TextSegment> embeddingStore,
EmbeddingModel embeddingModel
) {
return EmbeddingStoreContentRetriever.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.maxResults(2)
.minScore(0.6)
.build();
}
@Bean
CustomerSupportAgent customerSupportAgent(
StreamingChatLanguageModel chatLanguageModel,
Tokenizer tokenizer,
ContentRetriever retriever,
BookingTools tools
) {
return AiServices.builder(CustomerSupportAgent.class)
.streamingChatLanguageModel(chatLanguageModel)
.chatMemoryProvider(chatId -> TokenWindowChatMemory.builder()
.id(chatId)
.maxTokens(1000, tokenizer)
.build())
.contentRetriever(retriever)
.tools(tools)
.build();
}
} | [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder",
"dev.langchain4j.memory.chat.TokenWindowChatMemory.builder",
"dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((3196, 3417), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3196, 3388), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3196, 3336), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3196, 3284), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3556, 3705), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((3556, 3680), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((3556, 3638), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((3878, 4101), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3878, 4076), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3878, 4045), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3878, 4014), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3878, 3966), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((4354, 4763), 'dev.langchain4j.service.AiServices.builder'), ((4354, 4738), 'dev.langchain4j.service.AiServices.builder'), ((4354, 4708), 'dev.langchain4j.service.AiServices.builder'), ((4354, 4663), 'dev.langchain4j.service.AiServices.builder'), ((4354, 4463), 'dev.langchain4j.service.AiServices.builder'), ((4510, 4662), 'dev.langchain4j.memory.chat.TokenWindowChatMemory.builder'), ((4510, 4629), 'dev.langchain4j.memory.chat.TokenWindowChatMemory.builder'), ((4510, 4577), 'dev.langchain4j.memory.chat.TokenWindowChatMemory.builder')] |
package com.tencent.supersonic.headless.core.chat.parser.llm;
import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.core.config.OptimizationConfig;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMReq;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMReq.SqlGenerationMode;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMResp;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.output.Response;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Service
@Slf4j
public class TwoPassSqlGeneration implements SqlGeneration, InitializingBean {
private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline");
@Autowired
private ChatLanguageModel chatLanguageModel;
@Autowired
private SqlExamplarLoader sqlExamplarLoader;
@Autowired
private OptimizationConfig optimizationConfig;
@Autowired
private SqlPromptGenerator sqlPromptGenerator;
@Override
public LLMResp generation(LLMReq llmReq, Long dataSetId) {
keyPipelineLog.info("dataSetId:{},llmReq:{}", dataSetId, llmReq);
List<Map<String, String>> sqlExamples = sqlExamplarLoader.retrieverSqlExamples(llmReq.getQueryText(),
optimizationConfig.getText2sqlExampleNum());
String linkingPromptStr = sqlPromptGenerator.generateLinkingPrompt(llmReq, sqlExamples);
Prompt prompt = PromptTemplate.from(JsonUtil.toString(linkingPromptStr)).apply(new HashMap<>());
keyPipelineLog.info("step one request prompt:{}", prompt.toSystemMessage());
Response<AiMessage> response = chatLanguageModel.generate(prompt.toSystemMessage());
keyPipelineLog.info("step one model response:{}", response.content().text());
String schemaLinkStr = OutputFormat.getSchemaLink(response.content().text());
String generateSqlPrompt = sqlPromptGenerator.generateSqlPrompt(llmReq, schemaLinkStr, sqlExamples);
Prompt sqlPrompt = PromptTemplate.from(JsonUtil.toString(generateSqlPrompt)).apply(new HashMap<>());
keyPipelineLog.info("step two request prompt:{}", sqlPrompt.toSystemMessage());
Response<AiMessage> sqlResult = chatLanguageModel.generate(sqlPrompt.toSystemMessage());
String result = sqlResult.content().text();
keyPipelineLog.info("step two model response:{}", result);
Map<String, Double> sqlMap = new HashMap<>();
sqlMap.put(result, 1D);
keyPipelineLog.info("schemaLinkStr:{},sqlMap:{}", schemaLinkStr, sqlMap);
LLMResp llmResp = new LLMResp();
llmResp.setQuery(llmReq.getQueryText());
llmResp.setSqlRespMap(OutputFormat.buildSqlRespMap(sqlExamples, sqlMap));
return llmResp;
}
@Override
public void afterPropertiesSet() {
SqlGenerationFactory.addSqlGenerationForFactory(SqlGenerationMode.TWO_PASS_AUTO_COT, this);
}
}
| [
"dev.langchain4j.model.input.PromptTemplate.from"
] | [((1891, 1970), 'dev.langchain4j.model.input.PromptTemplate.from'), ((2459, 2539), 'dev.langchain4j.model.input.PromptTemplate.from')] |
package com.sg.chatbot.service;
import org.springframework.http.codec.ServerSentEvent;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.TokenStream;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Sinks;
@Service
public class ChatService {
private String openaiApiKey = "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";
private Assistant assistant;
private StreamingAssistant streamingAssistant;
interface Assistant {
String chat(String message);
}
interface StreamingAssistant {
TokenStream chat(String message);
}
public ChatService(){
if (openaiApiKey == null) {
System.err
.println("ERROR: OPENAI_API_KEY environment variable is not set. Please set it to your OpenAI API key.");
}
var memory = TokenWindowChatMemory.withMaxTokens(2000, new OpenAiTokenizer("gpt-3.5-turbo"));
assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(openaiApiKey))
.chatMemory(memory)
.build();
streamingAssistant = AiServices.builder(StreamingAssistant.class)
.streamingChatLanguageModel(OpenAiStreamingChatModel.withApiKey(openaiApiKey))
.chatMemory(memory)
.build();
}
public String chat(String message) {
System.out.println(message);
return assistant.chat(message);
}
public Flux<ServerSentEvent<String>> chatStream(String message) {
Sinks.Many<String> sink = Sinks.many().unicast().onBackpressureBuffer();
streamingAssistant.chat(message)
.onNext(sink::tryEmitNext)
.onComplete(c -> sink.tryEmitComplete())
.onError(sink::tryEmitError)
.start();
return sink.asFlux().map(mes -> ServerSentEvent.<String>builder()
.event("chat")
.data(mes)
.build());
}
} | [
"dev.langchain4j.service.AiServices.builder"
] | [((1177, 1326), 'dev.langchain4j.service.AiServices.builder'), ((1177, 1309), 'dev.langchain4j.service.AiServices.builder'), ((1177, 1281), 'dev.langchain4j.service.AiServices.builder'), ((1354, 1530), 'dev.langchain4j.service.AiServices.builder'), ((1354, 1513), 'dev.langchain4j.service.AiServices.builder'), ((1354, 1485), 'dev.langchain4j.service.AiServices.builder'), ((1748, 1793), 'reactor.core.publisher.Sinks.many'), ((1748, 1770), 'reactor.core.publisher.Sinks.many'), ((2009, 2107), 'org.springframework.http.codec.ServerSentEvent.<String>builder'), ((2009, 2090), 'org.springframework.http.codec.ServerSentEvent.<String>builder'), ((2009, 2065), 'org.springframework.http.codec.ServerSentEvent.<String>builder')] |
package dev.langchain4j.model.azure;
import com.azure.ai.openai.models.*;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.Tokenizer;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.output.TokenUsage;
import java.util.List;
import static dev.langchain4j.model.azure.InternalAzureOpenAiHelper.finishReasonFrom;
import static java.util.Collections.singletonList;
/**
* This class needs to be thread safe because it is called when a streaming result comes back
* and there is no guarantee that this thread will be the same as the one that initiated the request,
* in fact it almost certainly won't be.
*/
class AzureOpenAiStreamingResponseBuilder {
private final StringBuffer contentBuilder = new StringBuffer();
private final StringBuffer toolNameBuilder = new StringBuffer();
private final StringBuffer toolArgumentsBuilder = new StringBuffer();
private volatile CompletionsFinishReason finishReason;
private final Integer inputTokenCount;
public AzureOpenAiStreamingResponseBuilder(Integer inputTokenCount) {
this.inputTokenCount = inputTokenCount;
}
public void append(ChatCompletions completions) {
if (completions == null) {
return;
}
List<ChatChoice> choices = completions.getChoices();
if (choices == null || choices.isEmpty()) {
return;
}
ChatChoice chatCompletionChoice = choices.get(0);
if (chatCompletionChoice == null) {
return;
}
CompletionsFinishReason finishReason = chatCompletionChoice.getFinishReason();
if (finishReason != null) {
this.finishReason = finishReason;
}
com.azure.ai.openai.models.ChatResponseMessage delta = chatCompletionChoice.getDelta();
if (delta == null) {
return;
}
String content = delta.getContent();
if (content != null) {
contentBuilder.append(content);
return;
}
FunctionCall functionCall = delta.getFunctionCall();
if (functionCall != null) {
if (functionCall.getName() != null) {
toolNameBuilder.append(functionCall.getName());
}
if (functionCall.getArguments() != null) {
toolArgumentsBuilder.append(functionCall.getArguments());
}
}
}
public void append(Completions completions) {
if (completions == null) {
return;
}
List<Choice> choices = completions.getChoices();
if (choices == null || choices.isEmpty()) {
return;
}
Choice completionChoice = choices.get(0);
if (completionChoice == null) {
return;
}
CompletionsFinishReason completionsFinishReason = completionChoice.getFinishReason();
if (completionsFinishReason != null) {
this.finishReason = completionsFinishReason;
}
String token = completionChoice.getText();
if (token != null) {
contentBuilder.append(token);
}
}
public Response<AiMessage> build(Tokenizer tokenizer, boolean forcefulToolExecution) {
String content = contentBuilder.toString();
if (!content.isEmpty()) {
return Response.from(
AiMessage.from(content),
tokenUsage(content, tokenizer),
finishReasonFrom(finishReason)
);
}
String toolName = toolNameBuilder.toString();
if (!toolName.isEmpty()) {
ToolExecutionRequest toolExecutionRequest = ToolExecutionRequest.builder()
.name(toolName)
.arguments(toolArgumentsBuilder.toString())
.build();
return Response.from(
AiMessage.from(toolExecutionRequest),
tokenUsage(toolExecutionRequest, tokenizer, forcefulToolExecution),
finishReasonFrom(finishReason)
);
}
return null;
}
private TokenUsage tokenUsage(String content, Tokenizer tokenizer) {
if (tokenizer == null) {
return null;
}
int outputTokenCount = tokenizer.estimateTokenCountInText(content);
return new TokenUsage(inputTokenCount, outputTokenCount);
}
private TokenUsage tokenUsage(ToolExecutionRequest toolExecutionRequest, Tokenizer tokenizer, boolean forcefulToolExecution) {
if (tokenizer == null) {
return null;
}
int outputTokenCount = 0;
if (forcefulToolExecution) {
// OpenAI calculates output tokens differently when tool is executed forcefully
outputTokenCount += tokenizer.estimateTokenCountInForcefulToolExecutionRequest(toolExecutionRequest);
} else {
outputTokenCount = tokenizer.estimateTokenCountInToolExecutionRequests(singletonList(toolExecutionRequest));
}
return new TokenUsage(inputTokenCount, outputTokenCount);
}
}
| [
"dev.langchain4j.agent.tool.ToolExecutionRequest.builder"
] | [((3735, 3894), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((3735, 3865), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((3735, 3801), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')] |
package dev.nano.sbot.configuration;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import dev.nano.sbot.retriever.EmbeddingStoreLoggingRetriever;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.time.Duration;
import java.util.List;
import static dev.nano.sbot.constant.Constants.PROMPT_TEMPLATE_2;
@Configuration
@RequiredArgsConstructor
@Slf4j
public class LangChainConfiguration {
@Value("${langchain.api.key}")
private String apiKey;
@Value("${langchain.timeout}")
private Long timeout;
private final List<Document> documents;
@Bean
public ConversationalRetrievalChain chain() {
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
log.info("Ingesting Spring Boot Resources ...");
ingestor.ingest(documents);
log.info("Ingested {} documents", documents.size());
EmbeddingStoreRetriever retriever = EmbeddingStoreRetriever.from(embeddingStore, embeddingModel);
EmbeddingStoreLoggingRetriever loggingRetriever = new EmbeddingStoreLoggingRetriever(retriever);
/*MessageWindowChatMemory chatMemory = MessageWindowChatMemory.builder()
.maxMessages(10)
.build();*/
log.info("Building ConversationalRetrievalChain ...");
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(OpenAiChatModel.builder()
.apiKey(apiKey)
.timeout(Duration.ofSeconds(timeout))
.build()
)
.promptTemplate(PromptTemplate.from(PROMPT_TEMPLATE_2))
//.chatMemory(chatMemory)
.retriever(loggingRetriever)
.build();
log.info("Spring Boot knowledge base is ready!");
return chain;
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1682, 1906), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1682, 1881), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1682, 1833), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1682, 1785), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2530, 2966), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2530, 2941), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2530, 2854), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2530, 2782), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2604, 2764), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2604, 2731), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2604, 2669), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package com.nexus.backend.service;
import com.nexus.backend.dto.UserTender;
import com.nexus.backend.entity.Act;
import com.nexus.backend.entity.Tender;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import org.springframework.stereotype.Service;
import java.util.HashMap;
import java.util.Map;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
@Service
public class AiService {
public void testGpt(){
PromptTemplate promptTemplate = PromptTemplate
.from("Tell me a {{adjective}} joke about {{content}}..");
Map<String, Object> variables = new HashMap<>();
variables.put("adjective", "funny");
variables.put("content", "computers");
Prompt prompt = promptTemplate.apply(variables);
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey("KEY").modelName(GPT_3_5_TURBO)
.temperature(0.3)
.build();
String response = model.generate(prompt.text());
System.out.println(response);
}
public String checkIfCompliant(Act act, UserTender userTender) {
PromptTemplate promptTemplate = PromptTemplate
.from("This is a government act with a set of compliances {{act}}, With keeping this above act in mind, tell me if my tender/plan seems broadly compliant or not. " +
"Consider this tender/plan: {{tender}}" +
"Let me know if there are any shortcomings and where the tender/plan is not compliant. Also tell me about penalties.");
Map<String, Object> variables = new HashMap<>();
variables.put("act", act);
variables.put("tender", userTender);
Prompt prompt = promptTemplate.apply(variables);
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey("API_KEY")
.modelName(GPT_3_5_TURBO)
.temperature(0.3)
.build();
String response = model.generate(prompt.text());
System.out.println(response);
return response;
}
public void Summarise(){
}
public String checkIfTenderIsCompliant(Tender tender, String userTender) {
PromptTemplate promptTemplate = PromptTemplate
.from("This is a government Tender with a set of compliances {{tender}}. With keeping this above act in mind, tell me if my tender seems broadly compliant or not. " +
"Consider this tender/plan: {{userTender}}" +
"Let me know if there are any shortcomings and where the tender is not compliant. Also tell me about penalties.");
Map<String, Object> variables = new HashMap<>();
variables.put("tender", tender.toString());
variables.put("userTender", userTender.toString());
Prompt prompt = promptTemplate.apply(variables);
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey("KEY")
.modelName(GPT_3_5_TURBO)
.temperature(0.3)
.build();
String response = model.generate(prompt.text());
System.out.println(response);
return response;
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((957, 1097), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((957, 1072), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((957, 1038), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((957, 1013), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1948, 2109), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1948, 2084), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1948, 2050), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1948, 2008), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3065, 3222), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3065, 3197), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3065, 3163), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3065, 3121), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package eu.luminis.faqlangchain.config;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.inprocess.InProcessEmbeddingModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import static dev.langchain4j.model.inprocess.InProcessEmbeddingModelType.*;
import static dev.langchain4j.model.openai.OpenAiModelName.*;
import static java.time.Duration.*;
@Configuration
public class QuestionAnsweringConfig {
@Value("${openai.apiKey}")
private String openaiApiKey;
@Qualifier("openaiModel")
@Bean
public EmbeddingModel openaiEmbeddingModel() {
return OpenAiEmbeddingModel.builder()
.apiKey(openaiApiKey)
.modelName(TEXT_EMBEDDING_ADA_002)
.build();
}
@Qualifier("inMemoryModel")
@Bean
public EmbeddingModel inMemoryEmbeddingModel() {
return new InProcessEmbeddingModel(ALL_MINILM_L6_V2);
}
@Qualifier("openaiChatModel")
@Bean
public ChatLanguageModel openaiChatModel() {
return OpenAiChatModel.builder()
.apiKey(openaiApiKey)
.modelName(GPT_3_5_TURBO)
.temperature(0.7)
.timeout(ofSeconds(15))
.maxRetries(3)
.logResponses(true)
.logRequests(true)
.build();
}
@Qualifier("inMemoryEmbeddingStore")
@Bean
public EmbeddingStore<TextSegment> inMemoryEmbeddingStore() {
return new InMemoryEmbeddingStore<>();
}
@Qualifier("weaviateEmbeddingStore")
@Bean
public EmbeddingStore<TextSegment> weaviateEmbeddingStore(@Value("${weaviate.apiKey}") String apiKey,
@Value("${weaviate.host}") String host) {
return WeaviateEmbeddingStore.builder()
.apiKey(apiKey)
.scheme("https")
.host(host)
.build();
}
}
| [
"dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder",
"dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder"
] | [((1210, 1354), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1210, 1329), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1210, 1278), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1635, 1941), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1916), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1881), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1845), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1814), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1774), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1740), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1698), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2397, 2547), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder'), ((2397, 2522), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder'), ((2397, 2494), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder'), ((2397, 2461), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder')] |
package com.example.demo;
import java.time.Duration;
import java.time.LocalDate;
import java.util.Arrays;
import java.util.List;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.structured.StructuredPrompt;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.output.structured.Description;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.MemoryId;
import dev.langchain4j.service.SystemMessage;
import dev.langchain4j.service.UserMessage;
import dev.langchain4j.service.V;
public class AiServicesExamples {
static Duration duration = Duration.ofSeconds(60);
static ChatLanguageModel model = OpenAiChatModel.builder().apiKey(ApiKeys.OPENAI_API_KEY).timeout(duration).build();
////////////////// SIMPLE EXAMPLE //////////////////////
static class Simple_AI_Service_Example {
interface Assistant {
String chat(String message);
}
public static void main(String[] args) {
Assistant assistant = AiServices.create(Assistant.class, model);
String userMessage = "Translate 'Plus-Values des cessions de valeurs mobilières, de droits sociaux et gains assimilés'";
String answer = assistant.chat(userMessage);
System.out.println(answer);
}
}
////////////////// WITH MESSAGE AND VARIABLES //////////////////////
static class AI_Service_with_System_and_User_Messages_Example {
interface TextUtils {
@SystemMessage("You are a professional translator into {{language}}")
@UserMessage("Translate the following text: {{text}}")
String translate(@V("text") String text, @V("language") String language);
@SystemMessage("Summarize every message from user in {{n}} bullet points. Provide only bullet points.")
List<String> summarize(@UserMessage String text, @V("n") int n);
}
public static void main(String[] args) {
TextUtils utils = AiServices.create(TextUtils.class, model);
String translation = utils.translate("Hello, how are you?", "italian");
System.out.println(translation); // Ciao, come stai?
String text = "AI, or artificial intelligence, is a branch of computer science that aims to create "
+ "machines that mimic human intelligence. This can range from simple tasks such as recognizing "
+ "patterns or speech to more complex tasks like making decisions or predictions.";
List<String> bulletPoints = utils.summarize(text, 3);
System.out.println(bulletPoints);
}
}
////////////////////EXTRACTING DIFFERENT DATA TYPES ////////////////////
static class Sentiment_Extracting_AI_Service_Example {
enum Sentiment {
POSITIVE, NEUTRAL, NEGATIVE;
}
interface SentimentAnalyzer {
@UserMessage("Analyze sentiment of {{it}}")
Sentiment analyzeSentimentOf(String text);
@UserMessage("Does {{it}} have a positive sentiment?")
boolean isPositive(String text);
}
public static void main(String[] args) {
SentimentAnalyzer sentimentAnalyzer = AiServices.create(SentimentAnalyzer.class, model);
Sentiment sentiment = sentimentAnalyzer.analyzeSentimentOf("It is amazing!");
System.out.println(sentiment); // POSITIVE
boolean positive = sentimentAnalyzer.isPositive("It is bad!");
System.out.println(positive); // false
}
}
static class POJO_Extracting_AI_Service_Example {
static class Person {
private String firstName;
private String lastName;
private LocalDate birthDate;
@Override
public String toString() {
return "Person {" + " firstName = \"" + firstName + "\"" + ", lastName = \"" + lastName + "\""
+ ", birthDate = " + birthDate + " }";
}
}
interface PersonExtractor {
@UserMessage("Extract information about a person from {{it}}")
Person extractPersonFrom(String text);
}
public static void main(String[] args) {
PersonExtractor extractor = AiServices.create(PersonExtractor.class, model);
String text = "In 1968, amidst the fading echoes of Independence Day, "
+ "a child named John arrived under the calm evening sky. "
+ "This newborn, bearing the surname Doe, marked the start of a new journey.";
Person person = extractor.extractPersonFrom(text);
System.out.println(person); // Person { firstName = "John", lastName = "Doe", birthDate = 1968-07-04 }
}
}
////////////////////// DESCRIPTIONS ////////////////////////
static class POJO_With_Descriptions_Extracting_AI_Service_Example {
static class Recipe {
@Description("short title, 3 words maximum")
private String title;
@Description("short description, 2 sentences maximum")
private String description;
@Description("each step should be described in 6 to 8 words, steps should rhyme with each other")
private List<String> steps;
private Integer preparationTimeMinutes;
@Override
public String toString() {
return "Recipe {" +
" title = \"" + title + "\"" +
", description = \"" + description + "\"" +
", steps = " + steps +
", preparationTimeMinutes = " + preparationTimeMinutes +
" }";
}
}
@StructuredPrompt("Create a recipe of a {{dish}} that can be prepared using only {{ingredients}}")
static class CreateRecipePrompt {
private String dish;
private List<String> ingredients;
}
interface Chef {
Recipe createRecipeFrom(String... ingredients);
Recipe createRecipe(CreateRecipePrompt prompt);
}
public static void main(String[] args) {
Chef chef = AiServices.create(Chef.class, model);
Recipe recipe = chef.createRecipeFrom("cucumber", "tomato", "feta", "onion", "olives", "lemon");
System.out.println(recipe);
CreateRecipePrompt prompt = new CreateRecipePrompt();
prompt.dish = "oven dish";
prompt.ingredients = Arrays.asList("cucumber", "tomato", "feta", "onion", "olives", "potatoes");
Recipe anotherRecipe = chef.createRecipe(prompt);
System.out.println(anotherRecipe);
}
}
////////////////////////// WITH MEMORY /////////////////////////
static class ServiceWithMemoryExample {
interface Assistant {
String chat(String message);
}
public static void main(String[] args) {
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(model)
.chatMemory(chatMemory)
.build();
String answer = assistant.chat("Hello! My name is Klaus.");
System.out.println(answer); // Hello Klaus! How can I assist you today?
String answerWithName = assistant.chat("What is my name?");
System.out.println(answerWithName); // Your name is Klaus.
}
}
static class ServiceWithMemoryForEachUserExample {
interface Assistant {
String chat(@MemoryId int memoryId, @UserMessage String userMessage);
}
public static void main(String[] args) {
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(model)
.chatMemoryProvider(memoryId -> MessageWindowChatMemory.withMaxMessages(10))
.build();
System.out.println(assistant.chat(1, "Hello, my name is Klaus"));
// Hi Klaus! How can I assist you today?
System.out.println(assistant.chat(2, "Hello, my name is Francine"));
// Hello Francine! How can I assist you today?
System.out.println(assistant.chat(1, "What is my name?"));
// Your name is Klaus.
System.out.println(assistant.chat(2, "What is my name?"));
// Your name is Francine.
}
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((792, 874), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((792, 866), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((792, 848), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((6740, 6894), 'dev.langchain4j.service.AiServices.builder'), ((6740, 6865), 'dev.langchain4j.service.AiServices.builder'), ((6740, 6821), 'dev.langchain4j.service.AiServices.builder'), ((7478, 7685), 'dev.langchain4j.service.AiServices.builder'), ((7478, 7656), 'dev.langchain4j.service.AiServices.builder'), ((7478, 7559), 'dev.langchain4j.service.AiServices.builder')] |
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.service.AiServices;
public class _04_Agents {
static class Calculator {
@Tool("Calculates the length of a string")
int stringLength(String s) {
return s.length();
}
@Tool("Calculates the sum of two numbers")
int add(int a, int b) {
return a + b;
}
}
interface Assistant {
Response<AiMessage> chat(String userMessage);
}
public static void main(String[] args) {
String openAiKey = System.getenv("OPENAI_API_KEY");
var assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(openAiKey))
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.tools(new Calculator())
.build();
var question = "What is the sum of the numbers of letters in the words 'language' and 'model'";
var response = assistant.chat(question);
System.out.println(response.content().text());
System.out.println("\n\n########### TOKEN USAGE ############\n");
System.out.println(response.tokenUsage());
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((821, 1069), 'dev.langchain4j.service.AiServices.builder'), ((821, 1044), 'dev.langchain4j.service.AiServices.builder'), ((821, 1003), 'dev.langchain4j.service.AiServices.builder'), ((821, 930), 'dev.langchain4j.service.AiServices.builder')] |
package me.nzuguem.bot.configurations.llm;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import jakarta.annotation.PreDestroy;
import jakarta.enterprise.context.RequestScoped;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@RequestScoped
public class ChatMemoryBean implements ChatMemoryProvider {
private final Map<Object, ChatMemory> memories = new ConcurrentHashMap<>();
@Override
public ChatMemory get(Object memoryId) {
return memories.computeIfAbsent(memoryId, id -> MessageWindowChatMemory.builder()
.maxMessages(20)
.id(memoryId)
.build());
}
@PreDestroy
public void close() {
memories.clear();
}
}
| [
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((631, 752), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((631, 727), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((631, 697), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')] |
package net.savantly.mainbot.config;
import java.time.Duration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import lombok.RequiredArgsConstructor;
import net.savantly.mainbot.service.replicate.ReplicateClient;
@Configuration
@RequiredArgsConstructor
public class ChatModelConfig {
private final OpenAIConfig openAIConfig;
@Bean
@Primary
@ConditionalOnProperty(prefix = "openai", name = "enabled", havingValue = "true")
public ChatLanguageModel getChatModel(ReplicateClient replicateClient) {
return getOpenAiChatModel();
// return new ReplicateChatLanguageModel(replicateClient);
}
public ChatLanguageModel getOpenAiChatModel() {
String apiKey = openAIConfig.getApiKey();
return OpenAiChatModel.builder()
.apiKey(apiKey) // https://platform.openai.com/account/api-keys
.modelName(openAIConfig.getChatModelId())
.temperature(0.1)
.logResponses(false)
.logRequests(false)
.timeout(Duration.ofSeconds(openAIConfig.getTimeoutSeconds()))
.build();
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1056, 1430), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1056, 1405), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1056, 1326), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1056, 1290), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1056, 1253), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1056, 1219), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1056, 1113), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package io.quarkiverse.langchain4j.workshop.chat;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import jakarta.enterprise.context.ApplicationScoped;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@ApplicationScoped
public class ChatMemoryBean implements ChatMemoryProvider {
private final Map<Object, ChatMemory> memories = new ConcurrentHashMap<>();
@Override
public ChatMemory get(Object memoryId) {
return memories.computeIfAbsent(memoryId, id -> MessageWindowChatMemory.builder()
.maxMessages(3)
.id(memoryId)
.build());
}
public void clear(Object session) {
memories.remove(session);
}
}
| [
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((608, 728), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((608, 703), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((608, 673), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')] |
package io.quarkiverse.langchain4j.workshop.chat;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkiverse.langchain4j.redis.RedisEmbeddingStore;
import io.quarkus.runtime.StartupEvent;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.event.Observes;
import jakarta.inject.Inject;
import java.io.File;
import java.util.List;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
@ApplicationScoped
public class DocumentIngestor {
/**
* The embedding store (the database).
* The bean is provided by the quarkus-langchain4j-redis extension.
*/
@Inject
RedisEmbeddingStore store;
/**
* The embedding model (how the vector of a document is computed).
* The bean is provided by the LLM (like openai) extension.
*/
@Inject
EmbeddingModel embeddingModel;
public void ingest(@Observes StartupEvent event) {
System.out.printf("Ingesting documents...%n");
List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/catalog").toPath(), new TextDocumentParser());
var ingestor = EmbeddingStoreIngestor.builder()
.embeddingStore(store)
.embeddingModel(embeddingModel)
.documentSplitter(recursive(500, 0))
.build();
ingestor.ingest(documents);
System.out.printf("Ingested %d documents.%n", documents.size());
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1414, 1611), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1414, 1586), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1414, 1533), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1414, 1485), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package com.example.demo;
import java.time.Duration;
import dev.langchain4j.chain.ConversationalChain;
import dev.langchain4j.model.openai.OpenAiChatModel;
public class _07_ConversationalChain {
public static void main(String[] args) {
Duration duration = Duration.ofSeconds(60);
OpenAiChatModel model = OpenAiChatModel.builder().apiKey(ApiKeys.OPENAI_API_KEY).timeout(duration).build();
ConversationalChain chain = ConversationalChain.builder().chatLanguageModel(model)
// .chatMemory(...) // you can override default chat memory
.build();
String userMessage1 = "Can you give a brief explanation of the Agile methodology, 3 lines max?";
System.out.println("[User]: " + userMessage1);
String answer1 = chain.execute(userMessage1);
System.out.println("[LLM]: " + answer1);
String userMessage2 = "What are good tools for that? 3 lines max.";
System.out.println("[User]: " + userMessage2);
String answer2 = chain.execute(userMessage2);
System.out.println("[LLM]: " + answer2);
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder",
"dev.langchain4j.chain.ConversationalChain.builder"
] | [((313, 395), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((313, 387), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((313, 369), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((428, 559), 'dev.langchain4j.chain.ConversationalChain.builder'), ((428, 482), 'dev.langchain4j.chain.ConversationalChain.builder')] |
package org.mf.langchain.service;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.localai.LocalAiChatModel;
import dev.langchain4j.model.localai.LocalAiStreamingChatModel;
import org.jetbrains.annotations.Nullable;
import org.mf.langchain.util.LanguageModel;
import org.mf.langchain.StreamLanguageModel;
import org.springframework.stereotype.Service;
import java.time.Duration;
import java.util.function.Consumer;
@Service
public class LangChainService {
private final LanguageModel lm;
private final StreamLanguageModel slm;
LangChainService() {
lm = new LanguageModel(LocalAiChatModel.builder()
.modelName("phi-2")
.baseUrl("http://localhost:8080")
.build());
slm = new StreamLanguageModel(LocalAiStreamingChatModel.builder()
.modelName("phi-2")
.baseUrl("http://localhost:8080")
.timeout(Duration.ofDays(1))
.temperature(0.8)
.build());
}
public String Generate(String prompt)
{
return lm.RunBlocking(prompt);
}
public void GenerateStream(String prompt, Consumer<String> onNext, Consumer<Throwable> onError, @Nullable Consumer<AiMessage> onComplete) {
slm.generate(prompt, onNext, onError, onComplete);
}
}
| [
"dev.langchain4j.model.localai.LocalAiChatModel.builder",
"dev.langchain4j.model.localai.LocalAiStreamingChatModel.builder"
] | [((623, 760), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((623, 735), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((623, 685), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((802, 1027), 'dev.langchain4j.model.localai.LocalAiStreamingChatModel.builder'), ((802, 1002), 'dev.langchain4j.model.localai.LocalAiStreamingChatModel.builder'), ((802, 968), 'dev.langchain4j.model.localai.LocalAiStreamingChatModel.builder'), ((802, 923), 'dev.langchain4j.model.localai.LocalAiStreamingChatModel.builder'), ((802, 873), 'dev.langchain4j.model.localai.LocalAiStreamingChatModel.builder')] |
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.service.AiServices;
public class _04_Agents {
static class Calculator {
@Tool("Calculates the length of a string")
int stringLength(String s) {
return s.length();
}
@Tool("Calculates the sum of two numbers")
int add(int a, int b) {
return a + b;
}
}
interface Assistant {
Response<AiMessage> chat(String userMessage);
}
public static void main(String[] args) {
String openAiKey = System.getenv("OPENAI_API_KEY");
var assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(openAiKey))
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.tools(new Calculator())
.build();
var question = "What is the sum of the numbers of letters in the words 'language' and 'model'";
var response = assistant.chat(question);
System.out.println(response.content().text());
System.out.println("\n\n########### TOKEN USAGE ############\n");
System.out.println(response.tokenUsage());
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((821, 1069), 'dev.langchain4j.service.AiServices.builder'), ((821, 1044), 'dev.langchain4j.service.AiServices.builder'), ((821, 1003), 'dev.langchain4j.service.AiServices.builder'), ((821, 930), 'dev.langchain4j.service.AiServices.builder')] |
package io.quarkiverse.langchain4j.workshop.chat;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkiverse.langchain4j.redis.RedisEmbeddingStore;
import io.quarkus.runtime.StartupEvent;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.event.Observes;
import jakarta.inject.Inject;
import java.io.File;
import java.util.List;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
@ApplicationScoped
public class DocumentIngestor {
/**
* The embedding store (the database).
* The bean is provided by the quarkus-langchain4j-redis extension.
*/
@Inject
RedisEmbeddingStore store;
/**
* The embedding model (how the vector of a document is computed).
* The bean is provided by the LLM (like openai) extension.
*/
@Inject
EmbeddingModel embeddingModel;
public void ingest(@Observes StartupEvent event) {
System.out.printf("Ingesting documents...%n");
List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/catalog").toPath(), new TextDocumentParser());
var ingestor = EmbeddingStoreIngestor.builder()
.embeddingStore(store)
.embeddingModel(embeddingModel)
.documentSplitter(recursive(500, 0))
.build();
ingestor.ingest(documents);
System.out.printf("Ingested %d documents.%n", documents.size());
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1414, 1611), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1414, 1586), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1414, 1533), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1414, 1485), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package io.quarkiverse.langchain4j.samples;
import java.util.function.Supplier;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.store.memory.chat.InMemoryChatMemoryStore;
public class CustomProvider implements Supplier<ChatMemoryProvider> {
private final InMemoryChatMemoryStore store = new InMemoryChatMemoryStore();
@Override
public ChatMemoryProvider get() {
return new ChatMemoryProvider() {
@Override
public ChatMemory get(Object memoryId) {
return MessageWindowChatMemory.builder()
.maxMessages(20)
.id(memoryId)
.chatMemoryStore(store)
.build();
}
};
}
}
| [
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((652, 845), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((652, 812), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((652, 764), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((652, 726), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')] |
package dev.onurb.travelassistant;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import java.io.IOException;
import java.time.Duration;
import java.util.Scanner;
public class TravelAgency {
public static void main(String[] args) throws IOException {
String apiKey = System.getenv("OPENAPI_KEY");
TravelAssistant assistant = AiServices.builder(TravelAssistant.class)
.chatLanguageModel(OpenAiChatModel.builder().apiKey(apiKey).timeout(Duration.ofMinutes(3)).build())
.tools(new TripServices())
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
String input = readInput();
while (!"bye".equalsIgnoreCase(input)) {
String answer = assistant.chat(input);
System.out.println("\u001B[33m" + answer + "\u001B[37m");
input = readInput();
}
}
private static String readInput() {
Scanner in = new Scanner(System.in);
System.out.print("> ");
return in.nextLine();
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((460, 758), 'dev.langchain4j.service.AiServices.builder'), ((460, 733), 'dev.langchain4j.service.AiServices.builder'), ((460, 660), 'dev.langchain4j.service.AiServices.builder'), ((460, 617), 'dev.langchain4j.service.AiServices.builder'), ((537, 616), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((537, 608), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((537, 577), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
/*
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gemini.workshop;
import dev.langchain4j.agent.tool.P;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.vertexai.VertexAiGeminiChatModel;
import dev.langchain4j.service.AiServices;
public class Step8b_FunctionCalling {
record WeatherForecast(String location, String forecast, int temperature) {}
static class WeatherForecastService {
@Tool("Get the weather forecast for a location")
WeatherForecast getForecast(@P("Location to get the forecast for") String location) {
if (location.equals("Paris")) {
return new WeatherForecast("Paris", "Sunny", 20);
} else if (location.equals("London")) {
return new WeatherForecast("London", "Rainy", 15);
} else {
return new WeatherForecast("Unknown", "Unknown", 0);
}
}
}
interface WeatherAssistant {
String chat(String userMessage);
}
public static void main(String[] args) {
ChatLanguageModel model = VertexAiGeminiChatModel.builder()
.project(System.getenv("PROJECT_ID"))
.location(System.getenv("LOCATION"))
.modelName("gemini-1.0-pro")
.maxOutputTokens(100)
.build();
WeatherForecastService weatherForecastService = new WeatherForecastService();
WeatherAssistant assistant = AiServices.builder(WeatherAssistant.class)
.chatLanguageModel(model)
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.tools(weatherForecastService)
.build();
System.out.println(assistant.chat("What is the weather in Paris?"));
System.out.println(assistant.chat("What is the weather in London?"));
System.out.println(assistant.chat("Is the temperature warmer in Paris or London?"));
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder"
] | [((1743, 1971), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1743, 1950), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1743, 1916), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1743, 1875), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1743, 1826), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((2098, 2311), 'dev.langchain4j.service.AiServices.builder'), ((2098, 2290), 'dev.langchain4j.service.AiServices.builder'), ((2098, 2247), 'dev.langchain4j.service.AiServices.builder'), ((2098, 2178), 'dev.langchain4j.service.AiServices.builder')] |
package com.hillarocket.application.handler;
import com.vaadin.flow.server.auth.AnonymousAllowed;
import dev.hilla.BrowserCallable;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.TokenStream;
import jakarta.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Value;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Sinks;
@BrowserCallable
@AnonymousAllowed
public class OpenApiHandler {
@Value("${openai.api.key}")
private String OPENAI_API_KEY;
private Assistant assistant;
private StreamingAssistant streamingAssistant;
interface Assistant {
String chat(String message);
}
interface StreamingAssistant {
TokenStream chat(String message);
}
@PostConstruct
public void init() {
if (OPENAI_API_KEY == null) {
System.err.println("ERROR: OPENAI_API_KEY environment variable is not set. Please set it to your OpenAI API key.");
}
var memory = TokenWindowChatMemory.withMaxTokens(2000, new OpenAiTokenizer("gpt-3.5-turbo"));
assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(OPENAI_API_KEY))
.chatMemory(memory)
.build();
streamingAssistant = AiServices.builder(StreamingAssistant.class)
.streamingChatLanguageModel(OpenAiStreamingChatModel.withApiKey(OPENAI_API_KEY))
.chatMemory(memory)
.build();
}
public String chat(String message) {
return assistant.chat(message);
}
public Flux<String> chatStream(String message) {
Sinks.Many<String> sink = Sinks.many().unicast().onBackpressureBuffer();
streamingAssistant.chat(message)
.onNext(sink::tryEmitNext)
.onComplete(c -> sink.tryEmitComplete())
.onError(sink::tryEmitError)
.start();
return sink.asFlux();
}
} | [
"dev.langchain4j.service.AiServices.builder"
] | [((1336, 1511), 'dev.langchain4j.service.AiServices.builder'), ((1336, 1486), 'dev.langchain4j.service.AiServices.builder'), ((1336, 1450), 'dev.langchain4j.service.AiServices.builder'), ((1543, 1745), 'dev.langchain4j.service.AiServices.builder'), ((1543, 1720), 'dev.langchain4j.service.AiServices.builder'), ((1543, 1684), 'dev.langchain4j.service.AiServices.builder'), ((1929, 1974), 'reactor.core.publisher.Sinks.many'), ((1929, 1951), 'reactor.core.publisher.Sinks.many')] |
package _Engenharia;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
//import dev.langchain4j.data.document.splitter.ParagraphSplitter; !!!!!!!!!!!!!!!DANDO ERRO, substitui temporariamente!!!!!!!!!!!!!!!!!!!!!
import dev.langchain4j.data.document.splitter.DocumentSplitters; //Substituição
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.huggingface.HuggingFaceChatModel;
import dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static java.time.Duration.ofSeconds;
import java.io.File;
public class Assistente {
// You can get your own HuggingFace API key here: https://huggingface.co/settings/tokens
public static final String hfApiKey = "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";
private static String pergunta;
public String fazerPergunta() throws Exception {
Document document = loadDocument(toPath("template.txt")); //Usa documento criado com todos os dados do documento selecionado (Esse documento e criado dentro do pacote _Engenharia)
//escolhendo um modelo para vetorizar meu texto
EmbeddingModel embeddingModel = HuggingFaceEmbeddingModel.builder()
.accessToken(hfApiKey)
.modelId("sentence-transformers/all-MiniLM-L6-v2")
.waitForModel(true)
.timeout(ofSeconds(60))
.build();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
//estou aplicando o modelo de vetorização escolhido ao meu texto
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
// .splitter(new ParagraphSplitter()) !!!!!!!!!!!!!!!DANDO ERRO, substitui temporariamente!!!!!!!!!!!!!!!!!!!!!
.documentSplitter(DocumentSplitters.recursive(500)) //Substituição
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
ingestor.ingest(document);
//aqui eu escolho o modelo da inferência (a pergunta)
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(HuggingFaceChatModel.withAccessToken(hfApiKey))
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
// .chatMemory() // you can override default chat memory
// .promptTemplate() // you can override default prompt template
.build();
//aqui eu faço a inferência
String answer = chain.execute(pergunta);
File delete_file = new File("src/main/java/_Engenharia/template.txt"); //Apaga o documento depois da resposta
delete_file.delete(); //Caso erro na resposta o arquivo NAO e deletado
return answer; // Charlie is a cheerful carrot living in VeggieVille...
//exemplo para continuar a pesquisa
//https://github.com/langchain4j/langchain4j/blob/7307f43d9823af619f1e3196252d212f3df04ddc/langchain4j/src/main/java/dev/langchain4j/model/huggingface/HuggingFaceChatModel.java
}
private static Path toPath(String fileName) {
try {
URL fileUrl = Assistente.class.getResource(fileName);
return Paths.get(fileUrl.toURI());
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
public void setPergunta(String p) {
pergunta = p;
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1706, 1948), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((1706, 1923), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((1706, 1883), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((1706, 1847), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((1706, 1780), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((2162, 2524), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2162, 2499), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2162, 2451), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2162, 2385), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2675, 3064), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2675, 2885), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2675, 2796), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] |
package com.kchandrakant;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import java.util.HashMap;
import java.util.Map;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.time.Duration.ofSeconds;
public class PromptTemplates {
public static void main(String[] args) {
// Create a prompt template
PromptTemplate promptTemplate = PromptTemplate.from("Tell me a {{adjective}} joke about {{content}}..");
// Generate prompt using the prompt template and user variables
Map<String, Object> variables = new HashMap<>();
variables.put("adjective", "funny");
variables.put("content", "humans");
Prompt prompt = promptTemplate.apply(variables);
System.out.println(prompt.text());
// Create an instance of a model
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.modelName(GPT_3_5_TURBO)
.temperature(0.3)
.build();
// Start interacting
String response = model.generate(prompt.text());
System.out.println(response);
}
} | [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1019, 1193), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1019, 1168), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1019, 1134), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1019, 1092), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package com.azure.migration.java.copilot.service;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class Configure {
@Bean
ServiceAnalysisAgent chooseServiceAnalysisAgent(ChatLanguageModel chatLanguageModel) {
return AiServices.builder(ServiceAnalysisAgent.class)
.chatLanguageModel(chatLanguageModel)
.build();
}
@Bean
ConfigureResourceAgent configureResourceAgent(ChatLanguageModel chatLanguageModel,ContentRetriever contentRetriever) {
return AiServices.builder(ConfigureResourceAgent.class)
.chatLanguageModel(chatLanguageModel)
.contentRetriever(contentRetriever)
.build();
}
@Bean
WorkflowChatAgent configureWorkflowChatAgent(ChatLanguageModel chatLanguageModel, ContentRetriever contentRetriever, MigrationWorkflowTools migrationWorkflowTools) {
return AiServices.builder(WorkflowChatAgent.class)
.chatLanguageModel(chatLanguageModel)
.tools(migrationWorkflowTools)
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
}
@Bean
ContentRetriever contentRetriever(EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel) {
// You will need to adjust these parameters to find the optimal setting, which will depend on two main factors:
// - The nature of your data
// - The embedding model you are using
int maxResults = 5;
double minScore = 0.6;
return EmbeddingStoreContentRetriever.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.maxResults(maxResults)
.minScore(minScore)
.build();
}
@Bean
EmbeddingModel embeddingModel() {
return new AllMiniLmL6V2EmbeddingModel();
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder"
] | [((846, 971), 'dev.langchain4j.service.AiServices.builder'), ((846, 946), 'dev.langchain4j.service.AiServices.builder'), ((1128, 1307), 'dev.langchain4j.service.AiServices.builder'), ((1128, 1282), 'dev.langchain4j.service.AiServices.builder'), ((1128, 1230), 'dev.langchain4j.service.AiServices.builder'), ((1511, 1753), 'dev.langchain4j.service.AiServices.builder'), ((1511, 1728), 'dev.langchain4j.service.AiServices.builder'), ((1511, 1655), 'dev.langchain4j.service.AiServices.builder'), ((1511, 1608), 'dev.langchain4j.service.AiServices.builder'), ((2167, 2404), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2167, 2379), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2167, 2343), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2167, 2303), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2167, 2255), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder')] |
package com.example.application.services;
import com.vaadin.flow.server.auth.AnonymousAllowed;
import dev.hilla.BrowserCallable;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.TokenStream;
import jakarta.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Sinks;
@Service
@BrowserCallable
@AnonymousAllowed
public class ChatService {
@Value("${openai.api.key}")
private String OPENAI_API_KEY;
private Assistant assistant;
private StreamingAssistant streamingAssistant;
interface Assistant {
String chat(String message);
}
interface StreamingAssistant {
TokenStream chat(String message);
}
@PostConstruct
public void init() {
var memory = TokenWindowChatMemory.withMaxTokens(2000, new OpenAiTokenizer("gpt-3.5-turbo"));
assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(OPENAI_API_KEY))
.chatMemory(memory)
.build();
streamingAssistant = AiServices.builder(StreamingAssistant.class)
.streamingChatLanguageModel(OpenAiStreamingChatModel.withApiKey(OPENAI_API_KEY))
.chatMemory(memory)
.build();
}
public String chat(String message) {
return assistant.chat(message);
}
public Flux<String> chatStream(String message) {
Sinks.Many<String> sink = Sinks.many().unicast().onBackpressureBuffer();
streamingAssistant.chat(message)
.onNext(sink::tryEmitNext)
.onComplete(sink::tryEmitComplete)
.onError(sink::tryEmitError)
.start();
return sink.asFlux();
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((1208, 1383), 'dev.langchain4j.service.AiServices.builder'), ((1208, 1358), 'dev.langchain4j.service.AiServices.builder'), ((1208, 1322), 'dev.langchain4j.service.AiServices.builder'), ((1415, 1617), 'dev.langchain4j.service.AiServices.builder'), ((1415, 1592), 'dev.langchain4j.service.AiServices.builder'), ((1415, 1556), 'dev.langchain4j.service.AiServices.builder'), ((1801, 1846), 'reactor.core.publisher.Sinks.many'), ((1801, 1823), 'reactor.core.publisher.Sinks.many')] |
package org.acme;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkus.logging.Log;
import io.quarkus.runtime.Startup;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import jakarta.json.Json;
import jakarta.json.JsonArray;
import jakarta.json.JsonReader;
import jakarta.json.JsonValue;
import org.eclipse.microprofile.config.inject.ConfigProperty;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.List;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
@ApplicationScoped
public class IngestData {
@Inject
EmbeddingStore<TextSegment> store;
@Inject
EmbeddingModel embeddingModel;
@Inject
@ConfigProperty(name = "data.file")
File dataFile;
@Inject
@ConfigProperty(name = "max.entries", defaultValue = "99999")
Integer maxEntries;
@Startup
public void init() {
List<Document> documents = new ArrayList<>();
try(JsonReader reader = Json.createReader(new FileReader(dataFile))) {
JsonArray results = reader.readArray();
Log.info("Ingesting news reports...");
int i = 0;
for (JsonValue newsEntry : results) {
i++;
if(i > maxEntries) {
break;
}
String content = newsEntry.asJsonObject().getString("content", null);
if(content != null && !content.isEmpty()) {
Document doc = new Document(content);
documents.add(doc);
continue;
}
String fullDescription = newsEntry.asJsonObject().getString("full_description", null);
if(fullDescription != null && !fullDescription.isEmpty()) {
Document doc = new Document(fullDescription);
documents.add(doc);
continue;
}
String description = newsEntry.asJsonObject().getString("description", null);
if(description != null && !description.isEmpty()) {
Document doc = new Document(description);
documents.add(doc);
continue;
}
}
var ingestor = EmbeddingStoreIngestor.builder()
.embeddingStore(store)
.embeddingModel(embeddingModel)
.documentSplitter(recursive(1000, 50))
.build();
ingestor.ingest(documents);
Log.infof("Ingested %d news articles.", documents.size());
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
}
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((2590, 2805), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2590, 2776), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2590, 2717), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2590, 2665), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package com.sivalabs.demo.langchain4j;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.ollama.OllamaChatModel;
public class OllamaChatDemo {
public static void main(String[] args) {
ChatLanguageModel model = OllamaChatModel.builder()
.baseUrl("http://localhost:11434")
.modelName("llama2")
.build();
String answer = model.generate("List all the movies directed by Quentin Tarantino");
System.out.println(answer);
}
}
| [
"dev.langchain4j.model.ollama.OllamaChatModel.builder"
] | [((257, 395), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((257, 370), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((257, 333), 'dev.langchain4j.model.ollama.OllamaChatModel.builder')] |
package com.ramesh.langchain;
import java.util.Scanner;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
/***
* This project demostrates the use of LangCHain Services which uses custom tools to generate the final output
*/
public class ServiceWithToolsLive {
// Open AI Key and Chat GPT Model to use
public static String OPENAI_API_KEY = "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";
public static String OPENAI_MODEL = "gpt-3.5-turbo";
public static void main(String[] args) {
System.out.println("Using a custom Calculator as LangChain \"tool\"");
// Building a Custom LangChain Assistant using LangChain AiServices
System.out.println("Building a Custom Assistant using LangChain AiServices");
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(OPENAI_API_KEY)).tools(new Calculator())
.chatMemory(MessageWindowChatMemory.withMaxMessages(10)).build();
while (true) {
// get 2 words for which the total characters count is calculated
Scanner scanner = new Scanner(System.in);
System.out.print("Enter Word 1:");
String word1 = scanner.nextLine();
System.out.print("Enter Word 2:");
String word2 = scanner.nextLine();
String question = "What is the sum of the numbers of letters in the words \"" + word1 + "\" and \"" + word2 + "\"?";
System.out.println("Prompting ChatGPT :" + question);
// when a prompt having 2 words are sent LLM via LAngChain Assistant
// the Calcualtor functions are called to get the final answers
System.out.println("Invoking Custom Assistant Class chat() and getting response from ChatGPT...");
String answer = assistant.chat(question);
System.out.println("ChatGPT Response...\n");
System.out.println(answer);
}
}
// a custom tool
static class Calculator {
@Tool("Calculates the length of a string")
int stringLength(String s) {
return s.length();
}
@Tool("Calculates the sum of two numbers")
int add(int a, int b) {
return a + b;
}
}
interface Assistant {
String chat(String userMessage);
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((896, 1091), 'dev.langchain4j.service.AiServices.builder'), ((896, 1083), 'dev.langchain4j.service.AiServices.builder'), ((896, 1022), 'dev.langchain4j.service.AiServices.builder'), ((896, 998), 'dev.langchain4j.service.AiServices.builder')] |
package ${{ values.basePackage }};
import java.io.IOException;
import java.nio.file.Path;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentParser;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.util.ResourceUtils;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
@SpringBootApplication
public class DemoApplication {
public static void main(String[] args) {
SpringApplication.run(DemoApplication.class, args);
}
@Bean
ChatAgent chatAgent(ChatLanguageModel chatLanguageModel) {
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
return AiServices.builder(ChatAgent.class)
.chatLanguageModel(chatLanguageModel)
.chatMemory(chatMemory)
.build();
}
@Bean
DocumentAgent documentAgent(ChatLanguageModel chatLanguageModel, EmbeddingModel embeddingModel, EmbeddingStore<TextSegment> embeddingStore) throws IOException {
Path documentPath = ResourceUtils.getFile("classpath:documents/story.md").toPath();
DocumentParser documentParser = new TextDocumentParser();
Document document = FileSystemDocumentLoader.loadDocument(documentPath, documentParser);
EmbeddingStoreIngestor dataIngestor = EmbeddingStoreIngestor.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.documentSplitter(DocumentSplitters.recursive(300, 10))
.build();
dataIngestor.ingest(document);
ContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.maxResults(3)
.minScore(0.5)
.build();
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
return AiServices.builder(DocumentAgent.class)
.chatLanguageModel(chatLanguageModel)
.contentRetriever(contentRetriever)
.chatMemory(chatMemory)
.build();
}
}
@RestController
class ChatController {
private final ChatAgent chatAgent;
ChatController(ChatAgent chatAgent) {
this.chatAgent = chatAgent;
}
@PostMapping("/chat")
String chat(@RequestBody String prompt) {
return chatAgent.answer(prompt);
}
}
@RestController
class DocumentController {
private final DocumentAgent documentAgent;
DocumentController(DocumentAgent documentAgent) {
this.documentAgent = documentAgent;
}
@PostMapping("/chat/doc")
String chat(@RequestBody String prompt) {
return documentAgent.answer(prompt);
}
}
interface ChatAgent {
String answer(String prompt);
}
interface DocumentAgent {
String answer(String prompt);
}
| [
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder",
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1657, 1775), 'dev.langchain4j.service.AiServices.builder'), ((1657, 1762), 'dev.langchain4j.service.AiServices.builder'), ((1657, 1734), 'dev.langchain4j.service.AiServices.builder'), ((1972, 2034), 'org.springframework.util.ResourceUtils.getFile'), ((2228, 2405), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2228, 2392), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2228, 2332), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2228, 2296), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2479, 2642), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2479, 2629), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2479, 2610), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2479, 2591), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2479, 2555), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2727, 2889), 'dev.langchain4j.service.AiServices.builder'), ((2727, 2876), 'dev.langchain4j.service.AiServices.builder'), ((2727, 2848), 'dev.langchain4j.service.AiServices.builder'), ((2727, 2808), 'dev.langchain4j.service.AiServices.builder')] |
package com.docuverse.backend.configuration;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import io.github.cdimascio.dotenv.Dotenv;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import static dev.langchain4j.model.openai.OpenAiModelName.TEXT_EMBEDDING_ADA_002;
import static java.time.Duration.ofSeconds;
@Configuration
public class EmbeddingModelConfiguration {
Dotenv dotenv = Dotenv.load();
@Bean
public EmbeddingModel embeddingModel() {
return OpenAiEmbeddingModel.builder()
.apiKey(dotenv.get("OPENAI_API_KEY"))
.modelName(TEXT_EMBEDDING_ADA_002)
.timeout(ofSeconds(15))
.logRequests(false)
.logResponses(false)
.build();
}
}
| [
"dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder"
] | [((784, 1057), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((784, 1032), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((784, 995), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((784, 959), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((784, 919), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((784, 868), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder')] |
package io.quarkiverse.langchain4j.openai.runtime;
import static io.quarkiverse.langchain4j.runtime.OptionalUtil.firstOrDefault;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Optional;
import java.util.function.Supplier;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.DisabledChatLanguageModel;
import dev.langchain4j.model.chat.DisabledStreamingChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.embedding.DisabledEmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.image.DisabledImageModel;
import dev.langchain4j.model.image.ImageModel;
import dev.langchain4j.model.moderation.DisabledModerationModel;
import dev.langchain4j.model.moderation.ModerationModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.model.openai.OpenAiModerationModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient;
import io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel;
import io.quarkiverse.langchain4j.openai.runtime.config.ChatModelConfig;
import io.quarkiverse.langchain4j.openai.runtime.config.EmbeddingModelConfig;
import io.quarkiverse.langchain4j.openai.runtime.config.ImageModelConfig;
import io.quarkiverse.langchain4j.openai.runtime.config.LangChain4jOpenAiConfig;
import io.quarkiverse.langchain4j.openai.runtime.config.ModerationModelConfig;
import io.quarkiverse.langchain4j.runtime.NamedModelUtil;
import io.quarkus.runtime.ShutdownContext;
import io.quarkus.runtime.annotations.Recorder;
import io.smallrye.config.ConfigValidationException;
@Recorder
public class OpenAiRecorder {
private static final String DUMMY_KEY = "dummy";
public Supplier<ChatLanguageModel> chatModel(LangChain4jOpenAiConfig runtimeConfig, String modelName) {
LangChain4jOpenAiConfig.OpenAiConfig openAiConfig = correspondingOpenAiConfig(runtimeConfig, modelName);
if (openAiConfig.enableIntegration()) {
String apiKey = openAiConfig.apiKey();
if (DUMMY_KEY.equals(apiKey)) {
throw new ConfigValidationException(createApiKeyConfigProblems(modelName));
}
ChatModelConfig chatModelConfig = openAiConfig.chatModel();
var builder = OpenAiChatModel.builder()
.baseUrl(openAiConfig.baseUrl())
.apiKey(apiKey)
.timeout(openAiConfig.timeout())
.maxRetries(openAiConfig.maxRetries())
.logRequests(firstOrDefault(false, chatModelConfig.logRequests(), openAiConfig.logRequests()))
.logResponses(firstOrDefault(false, chatModelConfig.logResponses(), openAiConfig.logResponses()))
.modelName(chatModelConfig.modelName())
.temperature(chatModelConfig.temperature())
.topP(chatModelConfig.topP())
.presencePenalty(chatModelConfig.presencePenalty())
.frequencyPenalty(chatModelConfig.frequencyPenalty())
.responseFormat(chatModelConfig.responseFormat().orElse(null));
openAiConfig.organizationId().ifPresent(builder::organizationId);
if (chatModelConfig.maxTokens().isPresent()) {
builder.maxTokens(chatModelConfig.maxTokens().get());
}
return new Supplier<>() {
@Override
public ChatLanguageModel get() {
return builder.build();
}
};
} else {
return new Supplier<>() {
@Override
public ChatLanguageModel get() {
return new DisabledChatLanguageModel();
}
};
}
}
public Supplier<StreamingChatLanguageModel> streamingChatModel(LangChain4jOpenAiConfig runtimeConfig, String modelName) {
LangChain4jOpenAiConfig.OpenAiConfig openAiConfig = correspondingOpenAiConfig(runtimeConfig, modelName);
if (openAiConfig.enableIntegration()) {
String apiKey = openAiConfig.apiKey();
if (DUMMY_KEY.equals(apiKey)) {
throw new ConfigValidationException(createApiKeyConfigProblems(modelName));
}
ChatModelConfig chatModelConfig = openAiConfig.chatModel();
var builder = OpenAiStreamingChatModel.builder()
.baseUrl(openAiConfig.baseUrl())
.apiKey(apiKey)
.timeout(openAiConfig.timeout())
.logRequests(firstOrDefault(false, chatModelConfig.logRequests(), openAiConfig.logRequests()))
.logResponses(firstOrDefault(false, chatModelConfig.logResponses(), openAiConfig.logResponses()))
.modelName(chatModelConfig.modelName())
.temperature(chatModelConfig.temperature())
.topP(chatModelConfig.topP())
.presencePenalty(chatModelConfig.presencePenalty())
.frequencyPenalty(chatModelConfig.frequencyPenalty())
.responseFormat(chatModelConfig.responseFormat().orElse(null));
openAiConfig.organizationId().ifPresent(builder::organizationId);
if (chatModelConfig.maxTokens().isPresent()) {
builder.maxTokens(chatModelConfig.maxTokens().get());
}
return new Supplier<>() {
@Override
public StreamingChatLanguageModel get() {
return builder.build();
}
};
} else {
return new Supplier<>() {
@Override
public StreamingChatLanguageModel get() {
return new DisabledStreamingChatLanguageModel();
}
};
}
}
public Supplier<EmbeddingModel> embeddingModel(LangChain4jOpenAiConfig runtimeConfig, String modelName) {
LangChain4jOpenAiConfig.OpenAiConfig openAiConfig = correspondingOpenAiConfig(runtimeConfig, modelName);
if (openAiConfig.enableIntegration()) {
String apiKeyOpt = openAiConfig.apiKey();
if (DUMMY_KEY.equals(apiKeyOpt)) {
throw new ConfigValidationException(createApiKeyConfigProblems(modelName));
}
EmbeddingModelConfig embeddingModelConfig = openAiConfig.embeddingModel();
var builder = OpenAiEmbeddingModel.builder()
.baseUrl(openAiConfig.baseUrl())
.apiKey(apiKeyOpt)
.timeout(openAiConfig.timeout())
.maxRetries(openAiConfig.maxRetries())
.logRequests(firstOrDefault(false, embeddingModelConfig.logRequests(), openAiConfig.logRequests()))
.logResponses(firstOrDefault(false, embeddingModelConfig.logResponses(), openAiConfig.logResponses()))
.modelName(embeddingModelConfig.modelName());
if (embeddingModelConfig.user().isPresent()) {
builder.user(embeddingModelConfig.user().get());
}
openAiConfig.organizationId().ifPresent(builder::organizationId);
return new Supplier<>() {
@Override
public EmbeddingModel get() {
return builder.build();
}
};
} else {
return new Supplier<>() {
@Override
public EmbeddingModel get() {
return new DisabledEmbeddingModel();
}
};
}
}
public Supplier<ModerationModel> moderationModel(LangChain4jOpenAiConfig runtimeConfig, String modelName) {
LangChain4jOpenAiConfig.OpenAiConfig openAiConfig = correspondingOpenAiConfig(runtimeConfig, modelName);
if (openAiConfig.enableIntegration()) {
String apiKey = openAiConfig.apiKey();
if (DUMMY_KEY.equals(apiKey)) {
throw new ConfigValidationException(createApiKeyConfigProblems(modelName));
}
ModerationModelConfig moderationModelConfig = openAiConfig.moderationModel();
var builder = OpenAiModerationModel.builder()
.baseUrl(openAiConfig.baseUrl())
.apiKey(apiKey)
.timeout(openAiConfig.timeout())
.maxRetries(openAiConfig.maxRetries())
.logRequests(firstOrDefault(false, moderationModelConfig.logRequests(), openAiConfig.logRequests()))
.logResponses(firstOrDefault(false, moderationModelConfig.logResponses(), openAiConfig.logResponses()))
.modelName(moderationModelConfig.modelName());
openAiConfig.organizationId().ifPresent(builder::organizationId);
return new Supplier<>() {
@Override
public ModerationModel get() {
return builder.build();
}
};
} else {
return new Supplier<>() {
@Override
public ModerationModel get() {
return new DisabledModerationModel();
}
};
}
}
public Supplier<ImageModel> imageModel(LangChain4jOpenAiConfig runtimeConfig, String modelName) {
LangChain4jOpenAiConfig.OpenAiConfig openAiConfig = correspondingOpenAiConfig(runtimeConfig, modelName);
if (openAiConfig.enableIntegration()) {
String apiKey = openAiConfig.apiKey();
if (DUMMY_KEY.equals(apiKey)) {
throw new ConfigValidationException(createApiKeyConfigProblems(modelName));
}
ImageModelConfig imageModelConfig = openAiConfig.imageModel();
var builder = QuarkusOpenAiImageModel.builder()
.baseUrl(openAiConfig.baseUrl())
.apiKey(apiKey)
.timeout(openAiConfig.timeout())
.maxRetries(openAiConfig.maxRetries())
.logRequests(firstOrDefault(false, imageModelConfig.logRequests(), openAiConfig.logRequests()))
.logResponses(firstOrDefault(false, imageModelConfig.logResponses(), openAiConfig.logResponses()))
.modelName(imageModelConfig.modelName())
.size(imageModelConfig.size())
.quality(imageModelConfig.quality())
.style(imageModelConfig.style())
.responseFormat(imageModelConfig.responseFormat())
.user(imageModelConfig.user());
openAiConfig.organizationId().ifPresent(builder::organizationId);
// we persist if the directory was set explicitly and the boolean flag was not set to false
// or if the boolean flag was set explicitly to true
Optional<Path> persistDirectory = Optional.empty();
if (imageModelConfig.persist().isPresent()) {
if (imageModelConfig.persist().get()) {
persistDirectory = imageModelConfig.persistDirectory().or(new Supplier<>() {
@Override
public Optional<? extends Path> get() {
return Optional.of(Paths.get(System.getProperty("java.io.tmpdir"), "dall-e-images"));
}
});
}
} else {
if (imageModelConfig.persistDirectory().isPresent()) {
persistDirectory = imageModelConfig.persistDirectory();
}
}
builder.persistDirectory(persistDirectory);
return new Supplier<>() {
@Override
public ImageModel get() {
return builder.build();
}
};
} else {
return new Supplier<>() {
@Override
public ImageModel get() {
return new DisabledImageModel();
}
};
}
}
private LangChain4jOpenAiConfig.OpenAiConfig correspondingOpenAiConfig(LangChain4jOpenAiConfig runtimeConfig,
String modelName) {
LangChain4jOpenAiConfig.OpenAiConfig openAiConfig;
if (NamedModelUtil.isDefault(modelName)) {
openAiConfig = runtimeConfig.defaultConfig();
} else {
openAiConfig = runtimeConfig.namedConfig().get(modelName);
}
return openAiConfig;
}
private ConfigValidationException.Problem[] createApiKeyConfigProblems(String modelName) {
return createConfigProblems("api-key", modelName);
}
private ConfigValidationException.Problem[] createConfigProblems(String key, String modelName) {
return new ConfigValidationException.Problem[] { createConfigProblem(key, modelName) };
}
private ConfigValidationException.Problem createConfigProblem(String key, String modelName) {
return new ConfigValidationException.Problem(String.format(
"SRCFG00014: The config property quarkus.langchain4j.openai%s%s is required but it could not be found in any config source",
NamedModelUtil.isDefault(modelName) ? "." : ("." + modelName + "."), key));
}
public void cleanUp(ShutdownContext shutdown) {
shutdown.addShutdownTask(new Runnable() {
@Override
public void run() {
QuarkusOpenAiClient.clearCache();
}
});
}
}
| [
"dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder",
"dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder",
"dev.langchain4j.model.openai.OpenAiModerationModel.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((2450, 3312), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 3229), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 3155), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 3083), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 3033), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2969), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2909), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2791), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2676), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2617), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2564), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2528), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4555, 5367), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 5284), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 5210), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 5138), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 5088), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 5024), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 4964), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 4846), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 4731), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 4678), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 4642), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((6642, 7184), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((6642, 7119), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((6642, 6996), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((6642, 6876), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((6642, 6817), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((6642, 6764), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((6642, 6725), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((8417, 8960), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((8417, 8894), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((8417, 8770), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((8417, 8649), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((8417, 8590), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((8417, 8537), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((8417, 8501), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((10032, 10845), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10794), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10723), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10670), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10613), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10562), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10501), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10382), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10266), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10207), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10154), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10118), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder')] |
package io.quarkiverse.langchain4j.sample;
import java.util.function.Supplier;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
public class MyChatModelSupplier implements Supplier<ChatLanguageModel> {
@Override
public ChatLanguageModel get() {
return OpenAiChatModel.builder()
.apiKey("...")
.build();
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((328, 409), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((328, 384), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package com.tencent.supersonic.headless.core.chat.parser.llm;
import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.core.config.OptimizationConfig;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMReq;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMReq.SqlGenerationMode;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMResp;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.output.Response;
import org.apache.commons.lang3.tuple.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
@Service
public class TwoPassSCSqlGeneration implements SqlGeneration, InitializingBean {
private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline");
@Autowired
private ChatLanguageModel chatLanguageModel;
@Autowired
private SqlExamplarLoader sqlExamplarLoader;
@Autowired
private OptimizationConfig optimizationConfig;
@Autowired
private SqlPromptGenerator sqlPromptGenerator;
@Override
public LLMResp generation(LLMReq llmReq, Long dataSetId) {
//1.retriever sqlExamples and generate exampleListPool
keyPipelineLog.info("dataSetId:{},llmReq:{}", dataSetId, llmReq);
List<Map<String, String>> sqlExamples = sqlExamplarLoader.retrieverSqlExamples(llmReq.getQueryText(),
optimizationConfig.getText2sqlExampleNum());
List<List<Map<String, String>>> exampleListPool = sqlPromptGenerator.getExampleCombos(sqlExamples,
optimizationConfig.getText2sqlFewShotsNum(), optimizationConfig.getText2sqlSelfConsistencyNum());
//2.generator linking prompt,and parallel generate response.
List<String> linkingPromptPool = sqlPromptGenerator.generatePromptPool(llmReq, exampleListPool, false);
List<String> linkingResults = new CopyOnWriteArrayList<>();
linkingPromptPool.parallelStream().forEach(
linkingPrompt -> {
Prompt prompt = PromptTemplate.from(JsonUtil.toString(linkingPrompt)).apply(new HashMap<>());
keyPipelineLog.info("step one request prompt:{}", prompt.toSystemMessage());
Response<AiMessage> linkingResult = chatLanguageModel.generate(prompt.toSystemMessage());
String result = linkingResult.content().text();
keyPipelineLog.info("step one model response:{}", result);
linkingResults.add(OutputFormat.getSchemaLink(result));
}
);
List<String> sortedList = OutputFormat.formatList(linkingResults);
Pair<String, Map<String, Double>> linkingMap = OutputFormat.selfConsistencyVote(sortedList);
//3.generator sql prompt,and parallel generate response.
List<String> sqlPromptPool = sqlPromptGenerator.generateSqlPromptPool(llmReq, sortedList, exampleListPool);
List<String> sqlTaskPool = new CopyOnWriteArrayList<>();
sqlPromptPool.parallelStream().forEach(sqlPrompt -> {
Prompt linkingPrompt = PromptTemplate.from(JsonUtil.toString(sqlPrompt)).apply(new HashMap<>());
keyPipelineLog.info("step two request prompt:{}", linkingPrompt.toSystemMessage());
Response<AiMessage> sqlResult = chatLanguageModel.generate(linkingPrompt.toSystemMessage());
String result = sqlResult.content().text();
keyPipelineLog.info("step two model response:{}", result);
sqlTaskPool.add(result);
});
//4.format response.
Pair<String, Map<String, Double>> sqlMapPair = OutputFormat.selfConsistencyVote(sqlTaskPool);
keyPipelineLog.info("linkingMap:{} sqlMap:{}", linkingMap, sqlMapPair.getRight());
LLMResp llmResp = new LLMResp();
llmResp.setQuery(llmReq.getQueryText());
llmResp.setSqlRespMap(OutputFormat.buildSqlRespMap(sqlExamples, sqlMapPair.getRight()));
return llmResp;
}
@Override
public void afterPropertiesSet() {
SqlGenerationFactory.addSqlGenerationForFactory(SqlGenerationMode.TWO_PASS_AUTO_COT_SELF_CONSISTENCY, this);
}
}
| [
"dev.langchain4j.model.input.PromptTemplate.from"
] | [((2481, 2557), 'dev.langchain4j.model.input.PromptTemplate.from'), ((3537, 3609), 'dev.langchain4j.model.input.PromptTemplate.from')] |
package org.example;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.memory.chat.ChatMemoryStore;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.List;
public class _09_AIServices_06_ChatMemoryPersisted {
public static void main(String[] args) {
OpenAiChatModel model = OpenAiChatModel.withApiKey(ApiKeys.OPENAI_DEMO);
FileStore store = new FileStore();
ChatMemoryProvider provider = memoryId -> MessageWindowChatMemory.builder()
.id(memoryId)
.maxMessages(10)
.chatMemoryStore(store)
.build();
ChatAssistant assistant = AiServices.builder(ChatAssistant.class)
.chatLanguageModel(model)
.chatMemoryProvider(provider)
.build();
System.out.println(assistant.chat(1, "Hello my name is Michael"));
System.out.println(assistant.chat(2, "Hello my name is Karl"));
// System.out.println(assistant.chat(1, "What is my name?"));
// System.out.println(assistant.chat(2, "What is my name?"));
}
}
class FileStore implements ChatMemoryStore {
public static final String PATH = "src/main/resources/messages_%s.txt";
@Override
public List<ChatMessage> getMessages(Object memoryId) {
List<ChatMessage> chatMessages = new ArrayList<>();
String file = PATH.formatted(memoryId);
try {
if (!Files.exists(Paths.get(file))) {
Files.createFile(Paths.get(file));
}
for (String s : Files.readAllLines(Paths.get(file))) {
chatMessages.add(UserMessage.from(s));
}
} catch (IOException e) {
throw new RuntimeException(e);
}
return chatMessages;
}
@Override
public void updateMessages(Object memoryId, List<ChatMessage> messages) {
String file = PATH.formatted(memoryId);
for (ChatMessage message : messages) {
try {
Files.writeString(Paths.get(file), message.text() + "\n", StandardOpenOption.APPEND);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
@Override
public void deleteMessages(Object memoryId) {
System.out.println("Not implemented");
}
} | [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((843, 1004), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((843, 979), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((843, 939), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((843, 906), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((1041, 1193), 'dev.langchain4j.service.AiServices.builder'), ((1041, 1168), 'dev.langchain4j.service.AiServices.builder'), ((1041, 1122), 'dev.langchain4j.service.AiServices.builder')] |
package org.agoncal.fascicle.langchain4j.vectordb.pgvector;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore;
import java.util.List;
// tag::adocSkip[]
/**
* @author Antonio Goncalves
* http://www.antoniogoncalves.org
* --
*/
// end::adocSkip[]
public class MusicianService {
public static void main(String[] args) {
MusicianService musicianService = new MusicianService();
musicianService.usePGVectorToStoreEmbeddings();
}
public void usePGVectorToStoreEmbeddings() {
System.out.println("### usePGVectorToStoreEmbeddings");
// tag::adocSnippet[]
EmbeddingStore<TextSegment> embeddingStore =
PgVectorEmbeddingStore.builder()
.host("localhost")
.port(5432)
.createTable(true)
.dropTableFirst(true)
.dimension(384)
.table("langchain4j_collection")
.user("agoncal")
.password("agoncal")
.database("agoncal")
.build();
// end::adocSnippet[]
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
TextSegment segment1 = TextSegment.from("I've been to France twice.");
Embedding embedding1 = embeddingModel.embed(segment1).content();
embeddingStore.add(embedding1, segment1);
TextSegment segment2 = TextSegment.from("New Delhi is the capital of India.");
Embedding embedding2 = embeddingModel.embed(segment2).content();
embeddingStore.add(embedding2, segment2);
Embedding queryEmbedding = embeddingModel.embed("Did you ever travel abroad?").content();
List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(queryEmbedding, 1);
EmbeddingMatch<TextSegment> embeddingMatch = relevant.get(0);
System.out.println(embeddingMatch.score());
System.out.println(embeddingMatch.embedded().text());
}
}
| [
"dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder"
] | [((989, 1290), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1273), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1244), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1215), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1190), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1149), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1125), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1095), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1068), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1048), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder')] |
package com.ramesh.langchain;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static java.time.Duration.ofSeconds;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2QuantizedEmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
/***
* This project demonstrates how to use LangChain to ingest data from a document and
* get responses for prompts from the same, by creating a LangChain Chain
*/
public class ChainWithDocumentLive {
// Open AI Key and Chat GPT Model to use
public static String OPENAI_API_KEY = "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";
public static String OPENAI_MODEL = "gpt-3.5-turbo";
public static void main(String[] args) {
// embedding model to yse
EmbeddingModel embeddingModel = new AllMiniLmL6V2QuantizedEmbeddingModel();
// embeddings will be stored in memory
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
//Creating instance of EmbeddingStoreIngestor
System.out.println("Creating instance of EmbeddingStoreIngestor...");
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
// ingesting input data
System.out.println("Loading content from simpsons_adventures.txt and ingesting...");
Document document = loadDocument(".\\simpsons_adventures.txt");
ingestor.ingest(document);
// building the chat model
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey(OPENAI_API_KEY)
.timeout(ofSeconds(60))
.build();
// Building LangChain with Embeddings Retriever
System.out.println("Building LangChain with Embeddings Retriever...");
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(chatModel)
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.promptTemplate(PromptTemplate.from("Answer the following question to the best of your ability: {{question}}\n\nBase your answer on the following information:\n{{information}}"))
.build();
// prompting ChatGPT
System.out.println("Prompting ChatGPT \"Who is Simpson?\"...");
System.out.println("\nFetching response from ChatGPT via the created LangChain...\n");
// executing the LangChain chain
String answer = chain.execute("Who is Simpson?");
System.out.println(answer);
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1849, 2057), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1849, 2036), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1849, 1992), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1849, 1948), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2366, 2484), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2366, 2463), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2366, 2427), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2667, 3113), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2667, 3092), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2667, 2901), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2667, 2832), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2667, 2747), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] |
package io.quarkiverse.langchain4j.samples;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
import java.util.List;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkiverse.langchain4j.pinecone.PineconeEmbeddingStore;
@ApplicationScoped
public class IngestorExampleWithPinecone {
/**
* The embedding store (the database).
* The bean is provided by the quarkus-langchain4j-pinecone extension.
*/
@Inject
PineconeEmbeddingStore store;
/**
* The embedding model (how is computed the vector of a document).
* The bean is provided by the LLM (like openai) extension.
*/
@Inject
EmbeddingModel embeddingModel;
public void ingest(List<Document> documents) {
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.embeddingStore(store)
.embeddingModel(embeddingModel)
.documentSplitter(recursive(500, 0))
.build();
// Warning - this can take a long time...
ingestor.ingest(documents);
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1005, 1202), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1005, 1177), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1005, 1124), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1005, 1076), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
import dev.langchain4j.data.document.FileSystemDocumentLoader;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
import static java.util.stream.Collectors.joining;
public class _03_Retrieval {
private static final String RETRIEVER_DOCUMENT_NAME = "";
public static void main(String[] args) {
var openAiKey = System.getenv("OPENAI_API_KEY");
var embeddingModel = OpenAiEmbeddingModel.withApiKey(openAiKey);
var embeddingStore = new InMemoryEmbeddingStore<TextSegment>();
// 0 - Ingesting the document and store in vectorized form
var ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
var filePath = toPath(RETRIEVER_DOCUMENT_NAME);
var document = FileSystemDocumentLoader.loadDocument(filePath);
ingestor.ingest(document);
var chatModel = OpenAiChatModel.withApiKey(openAiKey);
var chatMemory = MessageWindowChatMemory.withMaxMessages(10);
var retriever = EmbeddingStoreRetriever.from(embeddingStore, embeddingModel);
var promptTemplate = PromptTemplate.from("""
Answer the following question to the best of your ability: {{question}}
Base your answer on the following information:
{{information}}""");
try (Scanner scanner = new Scanner(System.in)) {
while (true) {
System.out.println("\nEnter your question: ");
// 1 - Retrieving the question from the user
String question = scanner.nextLine();
if (question.equals("exit")) {
break;
}
// 2, 3 - Retrieving the most relevant segments according to the question
var relevantSegments = retriever.findRelevant(question);
var prompt = promptTemplate.apply(
Map.of(
"question", question,
"information", format(relevantSegments)));
chatMemory.add(prompt.toUserMessage());
// 4 - Send the prompt to the model
var response = chatModel.generate(chatMemory.messages());
chatMemory.add(response.content());
// 5 - Printing answer to the user
System.out.println(response.content().text());
System.out.println("\n\n########### TOKEN USAGE ############\n");
System.out.println(response.tokenUsage());
}
}
}
private static String format(List<TextSegment> relevantSegments) {
return relevantSegments.stream()
.map(TextSegment::text)
.map(segment -> "..." + segment + "...")
.collect(joining("\n\n"));
}
private static Path toPath(String fileName) {
try {
URL fileUrl = _03_Retrieval.class.getResource(fileName);
return Paths.get(fileUrl.toURI());
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1262, 1486), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1262, 1461), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1262, 1413), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1262, 1365), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package org.example;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.output.structured.Description;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.SystemMessage;
import dev.langchain4j.service.UserMessage;
import java.util.List;
public class _09_AIServices_04_PokemonTrainer {
public static void main(String[] args) {
// Zet logger op debug
OpenAiChatModel model = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_DEMO)
.logRequests(true)
.build();
PokemonTrainerGeneratorService trainerGenerator = AiServices.create(PokemonTrainerGeneratorService.class, model);
Trainer trainer = trainerGenerator.generate("Generate a low level trainer named 'Kelvin' with 2 bug and 2 fire pokemon");
System.out.println(trainer);
}
}
interface PokemonTrainerGeneratorService {
@SystemMessage("You generate random pokemon trainers with random pokemon, in accordance to the user message")
Trainer generate(@UserMessage String text);
}
record Trainer(String name, List<Pokemon> team) {
}
record Pokemon(String name
// , @Description("All uppercase") String type
, String type
, int level
, int hp
, @Description("Random number of moves between 1 and 4") List<String> moves)
{} | [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((450, 580), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((450, 555), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((450, 520), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
import dev.ai4j.openai4j.Model;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
public class _00_Model {
public static void main(String[] args) {
String openAiKey = System.getenv("OPENAI_API_KEY");
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.modelName(Model.GPT_3_5_TURBO.stringValue())
.apiKey(openAiKey)
.build();
var prompt = "Write hello world example in Java printing 'Hello TDC Future 2023'";
var response = chatModel.generate(UserMessage.from(prompt));
System.out.println(response.content().text());
System.out.println("\n\n########### TOKEN USAGE ############\n");
System.out.println(response.tokenUsage());
}
} | [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((359, 506), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((359, 481), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((359, 446), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((412, 445), 'dev.ai4j.openai4j.Model.GPT_3_5_TURBO.stringValue')] |
package com.example.application.services;
import com.vaadin.flow.server.auth.AnonymousAllowed;
import dev.hilla.BrowserCallable;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.TokenStream;
import jakarta.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Value;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Sinks;
@BrowserCallable
@AnonymousAllowed
public class ChatService {
@Value("${openai.api.key}")
private String OPENAI_API_KEY;
private Assistant assistant;
private StreamingAssistant streamingAssistant;
interface Assistant {
String chat(String message);
}
interface StreamingAssistant {
TokenStream chat(String message);
}
@PostConstruct
public void init() {
var memory = TokenWindowChatMemory.withMaxTokens(2000, new OpenAiTokenizer("gpt-3.5-turbo"));
assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(OPENAI_API_KEY))
.chatMemory(memory)
.build();
streamingAssistant = AiServices.builder(StreamingAssistant.class)
.streamingChatLanguageModel(OpenAiStreamingChatModel.withApiKey(OPENAI_API_KEY))
.chatMemory(memory)
.build();
}
public String chat(String message) {
return assistant.chat(message);
}
public Flux<String> chatStream(String message) {
Sinks.Many<String> sink = Sinks.many().unicast().onBackpressureBuffer();
streamingAssistant.chat(message)
.onNext(sink::tryEmitNext)
.onComplete(sink::tryEmitComplete)
.onError(sink::tryEmitError)
.start();
return sink.asFlux();
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((1152, 1327), 'dev.langchain4j.service.AiServices.builder'), ((1152, 1302), 'dev.langchain4j.service.AiServices.builder'), ((1152, 1266), 'dev.langchain4j.service.AiServices.builder'), ((1359, 1561), 'dev.langchain4j.service.AiServices.builder'), ((1359, 1536), 'dev.langchain4j.service.AiServices.builder'), ((1359, 1500), 'dev.langchain4j.service.AiServices.builder'), ((1745, 1790), 'reactor.core.publisher.Sinks.many'), ((1745, 1767), 'reactor.core.publisher.Sinks.many')] |
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.input.structured.StructuredPrompt;
import dev.langchain4j.model.input.structured.StructuredPromptProcessor;
import dev.langchain4j.model.openai.OpenAiChatModel;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static java.time.Duration.ofSeconds;
import static java.util.Arrays.asList;
public class _03_PromptTemplate {
static class Simple_Prompt_Template_Example {
public static void main(String[] args) {
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.timeout(ofSeconds(60))
.build();
String template = "Create a recipe for a {{dishType}} with the following ingredients: {{ingredients}}";
PromptTemplate promptTemplate = PromptTemplate.from(template);
Map<String, Object> variables = new HashMap<>();
variables.put("dishType", "oven dish");
variables.put("ingredients", "potato, tomato, feta, olive oil");
Prompt prompt = promptTemplate.apply(variables);
String response = model.generate(prompt.text());
System.out.println(response);
}
}
static class Structured_Prompt_Template_Example {
@StructuredPrompt({
"Create a recipe of a {{dish}} that can be prepared using only {{ingredients}}.",
"Structure your answer in the following way:",
"Recipe name: ...",
"Description: ...",
"Preparation time: ...",
"Required ingredients:",
"- ...",
"- ...",
"Instructions:",
"- ...",
"- ..."
})
static class CreateRecipePrompt {
String dish;
List<String> ingredients;
CreateRecipePrompt(String dish, List<String> ingredients) {
this.dish = dish;
this.ingredients = ingredients;
}
}
public static void main(String[] args) {
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.timeout(ofSeconds(60))
.build();
Structured_Prompt_Template_Example.CreateRecipePrompt createRecipePrompt = new Structured_Prompt_Template_Example.CreateRecipePrompt(
"salad",
asList("cucumber", "tomato", "feta", "onion", "olives")
);
Prompt prompt = StructuredPromptProcessor.toPrompt(createRecipePrompt);
String recipe = model.generate(prompt.text());
System.out.println(recipe);
}
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((668, 818), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((668, 789), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((668, 745), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2305, 2455), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2305, 2426), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2305, 2382), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
/*
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gemini.workshop;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.vertexai.VertexAiGeminiChatModel;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ImageContent;
import dev.langchain4j.data.message.TextContent;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.output.Response;
public class Step3_Multimodal {
static final String CAT_IMAGE_URL =
"https://upload.wikimedia.org/wikipedia/commons/e/e9/" +
"Felis_silvestris_silvestris_small_gradual_decrease_of_quality.png";
public static void main(String[] args) {
ChatLanguageModel model = VertexAiGeminiChatModel.builder()
.project(System.getenv("PROJECT_ID"))
.location(System.getenv("LOCATION"))
.modelName("gemini-1.0-pro-vision")
.build();
UserMessage userMessage = UserMessage.from(
ImageContent.from(CAT_IMAGE_URL),
TextContent.from("Describe the picture")
);
Response<AiMessage> response = model.generate(userMessage);
System.out.println(response.content().text());
}
}
| [
"dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder"
] | [((1277, 1478), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1277, 1457), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1277, 1409), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1277, 1360), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder')] |
package dev.langchain4j.model.openai;
import dev.ai4j.openai4j.chat.*;
import dev.ai4j.openai4j.completion.CompletionChoice;
import dev.ai4j.openai4j.completion.CompletionResponse;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.Tokenizer;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.output.TokenUsage;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import static dev.langchain4j.model.openai.InternalOpenAiHelper.finishReasonFrom;
import static java.util.Collections.singletonList;
import static java.util.stream.Collectors.toList;
/**
* This class needs to be thread safe because it is called when a streaming result comes back
* and there is no guarantee that this thread will be the same as the one that initiated the request,
* in fact it almost certainly won't be.
*/
public class OpenAiStreamingResponseBuilder {
private final StringBuffer contentBuilder = new StringBuffer();
private final StringBuffer toolNameBuilder = new StringBuffer();
private final StringBuffer toolArgumentsBuilder = new StringBuffer();
private final Map<Integer, ToolExecutionRequestBuilder> indexToToolExecutionRequestBuilder = new ConcurrentHashMap<>();
private volatile String finishReason;
private final Integer inputTokenCount;
public OpenAiStreamingResponseBuilder(Integer inputTokenCount) {
this.inputTokenCount = inputTokenCount;
}
public void append(ChatCompletionResponse partialResponse) {
if (partialResponse == null) {
return;
}
List<ChatCompletionChoice> choices = partialResponse.choices();
if (choices == null || choices.isEmpty()) {
return;
}
ChatCompletionChoice chatCompletionChoice = choices.get(0);
if (chatCompletionChoice == null) {
return;
}
String finishReason = chatCompletionChoice.finishReason();
if (finishReason != null) {
this.finishReason = finishReason;
}
Delta delta = chatCompletionChoice.delta();
if (delta == null) {
return;
}
String content = delta.content();
if (content != null) {
contentBuilder.append(content);
return;
}
if (delta.functionCall() != null) {
FunctionCall functionCall = delta.functionCall();
if (functionCall.name() != null) {
toolNameBuilder.append(functionCall.name());
}
if (functionCall.arguments() != null) {
toolArgumentsBuilder.append(functionCall.arguments());
}
}
if (delta.toolCalls() != null && !delta.toolCalls().isEmpty()) {
ToolCall toolCall = delta.toolCalls().get(0);
ToolExecutionRequestBuilder toolExecutionRequestBuilder
= indexToToolExecutionRequestBuilder.computeIfAbsent(toolCall.index(), idx -> new ToolExecutionRequestBuilder());
if (toolCall.id() != null) {
toolExecutionRequestBuilder.idBuilder.append(toolCall.id());
}
FunctionCall functionCall = toolCall.function();
if (functionCall.name() != null) {
toolExecutionRequestBuilder.nameBuilder.append(functionCall.name());
}
if (functionCall.arguments() != null) {
toolExecutionRequestBuilder.argumentsBuilder.append(functionCall.arguments());
}
}
}
public void append(CompletionResponse partialResponse) {
if (partialResponse == null) {
return;
}
List<CompletionChoice> choices = partialResponse.choices();
if (choices == null || choices.isEmpty()) {
return;
}
CompletionChoice completionChoice = choices.get(0);
if (completionChoice == null) {
return;
}
String finishReason = completionChoice.finishReason();
if (finishReason != null) {
this.finishReason = finishReason;
}
String token = completionChoice.text();
if (token != null) {
contentBuilder.append(token);
}
}
public Response<AiMessage> build(Tokenizer tokenizer, boolean forcefulToolExecution) {
String content = contentBuilder.toString();
if (!content.isEmpty()) {
return Response.from(
AiMessage.from(content),
tokenUsage(content, tokenizer),
finishReasonFrom(finishReason)
);
}
String toolName = toolNameBuilder.toString();
if (!toolName.isEmpty()) {
ToolExecutionRequest toolExecutionRequest = ToolExecutionRequest.builder()
.name(toolName)
.arguments(toolArgumentsBuilder.toString())
.build();
return Response.from(
AiMessage.from(toolExecutionRequest),
tokenUsage(singletonList(toolExecutionRequest), tokenizer, forcefulToolExecution),
finishReasonFrom(finishReason)
);
}
if (!indexToToolExecutionRequestBuilder.isEmpty()) {
List<ToolExecutionRequest> toolExecutionRequests = indexToToolExecutionRequestBuilder.values().stream()
.map(it -> ToolExecutionRequest.builder()
.id(it.idBuilder.toString())
.name(it.nameBuilder.toString())
.arguments(it.argumentsBuilder.toString())
.build())
.collect(toList());
return Response.from(
AiMessage.from(toolExecutionRequests),
tokenUsage(toolExecutionRequests, tokenizer, forcefulToolExecution),
finishReasonFrom(finishReason)
);
}
return null;
}
private TokenUsage tokenUsage(String content, Tokenizer tokenizer) {
if (tokenizer == null) {
return null;
}
int outputTokenCount = tokenizer.estimateTokenCountInText(content);
return new TokenUsage(inputTokenCount, outputTokenCount);
}
private TokenUsage tokenUsage(List<ToolExecutionRequest> toolExecutionRequests, Tokenizer tokenizer, boolean forcefulToolExecution) {
if (tokenizer == null) {
return null;
}
int outputTokenCount = 0;
if (forcefulToolExecution) {
// OpenAI calculates output tokens differently when tool is executed forcefully
for (ToolExecutionRequest toolExecutionRequest : toolExecutionRequests) {
outputTokenCount += tokenizer.estimateTokenCountInForcefulToolExecutionRequest(toolExecutionRequest);
}
} else {
outputTokenCount = tokenizer.estimateTokenCountInToolExecutionRequests(toolExecutionRequests);
}
return new TokenUsage(inputTokenCount, outputTokenCount);
}
private static class ToolExecutionRequestBuilder {
private final StringBuffer idBuilder = new StringBuffer();
private final StringBuffer nameBuilder = new StringBuffer();
private final StringBuffer argumentsBuilder = new StringBuffer();
}
}
| [
"dev.langchain4j.agent.tool.ToolExecutionRequest.builder"
] | [((4860, 5019), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((4860, 4990), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((4860, 4926), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((5501, 5757), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((5501, 5720), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((5501, 5649), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((5501, 5588), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')] |
End of preview. Expand
in Dataset Viewer.
README.md exists but content is empty.
Use the Edit dataset card button to edit it.
- Downloads last month
- 37