Adaptive RAG¶
In [ ]:
Copied!
var userHomeDir = System.getProperty("user.home");
var localRespoUrl = "file://" + userHomeDir + "/.m2/repository/";
var langchain4jVersion = "1.9.1";
var langchain4jbeta = "1.9.1-beta17";
var langgraph4jVersion = "1.7-SNAPSHOT";
var userHomeDir = System.getProperty("user.home");
var localRespoUrl = "file://" + userHomeDir + "/.m2/repository/";
var langchain4jVersion = "1.9.1";
var langchain4jbeta = "1.9.1-beta17";
var langgraph4jVersion = "1.7-SNAPSHOT";
In [2]:
Copied!
%dependency /add-repo local \{localRespoUrl} release|never snapshot|always
// %dependency /list-repos
%dependency /add org.slf4j:slf4j-jdk14:2.0.9
%dependency /add org.bsc.langgraph4j:langgraph4j-core:\{langgraph4jVersion}
%dependency /add org.bsc.langgraph4j:langgraph4j-langchain4j:\{langgraph4jVersion}
%dependency /add dev.langchain4j:langchain4j:\{langchain4jVersion}
%dependency /add dev.langchain4j:langchain4j-open-ai:\{langchain4jVersion}
%dependency /resolve
%dependency /add-repo local \{localRespoUrl} release|never snapshot|always
// %dependency /list-repos
%dependency /add org.slf4j:slf4j-jdk14:2.0.9
%dependency /add org.bsc.langgraph4j:langgraph4j-core:\{langgraph4jVersion}
%dependency /add org.bsc.langgraph4j:langgraph4j-langchain4j:\{langgraph4jVersion}
%dependency /add dev.langchain4j:langchain4j:\{langchain4jVersion}
%dependency /add dev.langchain4j:langchain4j-open-ai:\{langchain4jVersion}
%dependency /resolve
Repository local url: file:///Users/bsorrentino/.m2/repository/ added. Adding dependency org.slf4j:slf4j-jdk14:2.0.9 Adding dependency org.bsc.langgraph4j:langgraph4j-core:1.7-SNAPSHOT Adding dependency org.bsc.langgraph4j:langgraph4j-langchain4j:1.7-SNAPSHOT Adding dependency dev.langchain4j:langchain4j:1.9.1 Adding dependency dev.langchain4j:langchain4j-open-ai:1.9.1 Solving dependencies Resolved artifacts count: 16 Add to classpath: /Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/org/slf4j/slf4j-jdk14/2.0.9/slf4j-jdk14-2.0.9.jar Add to classpath: /Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/org/slf4j/slf4j-api/2.0.9/slf4j-api-2.0.9.jar Add to classpath: /Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/org/bsc/langgraph4j/langgraph4j-core/1.7-SNAPSHOT/langgraph4j-core-1.7-SNAPSHOT.jar Add to classpath: /Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/org/bsc/async/async-generator/4.0.0-beta2/async-generator-4.0.0-beta2.jar Add to classpath: /Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/org/bsc/langgraph4j/langgraph4j-langchain4j/1.7-SNAPSHOT/langgraph4j-langchain4j-1.7-SNAPSHOT.jar Add to classpath: /Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/dev/langchain4j/langchain4j/1.9.1/langchain4j-1.9.1.jar Add to classpath: /Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/dev/langchain4j/langchain4j-core/1.9.1/langchain4j-core-1.9.1.jar Add to classpath: /Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/org/jspecify/jspecify/1.0.0/jspecify-1.0.0.jar Add to classpath: /Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/com/fasterxml/jackson/core/jackson-annotations/2.20/jackson-annotations-2.20.jar Add to classpath: /Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/com/fasterxml/jackson/core/jackson-core/2.20.1/jackson-core-2.20.1.jar Add to classpath: /Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/com/fasterxml/jackson/core/jackson-databind/2.20.1/jackson-databind-2.20.1.jar Add to classpath: /Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/org/apache/opennlp/opennlp-tools/2.5.4/opennlp-tools-2.5.4.jar Add to classpath: /Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/dev/langchain4j/langchain4j-open-ai/1.9.1/langchain4j-open-ai-1.9.1.jar Add to classpath: /Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/dev/langchain4j/langchain4j-http-client/1.9.1/langchain4j-http-client-1.9.1.jar Add to classpath: /Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/dev/langchain4j/langchain4j-http-client-jdk/1.9.1/langchain4j-http-client-jdk-1.9.1.jar Add to classpath: /Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/com/knuddels/jtokkit/1.1.0/jtokkit-1.1.0.jar
Initialize Logger
In [3]:
Copied!
try( var file = new java.io.FileInputStream("./logging.properties")) {
java.util.logging.LogManager.getLogManager().readConfiguration( file );
}
var log = org.slf4j.LoggerFactory.getLogger("AdaptiveRag");
try( var file = new java.io.FileInputStream("./logging.properties")) {
java.util.logging.LogManager.getLogManager().readConfiguration( file );
}
var log = org.slf4j.LoggerFactory.getLogger("AdaptiveRag");
In [4]:
Copied!
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.structured.StructuredPrompt;
import dev.langchain4j.model.input.structured.StructuredPromptProcessor;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.output.structured.Description;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.SystemMessage;
import java.time.Duration;
import java.util.function.Function;
public class AnswerGrader implements Function<AnswerGrader.Arguments,AnswerGrader.Score> {
static final String MODELS[] = { "gpt-3.5-turbo-0125", "gpt-4o-mini" };
/**
* Binary score to assess answer addresses question.
*/
public static class Score {
@Description("Answer addresses the question, 'yes' or 'no'")
public String binaryScore;
@Override
public String toString() {
return "Score: " + binaryScore;
}
}
@StructuredPrompt("""
User question:
{{question}}
LLM generation:
{{generation}}
""")
record Arguments(String question, String generation) {
}
interface Service {
@SystemMessage("""
You are a grader assessing whether an answer addresses and/or resolves a question.
Give a binary score 'yes' or 'no'. Yes, means that the answer resolves the question otherwise return 'no'
""")
Score invoke(String userMessage);
}
String openApiKey;
@Override
public Score apply(Arguments args) {
var chatLanguageModel = OpenAiChatModel.builder()
.apiKey( System.getenv("OPENAI_API_KEY") )
.modelName( MODELS[1] )
.timeout(Duration.ofMinutes(2))
.logRequests(true)
.logResponses(true)
.maxRetries(2)
.temperature(0.0)
.maxTokens(2000)
.build();
Service service = AiServices.create(Service.class, chatLanguageModel);
Prompt prompt = StructuredPromptProcessor.toPrompt(args);
log.trace( "prompt: {}", prompt.text() );
return service.invoke(prompt.text());
}
}
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.structured.StructuredPrompt;
import dev.langchain4j.model.input.structured.StructuredPromptProcessor;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.output.structured.Description;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.SystemMessage;
import java.time.Duration;
import java.util.function.Function;
public class AnswerGrader implements Function {
static final String MODELS[] = { "gpt-3.5-turbo-0125", "gpt-4o-mini" };
/**
* Binary score to assess answer addresses question.
*/
public static class Score {
@Description("Answer addresses the question, 'yes' or 'no'")
public String binaryScore;
@Override
public String toString() {
return "Score: " + binaryScore;
}
}
@StructuredPrompt("""
User question:
{{question}}
LLM generation:
{{generation}}
""")
record Arguments(String question, String generation) {
}
interface Service {
@SystemMessage("""
You are a grader assessing whether an answer addresses and/or resolves a question.
Give a binary score 'yes' or 'no'. Yes, means that the answer resolves the question otherwise return 'no'
""")
Score invoke(String userMessage);
}
String openApiKey;
@Override
public Score apply(Arguments args) {
var chatLanguageModel = OpenAiChatModel.builder()
.apiKey( System.getenv("OPENAI_API_KEY") )
.modelName( MODELS[1] )
.timeout(Duration.ofMinutes(2))
.logRequests(true)
.logResponses(true)
.maxRetries(2)
.temperature(0.0)
.maxTokens(2000)
.build();
Service service = AiServices.create(Service.class, chatLanguageModel);
Prompt prompt = StructuredPromptProcessor.toPrompt(args);
log.trace( "prompt: {}", prompt.text() );
return service.invoke(prompt.text());
}
}
In [5]:
Copied!
var grader = new AnswerGrader();
var args = new AnswerGrader.Arguments( "What are the four operations ? ", "LLM means Large Language Model" );
grader.apply( args );
var grader = new AnswerGrader();
var args = new AnswerGrader.Arguments( "What are the four operations ? ", "LLM means Large Language Model" );
grader.apply( args );
prompt: User question: What are the four operations ? LLM generation: LLM means Large Language Model
Out[5]:
Score: no
In [6]:
Copied!
var args = new AnswerGrader.Arguments( "What are the four operations", "There are four basic operations: addition, subtraction, multiplication, and division." );
grader.apply( args );
var args = new AnswerGrader.Arguments( "What are the four operations", "There are four basic operations: addition, subtraction, multiplication, and division." );
grader.apply( args );
prompt: User question: What are the four operations LLM generation: There are four basic operations: addition, subtraction, multiplication, and division.
Out[6]:
Score: yes
In [7]:
Copied!
var args = new AnswerGrader.Arguments( "What player at the Bears expected to draft first in the 2024 NFL draft?", "The Bears selected USC quarterback Caleb Williams with the No. 1 pick in the 2024 NFL Draft." );
grader.apply( args );
var args = new AnswerGrader.Arguments( "What player at the Bears expected to draft first in the 2024 NFL draft?", "The Bears selected USC quarterback Caleb Williams with the No. 1 pick in the 2024 NFL Draft." );
grader.apply( args );
prompt: User question: What player at the Bears expected to draft first in the 2024 NFL draft? LLM generation: The Bears selected USC quarterback Caleb Williams with the No. 1 pick in the 2024 NFL Draft.
Out[7]:
Score: yes