Skip to content

Commit

Permalink
Upgrade quarkus-langchain4j to 0.24.0.CR3 & langchain4j to 1.0.0-beta1 (
Browse files Browse the repository at this point in the history
#54)

Also adjust to breaking changes:

- Default temperature of 0.7 has been removed for OpenAI chat models,
hence set it as default in our config.

Signed-off-by: Florian Hotze <[email protected]>
  • Loading branch information
florian-h05 authored Feb 6, 2025
1 parent 414ec0d commit db98525
Show file tree
Hide file tree
Showing 4 changed files with 12 additions and 9 deletions.
4 changes: 2 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@
<quarkus.platform.version>3.18.1</quarkus.platform.version>
<quarkus-minio.version>3.7.7</quarkus-minio.version>
<!-- remember to sync LangChain4j version when upgrading -->
<quarkus-langchain4j.version>0.23.3</quarkus-langchain4j.version>
<langchain4j.version>1.0.0-alpha1</langchain4j.version>
<quarkus-langchain4j.version>0.24.0.CR3</quarkus-langchain4j.version>
<langchain4j.version>1.0.0-beta1</langchain4j.version>
<skipITs>true</skipITs>
<!-- Configure SonarQube Cloud -->
<sonar-scanner.version>5.0.0.4389</sonar-scanner.version>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import java.util.Optional;

import io.smallrye.config.ConfigMapping;
import io.smallrye.config.WithDefault;

/**
* Provides configuration for the {@link ChatModelContainer}s. Model parameter descriptions are
Expand Down Expand Up @@ -59,7 +60,8 @@ interface ModelConfig {
*
* @return
*/
Optional<Double> temperature();
@WithDefault("0.7")
Double temperature();

/**
* We An alternative to sampling with temperature, called nucleus sampling, where the model
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ private ChatLanguageModel produceChatLanguageModel(ChatModelConfig.ModelConfig c
.baseUrl(config.baseUrl().orElse(null))
.apiKey(env.getOpenaiApiKey())
.modelName(config.model())
.temperature(config.temperature().orElse(null))
.temperature(config.temperature())
.topP(config.topP().orElse(null))
.frequencyPenalty(config.frequencyPenalty().orElse(null))
.presencePenalty(config.presencePenalty().orElse(null))
Expand All @@ -153,7 +153,7 @@ private ChatLanguageModel produceChatLanguageModel(ChatModelConfig.ModelConfig c
.model(config.model())
.options(
Options.builder()
.temperature(config.temperature().orElse(null))
.temperature(config.temperature())
.topP(config.topP().orElse(null))
.repeatPenalty(config.frequencyPenalty().orElse(null))
.build())
Expand All @@ -170,7 +170,7 @@ private ChatLanguageModel produceChatLanguageModel(ChatModelConfig.ModelConfig c
.endpoint(endpoint)
.apiKey(env.getAzureApiKey())
.apiVersion(AZURE_OPENAI_API_VERSION)
.temperature(config.temperature().orElse(null))
.temperature(config.temperature())
.topP(config.topP().orElse(null))
.frequencyPenalty(config.frequencyPenalty().orElse(null))
.presencePenalty(config.presencePenalty().orElse(null))
Expand All @@ -188,7 +188,7 @@ private StreamingChatLanguageModel produceStreamingChatLanguageModel(
.baseUrl(config.baseUrl().orElse(null))
.apiKey(env.getOpenaiApiKey())
.modelName(config.model())
.temperature(config.temperature().orElse(null))
.temperature(config.temperature())
.topP(config.topP().orElse(null))
.frequencyPenalty(config.frequencyPenalty().orElse(null))
.presencePenalty(config.presencePenalty().orElse(null))
Expand All @@ -206,7 +206,7 @@ private StreamingChatLanguageModel produceStreamingChatLanguageModel(
.model(config.model())
.options(
Options.builder()
.temperature(config.temperature().orElse(null))
.temperature(config.temperature())
.topP(config.topP().orElse(null))
.repeatPenalty(config.frequencyPenalty().orElse(null))
.build())
Expand All @@ -223,7 +223,7 @@ private StreamingChatLanguageModel produceStreamingChatLanguageModel(
.endpoint(endpoint)
.apiKey(env.getAzureApiKey())
.apiVersion(AZURE_OPENAI_API_VERSION)
.temperature(config.temperature().orElse(null))
.temperature(config.temperature())
.topP(config.topP().orElse(null))
.frequencyPenalty(config.frequencyPenalty().orElse(null))
.presencePenalty(config.presencePenalty().orElse(null))
Expand Down
1 change: 1 addition & 0 deletions src/main/resources/application-dev.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -47,3 +47,4 @@ quarkus:
devservices:
port: 6334
shared: false
qdrant-image-name: docker.io/qdrant/qdrant:v1.13.2-unprivileged

0 comments on commit db98525

Please sign in to comment.