diff --git a/models/spring-ai-anthropic/src/main/java/org/springframework/ai/anthropic/AnthropicChatModel.java b/models/spring-ai-anthropic/src/main/java/org/springframework/ai/anthropic/AnthropicChatModel.java
index 822f31844f..8a3925f3b7 100644
--- a/models/spring-ai-anthropic/src/main/java/org/springframework/ai/anthropic/AnthropicChatModel.java
+++ b/models/spring-ai-anthropic/src/main/java/org/springframework/ai/anthropic/AnthropicChatModel.java
@@ -82,7 +82,7 @@ public class AnthropicChatModel extends AbstractToolCallSupport implements ChatM
public static final Integer DEFAULT_MAX_TOKENS = 500;
- public static final Float DEFAULT_TEMPERATURE = 0.8f;
+ public static final Double DEFAULT_TEMPERATURE = 0.8;
/**
* The lower-level API for the Anthropic service.
diff --git a/models/spring-ai-anthropic/src/main/java/org/springframework/ai/anthropic/AnthropicChatOptions.java b/models/spring-ai-anthropic/src/main/java/org/springframework/ai/anthropic/AnthropicChatOptions.java
index 14bc85835a..03daa19524 100644
--- a/models/spring-ai-anthropic/src/main/java/org/springframework/ai/anthropic/AnthropicChatOptions.java
+++ b/models/spring-ai-anthropic/src/main/java/org/springframework/ai/anthropic/AnthropicChatOptions.java
@@ -48,8 +48,8 @@ public class AnthropicChatOptions implements ChatOptions, FunctionCallingOptions
private @JsonProperty("max_tokens") Integer maxTokens;
private @JsonProperty("metadata") ChatCompletionRequest.Metadata metadata;
private @JsonProperty("stop_sequences") List stopSequences;
- private @JsonProperty("temperature") Float temperature;
- private @JsonProperty("top_p") Float topP;
+ private @JsonProperty("temperature") Double temperature;
+ private @JsonProperty("top_p") Double topP;
private @JsonProperty("top_k") Integer topK;
/**
@@ -112,12 +112,12 @@ public Builder withStopSequences(List stopSequences) {
return this;
}
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.options.temperature = temperature;
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.options.topP = topP;
return this;
}
@@ -186,20 +186,20 @@ public void setStopSequences(List stopSequences) {
}
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return this.temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return this.topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
@@ -236,13 +236,13 @@ public void setFunctions(Set functions) {
@Override
@JsonIgnore
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return null;
}
@Override
@JsonIgnore
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return null;
}
diff --git a/models/spring-ai-anthropic/src/main/java/org/springframework/ai/anthropic/api/AnthropicApi.java b/models/spring-ai-anthropic/src/main/java/org/springframework/ai/anthropic/api/AnthropicApi.java
index a0cca8a0b0..ca5368ee4c 100644
--- a/models/spring-ai-anthropic/src/main/java/org/springframework/ai/anthropic/api/AnthropicApi.java
+++ b/models/spring-ai-anthropic/src/main/java/org/springframework/ai/anthropic/api/AnthropicApi.java
@@ -241,19 +241,19 @@ public record ChatCompletionRequest( // @formatter:off
@JsonProperty("metadata") Metadata metadata,
@JsonProperty("stop_sequences") List stopSequences,
@JsonProperty("stream") Boolean stream,
- @JsonProperty("temperature") Float temperature,
- @JsonProperty("top_p") Float topP,
+ @JsonProperty("temperature") Double temperature,
+ @JsonProperty("top_p") Double topP,
@JsonProperty("top_k") Integer topK,
@JsonProperty("tools") List tools) {
// @formatter:on
public ChatCompletionRequest(String model, List messages, String system, Integer maxTokens,
- Float temperature, Boolean stream) {
+ Double temperature, Boolean stream) {
this(model, messages, system, maxTokens, null, null, stream, temperature, null, null, null);
}
public ChatCompletionRequest(String model, List messages, String system, Integer maxTokens,
- List stopSequences, Float temperature, Boolean stream) {
+ List stopSequences, Double temperature, Boolean stream) {
this(model, messages, system, maxTokens, null, stopSequences, stream, temperature, null, null, null);
}
@@ -292,9 +292,9 @@ public static class ChatCompletionRequestBuilder {
private Boolean stream = false;
- private Float temperature;
+ private Double temperature;
- private Float topP;
+ private Double topP;
private Integer topK;
@@ -357,12 +357,12 @@ public ChatCompletionRequestBuilder withStream(Boolean stream) {
return this;
}
- public ChatCompletionRequestBuilder withTemperature(Float temperature) {
+ public ChatCompletionRequestBuilder withTemperature(Double temperature) {
this.temperature = temperature;
return this;
}
- public ChatCompletionRequestBuilder withTopP(Float topP) {
+ public ChatCompletionRequestBuilder withTopP(Double topP) {
this.topP = topP;
return this;
}
diff --git a/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/AnthropicChatModelIT.java b/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/AnthropicChatModelIT.java
index eb9dd5d49f..671cee3f2a 100644
--- a/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/AnthropicChatModelIT.java
+++ b/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/AnthropicChatModelIT.java
@@ -120,7 +120,7 @@ void testMessageHistory() {
@Test
void streamingWithTokenUsage() {
- var promptOptions = AnthropicChatOptions.builder().withTemperature(0f).build();
+ var promptOptions = AnthropicChatOptions.builder().withTemperature(0.0).build();
var prompt = new Prompt("List two colors of the Polish flag. Be brief.", promptOptions);
var streamingTokenUsage = this.chatModel.stream(prompt).blockLast().getMetadata().getUsage();
diff --git a/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/AnthropicChatModelObservationIT.java b/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/AnthropicChatModelObservationIT.java
index 6160d763a9..d01de8ee0f 100644
--- a/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/AnthropicChatModelObservationIT.java
+++ b/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/AnthropicChatModelObservationIT.java
@@ -71,9 +71,9 @@ void observationForChatOperation() {
.withModel(AnthropicApi.ChatModel.CLAUDE_3_HAIKU.getValue())
.withMaxTokens(2048)
.withStopSequences(List.of("this-is-the-end"))
- .withTemperature(0.7f)
+ .withTemperature(0.7)
.withTopK(1)
- .withTopP(1f)
+ .withTopP(1.0)
.build();
Prompt prompt = new Prompt("Why does a raven look like a desk?", options);
@@ -93,9 +93,9 @@ void observationForStreamingChatOperation() {
.withModel(AnthropicApi.ChatModel.CLAUDE_3_HAIKU.getValue())
.withMaxTokens(2048)
.withStopSequences(List.of("this-is-the-end"))
- .withTemperature(0.7f)
+ .withTemperature(0.7)
.withTopK(1)
- .withTopP(1f)
+ .withTopP(1.0)
.build();
Prompt prompt = new Prompt("Why does a raven look like a desk?", options);
diff --git a/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/ChatCompletionRequestTests.java b/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/ChatCompletionRequestTests.java
index b5c47abd04..be251b00df 100644
--- a/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/ChatCompletionRequestTests.java
+++ b/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/ChatCompletionRequestTests.java
@@ -31,7 +31,7 @@ public class ChatCompletionRequestTests {
public void createRequestWithChatOptions() {
var client = new AnthropicChatModel(new AnthropicApi("TEST"),
- AnthropicChatOptions.builder().withModel("DEFAULT_MODEL").withTemperature(66.6f).build());
+ AnthropicChatOptions.builder().withModel("DEFAULT_MODEL").withTemperature(66.6).build());
var request = client.createRequest(new Prompt("Test message content"), false);
@@ -39,16 +39,16 @@ public void createRequestWithChatOptions() {
assertThat(request.stream()).isFalse();
assertThat(request.model()).isEqualTo("DEFAULT_MODEL");
- assertThat(request.temperature()).isEqualTo(66.6f);
+ assertThat(request.temperature()).isEqualTo(66.6);
request = client.createRequest(new Prompt("Test message content",
- AnthropicChatOptions.builder().withModel("PROMPT_MODEL").withTemperature(99.9f).build()), true);
+ AnthropicChatOptions.builder().withModel("PROMPT_MODEL").withTemperature(99.9).build()), true);
assertThat(request.messages()).hasSize(1);
assertThat(request.stream()).isTrue();
assertThat(request.model()).isEqualTo("PROMPT_MODEL");
- assertThat(request.temperature()).isEqualTo(99.9f);
+ assertThat(request.temperature()).isEqualTo(99.9);
}
}
diff --git a/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/api/AnthropicApiIT.java b/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/api/AnthropicApiIT.java
index 013b315ed4..ceaebdfe6d 100644
--- a/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/api/AnthropicApiIT.java
+++ b/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/api/AnthropicApiIT.java
@@ -45,7 +45,7 @@ void chatCompletionEntity() {
Role.USER);
ResponseEntity response = anthropicApi
.chatCompletionEntity(new ChatCompletionRequest(AnthropicApi.ChatModel.CLAUDE_3_OPUS.getValue(),
- List.of(chatCompletionMessage), null, 100, 0.8f, false));
+ List.of(chatCompletionMessage), null, 100, 0.8, false));
System.out.println(response);
assertThat(response).isNotNull();
@@ -58,9 +58,8 @@ void chatCompletionStream() {
AnthropicMessage chatCompletionMessage = new AnthropicMessage(List.of(new ContentBlock("Tell me a Joke?")),
Role.USER);
- Flux response = anthropicApi
- .chatCompletionStream(new ChatCompletionRequest(AnthropicApi.ChatModel.CLAUDE_3_OPUS.getValue(),
- List.of(chatCompletionMessage), null, 100, 0.8f, true));
+ Flux response = anthropicApi.chatCompletionStream(new ChatCompletionRequest(
+ AnthropicApi.ChatModel.CLAUDE_3_OPUS.getValue(), List.of(chatCompletionMessage), null, 100, 0.8, true));
assertThat(response).isNotNull();
diff --git a/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/api/tool/AnthropicApiLegacyToolIT.java b/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/api/tool/AnthropicApiLegacyToolIT.java
index 652a400f62..6e9440e0af 100644
--- a/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/api/tool/AnthropicApiLegacyToolIT.java
+++ b/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/api/tool/AnthropicApiLegacyToolIT.java
@@ -107,8 +107,8 @@ void toolCalls() {
Role.USER);
ChatCompletionRequest chatCompletionRequest = new ChatCompletionRequest(
- AnthropicApi.ChatModel.CLAUDE_3_OPUS.getValue(), List.of(chatCompletionMessage), systemPrompt, 500,
- 0.8f, false);
+ AnthropicApi.ChatModel.CLAUDE_3_OPUS.getValue(), List.of(chatCompletionMessage), systemPrompt, 500, 0.8,
+ false);
ResponseEntity chatCompletion = doCall(chatCompletionRequest);
@@ -147,7 +147,7 @@ private ResponseEntity doCall(ChatCompletionRequest chat
AnthropicMessage chatCompletionMessage2 = new AnthropicMessage(List.of(new ContentBlock(content)), Role.USER);
return doCall(new ChatCompletionRequest(AnthropicApi.ChatModel.CLAUDE_3_OPUS.getValue(),
- List.of(chatCompletionMessage2), null, 500, 0.8f, false));
+ List.of(chatCompletionMessage2), null, 500, 0.8, false));
}
}
diff --git a/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/api/tool/AnthropicApiToolIT.java b/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/api/tool/AnthropicApiToolIT.java
index a4e9b9ed4a..c564297626 100644
--- a/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/api/tool/AnthropicApiToolIT.java
+++ b/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/api/tool/AnthropicApiToolIT.java
@@ -108,7 +108,7 @@ private ResponseEntity doCall(List mes
.withModel(AnthropicApi.ChatModel.CLAUDE_3_OPUS)
.withMessages(messageConversation)
.withMaxTokens(1500)
- .withTemperature(0.8f)
+ .withTemperature(0.8)
.withTools(tools)
.build();
diff --git a/models/spring-ai-azure-openai/src/main/java/org/springframework/ai/azure/openai/AzureOpenAiChatModel.java b/models/spring-ai-azure-openai/src/main/java/org/springframework/ai/azure/openai/AzureOpenAiChatModel.java
index ec3872e000..1b2a7a7248 100644
--- a/models/spring-ai-azure-openai/src/main/java/org/springframework/ai/azure/openai/AzureOpenAiChatModel.java
+++ b/models/spring-ai-azure-openai/src/main/java/org/springframework/ai/azure/openai/AzureOpenAiChatModel.java
@@ -89,6 +89,7 @@
* @author Christian Tzolov
* @author Grogdunn
* @author Benoit Moussaud
+ * @author Thomas Vitale
* @author luocongqiu
* @author timostark
* @see ChatModel
@@ -98,7 +99,7 @@ public class AzureOpenAiChatModel extends AbstractToolCallSupport implements Cha
private static final String DEFAULT_DEPLOYMENT_NAME = "gpt-4o";
- private static final Float DEFAULT_TEMPERATURE = 0.7f;
+ private static final Double DEFAULT_TEMPERATURE = 0.7;
/**
* The {@link OpenAIClient} used to interact with the Azure OpenAI service.
@@ -422,22 +423,22 @@ private ChatCompletionsOptions merge(ChatCompletionsOptions fromAzureOptions,
mergedAzureOptions.setTemperature(fromAzureOptions.getTemperature());
if (mergedAzureOptions.getTemperature() == null && toSpringAiOptions.getTemperature() != null) {
- mergedAzureOptions.setTemperature(toSpringAiOptions.getTemperature().doubleValue());
+ mergedAzureOptions.setTemperature(toSpringAiOptions.getTemperature());
}
mergedAzureOptions.setTopP(fromAzureOptions.getTopP());
if (mergedAzureOptions.getTopP() == null && toSpringAiOptions.getTopP() != null) {
- mergedAzureOptions.setTopP(toSpringAiOptions.getTopP().doubleValue());
+ mergedAzureOptions.setTopP(toSpringAiOptions.getTopP());
}
mergedAzureOptions.setFrequencyPenalty(fromAzureOptions.getFrequencyPenalty());
if (mergedAzureOptions.getFrequencyPenalty() == null && toSpringAiOptions.getFrequencyPenalty() != null) {
- mergedAzureOptions.setFrequencyPenalty(toSpringAiOptions.getFrequencyPenalty().doubleValue());
+ mergedAzureOptions.setFrequencyPenalty(toSpringAiOptions.getFrequencyPenalty());
}
mergedAzureOptions.setPresencePenalty(fromAzureOptions.getPresencePenalty());
if (mergedAzureOptions.getPresencePenalty() == null && toSpringAiOptions.getPresencePenalty() != null) {
- mergedAzureOptions.setPresencePenalty(toSpringAiOptions.getPresencePenalty().doubleValue());
+ mergedAzureOptions.setPresencePenalty(toSpringAiOptions.getPresencePenalty());
}
mergedAzureOptions.setResponseFormat(fromAzureOptions.getResponseFormat());
@@ -486,19 +487,19 @@ private ChatCompletionsOptions merge(AzureOpenAiChatOptions fromSpringAiOptions,
}
if (fromSpringAiOptions.getTemperature() != null) {
- mergedAzureOptions.setTemperature(fromSpringAiOptions.getTemperature().doubleValue());
+ mergedAzureOptions.setTemperature(fromSpringAiOptions.getTemperature());
}
if (fromSpringAiOptions.getTopP() != null) {
- mergedAzureOptions.setTopP(fromSpringAiOptions.getTopP().doubleValue());
+ mergedAzureOptions.setTopP(fromSpringAiOptions.getTopP());
}
if (fromSpringAiOptions.getFrequencyPenalty() != null) {
- mergedAzureOptions.setFrequencyPenalty(fromSpringAiOptions.getFrequencyPenalty().doubleValue());
+ mergedAzureOptions.setFrequencyPenalty(fromSpringAiOptions.getFrequencyPenalty());
}
if (fromSpringAiOptions.getPresencePenalty() != null) {
- mergedAzureOptions.setPresencePenalty(fromSpringAiOptions.getPresencePenalty().doubleValue());
+ mergedAzureOptions.setPresencePenalty(fromSpringAiOptions.getPresencePenalty());
}
if (fromSpringAiOptions.getN() != null) {
diff --git a/models/spring-ai-azure-openai/src/main/java/org/springframework/ai/azure/openai/AzureOpenAiChatOptions.java b/models/spring-ai-azure-openai/src/main/java/org/springframework/ai/azure/openai/AzureOpenAiChatOptions.java
index eec7613d98..fc4c0f1795 100644
--- a/models/spring-ai-azure-openai/src/main/java/org/springframework/ai/azure/openai/AzureOpenAiChatOptions.java
+++ b/models/spring-ai-azure-openai/src/main/java/org/springframework/ai/azure/openai/AzureOpenAiChatOptions.java
@@ -57,7 +57,7 @@ public class AzureOpenAiChatOptions implements FunctionCallingOptions, ChatOptio
* two settings is difficult to predict.
*/
@JsonProperty(value = "temperature")
- private Float temperature;
+ private Double temperature;
/**
* An alternative to sampling with temperature called nucleus sampling. This value
@@ -68,7 +68,7 @@ public class AzureOpenAiChatOptions implements FunctionCallingOptions, ChatOptio
* two settings is difficult to predict.
*/
@JsonProperty(value = "top_p")
- private Float topP;
+ private Double topP;
/**
* A map between GPT token IDs and bias scores that influences the probability of
@@ -109,7 +109,7 @@ public class AzureOpenAiChatOptions implements FunctionCallingOptions, ChatOptio
* output new topics.
*/
@JsonProperty(value = "presence_penalty")
- private Float presencePenalty;
+ private Double presencePenalty;
/**
* A value that influences the probability of generated tokens appearing based on
@@ -118,7 +118,7 @@ public class AzureOpenAiChatOptions implements FunctionCallingOptions, ChatOptio
* model repeating the same statements verbatim.
*/
@JsonProperty(value = "frequency_penalty")
- private Float frequencyPenalty;
+ private Double frequencyPenalty;
/**
* The deployment name as defined in Azure Open AI Studio when creating a deployment
@@ -182,7 +182,7 @@ public Builder withDeploymentName(String deploymentName) {
return this;
}
- public Builder withFrequencyPenalty(Float frequencyPenalty) {
+ public Builder withFrequencyPenalty(Double frequencyPenalty) {
this.options.frequencyPenalty = frequencyPenalty;
return this;
}
@@ -202,7 +202,7 @@ public Builder withN(Integer n) {
return this;
}
- public Builder withPresencePenalty(Float presencePenalty) {
+ public Builder withPresencePenalty(Double presencePenalty) {
this.options.presencePenalty = presencePenalty;
return this;
}
@@ -212,12 +212,12 @@ public Builder withStop(List stop) {
return this;
}
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.options.temperature = temperature;
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.options.topP = topP;
return this;
}
@@ -309,20 +309,20 @@ public void setStop(List stop) {
}
@Override
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return this.presencePenalty;
}
- public void setPresencePenalty(Float presencePenalty) {
+ public void setPresencePenalty(Double presencePenalty) {
this.presencePenalty = presencePenalty;
}
@Override
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return this.frequencyPenalty;
}
- public void setFrequencyPenalty(Float frequencyPenalty) {
+ public void setFrequencyPenalty(Double frequencyPenalty) {
this.frequencyPenalty = frequencyPenalty;
}
@@ -346,20 +346,20 @@ public void setDeploymentName(String deploymentName) {
}
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return this.temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return this.topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
diff --git a/models/spring-ai-azure-openai/src/test/java/org/springframework/ai/azure/openai/AzureChatCompletionsOptionsTests.java b/models/spring-ai-azure-openai/src/test/java/org/springframework/ai/azure/openai/AzureChatCompletionsOptionsTests.java
index e9c8196ff3..6e8d8bd531 100644
--- a/models/spring-ai-azure-openai/src/test/java/org/springframework/ai/azure/openai/AzureChatCompletionsOptionsTests.java
+++ b/models/spring-ai-azure-openai/src/test/java/org/springframework/ai/azure/openai/AzureChatCompletionsOptionsTests.java
@@ -44,14 +44,14 @@ public void createRequestWithChatOptions() {
var defaultOptions = AzureOpenAiChatOptions.builder()
.withDeploymentName("DEFAULT_MODEL")
- .withTemperature(66.6f)
- .withFrequencyPenalty(696.9f)
- .withPresencePenalty(969.6f)
+ .withTemperature(66.6)
+ .withFrequencyPenalty(696.9)
+ .withPresencePenalty(969.6)
.withLogitBias(Map.of("foo", 1))
.withMaxTokens(969)
.withN(69)
.withStop(List.of("foo", "bar"))
- .withTopP(0.69f)
+ .withTopP(0.69)
.withUser("user")
.withResponseFormat(AzureOpenAiResponseFormat.TEXT)
.build();
@@ -63,27 +63,27 @@ public void createRequestWithChatOptions() {
assertThat(requestOptions.getMessages()).hasSize(1);
assertThat(requestOptions.getModel()).isEqualTo("DEFAULT_MODEL");
- assertThat(requestOptions.getTemperature()).isEqualTo(66.6f);
- assertThat(requestOptions.getFrequencyPenalty()).isEqualTo(696.9f);
- assertThat(requestOptions.getPresencePenalty()).isEqualTo(969.6f);
+ assertThat(requestOptions.getTemperature()).isEqualTo(66.6);
+ assertThat(requestOptions.getFrequencyPenalty()).isEqualTo(696.9);
+ assertThat(requestOptions.getPresencePenalty()).isEqualTo(969.6);
assertThat(requestOptions.getLogitBias()).isEqualTo(Map.of("foo", 1));
assertThat(requestOptions.getMaxTokens()).isEqualTo(969);
assertThat(requestOptions.getN()).isEqualTo(69);
assertThat(requestOptions.getStop()).isEqualTo(List.of("foo", "bar"));
- assertThat(requestOptions.getTopP()).isEqualTo(0.69f);
+ assertThat(requestOptions.getTopP()).isEqualTo(0.69);
assertThat(requestOptions.getUser()).isEqualTo("user");
assertThat(requestOptions.getResponseFormat()).isInstanceOf(ChatCompletionsTextResponseFormat.class);
var runtimeOptions = AzureOpenAiChatOptions.builder()
.withDeploymentName("PROMPT_MODEL")
- .withTemperature(99.9f)
- .withFrequencyPenalty(100f)
- .withPresencePenalty(100f)
+ .withTemperature(99.9)
+ .withFrequencyPenalty(100.0)
+ .withPresencePenalty(100.0)
.withLogitBias(Map.of("foo", 2))
.withMaxTokens(100)
.withN(100)
.withStop(List.of("foo", "bar"))
- .withTopP(0.111f)
+ .withTopP(0.111)
.withUser("user2")
.withResponseFormat(AzureOpenAiResponseFormat.JSON)
.build();
@@ -93,30 +93,31 @@ public void createRequestWithChatOptions() {
assertThat(requestOptions.getMessages()).hasSize(1);
assertThat(requestOptions.getModel()).isEqualTo("PROMPT_MODEL");
- assertThat(requestOptions.getTemperature()).isEqualTo(99.9f);
- assertThat(requestOptions.getFrequencyPenalty()).isEqualTo(100f);
- assertThat(requestOptions.getPresencePenalty()).isEqualTo(100f);
+ assertThat(requestOptions.getTemperature()).isEqualTo(99.9);
+ assertThat(requestOptions.getFrequencyPenalty()).isEqualTo(100.0);
+ assertThat(requestOptions.getPresencePenalty()).isEqualTo(100.0);
assertThat(requestOptions.getLogitBias()).isEqualTo(Map.of("foo", 2));
assertThat(requestOptions.getMaxTokens()).isEqualTo(100);
assertThat(requestOptions.getN()).isEqualTo(100);
assertThat(requestOptions.getStop()).isEqualTo(List.of("foo", "bar"));
- assertThat(requestOptions.getTopP()).isEqualTo(0.111f);
+ assertThat(requestOptions.getTopP()).isEqualTo(0.111);
assertThat(requestOptions.getUser()).isEqualTo("user2");
assertThat(requestOptions.getResponseFormat()).isInstanceOf(ChatCompletionsJsonResponseFormat.class);
}
private static Stream providePresencePenaltyAndFrequencyPenaltyTest() {
- return Stream.of(Arguments.of(0.0f, 0.0f), Arguments.of(0.0f, 1.0f), Arguments.of(1.0f, 0.0f),
- Arguments.of(1.0f, 1.0f), Arguments.of(1.0f, null), Arguments.of(null, 1.0f), Arguments.of(null, null));
+ return Stream.of(Arguments.of(0.0, 0.0), Arguments.of(0.0, 1.0), Arguments.of(1.0, 0.0), Arguments.of(1.0, 1.0),
+ Arguments.of(1.0, null), Arguments.of(null, 1.0), Arguments.of(null, null));
}
@ParameterizedTest
@MethodSource("providePresencePenaltyAndFrequencyPenaltyTest")
- public void createChatOptionsWithPresencePenaltyAndFrequencyPenalty(Float presencePenalty, Float frequencyPenalty) {
+ public void createChatOptionsWithPresencePenaltyAndFrequencyPenalty(Double presencePenalty,
+ Double frequencyPenalty) {
var options = AzureOpenAiChatOptions.builder()
.withMaxTokens(800)
- .withTemperature(0.7F)
- .withTopP(0.95F)
+ .withTemperature(0.7)
+ .withTopP(0.95)
.withPresencePenalty(presencePenalty)
.withFrequencyPenalty(frequencyPenalty)
.build();
@@ -125,14 +126,14 @@ public void createChatOptionsWithPresencePenaltyAndFrequencyPenalty(Float presen
assertThat(options.getPresencePenalty()).isEqualTo(null);
}
else {
- assertThat(options.getPresencePenalty().floatValue()).isEqualTo(presencePenalty);
+ assertThat(options.getPresencePenalty()).isEqualTo(presencePenalty);
}
if (frequencyPenalty == null) {
assertThat(options.getFrequencyPenalty()).isEqualTo(null);
}
else {
- assertThat(options.getFrequencyPenalty().floatValue()).isEqualTo(frequencyPenalty);
+ assertThat(options.getFrequencyPenalty()).isEqualTo(frequencyPenalty);
}
}
diff --git a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic/AnthropicChatOptions.java b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic/AnthropicChatOptions.java
index e12280dff1..7bdc15e2d2 100644
--- a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic/AnthropicChatOptions.java
+++ b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic/AnthropicChatOptions.java
@@ -39,7 +39,7 @@ public class AnthropicChatOptions implements ChatOptions {
* responses from the generative. This value specifies default to be used by the backend while making the call to
* the generative.
*/
- private @JsonProperty("temperature") Float temperature;
+ private @JsonProperty("temperature") Double temperature;
/**
* Specify the maximum number of tokens to use in the generated response. Note that the models may stop before
@@ -57,7 +57,7 @@ public class AnthropicChatOptions implements ChatOptions {
* The maximum cumulative probability of tokens to consider when sampling. The generative uses combined Top-k and
* nucleus sampling. Nucleus sampling considers the smallest set of tokens whose probability sum is at least topP.
*/
- private @JsonProperty("top_p") Float topP;
+ private @JsonProperty("top_p") Double topP;
/**
* Configure up to four sequences that the generative recognizes. After a stop sequence, the generative stops
@@ -79,7 +79,7 @@ public static class Builder {
private final AnthropicChatOptions options = new AnthropicChatOptions();
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.options.setTemperature(temperature);
return this;
}
@@ -94,7 +94,7 @@ public Builder withTopK(Integer topK) {
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.options.setTopP(topP);
return this;
}
@@ -116,11 +116,11 @@ public AnthropicChatOptions build() {
}
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return this.temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@@ -153,11 +153,11 @@ public void setTopK(Integer topK) {
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return this.topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
@@ -186,13 +186,13 @@ public String getModel() {
@Override
@JsonIgnore
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return null;
}
@Override
@JsonIgnore
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return null;
}
diff --git a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic/BedrockAnthropicChatModel.java b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic/BedrockAnthropicChatModel.java
index d6a44d9bf6..c4321c3745 100644
--- a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic/BedrockAnthropicChatModel.java
+++ b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic/BedrockAnthropicChatModel.java
@@ -48,7 +48,7 @@ public class BedrockAnthropicChatModel implements ChatModel, StreamingChatModel
public BedrockAnthropicChatModel(AnthropicChatBedrockApi chatApi) {
this(chatApi,
AnthropicChatOptions.builder()
- .withTemperature(0.8f)
+ .withTemperature(0.8)
.withMaxTokensToSample(500)
.withTopK(10)
.withAnthropicVersion(AnthropicChatBedrockApi.DEFAULT_ANTHROPIC_VERSION)
diff --git a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic/api/AnthropicChatBedrockApi.java b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic/api/AnthropicChatBedrockApi.java
index b6a738b513..c1235456b6 100644
--- a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic/api/AnthropicChatBedrockApi.java
+++ b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic/api/AnthropicChatBedrockApi.java
@@ -34,6 +34,7 @@
/**
* @author Christian Tzolov
+ * @author Thomas Vitale
* @author Wei Jiang
* @since 0.8.0
*/
@@ -139,10 +140,10 @@ public AnthropicChatBedrockApi(String modelId, AwsCredentialsProvider credential
@JsonInclude(Include.NON_NULL)
public record AnthropicChatRequest(
@JsonProperty("prompt") String prompt,
- @JsonProperty("temperature") Float temperature,
+ @JsonProperty("temperature") Double temperature,
@JsonProperty("max_tokens_to_sample") Integer maxTokensToSample,
@JsonProperty("top_k") Integer topK,
- @JsonProperty("top_p") Float topP,
+ @JsonProperty("top_p") Double topP,
@JsonProperty("stop_sequences") List stopSequences,
@JsonProperty("anthropic_version") String anthropicVersion) {
@@ -152,10 +153,10 @@ public static Builder builder(String prompt) {
public static class Builder {
private final String prompt;
- private Float temperature;// = 0.7f;
+ private Double temperature;// = 0.7;
private Integer maxTokensToSample;// = 500;
private Integer topK;// = 10;
- private Float topP;
+ private Double topP;
private List stopSequences;
private String anthropicVersion;
@@ -163,7 +164,7 @@ private Builder(String prompt) {
this.prompt = prompt;
}
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.temperature = temperature;
return this;
}
@@ -178,7 +179,7 @@ public Builder withTopK(Integer topK) {
return this;
}
- public Builder withTopP(Float tpoP) {
+ public Builder withTopP(Double tpoP) {
this.topP = tpoP;
return this;
}
diff --git a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic3/Anthropic3ChatOptions.java b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic3/Anthropic3ChatOptions.java
index c573df9118..45927c9113 100644
--- a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic3/Anthropic3ChatOptions.java
+++ b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic3/Anthropic3ChatOptions.java
@@ -38,7 +38,7 @@ public class Anthropic3ChatOptions implements ChatOptions {
* responses from the generative. This value specifies default to be used by the backend while making the call to
* the generative.
*/
- private @JsonProperty("temperature") Float temperature;
+ private @JsonProperty("temperature") Double temperature;
/**
* Specify the maximum number of tokens to use in the generated response. Note that the models may stop before
@@ -56,7 +56,7 @@ public class Anthropic3ChatOptions implements ChatOptions {
* The maximum cumulative probability of tokens to consider when sampling. The generative uses combined Top-k and
* nucleus sampling. Nucleus sampling considers the smallest set of tokens whose probability sum is at least topP.
*/
- private @JsonProperty("top_p") Float topP;
+ private @JsonProperty("top_p") Double topP;
/**
* Configure up to four sequences that the generative recognizes. After a stop sequence, the generative stops
@@ -78,7 +78,7 @@ public static class Builder {
private final Anthropic3ChatOptions options = new Anthropic3ChatOptions();
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.options.setTemperature(temperature);
return this;
}
@@ -93,7 +93,7 @@ public Builder withTopK(Integer topK) {
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.options.setTopP(topP);
return this;
}
@@ -115,11 +115,11 @@ public Anthropic3ChatOptions build() {
}
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return this.temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@@ -142,11 +142,11 @@ public void setTopK(Integer topK) {
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return this.topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
@@ -175,13 +175,13 @@ public String getModel() {
@Override
@JsonIgnore
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return null;
}
@Override
@JsonIgnore
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return null;
}
diff --git a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic3/BedrockAnthropic3ChatModel.java b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic3/BedrockAnthropic3ChatModel.java
index 4d69b6e1f3..95a74f933c 100644
--- a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic3/BedrockAnthropic3ChatModel.java
+++ b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic3/BedrockAnthropic3ChatModel.java
@@ -62,7 +62,7 @@ public class BedrockAnthropic3ChatModel implements ChatModel, StreamingChatModel
public BedrockAnthropic3ChatModel(Anthropic3ChatBedrockApi chatApi) {
this(chatApi,
Anthropic3ChatOptions.builder()
- .withTemperature(0.8f)
+ .withTemperature(0.8)
.withMaxTokens(500)
.withTopK(10)
.withAnthropicVersion(Anthropic3ChatBedrockApi.DEFAULT_ANTHROPIC_VERSION)
diff --git a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic3/api/Anthropic3ChatBedrockApi.java b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic3/api/Anthropic3ChatBedrockApi.java
index b8407debed..a4bebcfe3e 100644
--- a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic3/api/Anthropic3ChatBedrockApi.java
+++ b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/anthropic3/api/Anthropic3ChatBedrockApi.java
@@ -41,6 +41,7 @@
*
* @author Ben Middleton
* @author Christian Tzolov
+ * @author Thomas Vitale
* @author Wei Jiang
* @since 1.0.0
*/
@@ -146,10 +147,10 @@ public Anthropic3ChatBedrockApi(String modelId, AwsCredentialsProvider credentia
public record AnthropicChatRequest(
@JsonProperty("messages") List messages,
@JsonProperty("system") String system,
- @JsonProperty("temperature") Float temperature,
+ @JsonProperty("temperature") Double temperature,
@JsonProperty("max_tokens") Integer maxTokens,
@JsonProperty("top_k") Integer topK,
- @JsonProperty("top_p") Float topP,
+ @JsonProperty("top_p") Double topP,
@JsonProperty("stop_sequences") List stopSequences,
@JsonProperty("anthropic_version") String anthropicVersion) {
@@ -160,10 +161,10 @@ public static Builder builder(List messages) {
public static class Builder {
private final List messages;
private String system;
- private Float temperature;// = 0.7f;
+ private Double temperature;// = 0.7;
private Integer maxTokens;// = 500;
private Integer topK;// = 10;
- private Float topP;
+ private Double topP;
private List stopSequences;
private String anthropicVersion;
@@ -175,7 +176,7 @@ public Builder withSystem(String system) {
this.system = system;
return this;
}
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.temperature = temperature;
return this;
}
@@ -190,7 +191,7 @@ public Builder withTopK(Integer topK) {
return this;
}
- public Builder withTopP(Float tpoP) {
+ public Builder withTopP(Double tpoP) {
this.topP = tpoP;
return this;
}
diff --git a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/cohere/BedrockCohereChatOptions.java b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/cohere/BedrockCohereChatOptions.java
index 4773c49da6..04f67f282e 100644
--- a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/cohere/BedrockCohereChatOptions.java
+++ b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/cohere/BedrockCohereChatOptions.java
@@ -40,13 +40,13 @@ public class BedrockCohereChatOptions implements ChatOptions {
* (optional) Use a lower value to decrease randomness in the response. Defaults to
* 0.7.
*/
- @JsonProperty("temperature") Float temperature;
+ @JsonProperty("temperature") Double temperature;
/**
* (optional) The maximum cumulative probability of tokens to consider when sampling.
* The generative uses combined Top-k and nucleus sampling. Nucleus sampling considers
* the smallest set of tokens whose probability sum is at least topP.
*/
- @JsonProperty("p") Float topP;
+ @JsonProperty("p") Double topP;
/**
* (optional) Specify the number of token choices the generative uses to generate the
* next token.
@@ -89,12 +89,12 @@ public static class Builder {
private final BedrockCohereChatOptions options = new BedrockCohereChatOptions();
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.options.setTemperature(temperature);
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.options.setTopP(topP);
return this;
}
@@ -141,20 +141,20 @@ public BedrockCohereChatOptions build() {
}
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return this.temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return this.topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
@@ -225,13 +225,13 @@ public String getModel() {
@Override
@JsonIgnore
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return null;
}
@Override
@JsonIgnore
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return null;
}
diff --git a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/cohere/api/CohereChatBedrockApi.java b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/cohere/api/CohereChatBedrockApi.java
index 0d69de07b5..1e77a74515 100644
--- a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/cohere/api/CohereChatBedrockApi.java
+++ b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/cohere/api/CohereChatBedrockApi.java
@@ -38,6 +38,7 @@
* https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-cohere.html
*
* @author Christian Tzolov
+ * @author Thomas Vitale
* @author Wei Jiang
* @since 0.8.0
*/
@@ -130,8 +131,8 @@ public CohereChatBedrockApi(String modelId, AwsCredentialsProvider credentialsPr
@JsonInclude(Include.NON_NULL)
public record CohereChatRequest(
@JsonProperty("prompt") String prompt,
- @JsonProperty("temperature") Float temperature,
- @JsonProperty("p") Float topP,
+ @JsonProperty("temperature") Double temperature,
+ @JsonProperty("p") Double topP,
@JsonProperty("k") Integer topK,
@JsonProperty("max_tokens") Integer maxTokens,
@JsonProperty("stop_sequences") List stopSequences,
@@ -204,8 +205,8 @@ public static Builder builder(String prompt) {
*/
public static class Builder {
private final String prompt;
- private Float temperature;
- private Float topP;
+ private Double temperature;
+ private Double topP;
private Integer topK;
private Integer maxTokens;
private List stopSequences;
@@ -219,12 +220,12 @@ public Builder(String prompt) {
this.prompt = prompt;
}
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.temperature = temperature;
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.topP = topP;
return this;
}
diff --git a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/jurassic2/BedrockAi21Jurassic2ChatModel.java b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/jurassic2/BedrockAi21Jurassic2ChatModel.java
index b6f9d6cbc5..7cb9bf0ac3 100644
--- a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/jurassic2/BedrockAi21Jurassic2ChatModel.java
+++ b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/jurassic2/BedrockAi21Jurassic2ChatModel.java
@@ -51,8 +51,8 @@ public BedrockAi21Jurassic2ChatModel(Ai21Jurassic2ChatBedrockApi chatApi, Bedroc
public BedrockAi21Jurassic2ChatModel(Ai21Jurassic2ChatBedrockApi chatApi) {
this(chatApi,
BedrockAi21Jurassic2ChatOptions.builder()
- .withTemperature(0.8f)
- .withTopP(0.9f)
+ .withTemperature(0.8)
+ .withTopP(0.9)
.withMaxTokens(100)
.build());
}
diff --git a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/jurassic2/BedrockAi21Jurassic2ChatOptions.java b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/jurassic2/BedrockAi21Jurassic2ChatOptions.java
index 5c424fd226..eb8ce968aa 100644
--- a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/jurassic2/BedrockAi21Jurassic2ChatOptions.java
+++ b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/jurassic2/BedrockAi21Jurassic2ChatOptions.java
@@ -61,13 +61,13 @@ public class BedrockAi21Jurassic2ChatOptions implements ChatOptions {
* Modifies the distribution from which tokens are sampled.
*/
@JsonProperty("temperature")
- private Float temperature;
+ private Double temperature;
/**
* Sample tokens from the corresponding top percentile of probability mass.
*/
@JsonProperty("topP")
- private Float topP;
+ private Double topP;
/**
* Return the top-K (topKReturn) alternative tokens.
@@ -171,7 +171,7 @@ public void setMinTokens(Integer minTokens) {
* @return The temperature.
*/
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return temperature;
}
@@ -179,7 +179,7 @@ public Float getTemperature() {
* Sets the temperature for modifying the token sampling distribution.
* @param temperature The temperature.
*/
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@@ -189,7 +189,7 @@ public void setTemperature(Float temperature) {
* @return The topP parameter.
*/
@Override
- public Float getTopP() {
+ public Double getTopP() {
return topP;
}
@@ -198,7 +198,7 @@ public Float getTopP() {
* mass.
* @param topP The topP parameter.
*/
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
@@ -238,12 +238,12 @@ public void setStopSequences(List stopSequences) {
@Override
@JsonIgnore
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return getFrequencyPenaltyOptions() != null ? getFrequencyPenaltyOptions().scale() : null;
}
@JsonIgnore
- public void setFrequencyPenalty(Float frequencyPenalty) {
+ public void setFrequencyPenalty(Double frequencyPenalty) {
if (frequencyPenalty != null) {
setFrequencyPenaltyOptions(Penalty.builder().scale(frequencyPenalty).build());
}
@@ -267,12 +267,12 @@ public void setFrequencyPenaltyOptions(Penalty frequencyPenaltyOptions) {
@Override
@JsonIgnore
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return getPresencePenaltyOptions() != null ? getPresencePenaltyOptions().scale() : null;
}
@JsonIgnore
- public void setPresencePenalty(Float presencePenalty) {
+ public void setPresencePenalty(Double presencePenalty) {
if (presencePenalty != null) {
setPresencePenaltyOptions(Penalty.builder().scale(presencePenalty).build());
}
@@ -344,12 +344,12 @@ public Builder withMinTokens(Integer minTokens) {
return this;
}
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
request.setTemperature(temperature);
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
request.setTopP(topP);
return this;
}
@@ -389,7 +389,7 @@ public BedrockAi21Jurassic2ChatOptions build() {
* Penalty object for frequency, presence, and count penalties.
*/
@JsonInclude(JsonInclude.Include.NON_NULL)
- public record Penalty(@JsonProperty("scale") Float scale, @JsonProperty("applyToNumbers") Boolean applyToNumbers,
+ public record Penalty(@JsonProperty("scale") Double scale, @JsonProperty("applyToNumbers") Boolean applyToNumbers,
@JsonProperty("applyToPunctuations") Boolean applyToPunctuations,
@JsonProperty("applyToStopwords") Boolean applyToStopwords,
@JsonProperty("applyToWhitespaces") Boolean applyToWhitespaces,
@@ -401,7 +401,7 @@ public static Builder builder() {
public static class Builder {
- private Float scale;
+ private Double scale;
// can't keep it null due to modelOptionsUtils#mapToClass convert null to
// false
@@ -415,7 +415,7 @@ public static class Builder {
private Boolean applyToEmojis = true;
- public Builder scale(Float scale) {
+ public Builder scale(Double scale) {
this.scale = scale;
return this;
}
diff --git a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/jurassic2/api/Ai21Jurassic2ChatBedrockApi.java b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/jurassic2/api/Ai21Jurassic2ChatBedrockApi.java
index 06f5216f71..c3ab019d12 100644
--- a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/jurassic2/api/Ai21Jurassic2ChatBedrockApi.java
+++ b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/jurassic2/api/Ai21Jurassic2ChatBedrockApi.java
@@ -37,6 +37,7 @@
* https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-jurassic2.html
*
* @author Christian Tzolov
+ * @author Thomas Vitale
* @author Wei Jiang
* @since 0.8.0
*/
@@ -132,8 +133,8 @@ public Ai21Jurassic2ChatBedrockApi(String modelId, AwsCredentialsProvider creden
@JsonInclude(Include.NON_NULL)
public record Ai21Jurassic2ChatRequest(
@JsonProperty("prompt") String prompt,
- @JsonProperty("temperature") Float temperature,
- @JsonProperty("topP") Float topP,
+ @JsonProperty("temperature") Double temperature,
+ @JsonProperty("topP") Double topP,
@JsonProperty("maxTokens") Integer maxTokens,
@JsonProperty("stopSequences") List stopSequences,
@JsonProperty("countPenalty") IntegerScalePenalty countPenalty,
@@ -198,8 +199,8 @@ public static Builder builder(String prompt) {
}
public static class Builder {
private String prompt;
- private Float temperature;
- private Float topP;
+ private Double temperature;
+ private Double topP;
private Integer maxTokens;
private List stopSequences;
private IntegerScalePenalty countPenalty;
@@ -210,12 +211,12 @@ public Builder(String prompt) {
this.prompt = prompt;
}
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.temperature = temperature;
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.topP = topP;
return this;
}
diff --git a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/llama/BedrockLlamaChatModel.java b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/llama/BedrockLlamaChatModel.java
index b391763227..51b83a7be0 100644
--- a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/llama/BedrockLlamaChatModel.java
+++ b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/llama/BedrockLlamaChatModel.java
@@ -49,8 +49,7 @@ public class BedrockLlamaChatModel implements ChatModel, StreamingChatModel {
private final BedrockLlamaChatOptions defaultOptions;
public BedrockLlamaChatModel(LlamaChatBedrockApi chatApi) {
- this(chatApi,
- BedrockLlamaChatOptions.builder().withTemperature(0.8f).withTopP(0.9f).withMaxGenLen(100).build());
+ this(chatApi, BedrockLlamaChatOptions.builder().withTemperature(0.8).withTopP(0.9).withMaxGenLen(100).build());
}
public BedrockLlamaChatModel(LlamaChatBedrockApi chatApi, BedrockLlamaChatOptions options) {
diff --git a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/llama/BedrockLlamaChatOptions.java b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/llama/BedrockLlamaChatOptions.java
index 8d26ccb309..ed50bd3c5e 100644
--- a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/llama/BedrockLlamaChatOptions.java
+++ b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/llama/BedrockLlamaChatOptions.java
@@ -35,13 +35,13 @@ public class BedrockLlamaChatOptions implements ChatOptions {
* The temperature value controls the randomness of the generated text. Use a lower
* value to decrease randomness in the response.
*/
- private @JsonProperty("temperature") Float temperature;
+ private @JsonProperty("temperature") Double temperature;
/**
* The topP value controls the diversity of the generated text. Use a lower value to
* ignore less probable options. Set to 0 or 1.0 to disable.
*/
- private @JsonProperty("top_p") Float topP;
+ private @JsonProperty("top_p") Double topP;
/**
* The maximum length of the generated text.
@@ -56,12 +56,12 @@ public static class Builder {
private BedrockLlamaChatOptions options = new BedrockLlamaChatOptions();
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.options.setTemperature(temperature);
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.options.setTopP(topP);
return this;
}
@@ -78,20 +78,20 @@ public BedrockLlamaChatOptions build() {
}
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return this.temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return this.topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
@@ -122,13 +122,13 @@ public String getModel() {
@Override
@JsonIgnore
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return null;
}
@Override
@JsonIgnore
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return null;
}
diff --git a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/llama/api/LlamaChatBedrockApi.java b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/llama/api/LlamaChatBedrockApi.java
index 2531e6c7d8..df9d642a0d 100644
--- a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/llama/api/LlamaChatBedrockApi.java
+++ b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/llama/api/LlamaChatBedrockApi.java
@@ -36,6 +36,7 @@
* https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-meta.html
*
* @author Christian Tzolov
+ * @author Thomas Vitale
* @author Wei Jiang
* @since 0.8.0
*/
@@ -119,8 +120,8 @@ public LlamaChatBedrockApi(String modelId, AwsCredentialsProvider credentialsPro
@JsonInclude(Include.NON_NULL)
public record LlamaChatRequest(
@JsonProperty("prompt") String prompt,
- @JsonProperty("temperature") Float temperature,
- @JsonProperty("top_p") Float topP,
+ @JsonProperty("temperature") Double temperature,
+ @JsonProperty("top_p") Double topP,
@JsonProperty("max_gen_len") Integer maxGenLen) {
/**
@@ -134,20 +135,20 @@ public static Builder builder(String prompt) {
public static class Builder {
private String prompt;
- private Float temperature;
- private Float topP;
+ private Double temperature;
+ private Double topP;
private Integer maxGenLen;
public Builder(String prompt) {
this.prompt = prompt;
}
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.temperature = temperature;
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.topP = topP;
return this;
}
diff --git a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/titan/BedrockTitanChatModel.java b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/titan/BedrockTitanChatModel.java
index b144a2a10d..e6d55a03bd 100644
--- a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/titan/BedrockTitanChatModel.java
+++ b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/titan/BedrockTitanChatModel.java
@@ -46,7 +46,7 @@ public class BedrockTitanChatModel implements ChatModel, StreamingChatModel {
private final BedrockTitanChatOptions defaultOptions;
public BedrockTitanChatModel(TitanChatBedrockApi chatApi) {
- this(chatApi, BedrockTitanChatOptions.builder().withTemperature(0.8f).build());
+ this(chatApi, BedrockTitanChatOptions.builder().withTemperature(0.8).build());
}
public BedrockTitanChatModel(TitanChatBedrockApi chatApi, BedrockTitanChatOptions defaultOptions) {
diff --git a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/titan/BedrockTitanChatOptions.java b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/titan/BedrockTitanChatOptions.java
index 67458a5b13..d1187f1189 100644
--- a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/titan/BedrockTitanChatOptions.java
+++ b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/titan/BedrockTitanChatOptions.java
@@ -37,12 +37,12 @@ public class BedrockTitanChatOptions implements ChatOptions {
/**
* The temperature value controls the randomness of the generated text.
*/
- private @JsonProperty("temperature") Float temperature;
+ private @JsonProperty("temperature") Double temperature;
/**
* The topP value controls the diversity of the generated text. Use a lower value to ignore less probable options.
*/
- private @JsonProperty("topP") Float topP;
+ private @JsonProperty("topP") Double topP;
/**
* Maximum number of tokens to generate.
@@ -63,12 +63,12 @@ public static class Builder {
private BedrockTitanChatOptions options = new BedrockTitanChatOptions();
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.options.temperature = temperature;
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.options.topP = topP;
return this;
}
@@ -90,20 +90,20 @@ public BedrockTitanChatOptions build() {
}
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
@@ -143,13 +143,13 @@ public String getModel() {
@Override
@JsonIgnore
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return null;
}
@Override
@JsonIgnore
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return null;
}
diff --git a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/titan/api/TitanChatBedrockApi.java b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/titan/api/TitanChatBedrockApi.java
index f7516ddc37..85a1f10c7a 100644
--- a/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/titan/api/TitanChatBedrockApi.java
+++ b/models/spring-ai-bedrock/src/main/java/org/springframework/ai/bedrock/titan/api/TitanChatBedrockApi.java
@@ -40,6 +40,7 @@
* https://docs.aws.amazon.com/bedrock/latest/userguide/titan-text-models.html
*
* @author Christian Tzolov
+ * @author Thomas Vitale
* @author Wei Jiang
* @since 0.8.0
*/
@@ -134,8 +135,8 @@ public record TitanChatRequest(
*/
@JsonInclude(Include.NON_NULL)
public record TextGenerationConfig(
- @JsonProperty("temperature") Float temperature,
- @JsonProperty("topP") Float topP,
+ @JsonProperty("temperature") Double temperature,
+ @JsonProperty("topP") Double topP,
@JsonProperty("maxTokenCount") Integer maxTokenCount,
@JsonProperty("stopSequences") List stopSequences) {
}
@@ -151,8 +152,8 @@ public static Builder builder(String inputText) {
public static class Builder {
private final String inputText;
- private Float temperature;
- private Float topP;
+ private Double temperature;
+ private Double topP;
private Integer maxTokenCount;
private List stopSequences;
@@ -160,12 +161,12 @@ public Builder(String inputText) {
this.inputText = inputText;
}
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.temperature = temperature;
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.topP = topP;
return this;
}
diff --git a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/anthropic/BedrockAnthropicCreateRequestTests.java b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/anthropic/BedrockAnthropicCreateRequestTests.java
index c8b5cbe859..3cf8b344b7 100644
--- a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/anthropic/BedrockAnthropicCreateRequestTests.java
+++ b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/anthropic/BedrockAnthropicCreateRequestTests.java
@@ -40,9 +40,9 @@ public void createRequestWithChatOptions() {
var client = new BedrockAnthropicChatModel(anthropicChatApi,
AnthropicChatOptions.builder()
- .withTemperature(66.6f)
+ .withTemperature(66.6)
.withTopK(66)
- .withTopP(0.66f)
+ .withTopP(0.66)
.withMaxTokensToSample(666)
.withAnthropicVersion("X.Y.Z")
.withStopSequences(List.of("stop1", "stop2"))
@@ -51,17 +51,17 @@ public void createRequestWithChatOptions() {
var request = client.createRequest(new Prompt("Test message content"));
assertThat(request.prompt()).isNotEmpty();
- assertThat(request.temperature()).isEqualTo(66.6f);
+ assertThat(request.temperature()).isEqualTo(66.6);
assertThat(request.topK()).isEqualTo(66);
- assertThat(request.topP()).isEqualTo(0.66f);
+ assertThat(request.topP()).isEqualTo(0.66);
assertThat(request.maxTokensToSample()).isEqualTo(666);
assertThat(request.anthropicVersion()).isEqualTo("X.Y.Z");
assertThat(request.stopSequences()).containsExactly("stop1", "stop2");
request = client.createRequest(new Prompt("Test message content",
AnthropicChatOptions.builder()
- .withTemperature(99.9f)
- .withTopP(0.99f)
+ .withTemperature(99.9)
+ .withTopP(0.99)
.withMaxTokensToSample(999)
.withAnthropicVersion("zzz")
.withStopSequences(List.of("stop3", "stop4"))
@@ -70,9 +70,9 @@ public void createRequestWithChatOptions() {
));
assertThat(request.prompt()).isNotEmpty();
- assertThat(request.temperature()).isEqualTo(99.9f);
+ assertThat(request.temperature()).isEqualTo(99.9);
assertThat(request.topK()).as("unchanged from the default options").isEqualTo(66);
- assertThat(request.topP()).isEqualTo(0.99f);
+ assertThat(request.topP()).isEqualTo(0.99);
assertThat(request.maxTokensToSample()).isEqualTo(999);
assertThat(request.anthropicVersion()).isEqualTo("zzz");
assertThat(request.stopSequences()).containsExactly("stop3", "stop4");
diff --git a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/anthropic/api/AnthropicChatBedrockApiIT.java b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/anthropic/api/AnthropicChatBedrockApiIT.java
index 334efa48ff..8f0efe45ac 100644
--- a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/anthropic/api/AnthropicChatBedrockApiIT.java
+++ b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/anthropic/api/AnthropicChatBedrockApiIT.java
@@ -52,7 +52,7 @@ public void chatCompletion() {
AnthropicChatRequest request = AnthropicChatRequest
.builder(String.format(AnthropicChatBedrockApi.PROMPT_TEMPLATE, "Name 3 famous pirates"))
- .withTemperature(0.8f)
+ .withTemperature(0.8)
.withMaxTokensToSample(300)
.withTopK(10)
.build();
@@ -75,7 +75,7 @@ public void chatCompletionStream() {
AnthropicChatRequest request = AnthropicChatRequest
.builder(String.format(AnthropicChatBedrockApi.PROMPT_TEMPLATE, "Name 3 famous pirates"))
- .withTemperature(0.8f)
+ .withTemperature(0.8)
.withMaxTokensToSample(300)
.withTopK(10)
.withStopSequences(List.of("\n\nHuman:"))
diff --git a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/anthropic3/BedrockAnthropic3CreateRequestTests.java b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/anthropic3/BedrockAnthropic3CreateRequestTests.java
index 31486f9e93..75551ca1cb 100644
--- a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/anthropic3/BedrockAnthropic3CreateRequestTests.java
+++ b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/anthropic3/BedrockAnthropic3CreateRequestTests.java
@@ -39,9 +39,9 @@ public void createRequestWithChatOptions() {
var client = new BedrockAnthropic3ChatModel(anthropicChatApi,
Anthropic3ChatOptions.builder()
- .withTemperature(66.6f)
+ .withTemperature(66.6)
.withTopK(66)
- .withTopP(0.66f)
+ .withTopP(0.66)
.withMaxTokens(666)
.withAnthropicVersion("X.Y.Z")
.withStopSequences(List.of("stop1", "stop2"))
@@ -50,17 +50,17 @@ public void createRequestWithChatOptions() {
var request = client.createRequest(new Prompt("Test message content"));
assertThat(request.messages()).isNotEmpty();
- assertThat(request.temperature()).isEqualTo(66.6f);
+ assertThat(request.temperature()).isEqualTo(66.6);
assertThat(request.topK()).isEqualTo(66);
- assertThat(request.topP()).isEqualTo(0.66f);
+ assertThat(request.topP()).isEqualTo(0.66);
assertThat(request.maxTokens()).isEqualTo(666);
assertThat(request.anthropicVersion()).isEqualTo("X.Y.Z");
assertThat(request.stopSequences()).containsExactly("stop1", "stop2");
request = client.createRequest(new Prompt("Test message content",
Anthropic3ChatOptions.builder()
- .withTemperature(99.9f)
- .withTopP(0.99f)
+ .withTemperature(99.9)
+ .withTopP(0.99)
.withMaxTokens(999)
.withAnthropicVersion("zzz")
.withStopSequences(List.of("stop3", "stop4"))
@@ -69,9 +69,9 @@ public void createRequestWithChatOptions() {
));
assertThat(request.messages()).isNotEmpty();
- assertThat(request.temperature()).isEqualTo(99.9f);
+ assertThat(request.temperature()).isEqualTo(99.9);
assertThat(request.topK()).as("unchanged from the default options").isEqualTo(66);
- assertThat(request.topP()).isEqualTo(0.99f);
+ assertThat(request.topP()).isEqualTo(0.99);
assertThat(request.maxTokens()).isEqualTo(999);
assertThat(request.anthropicVersion()).isEqualTo("zzz");
assertThat(request.stopSequences()).containsExactly("stop3", "stop4");
diff --git a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/anthropic3/api/Anthropic3ChatBedrockApiIT.java b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/anthropic3/api/Anthropic3ChatBedrockApiIT.java
index 15ab3dd0f5..48b89af37f 100644
--- a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/anthropic3/api/Anthropic3ChatBedrockApiIT.java
+++ b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/anthropic3/api/Anthropic3ChatBedrockApiIT.java
@@ -57,7 +57,7 @@ public void chatCompletion() {
MediaContent anthropicMessage = new MediaContent("Name 3 famous pirates");
ChatCompletionMessage chatCompletionMessage = new ChatCompletionMessage(List.of(anthropicMessage), Role.USER);
AnthropicChatRequest request = AnthropicChatRequest.builder(List.of(chatCompletionMessage))
- .withTemperature(0.8f)
+ .withTemperature(0.8)
.withMaxTokens(300)
.withTopK(10)
.withAnthropicVersion(DEFAULT_ANTHROPIC_VERSION)
@@ -97,7 +97,7 @@ public void chatMultiCompletion() {
AnthropicChatRequest request = AnthropicChatRequest
.builder(List.of(chatCompletionInitialMessage, chatCompletionAssistantMessage,
chatCompletionFollowupMessage))
- .withTemperature(0.8f)
+ .withTemperature(0.8)
.withMaxTokens(400)
.withTopK(10)
.withAnthropicVersion(DEFAULT_ANTHROPIC_VERSION)
@@ -123,7 +123,7 @@ public void chatCompletionStream() {
ChatCompletionMessage chatCompletionMessage = new ChatCompletionMessage(List.of(anthropicMessage), Role.USER);
AnthropicChatRequest request = AnthropicChatRequest.builder(List.of(chatCompletionMessage))
- .withTemperature(0.8f)
+ .withTemperature(0.8)
.withMaxTokens(300)
.withTopK(10)
.withAnthropicVersion(DEFAULT_ANTHROPIC_VERSION)
diff --git a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/cohere/BedrockCohereChatCreateRequestTests.java b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/cohere/BedrockCohereChatCreateRequestTests.java
index c757efe04a..b6c0027da9 100644
--- a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/cohere/BedrockCohereChatCreateRequestTests.java
+++ b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/cohere/BedrockCohereChatCreateRequestTests.java
@@ -47,9 +47,9 @@ public void createRequestWithChatOptions() {
var client = new BedrockCohereChatModel(chatApi,
BedrockCohereChatOptions.builder()
- .withTemperature(66.6f)
+ .withTemperature(66.6)
.withTopK(66)
- .withTopP(0.66f)
+ .withTopP(0.66)
.withMaxTokens(678)
.withStopSequences(List.of("stop1", "stop2"))
.withReturnLikelihoods(ReturnLikelihoods.ALL)
@@ -63,9 +63,9 @@ public void createRequestWithChatOptions() {
assertThat(request.prompt()).isNotEmpty();
assertThat(request.stream()).isTrue();
- assertThat(request.temperature()).isEqualTo(66.6f);
+ assertThat(request.temperature()).isEqualTo(66.6);
assertThat(request.topK()).isEqualTo(66);
- assertThat(request.topP()).isEqualTo(0.66f);
+ assertThat(request.topP()).isEqualTo(0.66);
assertThat(request.maxTokens()).isEqualTo(678);
assertThat(request.stopSequences()).containsExactly("stop1", "stop2");
assertThat(request.returnLikelihoods()).isEqualTo(ReturnLikelihoods.ALL);
@@ -75,9 +75,9 @@ public void createRequestWithChatOptions() {
request = client.createRequest(new Prompt("Test message content",
BedrockCohereChatOptions.builder()
- .withTemperature(99.9f)
+ .withTemperature(99.9)
.withTopK(99)
- .withTopP(0.99f)
+ .withTopP(0.99)
.withMaxTokens(888)
.withStopSequences(List.of("stop3", "stop4"))
.withReturnLikelihoods(ReturnLikelihoods.GENERATION)
@@ -92,9 +92,9 @@ public void createRequestWithChatOptions() {
assertThat(request.prompt()).isNotEmpty();
assertThat(request.stream()).isFalse();
- assertThat(request.temperature()).isEqualTo(99.9f);
+ assertThat(request.temperature()).isEqualTo(99.9);
assertThat(request.topK()).isEqualTo(99);
- assertThat(request.topP()).isEqualTo(0.99f);
+ assertThat(request.topP()).isEqualTo(0.99);
assertThat(request.maxTokens()).isEqualTo(888);
assertThat(request.stopSequences()).containsExactly("stop3", "stop4");
assertThat(request.returnLikelihoods()).isEqualTo(ReturnLikelihoods.GENERATION);
diff --git a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/cohere/api/CohereChatBedrockApiIT.java b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/cohere/api/CohereChatBedrockApiIT.java
index 68447ea94a..287eec21fc 100644
--- a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/cohere/api/CohereChatBedrockApiIT.java
+++ b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/cohere/api/CohereChatBedrockApiIT.java
@@ -49,13 +49,13 @@ public class CohereChatBedrockApiIT {
public void requestBuilder() {
CohereChatRequest request1 = new CohereChatRequest(
- "What is the capital of Bulgaria and what is the size? What it the national anthem?", 0.5f, 0.9f, 15,
- 40, List.of("END"), CohereChatRequest.ReturnLikelihoods.ALL, false, 1, null, Truncate.NONE);
+ "What is the capital of Bulgaria and what is the size? What it the national anthem?", 0.5, 0.9, 15, 40,
+ List.of("END"), CohereChatRequest.ReturnLikelihoods.ALL, false, 1, null, Truncate.NONE);
var request2 = CohereChatRequest
.builder("What is the capital of Bulgaria and what is the size? What it the national anthem?")
- .withTemperature(0.5f)
- .withTopP(0.9f)
+ .withTemperature(0.5)
+ .withTopP(0.9)
.withTopK(15)
.withMaxTokens(40)
.withStopSequences(List.of("END"))
@@ -75,8 +75,8 @@ public void chatCompletion() {
var request = CohereChatRequest
.builder("What is the capital of Bulgaria and what is the size? What it the national anthem?")
.withStream(false)
- .withTemperature(0.5f)
- .withTopP(0.8f)
+ .withTemperature(0.5)
+ .withTopP(0.8)
.withTopK(15)
.withMaxTokens(100)
.withStopSequences(List.of("END"))
@@ -100,8 +100,8 @@ public void chatCompletionStream() {
var request = CohereChatRequest
.builder("What is the capital of Bulgaria and what is the size? What it the national anthem?")
.withStream(true)
- .withTemperature(0.5f)
- .withTopP(0.8f)
+ .withTemperature(0.5)
+ .withTopP(0.8)
.withTopK(15)
.withMaxTokens(100)
.withStopSequences(List.of("END"))
diff --git a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/jurassic2/BedrockAi21Jurassic2ChatModelIT.java b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/jurassic2/BedrockAi21Jurassic2ChatModelIT.java
index 4366f5d7a6..c7a6419772 100644
--- a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/jurassic2/BedrockAi21Jurassic2ChatModelIT.java
+++ b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/jurassic2/BedrockAi21Jurassic2ChatModelIT.java
@@ -157,9 +157,9 @@ public BedrockAi21Jurassic2ChatModel bedrockAi21Jurassic2ChatModel(
Ai21Jurassic2ChatBedrockApi jurassic2ChatBedrockApi) {
return new BedrockAi21Jurassic2ChatModel(jurassic2ChatBedrockApi,
BedrockAi21Jurassic2ChatOptions.builder()
- .withTemperature(0.5f)
+ .withTemperature(0.5)
.withMaxTokens(100)
- .withTopP(0.9f)
+ .withTopP(0.9)
.build());
}
diff --git a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/jurassic2/api/Ai21Jurassic2ChatBedrockApiIT.java b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/jurassic2/api/Ai21Jurassic2ChatBedrockApiIT.java
index 0360c0c7d2..f3dedde9ef 100644
--- a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/jurassic2/api/Ai21Jurassic2ChatBedrockApiIT.java
+++ b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/jurassic2/api/Ai21Jurassic2ChatBedrockApiIT.java
@@ -44,8 +44,8 @@ public class Ai21Jurassic2ChatBedrockApiIT {
@Test
public void chatCompletion() {
- Ai21Jurassic2ChatRequest request = new Ai21Jurassic2ChatRequest("Give me the names of 3 famous pirates?", 0.9f,
- 0.9f, 100, null, // List.of("END"),
+ Ai21Jurassic2ChatRequest request = new Ai21Jurassic2ChatRequest("Give me the names of 3 famous pirates?", 0.9,
+ 0.9, 100, null, // List.of("END"),
new Ai21Jurassic2ChatRequest.IntegerScalePenalty(1, true, true, true, true, true),
new Ai21Jurassic2ChatRequest.FloatScalePenalty(0.5f, true, true, true, true, true),
new Ai21Jurassic2ChatRequest.IntegerScalePenalty(1, true, true, true, true, true));
diff --git a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/llama/BedrockLlamaChatModelIT.java b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/llama/BedrockLlamaChatModelIT.java
index 416b397783..c9239875eb 100644
--- a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/llama/BedrockLlamaChatModelIT.java
+++ b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/llama/BedrockLlamaChatModelIT.java
@@ -208,7 +208,7 @@ public LlamaChatBedrockApi llamaApi() {
@Bean
public BedrockLlamaChatModel llamaChatModel(LlamaChatBedrockApi llamaApi) {
return new BedrockLlamaChatModel(llamaApi,
- BedrockLlamaChatOptions.builder().withTemperature(0.5f).withMaxGenLen(100).withTopP(0.9f).build());
+ BedrockLlamaChatOptions.builder().withTemperature(0.5).withMaxGenLen(100).withTopP(0.9).build());
}
}
diff --git a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/llama/BedrockLlamaCreateRequestTests.java b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/llama/BedrockLlamaCreateRequestTests.java
index 4bd48680d2..cac93c7279 100644
--- a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/llama/BedrockLlamaCreateRequestTests.java
+++ b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/llama/BedrockLlamaCreateRequestTests.java
@@ -46,7 +46,7 @@ public class BedrockLlamaCreateRequestTests {
public void createRequestWithChatOptions() {
var client = new BedrockLlamaChatModel(api,
- BedrockLlamaChatOptions.builder().withTemperature(66.6f).withMaxGenLen(666).withTopP(0.66f).build());
+ BedrockLlamaChatOptions.builder().withTemperature(66.6).withMaxGenLen(666).withTopP(0.66).build());
var request = client.createRequest(new Prompt("Test message content"));
@@ -56,7 +56,7 @@ public void createRequestWithChatOptions() {
assertThat(request.maxGenLen()).isEqualTo(666);
request = client.createRequest(new Prompt("Test message content",
- BedrockLlamaChatOptions.builder().withTemperature(99.9f).withMaxGenLen(999).withTopP(0.99f).build()));
+ BedrockLlamaChatOptions.builder().withTemperature(99.9).withMaxGenLen(999).withTopP(0.99).build()));
assertThat(request.prompt()).isNotEmpty();
assertThat(request.temperature()).isEqualTo(99.9f);
diff --git a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/llama/api/LlamaChatBedrockApiIT.java b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/llama/api/LlamaChatBedrockApiIT.java
index 5b4587358f..48844670c0 100644
--- a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/llama/api/LlamaChatBedrockApiIT.java
+++ b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/llama/api/LlamaChatBedrockApiIT.java
@@ -48,8 +48,8 @@ public class LlamaChatBedrockApiIT {
public void chatCompletion() {
LlamaChatRequest request = LlamaChatRequest.builder("Hello, my name is")
- .withTemperature(0.9f)
- .withTopP(0.9f)
+ .withTemperature(0.9)
+ .withTopP(0.9)
.withMaxGenLen(20)
.build();
@@ -67,7 +67,7 @@ public void chatCompletion() {
@Test
public void chatCompletionStream() {
- LlamaChatRequest request = new LlamaChatRequest("Hello, my name is", 0.9f, 0.9f, 20);
+ LlamaChatRequest request = new LlamaChatRequest("Hello, my name is", 0.9, 0.9, 20);
Flux responseStream = llamaChatApi.chatCompletionStream(request);
List responses = responseStream.collectList().block();
diff --git a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/titan/BedrockTitanChatModelCreateRequestTests.java b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/titan/BedrockTitanChatModelCreateRequestTests.java
index af0522de63..90705ecc25 100644
--- a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/titan/BedrockTitanChatModelCreateRequestTests.java
+++ b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/titan/BedrockTitanChatModelCreateRequestTests.java
@@ -43,8 +43,8 @@ public void createRequestWithChatOptions() {
var model = new BedrockTitanChatModel(api,
BedrockTitanChatOptions.builder()
- .withTemperature(66.6f)
- .withTopP(0.66f)
+ .withTemperature(66.6)
+ .withTopP(0.66)
.withMaxTokenCount(666)
.withStopSequences(List.of("stop1", "stop2"))
.build());
@@ -52,15 +52,15 @@ public void createRequestWithChatOptions() {
var request = model.createRequest(new Prompt("Test message content"));
assertThat(request.inputText()).isNotEmpty();
- assertThat(request.textGenerationConfig().temperature()).isEqualTo(66.6f);
- assertThat(request.textGenerationConfig().topP()).isEqualTo(0.66f);
+ assertThat(request.textGenerationConfig().temperature()).isEqualTo(66.6);
+ assertThat(request.textGenerationConfig().topP()).isEqualTo(0.66);
assertThat(request.textGenerationConfig().maxTokenCount()).isEqualTo(666);
assertThat(request.textGenerationConfig().stopSequences()).containsExactly("stop1", "stop2");
request = model.createRequest(new Prompt("Test message content",
BedrockTitanChatOptions.builder()
- .withTemperature(99.9f)
- .withTopP(0.99f)
+ .withTemperature(99.9)
+ .withTopP(0.99)
.withMaxTokenCount(999)
.withStopSequences(List.of("stop3", "stop4"))
.build()
@@ -68,8 +68,8 @@ public void createRequestWithChatOptions() {
));
assertThat(request.inputText()).isNotEmpty();
- assertThat(request.textGenerationConfig().temperature()).isEqualTo(99.9f);
- assertThat(request.textGenerationConfig().topP()).isEqualTo(0.99f);
+ assertThat(request.textGenerationConfig().temperature()).isEqualTo(99.9);
+ assertThat(request.textGenerationConfig().topP()).isEqualTo(0.99);
assertThat(request.textGenerationConfig().maxTokenCount()).isEqualTo(999);
assertThat(request.textGenerationConfig().stopSequences()).containsExactly("stop3", "stop4");
}
diff --git a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/titan/api/TitanChatBedrockApiIT.java b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/titan/api/TitanChatBedrockApiIT.java
index 4a05080b05..453e84490e 100644
--- a/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/titan/api/TitanChatBedrockApiIT.java
+++ b/models/spring-ai-bedrock/src/test/java/org/springframework/ai/bedrock/titan/api/TitanChatBedrockApiIT.java
@@ -45,8 +45,8 @@ public class TitanChatBedrockApiIT {
Duration.ofMinutes(2));
TitanChatRequest titanChatRequest = TitanChatRequest.builder("Give me the names of 3 famous pirates?")
- .withTemperature(0.5f)
- .withTopP(0.9f)
+ .withTemperature(0.5)
+ .withTopP(0.9)
.withMaxTokenCount(100)
.withStopSequences(List.of("|"))
.build();
diff --git a/models/spring-ai-minimax/src/main/java/org/springframework/ai/minimax/MiniMaxChatModel.java b/models/spring-ai-minimax/src/main/java/org/springframework/ai/minimax/MiniMaxChatModel.java
index da23e253fb..d6d647c797 100644
--- a/models/spring-ai-minimax/src/main/java/org/springframework/ai/minimax/MiniMaxChatModel.java
+++ b/models/spring-ai-minimax/src/main/java/org/springframework/ai/minimax/MiniMaxChatModel.java
@@ -99,7 +99,7 @@ public class MiniMaxChatModel extends AbstractToolCallSupport implements ChatMod
*/
public MiniMaxChatModel(MiniMaxApi miniMaxApi) {
this(miniMaxApi,
- MiniMaxChatOptions.builder().withModel(MiniMaxApi.DEFAULT_CHAT_MODEL).withTemperature(0.7f).build());
+ MiniMaxChatOptions.builder().withModel(MiniMaxApi.DEFAULT_CHAT_MODEL).withTemperature(0.7).build());
}
/**
diff --git a/models/spring-ai-minimax/src/main/java/org/springframework/ai/minimax/MiniMaxChatOptions.java b/models/spring-ai-minimax/src/main/java/org/springframework/ai/minimax/MiniMaxChatOptions.java
index 9626004e35..31cae5791f 100644
--- a/models/spring-ai-minimax/src/main/java/org/springframework/ai/minimax/MiniMaxChatOptions.java
+++ b/models/spring-ai-minimax/src/main/java/org/springframework/ai/minimax/MiniMaxChatOptions.java
@@ -54,7 +54,7 @@ public class MiniMaxChatOptions implements FunctionCallingOptions, ChatOptions {
* Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing
* frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.
*/
- private @JsonProperty("frequency_penalty") Float frequencyPenalty;
+ private @JsonProperty("frequency_penalty") Double frequencyPenalty;
/**
* The maximum number of tokens to generate in the chat completion. The total length of input
* tokens and generated tokens is limited by the model's context length.
@@ -69,7 +69,7 @@ public class MiniMaxChatOptions implements FunctionCallingOptions, ChatOptions {
* Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they
* appear in the text so far, increasing the model's likelihood to talk about new topics.
*/
- private @JsonProperty("presence_penalty") Float presencePenalty;
+ private @JsonProperty("presence_penalty") Double presencePenalty;
/**
* An object specifying the format that the model must output. Setting to { "type":
* "json_object" } enables JSON mode, which guarantees the message the model generates is valid JSON.
@@ -92,13 +92,13 @@ public class MiniMaxChatOptions implements FunctionCallingOptions, ChatOptions {
* more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend
* altering this or top_p but not both.
*/
- private @JsonProperty("temperature") Float temperature;
+ private @JsonProperty("temperature") Double temperature;
/**
* An alternative to sampling with temperature, called nucleus sampling, where the model considers the
* results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10%
* probability mass are considered. We generally recommend altering this or temperature but not both.
*/
- private @JsonProperty("top_p") Float topP;
+ private @JsonProperty("top_p") Double topP;
/**
* Mask the text information in the output that is easy to involve privacy issues,
* including but not limited to email, domain name, link, ID number, home address, etc.
@@ -165,7 +165,7 @@ public Builder withModel(String model) {
return this;
}
- public Builder withFrequencyPenalty(Float frequencyPenalty) {
+ public Builder withFrequencyPenalty(Double frequencyPenalty) {
this.options.frequencyPenalty = frequencyPenalty;
return this;
}
@@ -180,7 +180,7 @@ public Builder withN(Integer n) {
return this;
}
- public Builder withPresencePenalty(Float presencePenalty) {
+ public Builder withPresencePenalty(Double presencePenalty) {
this.options.presencePenalty = presencePenalty;
return this;
}
@@ -200,12 +200,12 @@ public Builder withStop(List stop) {
return this;
}
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.options.temperature = temperature;
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.options.topP = topP;
return this;
}
@@ -258,11 +258,11 @@ public void setModel(String model) {
}
@Override
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return this.frequencyPenalty;
}
- public void setFrequencyPenalty(Float frequencyPenalty) {
+ public void setFrequencyPenalty(Double frequencyPenalty) {
this.frequencyPenalty = frequencyPenalty;
}
@@ -284,11 +284,11 @@ public void setN(Integer n) {
}
@Override
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return this.presencePenalty;
}
- public void setPresencePenalty(Float presencePenalty) {
+ public void setPresencePenalty(Double presencePenalty) {
this.presencePenalty = presencePenalty;
}
@@ -328,20 +328,20 @@ public void setStop(List stop) {
}
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return this.temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return this.topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
diff --git a/models/spring-ai-minimax/src/main/java/org/springframework/ai/minimax/api/MiniMaxApi.java b/models/spring-ai-minimax/src/main/java/org/springframework/ai/minimax/api/MiniMaxApi.java
index af040f8536..d631f25355 100644
--- a/models/spring-ai-minimax/src/main/java/org/springframework/ai/minimax/api/MiniMaxApi.java
+++ b/models/spring-ai-minimax/src/main/java/org/springframework/ai/minimax/api/MiniMaxApi.java
@@ -45,6 +45,7 @@
* MiniMax Embedding API.
*
* @author Geng Rong
+ * @author Thomas Vitale
* @since 1.0.0 M1
*/
public class MiniMaxApi {
@@ -247,16 +248,16 @@ public Function(String description, String name, Map parameters)
public record ChatCompletionRequest (
@JsonProperty("messages") List messages,
@JsonProperty("model") String model,
- @JsonProperty("frequency_penalty") Float frequencyPenalty,
+ @JsonProperty("frequency_penalty") Double frequencyPenalty,
@JsonProperty("max_tokens") Integer maxTokens,
@JsonProperty("n") Integer n,
- @JsonProperty("presence_penalty") Float presencePenalty,
+ @JsonProperty("presence_penalty") Double presencePenalty,
@JsonProperty("response_format") ResponseFormat responseFormat,
@JsonProperty("seed") Integer seed,
@JsonProperty("stop") List stop,
@JsonProperty("stream") Boolean stream,
- @JsonProperty("temperature") Float temperature,
- @JsonProperty("top_p") Float topP,
+ @JsonProperty("temperature") Double temperature,
+ @JsonProperty("top_p") Double topP,
@JsonProperty("mask_sensitive_info") Boolean maskSensitiveInfo,
@JsonProperty("tools") List tools,
@JsonProperty("tool_choice") Object toolChoice) {
@@ -268,7 +269,7 @@ public record ChatCompletionRequest (
* @param model ID of the model to use.
* @param temperature What sampling temperature to use, between 0 and 1.
*/
- public ChatCompletionRequest(List messages, String model, Float temperature) {
+ public ChatCompletionRequest(List messages, String model, Double temperature) {
this(messages, model, null, null, null, null,
null, null, null, false, temperature, null,null,
null, null);
@@ -283,7 +284,7 @@ public ChatCompletionRequest(List messages, String model,
* @param stream If set, partial message deltas will be sent.Tokens will be sent as data-only server-sent events
* as they become available, with the stream terminated by a data: [DONE] message.
*/
- public ChatCompletionRequest(List messages, String model, Float temperature, boolean stream) {
+ public ChatCompletionRequest(List messages, String model, Double temperature, boolean stream) {
this(messages, model, null, null, null, null,
null, null, null, stream, temperature, null,null,
null, null);
@@ -301,7 +302,7 @@ public ChatCompletionRequest(List messages, String model,
public ChatCompletionRequest(List messages, String model,
List tools, Object toolChoice) {
this(messages, model, null, null, null, null,
- null, null, null, false, 0.8f, null,null,
+ null, null, null, false, 0.8, null,null,
tools, toolChoice);
}
diff --git a/models/spring-ai-minimax/src/test/java/org/springframework/ai/minimax/ChatCompletionRequestTests.java b/models/spring-ai-minimax/src/test/java/org/springframework/ai/minimax/ChatCompletionRequestTests.java
index 3232836c58..221a6bccbd 100644
--- a/models/spring-ai-minimax/src/test/java/org/springframework/ai/minimax/ChatCompletionRequestTests.java
+++ b/models/spring-ai-minimax/src/test/java/org/springframework/ai/minimax/ChatCompletionRequestTests.java
@@ -34,7 +34,7 @@ public class ChatCompletionRequestTests {
public void createRequestWithChatOptions() {
var client = new MiniMaxChatModel(new MiniMaxApi("TEST"),
- MiniMaxChatOptions.builder().withModel("DEFAULT_MODEL").withTemperature(66.6f).build());
+ MiniMaxChatOptions.builder().withModel("DEFAULT_MODEL").withTemperature(66.6).build());
var request = client.createRequest(new Prompt("Test message content"), false);
@@ -42,16 +42,16 @@ public void createRequestWithChatOptions() {
assertThat(request.stream()).isFalse();
assertThat(request.model()).isEqualTo("DEFAULT_MODEL");
- assertThat(request.temperature()).isEqualTo(66.6f);
+ assertThat(request.temperature()).isEqualTo(66.6);
request = client.createRequest(new Prompt("Test message content",
- MiniMaxChatOptions.builder().withModel("PROMPT_MODEL").withTemperature(99.9f).build()), true);
+ MiniMaxChatOptions.builder().withModel("PROMPT_MODEL").withTemperature(99.9).build()), true);
assertThat(request.messages()).hasSize(1);
assertThat(request.stream()).isTrue();
assertThat(request.model()).isEqualTo("PROMPT_MODEL");
- assertThat(request.temperature()).isEqualTo(99.9f);
+ assertThat(request.temperature()).isEqualTo(99.9);
}
@Test
diff --git a/models/spring-ai-minimax/src/test/java/org/springframework/ai/minimax/api/MiniMaxApiIT.java b/models/spring-ai-minimax/src/test/java/org/springframework/ai/minimax/api/MiniMaxApiIT.java
index 7afb2d31b3..60812302da 100644
--- a/models/spring-ai-minimax/src/test/java/org/springframework/ai/minimax/api/MiniMaxApiIT.java
+++ b/models/spring-ai-minimax/src/test/java/org/springframework/ai/minimax/api/MiniMaxApiIT.java
@@ -43,7 +43,7 @@ public class MiniMaxApiIT {
void chatCompletionEntity() {
ChatCompletionMessage chatCompletionMessage = new ChatCompletionMessage("Hello world", Role.USER);
ResponseEntity response = miniMaxApi
- .chatCompletionEntity(new ChatCompletionRequest(List.of(chatCompletionMessage), "glm-4-air", 0.7f, false));
+ .chatCompletionEntity(new ChatCompletionRequest(List.of(chatCompletionMessage), "glm-4-air", 0.7, false));
assertThat(response).isNotNull();
assertThat(response.getBody()).isNotNull();
@@ -53,7 +53,7 @@ void chatCompletionEntity() {
void chatCompletionStream() {
ChatCompletionMessage chatCompletionMessage = new ChatCompletionMessage("Hello world", Role.USER);
Flux response = miniMaxApi
- .chatCompletionStream(new ChatCompletionRequest(List.of(chatCompletionMessage), "glm-4-air", 0.7f, true));
+ .chatCompletionStream(new ChatCompletionRequest(List.of(chatCompletionMessage), "glm-4-air", 0.7, true));
assertThat(response).isNotNull();
assertThat(response.collectList().block()).isNotNull();
diff --git a/models/spring-ai-minimax/src/test/java/org/springframework/ai/minimax/api/MiniMaxApiToolFunctionCallIT.java b/models/spring-ai-minimax/src/test/java/org/springframework/ai/minimax/api/MiniMaxApiToolFunctionCallIT.java
index 7f599b8e06..d878a898d5 100644
--- a/models/spring-ai-minimax/src/test/java/org/springframework/ai/minimax/api/MiniMaxApiToolFunctionCallIT.java
+++ b/models/spring-ai-minimax/src/test/java/org/springframework/ai/minimax/api/MiniMaxApiToolFunctionCallIT.java
@@ -117,7 +117,7 @@ public void toolFunctionCall() {
}
var functionResponseRequest = new ChatCompletionRequest(messages,
- org.springframework.ai.minimax.api.MiniMaxApi.ChatModel.ABAB_6_5_Chat.getValue(), 0.5F);
+ org.springframework.ai.minimax.api.MiniMaxApi.ChatModel.ABAB_6_5_Chat.getValue(), 0.5);
ResponseEntity chatCompletion2 = miniMaxApi.chatCompletionEntity(functionResponseRequest);
diff --git a/models/spring-ai-mistral-ai/src/main/java/org/springframework/ai/mistralai/MistralAiChatModel.java b/models/spring-ai-mistral-ai/src/main/java/org/springframework/ai/mistralai/MistralAiChatModel.java
index afc6627fdd..ceea2e869d 100644
--- a/models/spring-ai-mistral-ai/src/main/java/org/springframework/ai/mistralai/MistralAiChatModel.java
+++ b/models/spring-ai-mistral-ai/src/main/java/org/springframework/ai/mistralai/MistralAiChatModel.java
@@ -102,8 +102,8 @@ public class MistralAiChatModel extends AbstractToolCallSupport implements ChatM
public MistralAiChatModel(MistralAiApi mistralAiApi) {
this(mistralAiApi,
MistralAiChatOptions.builder()
- .withTemperature(0.7f)
- .withTopP(1f)
+ .withTemperature(0.7)
+ .withTopP(1.0)
.withSafePrompt(false)
.withModel(MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue())
.build());
diff --git a/models/spring-ai-mistral-ai/src/main/java/org/springframework/ai/mistralai/MistralAiChatOptions.java b/models/spring-ai-mistral-ai/src/main/java/org/springframework/ai/mistralai/MistralAiChatOptions.java
index 3a5523f2b5..dc4fcdc6dc 100644
--- a/models/spring-ai-mistral-ai/src/main/java/org/springframework/ai/mistralai/MistralAiChatOptions.java
+++ b/models/spring-ai-mistral-ai/src/main/java/org/springframework/ai/mistralai/MistralAiChatOptions.java
@@ -52,7 +52,7 @@ public class MistralAiChatOptions implements FunctionCallingOptions, ChatOptions
* make the output more random, while lower values like 0.2 will make it more focused
* and deterministic. We generally recommend altering this or top_p but not both.
*/
- private @JsonProperty("temperature") Float temperature;
+ private @JsonProperty("temperature") Double temperature;
/**
* Nucleus sampling, where the model considers the results of the tokens with top_p
@@ -60,7 +60,7 @@ public class MistralAiChatOptions implements FunctionCallingOptions, ChatOptions
* mass are considered. We generally recommend altering this or temperature but not
* both.
*/
- private @JsonProperty("top_p") Float topP;
+ private @JsonProperty("top_p") Double topP;
/**
* The maximum number of tokens to generate in the completion. The token count of your
@@ -173,12 +173,12 @@ public Builder withStop(List stop) {
return this;
}
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.options.setTemperature(temperature);
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.options.setTopP(topP);
return this;
}
@@ -299,20 +299,20 @@ public ToolChoice getToolChoice() {
}
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return this.temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return this.topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
@@ -340,13 +340,13 @@ public void setFunctions(Set functions) {
@Override
@JsonIgnore
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return null;
}
@Override
@JsonIgnore
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return null;
}
diff --git a/models/spring-ai-mistral-ai/src/main/java/org/springframework/ai/mistralai/api/MistralAiApi.java b/models/spring-ai-mistral-ai/src/main/java/org/springframework/ai/mistralai/api/MistralAiApi.java
index 3b034da97f..a6e156f464 100644
--- a/models/spring-ai-mistral-ai/src/main/java/org/springframework/ai/mistralai/api/MistralAiApi.java
+++ b/models/spring-ai-mistral-ai/src/main/java/org/springframework/ai/mistralai/api/MistralAiApi.java
@@ -378,8 +378,8 @@ public record ChatCompletionRequest(
@JsonProperty("messages") List messages,
@JsonProperty("tools") List tools,
@JsonProperty("tool_choice") ToolChoice toolChoice,
- @JsonProperty("temperature") Float temperature,
- @JsonProperty("top_p") Float topP,
+ @JsonProperty("temperature") Double temperature,
+ @JsonProperty("top_p") Double topP,
@JsonProperty("max_tokens") Integer maxTokens,
@JsonProperty("stream") Boolean stream,
@JsonProperty("safe_prompt") Boolean safePrompt,
@@ -396,7 +396,7 @@ public record ChatCompletionRequest(
* @param model ID of the model to use.
*/
public ChatCompletionRequest(List messages, String model) {
- this(model, messages, null, null, 0.7f, 1f, null, false, false, null, null, null);
+ this(model, messages, null, null, 0.7, 1.0, null, false, false, null, null, null);
}
/**
@@ -409,9 +409,9 @@ public ChatCompletionRequest(List messages, String model)
* @param stream Whether to stream back partial progress. If set, tokens will be
* sent
*/
- public ChatCompletionRequest(List messages, String model, Float temperature,
+ public ChatCompletionRequest(List messages, String model, Double temperature,
boolean stream) {
- this(model, messages, null, null, temperature, 1f, null, stream, false, null, null, null);
+ this(model, messages, null, null, temperature, 1.0, null, stream, false, null, null, null);
}
/**
@@ -423,8 +423,8 @@ public ChatCompletionRequest(List messages, String model,
* @param temperature What sampling temperature to use, between 0.0 and 1.0.
*
*/
- public ChatCompletionRequest(List messages, String model, Float temperature) {
- this(model, messages, null, null, temperature, 1f, null, false, false, null, null, null);
+ public ChatCompletionRequest(List messages, String model, Double temperature) {
+ this(model, messages, null, null, temperature, 1.0, null, false, false, null, null, null);
}
/**
@@ -439,7 +439,7 @@ public ChatCompletionRequest(List messages, String model,
*/
public ChatCompletionRequest(List messages, String model, List tools,
ToolChoice toolChoice) {
- this(model, messages, tools, toolChoice, null, 1f, null, false, false, null, null, null);
+ this(model, messages, tools, toolChoice, null, 1.0, null, false, false, null, null, null);
}
/**
@@ -447,7 +447,7 @@ public ChatCompletionRequest(List messages, String model,
* stream.
*/
public ChatCompletionRequest(List messages, Boolean stream) {
- this(null, messages, null, null, 0.7f, 1f, null, stream, false, null, null, null);
+ this(null, messages, null, null, 0.7, 1.0, null, stream, false, null, null, null);
}
/**
diff --git a/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/MistralAiChatCompletionRequestTest.java b/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/MistralAiChatCompletionRequestTest.java
index 893f4d43a7..29ffdb75a4 100644
--- a/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/MistralAiChatCompletionRequestTest.java
+++ b/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/MistralAiChatCompletionRequestTest.java
@@ -41,7 +41,7 @@ void chatCompletionDefaultRequestTest() {
assertThat(request.messages()).hasSize(1);
assertThat(request.topP()).isEqualTo(1);
- assertThat(request.temperature()).isEqualTo(0.7f);
+ assertThat(request.temperature()).isEqualTo(0.7);
assertThat(request.safePrompt()).isFalse();
assertThat(request.maxTokens()).isNull();
assertThat(request.stream()).isFalse();
@@ -50,13 +50,13 @@ void chatCompletionDefaultRequestTest() {
@Test
void chatCompletionRequestWithOptionsTest() {
- var options = MistralAiChatOptions.builder().withTemperature(0.5f).withTopP(0.8f).build();
+ var options = MistralAiChatOptions.builder().withTemperature(0.5).withTopP(0.8).build();
var request = chatModel.createRequest(new Prompt("test content", options), true);
assertThat(request.messages().size()).isEqualTo(1);
- assertThat(request.topP()).isEqualTo(0.8f);
- assertThat(request.temperature()).isEqualTo(0.5f);
+ assertThat(request.topP()).isEqualTo(0.8);
+ assertThat(request.temperature()).isEqualTo(0.5);
assertThat(request.stream()).isTrue();
}
diff --git a/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/MistralAiChatModelObservationIT.java b/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/MistralAiChatModelObservationIT.java
index e679be2405..00f56ba0da 100644
--- a/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/MistralAiChatModelObservationIT.java
+++ b/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/MistralAiChatModelObservationIT.java
@@ -70,8 +70,8 @@ void observationForChatOperation() {
.withModel(MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue())
.withMaxTokens(2048)
.withStop(List.of("this-is-the-end"))
- .withTemperature(0.7f)
- .withTopP(1f)
+ .withTemperature(0.7)
+ .withTopP(1.0)
.build();
Prompt prompt = new Prompt("Why does a raven look like a desk?", options);
@@ -91,8 +91,8 @@ void observationForStreamingChatOperation() {
.withModel(MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue())
.withMaxTokens(2048)
.withStop(List.of("this-is-the-end"))
- .withTemperature(0.7f)
- .withTopP(1f)
+ .withTemperature(0.7)
+ .withTopP(1.0)
.build();
Prompt prompt = new Prompt("Why does a raven look like a desk?", options);
diff --git a/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/MistralAiRetryTests.java b/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/MistralAiRetryTests.java
index e2f765ec28..8e2d4a7dd3 100644
--- a/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/MistralAiRetryTests.java
+++ b/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/MistralAiRetryTests.java
@@ -96,8 +96,8 @@ public void beforeEach() {
chatModel = new MistralAiChatModel(mistralAiApi,
MistralAiChatOptions.builder()
- .withTemperature(0.7f)
- .withTopP(1f)
+ .withTemperature(0.7)
+ .withTopP(1.0)
.withSafePrompt(false)
.withModel(MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue())
.build(),
diff --git a/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/api/MistralAiApiIT.java b/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/api/MistralAiApiIT.java
index a76b40d6b8..523ac4df2d 100644
--- a/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/api/MistralAiApiIT.java
+++ b/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/api/MistralAiApiIT.java
@@ -46,7 +46,7 @@ public class MistralAiApiIT {
void chatCompletionEntity() {
ChatCompletionMessage chatCompletionMessage = new ChatCompletionMessage("Hello world", Role.USER);
ResponseEntity response = mistralAiApi.chatCompletionEntity(new ChatCompletionRequest(
- List.of(chatCompletionMessage), MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue(), 0.8f, false));
+ List.of(chatCompletionMessage), MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue(), 0.8, false));
assertThat(response).isNotNull();
assertThat(response.getBody()).isNotNull();
@@ -63,7 +63,7 @@ void chatCompletionEntityWithSystemMessage() {
""", Role.SYSTEM);
ResponseEntity response = mistralAiApi.chatCompletionEntity(new ChatCompletionRequest(
- List.of(systemMessage, userMessage), MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue(), 0.8f, false));
+ List.of(systemMessage, userMessage), MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue(), 0.8, false));
assertThat(response).isNotNull();
assertThat(response.getBody()).isNotNull();
@@ -73,7 +73,7 @@ void chatCompletionEntityWithSystemMessage() {
void chatCompletionStream() {
ChatCompletionMessage chatCompletionMessage = new ChatCompletionMessage("Hello world", Role.USER);
Flux response = mistralAiApi.chatCompletionStream(new ChatCompletionRequest(
- List.of(chatCompletionMessage), MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue(), 0.8f, true));
+ List.of(chatCompletionMessage), MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue(), 0.8, true));
assertThat(response).isNotNull();
assertThat(response.collectList().block()).isNotNull();
diff --git a/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/api/tool/MistralAiApiToolFunctionCallIT.java b/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/api/tool/MistralAiApiToolFunctionCallIT.java
index b52c9da5dd..4d23255a6b 100644
--- a/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/api/tool/MistralAiApiToolFunctionCallIT.java
+++ b/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/api/tool/MistralAiApiToolFunctionCallIT.java
@@ -131,7 +131,7 @@ public void toolFunctionCall() throws JsonProcessingException {
}
}
- var functionResponseRequest = new ChatCompletionRequest(messages, MISTRAL_AI_CHAT_MODEL, 0.8f);
+ var functionResponseRequest = new ChatCompletionRequest(messages, MISTRAL_AI_CHAT_MODEL, 0.8);
ResponseEntity chatCompletion2 = completionApi
.chatCompletionEntity(functionResponseRequest);
diff --git a/models/spring-ai-moonshot/src/main/java/org/springframework/ai/moonshot/MoonshotChatOptions.java b/models/spring-ai-moonshot/src/main/java/org/springframework/ai/moonshot/MoonshotChatOptions.java
index 9c9948520c..4bf51bca52 100644
--- a/models/spring-ai-moonshot/src/main/java/org/springframework/ai/moonshot/MoonshotChatOptions.java
+++ b/models/spring-ai-moonshot/src/main/java/org/springframework/ai/moonshot/MoonshotChatOptions.java
@@ -53,7 +53,7 @@ public class MoonshotChatOptions implements FunctionCallingOptions, ChatOptions
* make the output more random, while lower values like 0.2 will make it more focused
* and deterministic. We generally recommend altering this or top_p but not both.
*/
- private @JsonProperty("temperature") Float temperature;
+ private @JsonProperty("temperature") Double temperature;
/**
* An alternative to sampling with temperature, called nucleus sampling, where the
@@ -61,7 +61,7 @@ public class MoonshotChatOptions implements FunctionCallingOptions, ChatOptions
* only the tokens comprising the top 10% probability mass are considered. We
* generally recommend altering this or temperature but not both.
*/
- private @JsonProperty("top_p") Float topP;
+ private @JsonProperty("top_p") Double topP;
/**
* How many chat completion choices to generate for each input message. Note that you
@@ -75,14 +75,14 @@ public class MoonshotChatOptions implements FunctionCallingOptions, ChatOptions
* they appear in the text so far, increasing the model's likelihood to talk about new
* topics.
*/
- private @JsonProperty("presence_penalty") Float presencePenalty;
+ private @JsonProperty("presence_penalty") Double presencePenalty;
/**
* Number between -2.0 and 2.0. Positive values penalize new tokens based on their
* existing frequency in the text so far, decreasing the model's likelihood to repeat
* the same line verbatim.
*/
- private @JsonProperty("frequency_penalty") Float frequencyPenalty;
+ private @JsonProperty("frequency_penalty") Double frequencyPenalty;
/**
* Up to 5 sequences where the API will stop generating further tokens.
@@ -182,12 +182,12 @@ public Builder withMaxTokens(Integer maxTokens) {
return this;
}
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.options.temperature = temperature;
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.options.topP = topP;
return this;
}
@@ -197,12 +197,12 @@ public Builder withN(Integer n) {
return this;
}
- public Builder withPresencePenalty(Float presencePenalty) {
+ public Builder withPresencePenalty(Double presencePenalty) {
this.options.presencePenalty = presencePenalty;
return this;
}
- public Builder withFrequencyPenalty(Float frequencyPenalty) {
+ public Builder withFrequencyPenalty(Double frequencyPenalty) {
this.options.frequencyPenalty = frequencyPenalty;
return this;
}
@@ -260,11 +260,11 @@ public void setModel(String model) {
}
@Override
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return this.frequencyPenalty;
}
- public void setFrequencyPenalty(Float frequencyPenalty) {
+ public void setFrequencyPenalty(Double frequencyPenalty) {
this.frequencyPenalty = frequencyPenalty;
}
@@ -286,11 +286,11 @@ public void setN(Integer n) {
}
@Override
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return this.presencePenalty;
}
- public void setPresencePenalty(Float presencePenalty) {
+ public void setPresencePenalty(Double presencePenalty) {
this.presencePenalty = presencePenalty;
}
@@ -314,20 +314,20 @@ public void setStop(List stop) {
}
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return this.temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return this.topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
diff --git a/models/spring-ai-moonshot/src/main/java/org/springframework/ai/moonshot/api/MoonshotApi.java b/models/spring-ai-moonshot/src/main/java/org/springframework/ai/moonshot/api/MoonshotApi.java
index 350644d389..43050b0252 100644
--- a/models/spring-ai-moonshot/src/main/java/org/springframework/ai/moonshot/api/MoonshotApi.java
+++ b/models/spring-ai-moonshot/src/main/java/org/springframework/ai/moonshot/api/MoonshotApi.java
@@ -48,6 +48,7 @@
*
*
* @author Geng Rong
+ * @author Thomas Vitale
*/
public class MoonshotApi {
@@ -155,11 +156,11 @@ public record ChatCompletionRequest(
@JsonProperty("messages") List messages,
@JsonProperty("model") String model,
@JsonProperty("max_tokens") Integer maxTokens,
- @JsonProperty("temperature") Float temperature,
- @JsonProperty("top_p") Float topP,
+ @JsonProperty("temperature") Double temperature,
+ @JsonProperty("top_p") Double topP,
@JsonProperty("n") Integer n,
- @JsonProperty("frequency_penalty") Float frequencyPenalty,
- @JsonProperty("presence_penalty") Float presencePenalty,
+ @JsonProperty("frequency_penalty") Double frequencyPenalty,
+ @JsonProperty("presence_penalty") Double presencePenalty,
@JsonProperty("stop") List stop,
@JsonProperty("stream") Boolean stream,
@JsonProperty("tools") List tools,
@@ -174,7 +175,7 @@ public record ChatCompletionRequest(
* @param model ID of the model to use.
*/
public ChatCompletionRequest(List messages, String model) {
- this(messages, model, null, 0.3f, 1f, null, null, null, null, false, null, null);
+ this(messages, model, null, 0.3, 1.0, null, null, null, null, false, null, null);
}
/**
@@ -187,9 +188,9 @@ public ChatCompletionRequest(List messages, String model)
* @param stream Whether to stream back partial progress. If set, tokens will be
* sent
*/
- public ChatCompletionRequest(List messages, String model, Float temperature,
+ public ChatCompletionRequest(List messages, String model, Double temperature,
boolean stream) {
- this(messages, model, null, temperature, 1f, null, null, null, null, stream, null, null);
+ this(messages, model, null, temperature, 1.0, null, null, null, null, stream, null, null);
}
/**
@@ -200,8 +201,8 @@ public ChatCompletionRequest(List messages, String model,
* @param model ID of the model to use.
* @param temperature What sampling temperature to use, between 0.0 and 1.0.
*/
- public ChatCompletionRequest(List messages, String model, Float temperature) {
- this(messages, model, null, temperature, 1f, null, null, null, null, false, null, null);
+ public ChatCompletionRequest(List messages, String model, Double temperature) {
+ this(messages, model, null, temperature, 1.0, null, null, null, null, false, null, null);
}
/**
@@ -216,7 +217,7 @@ public ChatCompletionRequest(List messages, String model,
*/
public ChatCompletionRequest(List messages, String model, List tools,
Object toolChoice) {
- this(messages, model, null, null, 1f, null, null, null, null, false, tools, toolChoice);
+ this(messages, model, null, null, 1.0, null, null, null, null, false, tools, toolChoice);
}
/**
@@ -224,7 +225,7 @@ public ChatCompletionRequest(List messages, String model,
* stream.
*/
public ChatCompletionRequest(List messages, Boolean stream) {
- this(messages, DEFAULT_CHAT_MODEL, null, 0.7f, 1F, null, null, null, null, stream, null, null);
+ this(messages, DEFAULT_CHAT_MODEL, null, 0.7, 1.0, null, null, null, null, stream, null, null);
}
/**
diff --git a/models/spring-ai-moonshot/src/test/java/org/springframework/ai/moonshot/MoonshotChatCompletionRequestTest.java b/models/spring-ai-moonshot/src/test/java/org/springframework/ai/moonshot/MoonshotChatCompletionRequestTest.java
index 568c4142e1..c66551bb1a 100644
--- a/models/spring-ai-moonshot/src/test/java/org/springframework/ai/moonshot/MoonshotChatCompletionRequestTest.java
+++ b/models/spring-ai-moonshot/src/test/java/org/springframework/ai/moonshot/MoonshotChatCompletionRequestTest.java
@@ -45,12 +45,12 @@ void chatCompletionDefaultRequestTest() {
@Test
void chatCompletionRequestWithOptionsTest() {
- var options = MoonshotChatOptions.builder().withTemperature(0.5f).withTopP(0.8f).build();
+ var options = MoonshotChatOptions.builder().withTemperature(0.5).withTopP(0.8).build();
var request = chatModel.createRequest(new Prompt("test content", options), true);
assertThat(request.messages().size()).isEqualTo(1);
- assertThat(request.topP()).isEqualTo(0.8f);
- assertThat(request.temperature()).isEqualTo(0.5f);
+ assertThat(request.topP()).isEqualTo(0.8);
+ assertThat(request.temperature()).isEqualTo(0.5);
assertThat(request.stream()).isTrue();
}
diff --git a/models/spring-ai-moonshot/src/test/java/org/springframework/ai/moonshot/MoonshotRetryTests.java b/models/spring-ai-moonshot/src/test/java/org/springframework/ai/moonshot/MoonshotRetryTests.java
index da0e8663f7..591819880f 100644
--- a/models/spring-ai-moonshot/src/test/java/org/springframework/ai/moonshot/MoonshotRetryTests.java
+++ b/models/spring-ai-moonshot/src/test/java/org/springframework/ai/moonshot/MoonshotRetryTests.java
@@ -85,8 +85,8 @@ public void beforeEach() {
chatModel = new MoonshotChatModel(moonshotApi,
MoonshotChatOptions.builder()
- .withTemperature(0.7f)
- .withTopP(1f)
+ .withTemperature(0.7)
+ .withTopP(1.0)
.withModel(MoonshotApi.ChatModel.MOONSHOT_V1_32K.getValue())
.build(),
null, retryTemplate);
diff --git a/models/spring-ai-moonshot/src/test/java/org/springframework/ai/moonshot/api/MoonshotApiIT.java b/models/spring-ai-moonshot/src/test/java/org/springframework/ai/moonshot/api/MoonshotApiIT.java
index 9498b38d99..6c6166db16 100644
--- a/models/spring-ai-moonshot/src/test/java/org/springframework/ai/moonshot/api/MoonshotApiIT.java
+++ b/models/spring-ai-moonshot/src/test/java/org/springframework/ai/moonshot/api/MoonshotApiIT.java
@@ -41,7 +41,7 @@ public class MoonshotApiIT {
void chatCompletionEntity() {
ChatCompletionMessage chatCompletionMessage = new ChatCompletionMessage("Hello world", Role.USER);
ResponseEntity response = moonshotApi.chatCompletionEntity(new ChatCompletionRequest(
- List.of(chatCompletionMessage), MoonshotApi.ChatModel.MOONSHOT_V1_8K.getValue(), 0.8f, false));
+ List.of(chatCompletionMessage), MoonshotApi.ChatModel.MOONSHOT_V1_8K.getValue(), 0.8, false));
assertThat(response).isNotNull();
assertThat(response.getBody()).isNotNull();
@@ -58,7 +58,7 @@ void chatCompletionEntityWithSystemMessage() {
""", Role.SYSTEM);
ResponseEntity response = moonshotApi.chatCompletionEntity(new ChatCompletionRequest(
- List.of(systemMessage, userMessage), MoonshotApi.ChatModel.MOONSHOT_V1_8K.getValue(), 0.8f, false));
+ List.of(systemMessage, userMessage), MoonshotApi.ChatModel.MOONSHOT_V1_8K.getValue(), 0.8, false));
assertThat(response).isNotNull();
assertThat(response.getBody()).isNotNull();
@@ -68,7 +68,7 @@ void chatCompletionEntityWithSystemMessage() {
void chatCompletionStream() {
ChatCompletionMessage chatCompletionMessage = new ChatCompletionMessage("Hello world", Role.USER);
Flux response = moonshotApi.chatCompletionStream(new ChatCompletionRequest(
- List.of(chatCompletionMessage), MoonshotApi.ChatModel.MOONSHOT_V1_8K.getValue(), 0.8f, true));
+ List.of(chatCompletionMessage), MoonshotApi.ChatModel.MOONSHOT_V1_8K.getValue(), 0.8, true));
assertThat(response).isNotNull();
assertThat(response.collectList().block()).isNotNull();
diff --git a/models/spring-ai-moonshot/src/test/java/org/springframework/ai/moonshot/api/MoonshotApiToolFunctionCallIT.java b/models/spring-ai-moonshot/src/test/java/org/springframework/ai/moonshot/api/MoonshotApiToolFunctionCallIT.java
index 6192b130c1..7c3764afe5 100644
--- a/models/spring-ai-moonshot/src/test/java/org/springframework/ai/moonshot/api/MoonshotApiToolFunctionCallIT.java
+++ b/models/spring-ai-moonshot/src/test/java/org/springframework/ai/moonshot/api/MoonshotApiToolFunctionCallIT.java
@@ -125,7 +125,7 @@ private void toolFunctionCall(String userMessage, String cityName) {
}
var functionResponseRequest = new ChatCompletionRequest(messages,
- MoonshotApi.ChatModel.MOONSHOT_V1_8K.getValue(), 0.5F);
+ MoonshotApi.ChatModel.MOONSHOT_V1_8K.getValue(), 0.5);
ResponseEntity chatCompletion2 = moonshotApi.chatCompletionEntity(functionResponseRequest);
diff --git a/models/spring-ai-ollama/src/main/java/org/springframework/ai/ollama/api/OllamaOptions.java b/models/spring-ai-ollama/src/main/java/org/springframework/ai/ollama/api/OllamaOptions.java
index 2105f74102..3fd22d03e4 100644
--- a/models/spring-ai-ollama/src/main/java/org/springframework/ai/ollama/api/OllamaOptions.java
+++ b/models/spring-ai-ollama/src/main/java/org/springframework/ai/ollama/api/OllamaOptions.java
@@ -171,7 +171,7 @@ public class OllamaOptions implements FunctionCallingOptions, ChatOptions, Embed
* more diverse text, while a lower value (e.g., 0.5) will generate more focused and
* conservative text. (Default: 0.9)
*/
- @JsonProperty("top_p") private Float topP;
+ @JsonProperty("top_p") private Double topP;
/**
* Tail free sampling is used to reduce the impact of less probable tokens
@@ -195,24 +195,24 @@ public class OllamaOptions implements FunctionCallingOptions, ChatOptions, Embed
* The temperature of the model. Increasing the temperature will
* make the model answer more creatively. (Default: 0.8)
*/
- @JsonProperty("temperature") private Float temperature;
+ @JsonProperty("temperature") private Double temperature;
/**
* Sets how strongly to penalize repetitions. A higher value
* (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g.,
* 0.9) will be more lenient. (Default: 1.1)
*/
- @JsonProperty("repeat_penalty") private Float repeatPenalty;
+ @JsonProperty("repeat_penalty") private Double repeatPenalty;
/**
* (Default: 0.0)
*/
- @JsonProperty("presence_penalty") private Float presencePenalty;
+ @JsonProperty("presence_penalty") private Double presencePenalty;
/**
* (Default: 0.0)
*/
- @JsonProperty("frequency_penalty") private Float frequencyPenalty;
+ @JsonProperty("frequency_penalty") private Double frequencyPenalty;
/**
* Enable Mirostat sampling for controlling perplexity. (default: 0, 0
@@ -414,7 +414,7 @@ public OllamaOptions withTopK(Integer topK) {
return this;
}
- public OllamaOptions withTopP(Float topP) {
+ public OllamaOptions withTopP(Double topP) {
this.topP = topP;
return this;
}
@@ -434,22 +434,22 @@ public OllamaOptions withRepeatLastN(Integer repeatLastN) {
return this;
}
- public OllamaOptions withTemperature(Float temperature) {
+ public OllamaOptions withTemperature(Double temperature) {
this.temperature = temperature;
return this;
}
- public OllamaOptions withRepeatPenalty(Float repeatPenalty) {
+ public OllamaOptions withRepeatPenalty(Double repeatPenalty) {
this.repeatPenalty = repeatPenalty;
return this;
}
- public OllamaOptions withPresencePenalty(Float presencePenalty) {
+ public OllamaOptions withPresencePenalty(Double presencePenalty) {
this.presencePenalty = presencePenalty;
return this;
}
- public OllamaOptions withFrequencyPenalty(Float frequencyPenalty) {
+ public OllamaOptions withFrequencyPenalty(Double frequencyPenalty) {
this.frequencyPenalty = frequencyPenalty;
return this;
}
@@ -664,11 +664,11 @@ public void setTopK(Integer topK) {
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return this.topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
@@ -697,37 +697,37 @@ public void setRepeatLastN(Integer repeatLastN) {
}
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return this.temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
- public Float getRepeatPenalty() {
+ public Double getRepeatPenalty() {
return this.repeatPenalty;
}
- public void setRepeatPenalty(Float repeatPenalty) {
+ public void setRepeatPenalty(Double repeatPenalty) {
this.repeatPenalty = repeatPenalty;
}
@Override
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return this.presencePenalty;
}
- public void setPresencePenalty(Float presencePenalty) {
+ public void setPresencePenalty(Double presencePenalty) {
this.presencePenalty = presencePenalty;
}
@Override
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return this.frequencyPenalty;
}
- public void setFrequencyPenalty(Float frequencyPenalty) {
+ public void setFrequencyPenalty(Double frequencyPenalty) {
this.frequencyPenalty = frequencyPenalty;
}
diff --git a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelFunctionCallingIT.java b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelFunctionCallingIT.java
index a790a8a898..eee175c549 100644
--- a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelFunctionCallingIT.java
+++ b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelFunctionCallingIT.java
@@ -137,7 +137,7 @@ public OllamaApi ollamaApi() {
@Bean
public OllamaChatModel ollamaChat(OllamaApi ollamaApi) {
- return new OllamaChatModel(ollamaApi, OllamaOptions.create().withModel(MODEL).withTemperature(0.9f));
+ return new OllamaChatModel(ollamaApi, OllamaOptions.create().withModel(MODEL).withTemperature(0.9));
}
}
diff --git a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelIT.java b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelIT.java
index 8cd56feadf..be403b3503 100644
--- a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelIT.java
+++ b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelIT.java
@@ -86,7 +86,7 @@ void roleTest() {
UserMessage userMessage = new UserMessage("Tell me about 5 famous pirates from the Golden Age of Piracy.");
// portable/generic options
- var portableOptions = ChatOptionsBuilder.builder().withTemperature(0.7f).build();
+ var portableOptions = ChatOptionsBuilder.builder().withTemperature(0.7).build();
Prompt prompt = new Prompt(List.of(userMessage, systemMessage), portableOptions);
@@ -240,7 +240,7 @@ public OllamaApi ollamaApi() {
@Bean
public OllamaChatModel ollamaChat(OllamaApi ollamaApi) {
- return new OllamaChatModel(ollamaApi, OllamaOptions.create().withModel(MODEL).withTemperature(0.9f));
+ return new OllamaChatModel(ollamaApi, OllamaOptions.create().withModel(MODEL).withTemperature(0.9));
}
}
diff --git a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelMultimodalIT.java b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelMultimodalIT.java
index 06964c10dd..f58552f869 100644
--- a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelMultimodalIT.java
+++ b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelMultimodalIT.java
@@ -91,7 +91,7 @@ public OllamaApi ollamaApi() {
@Bean
public OllamaChatModel ollamaChat(OllamaApi ollamaApi) {
- return new OllamaChatModel(ollamaApi, OllamaOptions.create().withModel(MODEL).withTemperature(0.9f));
+ return new OllamaChatModel(ollamaApi, OllamaOptions.create().withModel(MODEL).withTemperature(0.9));
}
}
diff --git a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelObservationIT.java b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelObservationIT.java
index 25d5f9960b..75da0658dc 100644
--- a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelObservationIT.java
+++ b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelObservationIT.java
@@ -67,13 +67,13 @@ void beforeEach() {
void observationForChatOperation() {
var options = OllamaOptions.builder()
.withModel(OllamaModel.MISTRAL.getName())
- .withFrequencyPenalty(0f)
+ .withFrequencyPenalty(0.0)
.withNumPredict(2048)
- .withPresencePenalty(0f)
+ .withPresencePenalty(0.0)
.withStop(List.of("this-is-the-end"))
- .withTemperature(0.7f)
+ .withTemperature(0.7)
.withTopK(1)
- .withTopP(1f)
+ .withTopP(1.0)
.build();
Prompt prompt = new Prompt("Why does a raven look like a desk?", options);
@@ -91,13 +91,13 @@ void observationForChatOperation() {
void observationForStreamingChatOperation() {
var options = OllamaOptions.builder()
.withModel(OllamaModel.MISTRAL.getName())
- .withFrequencyPenalty(0f)
+ .withFrequencyPenalty(0.0)
.withNumPredict(2048)
- .withPresencePenalty(0f)
+ .withPresencePenalty(0.0)
.withStop(List.of("this-is-the-end"))
- .withTemperature(0.7f)
+ .withTemperature(0.7)
.withTopK(1)
- .withTopP(1f)
+ .withTopP(1.0)
.build();
Prompt prompt = new Prompt("Why does a raven look like a desk?", options);
diff --git a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatRequestTests.java b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatRequestTests.java
index 8dffb29bf1..b9b7bc2e6e 100644
--- a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatRequestTests.java
+++ b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatRequestTests.java
@@ -30,7 +30,7 @@
public class OllamaChatRequestTests {
OllamaChatModel chatModel = new OllamaChatModel(new OllamaApi(),
- new OllamaOptions().withModel("MODEL_NAME").withTopK(99).withTemperature(66.6f).withNumGPU(1));
+ new OllamaOptions().withModel("MODEL_NAME").withTopK(99).withTemperature(66.6).withNumGPU(1));
@Test
public void createRequestWithDefaultOptions() {
@@ -51,7 +51,7 @@ public void createRequestWithDefaultOptions() {
public void createRequestWithPromptOllamaOptions() {
// Runtime options should override the default options.
- OllamaOptions promptOptions = new OllamaOptions().withTemperature(0.8f).withTopP(0.5f).withNumGPU(2);
+ OllamaOptions promptOptions = new OllamaOptions().withTemperature(0.8).withTopP(0.5).withNumGPU(2);
var request = chatModel.ollamaChatRequest(new Prompt("Test message content", promptOptions), true);
@@ -73,9 +73,9 @@ public void createRequestWithPromptPortableChatOptions() {
// Ollama runtime options.
ChatOptions portablePromptOptions = ChatOptionsBuilder.builder()
- .withTemperature(0.9f)
+ .withTemperature(0.9)
.withTopK(100)
- .withTopP(0.6f)
+ .withTopP(0.6)
.build();
var request = chatModel.ollamaChatRequest(new Prompt("Test message content", portablePromptOptions), true);
diff --git a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/api/OllamaApiIT.java b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/api/OllamaApiIT.java
index e7a336a423..af050bee4d 100644
--- a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/api/OllamaApiIT.java
+++ b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/api/OllamaApiIT.java
@@ -38,8 +38,6 @@
import static org.assertj.core.api.Assertions.assertThat;
-;
-
/**
* @author Christian Tzolov
* @author Thomas Vitale
@@ -97,7 +95,7 @@ public void chat() {
.withContent("What is the capital of Bulgaria and what is the size? "
+ "What it the national anthem?")
.build()))
- .withOptions(OllamaOptions.create().withTemperature(0.9f))
+ .withOptions(OllamaOptions.create().withTemperature(0.9))
.build();
ChatResponse response = ollamaApi.chat(request);
@@ -119,7 +117,7 @@ public void streamingChat() {
.withMessages(List.of(Message.builder(Role.USER)
.withContent("What is the capital of Bulgaria and what is the size? " + "What it the national anthem?")
.build()))
- .withOptions(OllamaOptions.create().withTemperature(0.9f).toMap())
+ .withOptions(OllamaOptions.create().withTemperature(0.9).toMap())
.build();
Flux response = ollamaApi.streamingChat(request);
diff --git a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/api/OllamaModelOptionsTests.java b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/api/OllamaModelOptionsTests.java
index fd5b6a0094..faffdf2423 100644
--- a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/api/OllamaModelOptionsTests.java
+++ b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/api/OllamaModelOptionsTests.java
@@ -28,7 +28,7 @@ public class OllamaModelOptionsTests {
@Test
public void testOptions() {
- var options = OllamaOptions.create().withTemperature(3.14f).withTopK(30).withStop(List.of("a", "b", "c"));
+ var options = OllamaOptions.create().withTemperature(3.14).withTopK(30).withStop(List.of("a", "b", "c"));
var optionsMap = options.toMap();
System.out.println(optionsMap);
diff --git a/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/OpenAiChatModel.java b/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/OpenAiChatModel.java
index b2b76854ed..55f999813b 100644
--- a/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/OpenAiChatModel.java
+++ b/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/OpenAiChatModel.java
@@ -134,7 +134,7 @@ public class OpenAiChatModel extends AbstractToolCallSupport implements ChatMode
*/
public OpenAiChatModel(OpenAiApi openAiApi) {
this(openAiApi,
- OpenAiChatOptions.builder().withModel(OpenAiApi.DEFAULT_CHAT_MODEL).withTemperature(0.7f).build());
+ OpenAiChatOptions.builder().withModel(OpenAiApi.DEFAULT_CHAT_MODEL).withTemperature(0.7).build());
}
/**
diff --git a/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/OpenAiChatOptions.java b/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/OpenAiChatOptions.java
index db5e5a6aa2..3a2d8695b8 100644
--- a/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/OpenAiChatOptions.java
+++ b/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/OpenAiChatOptions.java
@@ -57,7 +57,7 @@ public class OpenAiChatOptions implements FunctionCallingOptions, ChatOptions {
* Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing
* frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.
*/
- private @JsonProperty("frequency_penalty") Float frequencyPenalty;
+ private @JsonProperty("frequency_penalty") Double frequencyPenalty;
/**
* Modify the likelihood of specified tokens appearing in the completion. Accepts a JSON object
* that maps tokens (specified by their token ID in the tokenizer) to an associated bias value from -100 to 100.
@@ -90,7 +90,7 @@ public class OpenAiChatOptions implements FunctionCallingOptions, ChatOptions {
* Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they
* appear in the text so far, increasing the model's likelihood to talk about new topics.
*/
- private @JsonProperty("presence_penalty") Float presencePenalty;
+ private @JsonProperty("presence_penalty") Double presencePenalty;
/**
* An object specifying the format that the model must output. Setting to { "type":
* "json_object" } enables JSON mode, which guarantees the message the model generates is valid JSON.
@@ -117,13 +117,13 @@ public class OpenAiChatOptions implements FunctionCallingOptions, ChatOptions {
* more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend
* altering this or top_p but not both.
*/
- private @JsonProperty("temperature") Float temperature;
+ private @JsonProperty("temperature") Double temperature;
/**
* An alternative to sampling with temperature, called nucleus sampling, where the model considers the
* results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10%
* probability mass are considered. We generally recommend altering this or temperature but not both.
*/
- private @JsonProperty("top_p") Float topP;
+ private @JsonProperty("top_p") Double topP;
/**
* A list of tools the model may call. Currently, only functions are supported as a tool. Use this to
* provide a list of functions the model may generate JSON inputs for.
@@ -205,7 +205,7 @@ public Builder withModel(OpenAiApi.ChatModel openAiChatModel) {
return this;
}
- public Builder withFrequencyPenalty(Float frequencyPenalty) {
+ public Builder withFrequencyPenalty(Double frequencyPenalty) {
this.options.frequencyPenalty = frequencyPenalty;
return this;
}
@@ -235,7 +235,7 @@ public Builder withN(Integer n) {
return this;
}
- public Builder withPresencePenalty(Float presencePenalty) {
+ public Builder withPresencePenalty(Double presencePenalty) {
this.options.presencePenalty = presencePenalty;
return this;
}
@@ -260,12 +260,12 @@ public Builder withStop(List stop) {
return this;
}
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.options.temperature = temperature;
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.options.topP = topP;
return this;
}
@@ -337,11 +337,11 @@ public void setModel(String model) {
}
@Override
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return this.frequencyPenalty;
}
- public void setFrequencyPenalty(Float frequencyPenalty) {
+ public void setFrequencyPenalty(Double frequencyPenalty) {
this.frequencyPenalty = frequencyPenalty;
}
@@ -387,11 +387,11 @@ public void setN(Integer n) {
}
@Override
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return this.presencePenalty;
}
- public void setPresencePenalty(Float presencePenalty) {
+ public void setPresencePenalty(Double presencePenalty) {
this.presencePenalty = presencePenalty;
}
@@ -439,20 +439,20 @@ public void setStop(List stop) {
}
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return this.temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return this.topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
diff --git a/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/api/OpenAiApi.java b/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/api/OpenAiApi.java
index 619b105a7c..e08334959f 100644
--- a/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/api/OpenAiApi.java
+++ b/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/api/OpenAiApi.java
@@ -415,20 +415,20 @@ public Function(String description, String name, String jsonSchema) {
public record ChatCompletionRequest(// @formatter:off
@JsonProperty("messages") List messages,
@JsonProperty("model") String model,
- @JsonProperty("frequency_penalty") Float frequencyPenalty,
+ @JsonProperty("frequency_penalty") Double frequencyPenalty,
@JsonProperty("logit_bias") Map logitBias,
@JsonProperty("logprobs") Boolean logprobs,
@JsonProperty("top_logprobs") Integer topLogprobs,
@JsonProperty("max_tokens") Integer maxTokens,
@JsonProperty("n") Integer n,
- @JsonProperty("presence_penalty") Float presencePenalty,
+ @JsonProperty("presence_penalty") Double presencePenalty,
@JsonProperty("response_format") ResponseFormat responseFormat,
@JsonProperty("seed") Integer seed,
@JsonProperty("stop") List stop,
@JsonProperty("stream") Boolean stream,
@JsonProperty("stream_options") StreamOptions streamOptions,
- @JsonProperty("temperature") Float temperature,
- @JsonProperty("top_p") Float topP,
+ @JsonProperty("temperature") Double temperature,
+ @JsonProperty("top_p") Double topP,
@JsonProperty("tools") List tools,
@JsonProperty("tool_choice") Object toolChoice,
@JsonProperty("parallel_tool_calls") Boolean parallelToolCalls,
@@ -441,7 +441,7 @@ public record ChatCompletionRequest(// @formatter:off
* @param model ID of the model to use.
* @param temperature What sampling temperature to use, between 0 and 1.
*/
- public ChatCompletionRequest(List messages, String model, Float temperature) {
+ public ChatCompletionRequest(List messages, String model, Double temperature) {
this(messages, model, null, null, null, null, null, null, null,
null, null, null, false, null, temperature, null,
null, null, null, null);
@@ -456,7 +456,7 @@ public ChatCompletionRequest(List messages, String model,
* @param stream If set, partial message deltas will be sent.Tokens will be sent as data-only server-sent events
* as they become available, with the stream terminated by a data: [DONE] message.
*/
- public ChatCompletionRequest(List messages, String model, Float temperature, boolean stream) {
+ public ChatCompletionRequest(List messages, String model, Double temperature, boolean stream) {
this(messages, model, null, null, null, null, null, null, null,
null, null, null, stream, null, temperature, null,
null, null, null, null);
@@ -474,7 +474,7 @@ public ChatCompletionRequest(List messages, String model,
public ChatCompletionRequest(List messages, String model,
List tools, Object toolChoice) {
this(messages, model, null, null, null, null, null, null, null,
- null, null, null, false, null, 0.8f, null,
+ null, null, null, false, null, 0.8, null,
tools, toolChoice, null, null);
}
diff --git a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/ChatCompletionRequestTests.java b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/ChatCompletionRequestTests.java
index a4b1177913..f91edff961 100644
--- a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/ChatCompletionRequestTests.java
+++ b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/ChatCompletionRequestTests.java
@@ -35,7 +35,7 @@ public class ChatCompletionRequestTests {
public void createRequestWithChatOptions() {
var client = new OpenAiChatModel(new OpenAiApi("TEST"),
- OpenAiChatOptions.builder().withModel("DEFAULT_MODEL").withTemperature(66.6f).build());
+ OpenAiChatOptions.builder().withModel("DEFAULT_MODEL").withTemperature(66.6).build());
var request = client.createRequest(new Prompt("Test message content"), false);
@@ -43,16 +43,16 @@ public void createRequestWithChatOptions() {
assertThat(request.stream()).isFalse();
assertThat(request.model()).isEqualTo("DEFAULT_MODEL");
- assertThat(request.temperature()).isEqualTo(66.6f);
+ assertThat(request.temperature()).isEqualTo(66.6);
request = client.createRequest(new Prompt("Test message content",
- OpenAiChatOptions.builder().withModel("PROMPT_MODEL").withTemperature(99.9f).build()), true);
+ OpenAiChatOptions.builder().withModel("PROMPT_MODEL").withTemperature(99.9).build()), true);
assertThat(request.messages()).hasSize(1);
assertThat(request.stream()).isTrue();
assertThat(request.model()).isEqualTo("PROMPT_MODEL");
- assertThat(request.temperature()).isEqualTo(99.9f);
+ assertThat(request.temperature()).isEqualTo(99.9);
}
@Test
diff --git a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/api/OpenAiApiIT.java b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/api/OpenAiApiIT.java
index 25e553c1bf..17a0022763 100644
--- a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/api/OpenAiApiIT.java
+++ b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/api/OpenAiApiIT.java
@@ -44,7 +44,7 @@ public class OpenAiApiIT {
void chatCompletionEntity() {
ChatCompletionMessage chatCompletionMessage = new ChatCompletionMessage("Hello world", Role.USER);
ResponseEntity response = openAiApi.chatCompletionEntity(
- new ChatCompletionRequest(List.of(chatCompletionMessage), "gpt-3.5-turbo", 0.8f, false));
+ new ChatCompletionRequest(List.of(chatCompletionMessage), "gpt-3.5-turbo", 0.8, false));
assertThat(response).isNotNull();
assertThat(response.getBody()).isNotNull();
@@ -54,7 +54,7 @@ void chatCompletionEntity() {
void chatCompletionStream() {
ChatCompletionMessage chatCompletionMessage = new ChatCompletionMessage("Hello world", Role.USER);
Flux response = openAiApi.chatCompletionStream(
- new ChatCompletionRequest(List.of(chatCompletionMessage), "gpt-3.5-turbo", 0.8f, true));
+ new ChatCompletionRequest(List.of(chatCompletionMessage), "gpt-3.5-turbo", 0.8, true));
assertThat(response).isNotNull();
assertThat(response.collectList().block()).isNotNull();
diff --git a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/api/tool/OpenAiApiToolFunctionCallIT.java b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/api/tool/OpenAiApiToolFunctionCallIT.java
index 0a02361080..f8d0f20316 100644
--- a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/api/tool/OpenAiApiToolFunctionCallIT.java
+++ b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/api/tool/OpenAiApiToolFunctionCallIT.java
@@ -124,7 +124,7 @@ public void toolFunctionCall() {
}
}
- var functionResponseRequest = new ChatCompletionRequest(messages, "gpt-4o", 0.5f);
+ var functionResponseRequest = new ChatCompletionRequest(messages, "gpt-4o", 0.5);
ResponseEntity chatCompletion2 = completionApi.chatCompletionEntity(functionResponseRequest);
diff --git a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/OpenAiChatModelObservationIT.java b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/OpenAiChatModelObservationIT.java
index d0e582adbe..3868ebf8ef 100644
--- a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/OpenAiChatModelObservationIT.java
+++ b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/OpenAiChatModelObservationIT.java
@@ -71,12 +71,12 @@ void observationForChatOperation() {
var options = OpenAiChatOptions.builder()
.withModel(OpenAiApi.ChatModel.GPT_4_O_MINI.getValue())
- .withFrequencyPenalty(0f)
+ .withFrequencyPenalty(0.0)
.withMaxTokens(2048)
- .withPresencePenalty(0f)
+ .withPresencePenalty(0.0)
.withStop(List.of("this-is-the-end"))
- .withTemperature(0.7f)
- .withTopP(1f)
+ .withTemperature(0.7)
+ .withTopP(1.0)
.build();
Prompt prompt = new Prompt("Why does a raven look like a desk?", options);
@@ -94,12 +94,12 @@ void observationForChatOperation() {
void observationForStreamingChatOperation() {
var options = OpenAiChatOptions.builder()
.withModel(OpenAiApi.ChatModel.GPT_4_O_MINI.getValue())
- .withFrequencyPenalty(0f)
+ .withFrequencyPenalty(0.0)
.withMaxTokens(2048)
- .withPresencePenalty(0f)
+ .withPresencePenalty(0.0)
.withStop(List.of("this-is-the-end"))
- .withTemperature(0.7f)
- .withTopP(1f)
+ .withTemperature(0.7)
+ .withTopP(1.0)
.withStreamUsage(true)
.build();
diff --git a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/OpenAiPaymentTransactionIT.java b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/OpenAiPaymentTransactionIT.java
index 4f299bd5b3..5b0cd73e44 100644
--- a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/OpenAiPaymentTransactionIT.java
+++ b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/OpenAiPaymentTransactionIT.java
@@ -203,7 +203,7 @@ public OpenAiChatModel openAiClient(OpenAiApi openAiApi, FunctionCallbackContext
return new OpenAiChatModel(openAiApi,
OpenAiChatOptions.builder()
.withModel(ChatModel.GPT_4_O_MINI.getName())
- .withTemperature(0.1f)
+ .withTemperature(0.1)
.build(),
functionCallbackContext, RetryUtils.DEFAULT_RETRY_TEMPLATE);
}
diff --git a/models/spring-ai-qianfan/src/main/java/org/springframework/ai/qianfan/QianFanChatModel.java b/models/spring-ai-qianfan/src/main/java/org/springframework/ai/qianfan/QianFanChatModel.java
index c5be46e8b4..cfad456e42 100644
--- a/models/spring-ai-qianfan/src/main/java/org/springframework/ai/qianfan/QianFanChatModel.java
+++ b/models/spring-ai-qianfan/src/main/java/org/springframework/ai/qianfan/QianFanChatModel.java
@@ -77,7 +77,7 @@ public class QianFanChatModel implements ChatModel, StreamingChatModel {
*/
public QianFanChatModel(QianFanApi qianFanApi) {
this(qianFanApi,
- QianFanChatOptions.builder().withModel(QianFanApi.DEFAULT_CHAT_MODEL).withTemperature(0.7f).build());
+ QianFanChatOptions.builder().withModel(QianFanApi.DEFAULT_CHAT_MODEL).withTemperature(0.7).build());
}
/**
diff --git a/models/spring-ai-qianfan/src/main/java/org/springframework/ai/qianfan/QianFanChatOptions.java b/models/spring-ai-qianfan/src/main/java/org/springframework/ai/qianfan/QianFanChatOptions.java
index f75df382bd..cb148f4973 100644
--- a/models/spring-ai-qianfan/src/main/java/org/springframework/ai/qianfan/QianFanChatOptions.java
+++ b/models/spring-ai-qianfan/src/main/java/org/springframework/ai/qianfan/QianFanChatOptions.java
@@ -46,7 +46,7 @@ public class QianFanChatOptions implements ChatOptions {
* Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing
* frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.
*/
- private @JsonProperty("frequency_penalty") Float frequencyPenalty;
+ private @JsonProperty("frequency_penalty") Double frequencyPenalty;
/**
* The maximum number of tokens to generate in the chat completion. The total length of input
* tokens and generated tokens is limited by the model's context length.
@@ -56,7 +56,7 @@ public class QianFanChatOptions implements ChatOptions {
* Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they
* appear in the text so far, increasing the model's likelihood to talk about new topics.
*/
- private @JsonProperty("presence_penalty") Float presencePenalty;
+ private @JsonProperty("presence_penalty") Double presencePenalty;
/**
* An object specifying the format that the model must output. Setting to { "type":
* "json_object" } enables JSON mode, which guarantees the message the model generates is valid JSON.
@@ -72,13 +72,13 @@ public class QianFanChatOptions implements ChatOptions {
* more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend
* altering this or top_p but not both.
*/
- private @JsonProperty("temperature") Float temperature;
+ private @JsonProperty("temperature") Double temperature;
/**
* An alternative to sampling with temperature, called nucleus sampling, where the model considers the
* results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10%
* probability mass are considered. We generally recommend altering this or temperature but not both.
*/
- private @JsonProperty("top_p") Float topP;
+ private @JsonProperty("top_p") Double topP;
// @formatter:on
public static Builder builder() {
@@ -102,7 +102,7 @@ public Builder withModel(String model) {
return this;
}
- public Builder withFrequencyPenalty(Float frequencyPenalty) {
+ public Builder withFrequencyPenalty(Double frequencyPenalty) {
this.options.frequencyPenalty = frequencyPenalty;
return this;
}
@@ -112,7 +112,7 @@ public Builder withMaxTokens(Integer maxTokens) {
return this;
}
- public Builder withPresencePenalty(Float presencePenalty) {
+ public Builder withPresencePenalty(Double presencePenalty) {
this.options.presencePenalty = presencePenalty;
return this;
}
@@ -127,12 +127,12 @@ public Builder withStop(List stop) {
return this;
}
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.options.temperature = temperature;
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.options.topP = topP;
return this;
}
@@ -153,11 +153,11 @@ public void setModel(String model) {
}
@Override
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return this.frequencyPenalty;
}
- public void setFrequencyPenalty(Float frequencyPenalty) {
+ public void setFrequencyPenalty(Double frequencyPenalty) {
this.frequencyPenalty = frequencyPenalty;
}
@@ -171,11 +171,11 @@ public void setMaxTokens(Integer maxTokens) {
}
@Override
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return this.presencePenalty;
}
- public void setPresencePenalty(Float presencePenalty) {
+ public void setPresencePenalty(Double presencePenalty) {
this.presencePenalty = presencePenalty;
}
@@ -207,20 +207,20 @@ public void setStop(List stop) {
}
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return this.temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return this.topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
diff --git a/models/spring-ai-qianfan/src/main/java/org/springframework/ai/qianfan/api/QianFanApi.java b/models/spring-ai-qianfan/src/main/java/org/springframework/ai/qianfan/api/QianFanApi.java
index 7dcfb67a1e..ec68cd8dd6 100644
--- a/models/spring-ai-qianfan/src/main/java/org/springframework/ai/qianfan/api/QianFanApi.java
+++ b/models/spring-ai-qianfan/src/main/java/org/springframework/ai/qianfan/api/QianFanApi.java
@@ -39,6 +39,7 @@
* QianFan Docs
*
* @author Geng Rong
+ * @author Thomas Vitale
* @since 1.0
*/
public class QianFanApi extends AuthApi {
@@ -187,14 +188,14 @@ public record ChatCompletionRequest (
@JsonProperty("messages") List messages,
@JsonProperty("system") String system,
@JsonProperty("model") String model,
- @JsonProperty("frequency_penalty") Float frequencyPenalty,
+ @JsonProperty("frequency_penalty") Double frequencyPenalty,
@JsonProperty("max_output_tokens") Integer maxTokens,
- @JsonProperty("presence_penalty") Float presencePenalty,
+ @JsonProperty("presence_penalty") Double presencePenalty,
@JsonProperty("response_format") ResponseFormat responseFormat,
@JsonProperty("stop") List stop,
@JsonProperty("stream") Boolean stream,
- @JsonProperty("temperature") Float temperature,
- @JsonProperty("top_p") Float topP) {
+ @JsonProperty("temperature") Double temperature,
+ @JsonProperty("top_p") Double topP) {
/**
* Shortcut constructor for a chat completion request with the given messages and model.
@@ -203,7 +204,7 @@ public record ChatCompletionRequest (
* @param model ID of the model to use.
* @param temperature What sampling temperature to use, between 0 and 1.
*/
- public ChatCompletionRequest(List messages, String system, String model, Float temperature) {
+ public ChatCompletionRequest(List messages, String system, String model, Double temperature) {
this(messages, system, model, null,null,
null, null, null, false, temperature, null);
}
@@ -217,7 +218,7 @@ public ChatCompletionRequest(List messages, String system
* @param stream If set, partial message deltas will be sent.Tokens will be sent as data-only server-sent events
* as they become available, with the stream terminated by a data: [DONE] message.
*/
- public ChatCompletionRequest(List messages, String system, String model, Float temperature, boolean stream) {
+ public ChatCompletionRequest(List messages, String system, String model, Double temperature, boolean stream) {
this(messages, system, model, null,null,
null, null, null, stream, temperature, null);
}
@@ -233,7 +234,7 @@ public ChatCompletionRequest(List messages, String system
*/
public ChatCompletionRequest(List messages, String system, Boolean stream) {
this(messages, system, DEFAULT_CHAT_MODEL, null,null,
- null, null, null, stream, 0.8F, null);
+ null, null, null, stream, 0.8, null);
}
/**
diff --git a/models/spring-ai-qianfan/src/test/java/org/springframework/ai/qianfan/ChatCompletionRequestTests.java b/models/spring-ai-qianfan/src/test/java/org/springframework/ai/qianfan/ChatCompletionRequestTests.java
index 2f42ca052b..c4f76a4027 100644
--- a/models/spring-ai-qianfan/src/test/java/org/springframework/ai/qianfan/ChatCompletionRequestTests.java
+++ b/models/spring-ai-qianfan/src/test/java/org/springframework/ai/qianfan/ChatCompletionRequestTests.java
@@ -30,7 +30,7 @@ public class ChatCompletionRequestTests {
public void createRequestWithChatOptions() {
var client = new QianFanChatModel(new QianFanApi("TEST", "TEST"),
- QianFanChatOptions.builder().withModel("DEFAULT_MODEL").withTemperature(66.6f).build());
+ QianFanChatOptions.builder().withModel("DEFAULT_MODEL").withTemperature(66.6).build());
var request = client.createRequest(new Prompt("Test message content"), false);
@@ -38,16 +38,16 @@ public void createRequestWithChatOptions() {
assertThat(request.stream()).isFalse();
assertThat(request.model()).isEqualTo("DEFAULT_MODEL");
- assertThat(request.temperature()).isEqualTo(66.6f);
+ assertThat(request.temperature()).isEqualTo(66.6);
request = client.createRequest(new Prompt("Test message content",
- QianFanChatOptions.builder().withModel("PROMPT_MODEL").withTemperature(99.9f).build()), true);
+ QianFanChatOptions.builder().withModel("PROMPT_MODEL").withTemperature(99.9).build()), true);
assertThat(request.messages()).hasSize(1);
assertThat(request.stream()).isTrue();
assertThat(request.model()).isEqualTo("PROMPT_MODEL");
- assertThat(request.temperature()).isEqualTo(99.9f);
+ assertThat(request.temperature()).isEqualTo(99.9);
}
}
diff --git a/models/spring-ai-qianfan/src/test/java/org/springframework/ai/qianfan/api/QianFanApiIT.java b/models/spring-ai-qianfan/src/test/java/org/springframework/ai/qianfan/api/QianFanApiIT.java
index f3d07f554d..38f34b72cc 100644
--- a/models/spring-ai-qianfan/src/test/java/org/springframework/ai/qianfan/api/QianFanApiIT.java
+++ b/models/spring-ai-qianfan/src/test/java/org/springframework/ai/qianfan/api/QianFanApiIT.java
@@ -47,7 +47,7 @@ public class QianFanApiIT {
void chatCompletionEntity() {
ChatCompletionMessage chatCompletionMessage = new ChatCompletionMessage("Hello world", Role.USER);
ResponseEntity response = qianFanApi.chatCompletionEntity(new ChatCompletionRequest(
- List.of(chatCompletionMessage), buildSystemMessage(), "ernie_speed", 0.7f, false));
+ List.of(chatCompletionMessage), buildSystemMessage(), "ernie_speed", 0.7, false));
assertThat(response).isNotNull();
assertThat(response.getBody()).isNotNull();
@@ -57,7 +57,7 @@ void chatCompletionEntity() {
void chatCompletionStream() {
ChatCompletionMessage chatCompletionMessage = new ChatCompletionMessage("Hello world", Role.USER);
Flux response = qianFanApi.chatCompletionStream(new ChatCompletionRequest(
- List.of(chatCompletionMessage), buildSystemMessage(), "ernie_speed", 0.7f, true));
+ List.of(chatCompletionMessage), buildSystemMessage(), "ernie_speed", 0.7, true));
assertThat(response).isNotNull();
assertThat(response.collectList().block()).isNotNull();
diff --git a/models/spring-ai-vertex-ai-gemini/src/main/java/org/springframework/ai/vertexai/gemini/VertexAiGeminiChatModel.java b/models/spring-ai-vertex-ai-gemini/src/main/java/org/springframework/ai/vertexai/gemini/VertexAiGeminiChatModel.java
index faca97ae27..1be9935e67 100644
--- a/models/spring-ai-vertex-ai-gemini/src/main/java/org/springframework/ai/vertexai/gemini/VertexAiGeminiChatModel.java
+++ b/models/spring-ai-vertex-ai-gemini/src/main/java/org/springframework/ai/vertexai/gemini/VertexAiGeminiChatModel.java
@@ -138,7 +138,7 @@ public String getName() {
public VertexAiGeminiChatModel(VertexAI vertexAI) {
this(vertexAI,
- VertexAiGeminiChatOptions.builder().withModel(ChatModel.GEMINI_1_5_PRO).withTemperature(0.8f).build());
+ VertexAiGeminiChatOptions.builder().withModel(ChatModel.GEMINI_1_5_PRO).withTemperature(0.8).build());
}
public VertexAiGeminiChatModel(VertexAI vertexAI, VertexAiGeminiChatOptions options) {
@@ -354,7 +354,7 @@ private GenerationConfig toGenerationConfig(VertexAiGeminiChatOptions options) {
GenerationConfig.Builder generationConfigBuilder = GenerationConfig.newBuilder();
if (options.getTemperature() != null) {
- generationConfigBuilder.setTemperature(options.getTemperature());
+ generationConfigBuilder.setTemperature(options.getTemperature().floatValue());
}
if (options.getMaxOutputTokens() != null) {
generationConfigBuilder.setMaxOutputTokens(options.getMaxOutputTokens());
@@ -363,7 +363,7 @@ private GenerationConfig toGenerationConfig(VertexAiGeminiChatOptions options) {
generationConfigBuilder.setTopK(options.getTopK());
}
if (options.getTopP() != null) {
- generationConfigBuilder.setTopP(options.getTopP());
+ generationConfigBuilder.setTopP(options.getTopP().floatValue());
}
if (options.getCandidateCount() != null) {
generationConfigBuilder.setCandidateCount(options.getCandidateCount());
diff --git a/models/spring-ai-vertex-ai-gemini/src/main/java/org/springframework/ai/vertexai/gemini/VertexAiGeminiChatOptions.java b/models/spring-ai-vertex-ai-gemini/src/main/java/org/springframework/ai/vertexai/gemini/VertexAiGeminiChatOptions.java
index 62d24977d5..062089cb9d 100644
--- a/models/spring-ai-vertex-ai-gemini/src/main/java/org/springframework/ai/vertexai/gemini/VertexAiGeminiChatOptions.java
+++ b/models/spring-ai-vertex-ai-gemini/src/main/java/org/springframework/ai/vertexai/gemini/VertexAiGeminiChatOptions.java
@@ -58,11 +58,11 @@ public enum TransportType {
/**
* Optional. Controls the randomness of predictions.
*/
- private @JsonProperty("temperature") Float temperature;
+ private @JsonProperty("temperature") Double temperature;
/**
* Optional. If specified, nucleus sampling will be used.
*/
- private @JsonProperty("topP") Float topP;
+ private @JsonProperty("topP") Double topP;
/**
* Optional. If specified, top k sampling will be used.
*/
@@ -131,12 +131,12 @@ public Builder withStopSequences(List stopSequences) {
return this;
}
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.options.setTemperature(temperature);
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.options.setTopP(topP);
return this;
}
@@ -210,20 +210,20 @@ public void setStopSequences(List stopSequences) {
}
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return this.temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return this.topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
@@ -303,13 +303,13 @@ public void setFunctions(Set functions) {
@Override
@JsonIgnore
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return null;
}
@Override
@JsonIgnore
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return null;
}
diff --git a/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/CreateGeminiRequestTests.java b/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/CreateGeminiRequestTests.java
index 86d0615be9..1d4dce04ce 100644
--- a/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/CreateGeminiRequestTests.java
+++ b/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/CreateGeminiRequestTests.java
@@ -51,7 +51,7 @@ public class CreateGeminiRequestTests {
public void createRequestWithChatOptions() {
var client = new VertexAiGeminiChatModel(vertexAI,
- VertexAiGeminiChatOptions.builder().withModel("DEFAULT_MODEL").withTemperature(66.6f).build());
+ VertexAiGeminiChatOptions.builder().withModel("DEFAULT_MODEL").withTemperature(66.6).build());
GeminiRequest request = client.createGeminiRequest(new Prompt("Test message content"));
@@ -62,7 +62,7 @@ public void createRequestWithChatOptions() {
assertThat(request.model().getGenerationConfig().getTemperature()).isEqualTo(66.6f);
request = client.createGeminiRequest(new Prompt("Test message content",
- VertexAiGeminiChatOptions.builder().withModel("PROMPT_MODEL").withTemperature(99.9f).build()));
+ VertexAiGeminiChatOptions.builder().withModel("PROMPT_MODEL").withTemperature(99.9).build()));
assertThat(request.contents()).hasSize(1);
@@ -80,7 +80,7 @@ public void createRequestWithSystemMessage() throws MalformedURLException {
List.of(new Media(MimeTypeUtils.IMAGE_PNG, new URL("http://example.com"))));
var client = new VertexAiGeminiChatModel(vertexAI,
- VertexAiGeminiChatOptions.builder().withModel("DEFAULT_MODEL").withTemperature(66.6f).build());
+ VertexAiGeminiChatOptions.builder().withModel("DEFAULT_MODEL").withTemperature(66.6).build());
GeminiRequest request = client.createGeminiRequest(new Prompt(List.of(systemMessage, userMessage)));
@@ -197,10 +197,10 @@ public void createRequestWithGenerationConfigOptions() {
var client = new VertexAiGeminiChatModel(vertexAI,
VertexAiGeminiChatOptions.builder()
.withModel("DEFAULT_MODEL")
- .withTemperature(66.6f)
+ .withTemperature(66.6)
.withMaxOutputTokens(100)
.withTopK(10.0f)
- .withTopP(5.0f)
+ .withTopP(5.0)
.withStopSequences(List.of("stop1", "stop2"))
.withCandidateCount(1)
.withResponseMimeType("application/json")
diff --git a/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/function/VertexAiGeminiChatModelFunctionCallingIT.java b/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/function/VertexAiGeminiChatModelFunctionCallingIT.java
index db40476ac6..836155f383 100644
--- a/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/function/VertexAiGeminiChatModelFunctionCallingIT.java
+++ b/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/function/VertexAiGeminiChatModelFunctionCallingIT.java
@@ -242,7 +242,7 @@ public VertexAiGeminiChatModel vertexAiEmbedding(VertexAI vertexAi) {
return new VertexAiGeminiChatModel(vertexAi,
VertexAiGeminiChatOptions.builder()
.withModel(VertexAiGeminiChatModel.ChatModel.GEMINI_1_5_PRO)
- .withTemperature(0.9f)
+ .withTemperature(0.9)
.build());
}
diff --git a/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/function/VertexAiGeminiPaymentTransactionIT.java b/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/function/VertexAiGeminiPaymentTransactionIT.java
index 1645152d23..3954019d4c 100644
--- a/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/function/VertexAiGeminiPaymentTransactionIT.java
+++ b/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/function/VertexAiGeminiPaymentTransactionIT.java
@@ -202,7 +202,7 @@ public VertexAiGeminiChatModel vertexAiChatModel(VertexAI vertexAi, ApplicationC
return new VertexAiGeminiChatModel(vertexAi,
VertexAiGeminiChatOptions.builder()
.withModel(VertexAiGeminiChatModel.ChatModel.GEMINI_1_5_FLASH)
- .withTemperature(0.1f)
+ .withTemperature(0.1)
.build(),
functionCallbackContext);
}
diff --git a/models/spring-ai-vertex-ai-palm2/src/main/java/org/springframework/ai/vertexai/palm2/VertexAiPaLm2ChatModel.java b/models/spring-ai-vertex-ai-palm2/src/main/java/org/springframework/ai/vertexai/palm2/VertexAiPaLm2ChatModel.java
index db22e132fb..de8d732636 100644
--- a/models/spring-ai-vertex-ai-palm2/src/main/java/org/springframework/ai/vertexai/palm2/VertexAiPaLm2ChatModel.java
+++ b/models/spring-ai-vertex-ai-palm2/src/main/java/org/springframework/ai/vertexai/palm2/VertexAiPaLm2ChatModel.java
@@ -43,7 +43,7 @@ public class VertexAiPaLm2ChatModel implements ChatModel {
public VertexAiPaLm2ChatModel(VertexAiPaLm2Api vertexAiApi) {
this(vertexAiApi,
- VertexAiPaLm2ChatOptions.builder().withTemperature(0.7f).withCandidateCount(1).withTopK(20).build());
+ VertexAiPaLm2ChatOptions.builder().withTemperature(0.7).withCandidateCount(1).withTopK(20).build());
}
public VertexAiPaLm2ChatModel(VertexAiPaLm2Api vertexAiApi, VertexAiPaLm2ChatOptions defaultOptions) {
diff --git a/models/spring-ai-vertex-ai-palm2/src/main/java/org/springframework/ai/vertexai/palm2/VertexAiPaLm2ChatOptions.java b/models/spring-ai-vertex-ai-palm2/src/main/java/org/springframework/ai/vertexai/palm2/VertexAiPaLm2ChatOptions.java
index 8e27145362..77b968f693 100644
--- a/models/spring-ai-vertex-ai-palm2/src/main/java/org/springframework/ai/vertexai/palm2/VertexAiPaLm2ChatOptions.java
+++ b/models/spring-ai-vertex-ai-palm2/src/main/java/org/springframework/ai/vertexai/palm2/VertexAiPaLm2ChatOptions.java
@@ -39,7 +39,7 @@ public class VertexAiPaLm2ChatOptions implements ChatOptions {
* generative. This value specifies default to be used by the backend while making the
* call to the generative.
*/
- private @JsonProperty("temperature") Float temperature;
+ private @JsonProperty("temperature") Double temperature;
/**
* The number of generated response messages to return. This value must be between [1,
@@ -52,7 +52,7 @@ public class VertexAiPaLm2ChatOptions implements ChatOptions {
* generative uses combined Top-k and nucleus sampling. Nucleus sampling considers the
* smallest set of tokens whose probability sum is at least topP.
*/
- private @JsonProperty("topP") Float topP;
+ private @JsonProperty("topP") Double topP;
/**
* The maximum number of tokens to consider when sampling. The generative uses
@@ -70,7 +70,7 @@ public static class Builder {
private VertexAiPaLm2ChatOptions options = new VertexAiPaLm2ChatOptions();
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.options.temperature = temperature;
return this;
}
@@ -80,7 +80,7 @@ public Builder withCandidateCount(Integer candidateCount) {
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.options.topP = topP;
return this;
}
@@ -97,11 +97,11 @@ public VertexAiPaLm2ChatOptions build() {
}
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return this.temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@@ -114,11 +114,11 @@ public void setCandidateCount(Integer candidateCount) {
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return this.topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
@@ -151,13 +151,13 @@ public List getStopSequences() {
@Override
@JsonIgnore
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return null;
}
@Override
@JsonIgnore
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return null;
}
diff --git a/models/spring-ai-vertex-ai-palm2/src/main/java/org/springframework/ai/vertexai/palm2/api/VertexAiPaLm2Api.java b/models/spring-ai-vertex-ai-palm2/src/main/java/org/springframework/ai/vertexai/palm2/api/VertexAiPaLm2Api.java
index cf47be62c4..4e590fbf79 100644
--- a/models/spring-ai-vertex-ai-palm2/src/main/java/org/springframework/ai/vertexai/palm2/api/VertexAiPaLm2Api.java
+++ b/models/spring-ai-vertex-ai-palm2/src/main/java/org/springframework/ai/vertexai/palm2/api/VertexAiPaLm2Api.java
@@ -87,6 +87,7 @@
* https://ai.google.dev/api/rest#rest-resource:-v1.models
*
* @author Christian Tzolov
+ * @author Thomas Vitale
*/
public class VertexAiPaLm2Api {
@@ -353,8 +354,8 @@ public record Model(
@JsonProperty("inputTokenLimit") Integer inputTokenLimit,
@JsonProperty("outputTokenLimit") Integer outputTokenLimit,
@JsonProperty("supportedGenerationMethods") List supportedGenerationMethods,
- @JsonProperty("temperature") Float temperature,
- @JsonProperty("topP") Float topP,
+ @JsonProperty("temperature") Double temperature,
+ @JsonProperty("topP") Double topP,
@JsonProperty("topK") Integer topK) {
}
@@ -528,9 +529,9 @@ public record Example(
@JsonInclude(Include.NON_NULL)
public record GenerateMessageRequest(
@JsonProperty("prompt") MessagePrompt prompt,
- @JsonProperty("temperature") Float temperature,
+ @JsonProperty("temperature") Double temperature,
@JsonProperty("candidateCount") Integer candidateCount,
- @JsonProperty("topP") Float topP,
+ @JsonProperty("topP") Double topP,
@JsonProperty("topK") Integer topK) {
/**
@@ -549,7 +550,7 @@ public GenerateMessageRequest(MessagePrompt prompt) {
* @param temperature (optional) Controls the randomness of the output.
* @param topK (optional) The maximum number of tokens to consider when sampling.
*/
- public GenerateMessageRequest(MessagePrompt prompt, Float temperature, Integer topK) {
+ public GenerateMessageRequest(MessagePrompt prompt, Double temperature, Integer topK) {
this(prompt, temperature, null, null, topK);
}
}
diff --git a/models/spring-ai-vertex-ai-palm2/src/test/java/org/springframework/ai/vertexai/palm2/VertexAiPaLm2ChatRequestTests.java b/models/spring-ai-vertex-ai-palm2/src/test/java/org/springframework/ai/vertexai/palm2/VertexAiPaLm2ChatRequestTests.java
index 81a07a150d..9e2d6726cf 100644
--- a/models/spring-ai-vertex-ai-palm2/src/test/java/org/springframework/ai/vertexai/palm2/VertexAiPaLm2ChatRequestTests.java
+++ b/models/spring-ai-vertex-ai-palm2/src/test/java/org/springframework/ai/vertexai/palm2/VertexAiPaLm2ChatRequestTests.java
@@ -39,7 +39,7 @@ public void createRequestWithDefaultOptions() {
assertThat(request.prompt().messages()).hasSize(1);
assertThat(request.candidateCount()).isEqualTo(1);
- assertThat(request.temperature()).isEqualTo(0.7f);
+ assertThat(request.temperature()).isEqualTo(0.7);
assertThat(request.topK()).isEqualTo(20);
assertThat(request.topP()).isNull();
}
@@ -49,8 +49,8 @@ public void createRequestWithPromptVertexAiOptions() {
// Runtime options should override the default options.
VertexAiPaLm2ChatOptions promptOptions = VertexAiPaLm2ChatOptions.builder()
- .withTemperature(0.8f)
- .withTopP(0.5f)
+ .withTemperature(0.8)
+ .withTopP(0.5)
.withTopK(99)
// .withCandidateCount(2)
.build();
@@ -60,9 +60,9 @@ public void createRequestWithPromptVertexAiOptions() {
assertThat(request.prompt().messages()).hasSize(1);
assertThat(request.candidateCount()).isEqualTo(1);
- assertThat(request.temperature()).isEqualTo(0.8f);
+ assertThat(request.temperature()).isEqualTo(0.8);
assertThat(request.topK()).isEqualTo(99);
- assertThat(request.topP()).isEqualTo(0.5f);
+ assertThat(request.topP()).isEqualTo(0.5);
}
@Test
@@ -70,9 +70,9 @@ public void createRequestWithPromptPortableChatOptions() {
// runtime options.
ChatOptions portablePromptOptions = ChatOptionsBuilder.builder()
- .withTemperature(0.9f)
+ .withTemperature(0.9)
.withTopK(100)
- .withTopP(0.6f)
+ .withTopP(0.6)
.build();
var request = chatModel.createRequest(new Prompt("Test message content", portablePromptOptions));
@@ -80,9 +80,9 @@ public void createRequestWithPromptPortableChatOptions() {
assertThat(request.prompt().messages()).hasSize(1);
assertThat(request.candidateCount()).isEqualTo(1);
- assertThat(request.temperature()).isEqualTo(0.9f);
+ assertThat(request.temperature()).isEqualTo(0.9);
assertThat(request.topK()).isEqualTo(100);
- assertThat(request.topP()).isEqualTo(0.6f);
+ assertThat(request.topP()).isEqualTo(0.6);
}
}
diff --git a/models/spring-ai-watsonx-ai/src/main/java/org/springframework/ai/watsonx/WatsonxAiChatModel.java b/models/spring-ai-watsonx-ai/src/main/java/org/springframework/ai/watsonx/WatsonxAiChatModel.java
index 91a0cda70f..fc3910648b 100644
--- a/models/spring-ai-watsonx-ai/src/main/java/org/springframework/ai/watsonx/WatsonxAiChatModel.java
+++ b/models/spring-ai-watsonx-ai/src/main/java/org/springframework/ai/watsonx/WatsonxAiChatModel.java
@@ -57,13 +57,13 @@ public class WatsonxAiChatModel implements ChatModel, StreamingChatModel {
public WatsonxAiChatModel(WatsonxAiApi watsonxAiApi) {
this(watsonxAiApi,
WatsonxAiChatOptions.builder()
- .withTemperature(0.7f)
- .withTopP(1.0f)
+ .withTemperature(0.7)
+ .withTopP(1.0)
.withTopK(50)
.withDecodingMethod("greedy")
.withMaxNewTokens(20)
.withMinNewTokens(0)
- .withRepetitionPenalty(1.0f)
+ .withRepetitionPenalty(1.0)
.withStopSequences(List.of())
.build());
}
diff --git a/models/spring-ai-watsonx-ai/src/main/java/org/springframework/ai/watsonx/WatsonxAiChatOptions.java b/models/spring-ai-watsonx-ai/src/main/java/org/springframework/ai/watsonx/WatsonxAiChatOptions.java
index 1ca67cb2c9..75bcd5527e 100644
--- a/models/spring-ai-watsonx-ai/src/main/java/org/springframework/ai/watsonx/WatsonxAiChatOptions.java
+++ b/models/spring-ai-watsonx-ai/src/main/java/org/springframework/ai/watsonx/WatsonxAiChatOptions.java
@@ -48,14 +48,14 @@ public class WatsonxAiChatOptions implements ChatOptions {
* The temperature of the model. Increasing the temperature will
* make the model answer more creatively. (Default: 0.7)
*/
- @JsonProperty("temperature") private Float temperature;
+ @JsonProperty("temperature") private Double temperature;
/**
* Works together with top-k. A higher value (e.g., 0.95) will lead to
* more diverse text, while a lower value (e.g., 0.2) will generate more focused and
* conservative text. (Default: 1.0)
*/
- @JsonProperty("top_p") private Float topP;
+ @JsonProperty("top_p") private Double topP;
/**
* Reduces the probability of generating nonsense. A higher value (e.g.
@@ -104,7 +104,7 @@ public class WatsonxAiChatOptions implements ChatOptions {
* (e.g., 1.8) will penalize repetitions more strongly, while a lower value (e.g.,
* 1.1) will be more lenient. (Default: 1.0)
*/
- @JsonProperty("repetition_penalty") private Float repetitionPenalty;
+ @JsonProperty("repetition_penalty") private Double repetitionPenalty;
/**
* Produce repeatable results, set the same random seed value every time. (Default: randomly generated)
@@ -126,20 +126,20 @@ public class WatsonxAiChatOptions implements ChatOptions {
private ObjectMapper mapper = new ObjectMapper();
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
@@ -198,20 +198,20 @@ public void setStopSequences(List stopSequences) {
@Override
@JsonIgnore
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return getRepetitionPenalty();
}
@JsonIgnore
- public void setPresencePenalty(Float presencePenalty) {
+ public void setPresencePenalty(Double presencePenalty) {
setRepetitionPenalty(presencePenalty);
}
- public Float getRepetitionPenalty() {
+ public Double getRepetitionPenalty() {
return repetitionPenalty;
}
- public void setRepetitionPenalty(Float repetitionPenalty) {
+ public void setRepetitionPenalty(Double repetitionPenalty) {
this.repetitionPenalty = repetitionPenalty;
}
@@ -248,7 +248,7 @@ public void addAdditionalProperty(String key, Object value) {
@Override
@JsonIgnore
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return null;
}
@@ -260,12 +260,12 @@ public static class Builder {
WatsonxAiChatOptions options = new WatsonxAiChatOptions();
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.options.temperature = temperature;
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.options.topP = topP;
return this;
}
@@ -295,7 +295,7 @@ public Builder withStopSequences(List stopSequences) {
return this;
}
- public Builder withRepetitionPenalty(Float repetitionPenalty) {
+ public Builder withRepetitionPenalty(Double repetitionPenalty) {
this.options.repetitionPenalty = repetitionPenalty;
return this;
}
diff --git a/models/spring-ai-watsonx-ai/src/test/java/org/springframework/ai/watsonx/WatsonxAiChatModelTest.java b/models/spring-ai-watsonx-ai/src/test/java/org/springframework/ai/watsonx/WatsonxAiChatModelTest.java
index cf4f1c739d..23c2d2025a 100644
--- a/models/spring-ai-watsonx-ai/src/test/java/org/springframework/ai/watsonx/WatsonxAiChatModelTest.java
+++ b/models/spring-ai-watsonx-ai/src/test/java/org/springframework/ai/watsonx/WatsonxAiChatModelTest.java
@@ -52,7 +52,7 @@ public class WatsonxAiChatModelTest {
@Test
public void testCreateRequestWithNoModelId() {
- var options = ChatOptionsBuilder.builder().withTemperature(0.9f).withTopK(100).withTopP(0.6f).build();
+ var options = ChatOptionsBuilder.builder().withTemperature(0.9).withTopK(100).withTopP(0.6).build();
Prompt prompt = new Prompt("Test message", options);
@@ -93,12 +93,12 @@ public void testCreateRequestSuccessfullyWithNonDefaultParams() {
WatsonxAiChatOptions modelOptions = WatsonxAiChatOptions.builder()
.withModel("meta-llama/llama-2-70b-chat")
.withDecodingMethod("sample")
- .withTemperature(0.1f)
- .withTopP(0.2f)
+ .withTemperature(0.1)
+ .withTopP(0.2)
.withTopK(10)
.withMaxNewTokens(30)
.withMinNewTokens(10)
- .withRepetitionPenalty(1.4f)
+ .withRepetitionPenalty(1.4)
.withStopSequences(List.of("\n\n\n"))
.withRandomSeed(4)
.build();
@@ -127,12 +127,12 @@ public void testCreateRequestSuccessfullyWithChatDisabled() {
WatsonxAiChatOptions modelOptions = WatsonxAiChatOptions.builder()
.withModel("meta-llama/llama-2-70b-chat")
.withDecodingMethod("sample")
- .withTemperature(0.1f)
- .withTopP(0.2f)
+ .withTemperature(0.1)
+ .withTopP(0.2)
.withTopK(10)
.withMaxNewTokens(30)
.withMinNewTokens(10)
- .withRepetitionPenalty(1.4f)
+ .withRepetitionPenalty(1.4)
.withStopSequences(List.of("\n\n\n"))
.withRandomSeed(4)
.build();
diff --git a/models/spring-ai-watsonx-ai/src/test/java/org/springframework/ai/watsonx/api/WatsonxAiChatOptionTest.java b/models/spring-ai-watsonx-ai/src/test/java/org/springframework/ai/watsonx/api/WatsonxAiChatOptionTest.java
index 5011cc3e07..f77812852f 100644
--- a/models/spring-ai-watsonx-ai/src/test/java/org/springframework/ai/watsonx/api/WatsonxAiChatOptionTest.java
+++ b/models/spring-ai-watsonx-ai/src/test/java/org/springframework/ai/watsonx/api/WatsonxAiChatOptionTest.java
@@ -34,13 +34,13 @@ public class WatsonxAiChatOptionTest {
public void testOptions() {
WatsonxAiChatOptions options = WatsonxAiChatOptions.builder()
.withDecodingMethod("sample")
- .withTemperature(1.2f)
+ .withTemperature(1.2)
.withTopK(20)
- .withTopP(0.5f)
+ .withTopP(0.5)
.withMaxNewTokens(100)
.withMinNewTokens(20)
.withStopSequences(List.of("\n\n\n"))
- .withRepetitionPenalty(1.1f)
+ .withRepetitionPenalty(1.1)
.withRandomSeed(4)
.build();
@@ -61,13 +61,13 @@ public void testOptions() {
public void testOptionsWithAdditionalParamsOneByOne() {
WatsonxAiChatOptions options = WatsonxAiChatOptions.builder()
.withDecodingMethod("sample")
- .withTemperature(1.2f)
+ .withTemperature(1.2)
.withTopK(20)
- .withTopP(0.5f)
+ .withTopP(0.5)
.withMaxNewTokens(100)
.withMinNewTokens(20)
.withStopSequences(List.of("\n\n\n"))
- .withRepetitionPenalty(1.1f)
+ .withRepetitionPenalty(1.1)
.withRandomSeed(4)
.withAdditionalProperty("HAP", true)
.withAdditionalProperty("typicalP", 0.5f)
@@ -92,13 +92,13 @@ public void testOptionsWithAdditionalParamsOneByOne() {
public void testOptionsWithAdditionalParamsMap() {
WatsonxAiChatOptions options = WatsonxAiChatOptions.builder()
.withDecodingMethod("sample")
- .withTemperature(1.2f)
+ .withTemperature(1.2)
.withTopK(20)
- .withTopP(0.5f)
+ .withTopP(0.5)
.withMaxNewTokens(100)
.withMinNewTokens(20)
.withStopSequences(List.of("\n\n\n"))
- .withRepetitionPenalty(1.1f)
+ .withRepetitionPenalty(1.1)
.withRandomSeed(4)
.withAdditionalProperties(Map.of("HAP", true, "typicalP", 0.5f, "test_value", "test"))
.build();
diff --git a/models/spring-ai-zhipuai/src/main/java/org/springframework/ai/zhipuai/ZhiPuAiChatModel.java b/models/spring-ai-zhipuai/src/main/java/org/springframework/ai/zhipuai/ZhiPuAiChatModel.java
index 773e8106f6..2a9293c355 100644
--- a/models/spring-ai-zhipuai/src/main/java/org/springframework/ai/zhipuai/ZhiPuAiChatModel.java
+++ b/models/spring-ai-zhipuai/src/main/java/org/springframework/ai/zhipuai/ZhiPuAiChatModel.java
@@ -100,7 +100,7 @@ public class ZhiPuAiChatModel extends AbstractToolCallSupport implements ChatMod
*/
public ZhiPuAiChatModel(ZhiPuAiApi zhiPuAiApi) {
this(zhiPuAiApi,
- ZhiPuAiChatOptions.builder().withModel(ZhiPuAiApi.DEFAULT_CHAT_MODEL).withTemperature(0.7f).build());
+ ZhiPuAiChatOptions.builder().withModel(ZhiPuAiApi.DEFAULT_CHAT_MODEL).withTemperature(0.7).build());
}
/**
diff --git a/models/spring-ai-zhipuai/src/main/java/org/springframework/ai/zhipuai/ZhiPuAiChatOptions.java b/models/spring-ai-zhipuai/src/main/java/org/springframework/ai/zhipuai/ZhiPuAiChatOptions.java
index 9af8f6616d..b495eb6679 100644
--- a/models/spring-ai-zhipuai/src/main/java/org/springframework/ai/zhipuai/ZhiPuAiChatOptions.java
+++ b/models/spring-ai-zhipuai/src/main/java/org/springframework/ai/zhipuai/ZhiPuAiChatOptions.java
@@ -62,13 +62,13 @@ public class ZhiPuAiChatOptions implements FunctionCallingOptions, ChatOptions {
* more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend
* altering this or top_p but not both.
*/
- private @JsonProperty("temperature") Float temperature;
+ private @JsonProperty("temperature") Double temperature;
/**
* An alternative to sampling with temperature, called nucleus sampling, where the model considers the
* results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10%
* probability mass are considered. We generally recommend altering this or temperature but not both.
*/
- private @JsonProperty("top_p") Float topP;
+ private @JsonProperty("top_p") Double topP;
/**
* A list of tools the model may call. Currently, only functions are supported as a tool. Use this to
* provide a list of functions the model may generate JSON inputs for.
@@ -156,12 +156,12 @@ public Builder withStop(List stop) {
return this;
}
- public Builder withTemperature(Float temperature) {
+ public Builder withTemperature(Double temperature) {
this.options.temperature = temperature;
return this;
}
- public Builder withTopP(Float topP) {
+ public Builder withTopP(Double topP) {
this.options.topP = topP;
return this;
}
@@ -252,20 +252,20 @@ public void setStop(List stop) {
}
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return this.temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return this.topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
@@ -330,13 +330,13 @@ public void setFunctions(Set functionNames) {
@Override
@JsonIgnore
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return null;
}
@Override
@JsonIgnore
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return null;
}
diff --git a/models/spring-ai-zhipuai/src/main/java/org/springframework/ai/zhipuai/api/ZhiPuAiApi.java b/models/spring-ai-zhipuai/src/main/java/org/springframework/ai/zhipuai/api/ZhiPuAiApi.java
index 42df85da84..363937cc3e 100644
--- a/models/spring-ai-zhipuai/src/main/java/org/springframework/ai/zhipuai/api/ZhiPuAiApi.java
+++ b/models/spring-ai-zhipuai/src/main/java/org/springframework/ai/zhipuai/api/ZhiPuAiApi.java
@@ -46,6 +46,7 @@
* ZhiPuAI Embedding API.
*
* @author Geng Rong
+ * @author Thomas Vitale
* @since 1.0.0
*/
public class ZhiPuAiApi {
@@ -230,8 +231,8 @@ public record ChatCompletionRequest (
@JsonProperty("max_tokens") Integer maxTokens,
@JsonProperty("stop") List stop,
@JsonProperty("stream") Boolean stream,
- @JsonProperty("temperature") Float temperature,
- @JsonProperty("top_p") Float topP,
+ @JsonProperty("temperature") Double temperature,
+ @JsonProperty("top_p") Double topP,
@JsonProperty("tools") List tools,
@JsonProperty("tool_choice") Object toolChoice,
@JsonProperty("user") String user,
@@ -245,7 +246,7 @@ public record ChatCompletionRequest (
* @param model ID of the model to use.
* @param temperature What sampling temperature to use, between 0 and 1.
*/
- public ChatCompletionRequest(List messages, String model, Float temperature) {
+ public ChatCompletionRequest(List messages, String model, Double temperature) {
this(messages, model, null, null, false, temperature, null,
null, null, null, null, null);
}
@@ -259,7 +260,7 @@ public ChatCompletionRequest(List messages, String model,
* @param stream If set, partial message deltas will be sent.Tokens will be sent as data-only server-sent events
* as they become available, with the stream terminated by a data: [DONE] message.
*/
- public ChatCompletionRequest(List messages, String model, Float temperature, boolean stream) {
+ public ChatCompletionRequest(List messages, String model, Double temperature, boolean stream) {
this(messages, model, null, null, stream, temperature, null,
null, null, null, null, null);
}
@@ -275,7 +276,7 @@ public ChatCompletionRequest(List messages, String model,
*/
public ChatCompletionRequest(List messages, String model,
List tools, Object toolChoice) {
- this(messages, model, null, null, false, 0.8f, null,
+ this(messages, model, null, null, false, 0.8, null,
tools, toolChoice, null, null, null);
}
diff --git a/models/spring-ai-zhipuai/src/test/java/org/springframework/ai/zhipuai/ChatCompletionRequestTests.java b/models/spring-ai-zhipuai/src/test/java/org/springframework/ai/zhipuai/ChatCompletionRequestTests.java
index 972280627e..eb12b04c50 100644
--- a/models/spring-ai-zhipuai/src/test/java/org/springframework/ai/zhipuai/ChatCompletionRequestTests.java
+++ b/models/spring-ai-zhipuai/src/test/java/org/springframework/ai/zhipuai/ChatCompletionRequestTests.java
@@ -34,7 +34,7 @@ public class ChatCompletionRequestTests {
public void createRequestWithChatOptions() {
var client = new ZhiPuAiChatModel(new ZhiPuAiApi("TEST"),
- ZhiPuAiChatOptions.builder().withModel("DEFAULT_MODEL").withTemperature(66.6f).build());
+ ZhiPuAiChatOptions.builder().withModel("DEFAULT_MODEL").withTemperature(66.6).build());
var request = client.createRequest(new Prompt("Test message content"), false);
@@ -42,16 +42,16 @@ public void createRequestWithChatOptions() {
assertThat(request.stream()).isFalse();
assertThat(request.model()).isEqualTo("DEFAULT_MODEL");
- assertThat(request.temperature()).isEqualTo(66.6f);
+ assertThat(request.temperature()).isEqualTo(66.6);
request = client.createRequest(new Prompt("Test message content",
- ZhiPuAiChatOptions.builder().withModel("PROMPT_MODEL").withTemperature(99.9f).build()), true);
+ ZhiPuAiChatOptions.builder().withModel("PROMPT_MODEL").withTemperature(99.9).build()), true);
assertThat(request.messages()).hasSize(1);
assertThat(request.stream()).isTrue();
assertThat(request.model()).isEqualTo("PROMPT_MODEL");
- assertThat(request.temperature()).isEqualTo(99.9f);
+ assertThat(request.temperature()).isEqualTo(99.9);
}
@Test
diff --git a/models/spring-ai-zhipuai/src/test/java/org/springframework/ai/zhipuai/api/ZhiPuAiApiIT.java b/models/spring-ai-zhipuai/src/test/java/org/springframework/ai/zhipuai/api/ZhiPuAiApiIT.java
index 2d559c9e6f..1837f9ba52 100644
--- a/models/spring-ai-zhipuai/src/test/java/org/springframework/ai/zhipuai/api/ZhiPuAiApiIT.java
+++ b/models/spring-ai-zhipuai/src/test/java/org/springframework/ai/zhipuai/api/ZhiPuAiApiIT.java
@@ -43,8 +43,8 @@ public class ZhiPuAiApiIT {
@Test
void chatCompletionEntity() {
ChatCompletionMessage chatCompletionMessage = new ChatCompletionMessage("Hello world", Role.USER);
- ResponseEntity response = zhiPuAiApi.chatCompletionEntity(
- new ChatCompletionRequest(List.of(chatCompletionMessage), "glm-3-turbo", 0.7f, false));
+ ResponseEntity response = zhiPuAiApi
+ .chatCompletionEntity(new ChatCompletionRequest(List.of(chatCompletionMessage), "glm-3-turbo", 0.7, false));
assertThat(response).isNotNull();
assertThat(response.getBody()).isNotNull();
@@ -55,7 +55,7 @@ void chatCompletionEntityWithMoreParams() {
ChatCompletionMessage chatCompletionMessage = new ChatCompletionMessage("Hello world", Role.USER);
ResponseEntity response = zhiPuAiApi
.chatCompletionEntity(new ChatCompletionRequest(List.of(chatCompletionMessage), "glm-3-turbo", 1024, null,
- false, 0.95f, 0.7f, null, null, null, "test_request_id", false));
+ false, 0.95, 0.7, null, null, null, "test_request_id", false));
assertThat(response).isNotNull();
assertThat(response.getBody()).isNotNull();
@@ -65,7 +65,7 @@ void chatCompletionEntityWithMoreParams() {
void chatCompletionStream() {
ChatCompletionMessage chatCompletionMessage = new ChatCompletionMessage("Hello world", Role.USER);
Flux response = zhiPuAiApi
- .chatCompletionStream(new ChatCompletionRequest(List.of(chatCompletionMessage), "glm-3-turbo", 0.7f, true));
+ .chatCompletionStream(new ChatCompletionRequest(List.of(chatCompletionMessage), "glm-3-turbo", 0.7, true));
assertThat(response).isNotNull();
assertThat(response.collectList().block()).isNotNull();
diff --git a/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptions.java b/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptions.java
index a217eb9407..f2d0d5ee2a 100644
--- a/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptions.java
+++ b/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptions.java
@@ -29,25 +29,25 @@ public interface ChatOptions extends ModelOptions {
String getModel();
@Nullable
- Float getFrequencyPenalty();
+ Double getFrequencyPenalty();
@Nullable
Integer getMaxTokens();
@Nullable
- Float getPresencePenalty();
+ Double getPresencePenalty();
@Nullable
List getStopSequences();
@Nullable
- Float getTemperature();
+ Double getTemperature();
@Nullable
Integer getTopK();
@Nullable
- Float getTopP();
+ Double getTopP();
ChatOptions copy();
diff --git a/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptionsBuilder.java b/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptionsBuilder.java
index 9101892eab..07f955c983 100644
--- a/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptionsBuilder.java
+++ b/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptionsBuilder.java
@@ -23,19 +23,19 @@ private static class DefaultChatOptions implements ChatOptions {
private String model;
- private Float frequencyPenalty;
+ private Double frequencyPenalty;
private Integer maxTokens;
- private Float presencePenalty;
+ private Double presencePenalty;
private List stopSequences;
- private Float temperature;
+ private Double temperature;
private Integer topK;
- private Float topP;
+ private Double topP;
@Override
public String getModel() {
@@ -47,11 +47,11 @@ public void setModel(String model) {
}
@Override
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return frequencyPenalty;
}
- public void setFrequencyPenalty(Float frequencyPenalty) {
+ public void setFrequencyPenalty(Double frequencyPenalty) {
this.frequencyPenalty = frequencyPenalty;
}
@@ -65,11 +65,11 @@ public void setMaxTokens(Integer maxTokens) {
}
@Override
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return presencePenalty;
}
- public void setPresencePenalty(Float presencePenalty) {
+ public void setPresencePenalty(Double presencePenalty) {
this.presencePenalty = presencePenalty;
}
@@ -83,11 +83,11 @@ public void setStopSequences(List stopSequences) {
}
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@@ -101,11 +101,11 @@ public void setTopK(Integer topK) {
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
@@ -138,7 +138,7 @@ public ChatOptionsBuilder withModel(String model) {
return this;
}
- public ChatOptionsBuilder withFrequencyPenalty(Float frequencyPenalty) {
+ public ChatOptionsBuilder withFrequencyPenalty(Double frequencyPenalty) {
options.setFrequencyPenalty(frequencyPenalty);
return this;
}
@@ -148,7 +148,7 @@ public ChatOptionsBuilder withMaxTokens(Integer maxTokens) {
return this;
}
- public ChatOptionsBuilder withPresencePenalty(Float presencePenalty) {
+ public ChatOptionsBuilder withPresencePenalty(Double presencePenalty) {
options.setPresencePenalty(presencePenalty);
return this;
}
@@ -158,7 +158,7 @@ public ChatOptionsBuilder withStopSequences(List stop) {
return this;
}
- public ChatOptionsBuilder withTemperature(Float temperature) {
+ public ChatOptionsBuilder withTemperature(Double temperature) {
options.setTemperature(temperature);
return this;
}
@@ -168,7 +168,7 @@ public ChatOptionsBuilder withTopK(Integer topK) {
return this;
}
- public ChatOptionsBuilder withTopP(Float topP) {
+ public ChatOptionsBuilder withTopP(Double topP) {
options.setTopP(topP);
return this;
}
diff --git a/spring-ai-core/src/main/java/org/springframework/ai/model/function/FunctionCallingOptionsBuilder.java b/spring-ai-core/src/main/java/org/springframework/ai/model/function/FunctionCallingOptionsBuilder.java
index 8368e7d1fc..5c1d7c0520 100644
--- a/spring-ai-core/src/main/java/org/springframework/ai/model/function/FunctionCallingOptionsBuilder.java
+++ b/spring-ai-core/src/main/java/org/springframework/ai/model/function/FunctionCallingOptionsBuilder.java
@@ -29,6 +29,7 @@
* function-calling.
*
* @author Christian Tzolov
+ * @author Thomas Vitale
* @since 0.8.1
*/
public class FunctionCallingOptionsBuilder {
@@ -66,7 +67,7 @@ public FunctionCallingOptionsBuilder withModel(String model) {
return this;
}
- public FunctionCallingOptionsBuilder withFrequencyPenalty(Float frequencyPenalty) {
+ public FunctionCallingOptionsBuilder withFrequencyPenalty(Double frequencyPenalty) {
this.options.setFrequencyPenalty(frequencyPenalty);
return this;
}
@@ -76,7 +77,7 @@ public FunctionCallingOptionsBuilder withMaxTokens(Integer maxTokens) {
return this;
}
- public FunctionCallingOptionsBuilder withPresencePenalty(Float presencePenalty) {
+ public FunctionCallingOptionsBuilder withPresencePenalty(Double presencePenalty) {
this.options.setPresencePenalty(presencePenalty);
return this;
}
@@ -86,7 +87,7 @@ public FunctionCallingOptionsBuilder withStopSequences(List stopSequence
return this;
}
- public FunctionCallingOptionsBuilder withTemperature(Float temperature) {
+ public FunctionCallingOptionsBuilder withTemperature(Double temperature) {
this.options.setTemperature(temperature);
return this;
}
@@ -96,7 +97,7 @@ public FunctionCallingOptionsBuilder withTopK(Integer topK) {
return this;
}
- public FunctionCallingOptionsBuilder withTopP(Float topP) {
+ public FunctionCallingOptionsBuilder withTopP(Double topP) {
this.options.setTopP(topP);
return this;
}
@@ -113,19 +114,19 @@ public static class PortableFunctionCallingOptions implements FunctionCallingOpt
private String model;
- private Float frequencyPenalty;
+ private Double frequencyPenalty;
private Integer maxTokens;
- private Float presencePenalty;
+ private Double presencePenalty;
private List stopSequences;
- private Float temperature;
+ private Double temperature;
private Integer topK;
- private Float topP;
+ private Double topP;
@Override
public List getFunctionCallbacks() {
@@ -157,11 +158,11 @@ public void setModel(String model) {
}
@Override
- public Float getFrequencyPenalty() {
+ public Double getFrequencyPenalty() {
return frequencyPenalty;
}
- public void setFrequencyPenalty(Float frequencyPenalty) {
+ public void setFrequencyPenalty(Double frequencyPenalty) {
this.frequencyPenalty = frequencyPenalty;
}
@@ -175,11 +176,11 @@ public void setMaxTokens(Integer maxTokens) {
}
@Override
- public Float getPresencePenalty() {
+ public Double getPresencePenalty() {
return presencePenalty;
}
- public void setPresencePenalty(Float presencePenalty) {
+ public void setPresencePenalty(Double presencePenalty) {
this.presencePenalty = presencePenalty;
}
@@ -193,11 +194,11 @@ public void setStopSequences(List stopSequences) {
}
@Override
- public Float getTemperature() {
+ public Double getTemperature() {
return temperature;
}
- public void setTemperature(Float temperature) {
+ public void setTemperature(Double temperature) {
this.temperature = temperature;
}
@@ -211,11 +212,11 @@ public void setTopK(Integer topK) {
}
@Override
- public Float getTopP() {
+ public Double getTopP() {
return topP;
}
- public void setTopP(Float topP) {
+ public void setTopP(Double topP) {
this.topP = topP;
}
diff --git a/spring-ai-core/src/test/java/org/springframework/ai/chat/ChatBuilderTests.java b/spring-ai-core/src/test/java/org/springframework/ai/chat/ChatBuilderTests.java
index da9db9e79d..5ed9ccc8f0 100644
--- a/spring-ai-core/src/test/java/org/springframework/ai/chat/ChatBuilderTests.java
+++ b/spring-ai-core/src/test/java/org/springframework/ai/chat/ChatBuilderTests.java
@@ -40,8 +40,8 @@ public class ChatBuilderTests {
@Test
void createNewChatOptionsTest() {
- Float temperature = 1.1f;
- Float topP = 2.2f;
+ Double temperature = 1.1;
+ Double topP = 2.2;
Integer topK = 111;
ChatOptions options = ChatOptionsBuilder.builder()
@@ -57,8 +57,8 @@ void createNewChatOptionsTest() {
@Test
void duplicateChatOptionsTest() {
- Float initTemperature = 1.1f;
- Float initTopP = 2.2f;
+ Double initTemperature = 1.1;
+ Double initTopP = 2.2;
Integer initTopK = 111;
ChatOptions options = ChatOptionsBuilder.builder()
@@ -71,8 +71,8 @@ void duplicateChatOptionsTest() {
@Test
void createFunctionCallingOptionTest() {
- Float temperature = 1.1f;
- Float topP = 2.2f;
+ Double temperature = 1.1;
+ Double topP = 2.2;
Integer topK = 111;
List functionCallbacks = new ArrayList<>();
Set functions = new HashSet<>();
diff --git a/spring-ai-core/src/test/java/org/springframework/ai/chat/observation/DefaultChatModelObservationConventionTests.java b/spring-ai-core/src/test/java/org/springframework/ai/chat/observation/DefaultChatModelObservationConventionTests.java
index 785c191815..dda0cc95b9 100644
--- a/spring-ai-core/src/test/java/org/springframework/ai/chat/observation/DefaultChatModelObservationConventionTests.java
+++ b/spring-ai-core/src/test/java/org/springframework/ai/chat/observation/DefaultChatModelObservationConventionTests.java
@@ -98,13 +98,13 @@ void shouldHaveKeyValuesWhenDefinedAndResponse() {
.provider("superprovider")
.requestOptions(ChatOptionsBuilder.builder()
.withModel("mistral")
- .withFrequencyPenalty(0.8f)
+ .withFrequencyPenalty(0.8)
.withMaxTokens(200)
- .withPresencePenalty(1.0f)
+ .withPresencePenalty(1.0)
.withStopSequences(List.of("addio", "bye"))
- .withTemperature(0.5f)
+ .withTemperature(0.5)
.withTopK(1)
- .withTopP(0.9f)
+ .withTopP(0.9)
.build())
.build();
observationContext.setResponse(new ChatResponse(
diff --git a/spring-ai-core/src/test/java/org/springframework/ai/prompt/PromptTemplateTest.java b/spring-ai-core/src/test/java/org/springframework/ai/prompt/PromptTemplateTest.java
index cbe5e53dbb..835bd59e79 100644
--- a/spring-ai-core/src/test/java/org/springframework/ai/prompt/PromptTemplateTest.java
+++ b/spring-ai-core/src/test/java/org/springframework/ai/prompt/PromptTemplateTest.java
@@ -44,7 +44,7 @@ public class PromptTemplateTest {
public void testCreateWithEmptyModelAndChatOptions() {
String template = "This is a test prompt with no variables";
PromptTemplate promptTemplate = new PromptTemplate(template);
- ChatOptions chatOptions = ChatOptionsBuilder.builder().withTemperature(0.7f).withTopK(3).build();
+ ChatOptions chatOptions = ChatOptionsBuilder.builder().withTemperature(0.7).withTopK(3).build();
Prompt prompt = promptTemplate.create(chatOptions);
@@ -60,7 +60,7 @@ public void testCreateWithModelAndChatOptions() {
model.put("name", "Alice");
model.put("age", 30);
PromptTemplate promptTemplate = new PromptTemplate(template, model);
- ChatOptions chatOptions = ChatOptionsBuilder.builder().withTemperature(0.5f).withMaxTokens(100).build();
+ ChatOptions chatOptions = ChatOptionsBuilder.builder().withTemperature(0.5).withMaxTokens(100).build();
Prompt prompt = promptTemplate.create(model, chatOptions);
@@ -79,7 +79,7 @@ public void testCreateWithOverriddenModelAndChatOptions() {
Map overriddenModel = new HashMap<>();
overriddenModel.put("color", "red");
- ChatOptions chatOptions = ChatOptionsBuilder.builder().withTemperature(0.8f).build();
+ ChatOptions chatOptions = ChatOptionsBuilder.builder().withTemperature(0.8).build();
Prompt prompt = promptTemplate.create(overriddenModel, chatOptions);
diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/anthropic-chat.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/anthropic-chat.adoc
index 3f3c1c361f..a5fcc2fd00 100644
--- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/anthropic-chat.adoc
+++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/anthropic-chat.adoc
@@ -296,12 +296,12 @@ AnthropicMessage chatCompletionMessage = new AnthropicMessage(
// Sync request
ResponseEntity response = anthropicApi
.chatCompletionEntity(new ChatCompletionRequest(AnthropicApi.ChatModel.CLAUDE_3_OPUS.getValue(),
- List.of(chatCompletionMessage), null, 100, 0.8f, false));
+ List.of(chatCompletionMessage), null, 100, 0.8, false));
// Streaming request
Flux response = anthropicApi
.chatCompletionStream(new ChatCompletionRequest(AnthropicApi.ChatModel.CLAUDE_3_OPUS.getValue(),
- List.of(chatCompletionMessage), null, 100, 0.8f, true));
+ List.of(chatCompletionMessage), null, 100, 0.8, true));
----
Follow the https://github.com/spring-projects/spring-ai/blob/main/models/spring-ai-anthropic/src/main/java/org/springframework/ai/anthropic/api/AnthropicApi.java[AnthropicApi.java]'s JavaDoc for further information.
diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/azure-openai-chat.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/azure-openai-chat.adoc
index 7b43c4768f..92e7e3e5a7 100644
--- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/azure-openai-chat.adoc
+++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/azure-openai-chat.adoc
@@ -312,7 +312,7 @@ var openAIClient = new OpenAIClientBuilder()
var openAIChatOptions = AzureOpenAiChatOptions.builder()
.withDeploymentName("gpt-4o")
- .withTemperature(0.4f)
+ .withTemperature(0.4)
.withMaxTokens(200)
.build();
diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-anthropic.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-anthropic.adoc
index 63e2e3e895..c6bd2bce3a 100644
--- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-anthropic.adoc
+++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-anthropic.adoc
@@ -204,9 +204,9 @@ AnthropicChatBedrockApi anthropicApi = new AnthropicChatBedrockApi(
BedrockAnthropicChatModel chatModel = new BedrockAnthropicChatModel(anthropicApi,
AnthropicChatOptions.builder()
- .withTemperature(0.6f)
+ .withTemperature(0.6)
.withTopK(10)
- .withTopP(0.8f)
+ .withTopP(0.8)
.withMaxTokensToSample(100)
.withAnthropicVersion(AnthropicChatBedrockApi.DEFAULT_ANTHROPIC_VERSION)
.build());
@@ -238,7 +238,7 @@ AnthropicChatBedrockApi anthropicChatApi = new AnthropicChatBedrockApi(
AnthropicChatRequest request = AnthropicChatRequest
.builder(String.format(AnthropicChatBedrockApi.PROMPT_TEMPLATE, "Name 3 famous pirates"))
- .withTemperature(0.8f)
+ .withTemperature(0.8)
.withMaxTokensToSample(300)
.withTopK(10)
.build();
diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-anthropic3.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-anthropic3.adoc
index 2f547788ea..b62a580cf3 100644
--- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-anthropic3.adoc
+++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-anthropic3.adoc
@@ -245,9 +245,9 @@ Anthropic3ChatBedrockApi anthropicApi = new Anthropic3ChatBedrockApi(
BedrockAnthropic3ChatModel chatModel = new BedrockAnthropic3ChatModel(anthropicApi,
AnthropicChatOptions.builder()
- .withTemperature(0.6f)
+ .withTemperature(0.6)
.withTopK(10)
- .withTopP(0.8f)
+ .withTopP(0.8)
.withMaxTokensToSample(100)
.withAnthropicVersion(AnthropicChatBedrockApi.DEFAULT_ANTHROPIC_VERSION)
.build());
@@ -275,7 +275,7 @@ Anthropic3ChatBedrockApi anthropicChatApi = new Anthropic3ChatBedrockApi(
AnthropicChatRequest request = AnthropicChatRequest
.builder(String.format(Anthropic3ChatBedrockApi.PROMPT_TEMPLATE, "Name 3 famous pirates"))
- .withTemperature(0.8f)
+ .withTemperature(0.8)
.withMaxTokensToSample(300)
.withTopK(10)
.build();
diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-cohere.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-cohere.adoc
index c4345a46c4..2d480fd44e 100644
--- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-cohere.adoc
+++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-cohere.adoc
@@ -195,11 +195,11 @@ CohereChatBedrockApi api = new CohereChatBedrockApi(CohereChatModel.COHERE_COMMA
BedrockCohereChatModel chatModel = new BedrockCohereChatModel(api,
BedrockCohereChatOptions.builder()
- .withTemperature(0.6f)
+ .withTemperature(0.6)
.withTopK(10)
- .withTopP(0.5f)
+ .withTopP(0.5)
.withMaxTokens(678)
- .build()
+ .build());
ChatResponse response = chatModel.call(
new Prompt("Generate the names of 5 famous pirates."));
@@ -231,8 +231,8 @@ CohereChatBedrockApi cohereChatApi = new CohereChatBedrockApi(
var request = CohereChatRequest
.builder("What is the capital of Bulgaria and what is the size? What it the national anthem?")
.withStream(false)
- .withTemperature(0.5f)
- .withTopP(0.8f)
+ .withTemperature(0.5)
+ .withTopP(0.8)
.withTopK(15)
.withMaxTokens(100)
.withStopSequences(List.of("END"))
@@ -247,8 +247,8 @@ CohereChatResponse response = cohereChatApi.chatCompletion(request);
var request = CohereChatRequest
.builder("What is the capital of Bulgaria and what is the size? What it the national anthem?")
.withStream(true)
- .withTemperature(0.5f)
- .withTopP(0.8f)
+ .withTemperature(0.5)
+ .withTopP(0.8)
.withTopK(15)
.withMaxTokens(100)
.withStopSequences(List.of("END"))
diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-jurassic2.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-jurassic2.adoc
index d1d8956ce5..ca29f165e1 100644
--- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-jurassic2.adoc
+++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-jurassic2.adoc
@@ -183,9 +183,9 @@ Ai21Jurassic2ChatBedrockApi api = new Ai21Jurassic2ChatBedrockApi(Ai21Jurassic2C
BedrockAi21Jurassic2ChatModel chatModel = new BedrockAi21Jurassic2ChatModel(api,
BedrockAi21Jurassic2ChatOptions.builder()
- .withTemperature(0.5f)
+ .withTemperature(0.5)
.withMaxTokens(100)
- .withTopP(0.9f).build());
+ .withTopP(0.9).build());
ChatResponse response = chatModel.call(
new Prompt("Generate the names of 5 famous pirates."));
@@ -209,8 +209,8 @@ Ai21Jurassic2ChatBedrockApi jurassic2ChatApi = new Ai21Jurassic2ChatBedrockApi(
Duration.ofMillis(1000L));
Ai21Jurassic2ChatRequest request = Ai21Jurassic2ChatRequest.builder("Hello, my name is")
- .withTemperature(0.9f)
- .withTopP(0.9f)
+ .withTemperature(0.9)
+ .withTopP(0.9)
.withMaxTokens(20)
.build();
diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-llama.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-llama.adoc
index d8a0c63476..a51ca34086 100644
--- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-llama.adoc
+++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-llama.adoc
@@ -193,9 +193,9 @@ LlamaChatBedrockApi api = new LlamaChatBedrockApi(LlamaChatModel.LLAMA2_70B_CHAT
BedrockLlamaChatModel chatModel = new BedrockLlamaChatModel(api,
BedrockLlamaChatOptions.builder()
- .withTemperature(0.5f)
+ .withTemperature(0.5)
.withMaxGenLen(100)
- .withTopP(0.9f).build());
+ .withTopP(0.9).build());
ChatResponse response = chatModel.call(
new Prompt("Generate the names of 5 famous pirates."));
@@ -225,8 +225,8 @@ LlamaChatBedrockApi llamaChatApi = new LlamaChatBedrockApi(
Duration.ofMillis(1000L));
LlamaChatRequest request = LlamaChatRequest.builder("Hello, my name is")
- .withTemperature(0.9f)
- .withTopP(0.9f)
+ .withTemperature(0.9)
+ .withTopP(0.9)
.withMaxGenLen(20)
.build();
diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-titan.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-titan.adoc
index 45f836b4c4..9f34fe50bd 100644
--- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-titan.adoc
+++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/bedrock/bedrock-titan.adoc
@@ -192,8 +192,8 @@ TitanChatBedrockApi titanApi = new TitanChatBedrockApi(
BedrockTitanChatModel chatModel = new BedrockTitanChatModel(titanApi,
BedrockTitanChatOptions.builder()
- .withTemperature(0.6f)
- .withTopP(0.8f)
+ .withTemperature(0.6)
+ .withTopP(0.8)
.withMaxTokenCount(100)
.build());
@@ -223,8 +223,8 @@ TitanChatBedrockApi titanBedrockApi = new TitanChatBedrockApi(TitanChatCompletio
Region.EU_CENTRAL_1.id(), Duration.ofMillis(1000L));
TitanChatRequest titanChatRequest = TitanChatRequest.builder("Give me the names of 3 famous pirates?")
- .withTemperature(0.5f)
- .withTopP(0.9f)
+ .withTemperature(0.5)
+ .withTopP(0.9)
.withMaxTokenCount(100)
.withStopSequences(List.of("|"))
.build();
diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/minimax-chat.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/minimax-chat.adoc
index 60baca0cc8..c0bfcce0a3 100644
--- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/minimax-chat.adoc
+++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/minimax-chat.adoc
@@ -122,7 +122,7 @@ ChatResponse response = chatModel.call(
"Generate the names of 5 famous pirates.",
MiniMaxChatOptions.builder()
.withModel(MiniMaxApi.ChatModel.ABAB_5_5_Chat.getValue())
- .withTemperature(0.5f)
+ .withTemperature(0.5)
.build()
));
----
@@ -205,7 +205,7 @@ var miniMaxApi = new MiniMaxApi(System.getenv("MINIMAX_API_KEY"));
var chatModel = new MiniMaxChatModel(miniMaxApi, MiniMaxChatOptions.builder()
.withModel(MiniMaxApi.ChatModel.ABAB_5_5_Chat.getValue())
- .withTemperature(0.4f)
+ .withTemperature(0.4)
.withMaxTokens(200)
.build());
@@ -236,11 +236,11 @@ ChatCompletionMessage chatCompletionMessage =
// Sync request
ResponseEntity response = miniMaxApi.chatCompletionEntity(
- new ChatCompletionRequest(List.of(chatCompletionMessage), MiniMaxApi.ChatModel.ABAB_5_5_Chat.getValue(), 0.7f, false));
+ new ChatCompletionRequest(List.of(chatCompletionMessage), MiniMaxApi.ChatModel.ABAB_5_5_Chat.getValue(), 0.7, false));
// Streaming request
Flux streamResponse = miniMaxApi.chatCompletionStream(
- new ChatCompletionRequest(List.of(chatCompletionMessage), MiniMaxApi.ChatModel.ABAB_5_5_Chat.getValue(), 0.7f, true));
+ new ChatCompletionRequest(List.of(chatCompletionMessage), MiniMaxApi.ChatModel.ABAB_5_5_Chat.getValue(), 0.7, true));
----
Follow the https://github.com/spring-projects/spring-ai/blob/main/models/spring-ai-minimax/src/main/java/org/springframework/ai/minimax/api/MiniMaxApi.java[MiniMaxApi.java]'s JavaDoc for further information.
diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/mistralai-chat.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/mistralai-chat.adoc
index 4d246676ce..d92b7e598d 100644
--- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/mistralai-chat.adoc
+++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/mistralai-chat.adoc
@@ -128,7 +128,7 @@ ChatResponse response = chatModel.call(
"Generate the names of 5 famous pirates.",
MistralAiChatOptions.builder()
.withModel(MistralAiApi.ChatModel.LARGE.getValue())
- .withTemperature(0.5f)
+ .withTemperature(0.5)
.build()
));
----
@@ -226,7 +226,7 @@ var mistralAiApi = new MistralAiApi(System.getenv("MISTRAL_AI_API_KEY"));
var chatModel = new MistralAiChatModel(mistralAiApi, MistralAiChatOptions.builder()
.withModel(MistralAiApi.ChatModel.LARGE.getValue())
- .withTemperature(0.4f)
+ .withTemperature(0.4)
.withMaxTokens(200)
.build());
@@ -257,11 +257,11 @@ ChatCompletionMessage chatCompletionMessage =
// Sync request
ResponseEntity response = mistralAiApi.chatCompletionEntity(
- new ChatCompletionRequest(List.of(chatCompletionMessage), MistralAiApi.ChatModel.LARGE.getValue(), 0.8f, false));
+ new ChatCompletionRequest(List.of(chatCompletionMessage), MistralAiApi.ChatModel.LARGE.getValue(), 0.8, false));
// Streaming request
Flux streamResponse = mistralAiApi.chatCompletionStream(
- new ChatCompletionRequest(List.of(chatCompletionMessage), MistralAiApi.ChatModel.LARGE.getValue(), 0.8f, true));
+ new ChatCompletionRequest(List.of(chatCompletionMessage), MistralAiApi.ChatModel.LARGE.getValue(), 0.8, true));
----
Follow the https://github.com/spring-projects/spring-ai/blob/main/models/spring-ai-mistral-ai/src/main/java/org/springframework/ai/mistralai/api/MistralAiApi.java[MistralAiApi.java]'s JavaDoc for further information.
diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/moonshot-chat.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/moonshot-chat.adoc
index 3b9383f700..d2b68f93e6 100644
--- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/moonshot-chat.adoc
+++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/moonshot-chat.adoc
@@ -121,7 +121,7 @@ ChatResponse response = chatModel.call(
"Generate the names of 5 famous pirates.",
MoonshotChatOptions.builder()
.withModel(MoonshotApi.ChatModel.MOONSHOT_V1_8K.getValue())
- .withTemperature(0.5f)
+ .withTemperature(0.5)
.build()
));
----
@@ -204,7 +204,7 @@ var moonshotApi = new MoonshotApi(System.getenv("MOONSHOT_API_KEY"));
var chatModel = new MoonshotChatModel(moonshotApi, MoonshotChatOptions.builder()
.withModel(MoonshotApi.ChatModel.MOONSHOT_V1_8K.getValue())
- .withTemperature(0.4f)
+ .withTemperature(0.4)
.withMaxTokens(200)
.build());
@@ -235,11 +235,11 @@ ChatCompletionMessage chatCompletionMessage =
// Sync request
ResponseEntity response = moonshotApi.chatCompletionEntity(
- new ChatCompletionRequest(List.of(chatCompletionMessage), MoonshotApi.ChatModel.MOONSHOT_V1_8K.getValue(), 0.7f, false));
+ new ChatCompletionRequest(List.of(chatCompletionMessage), MoonshotApi.ChatModel.MOONSHOT_V1_8K.getValue(), 0.7, false));
// Streaming request
Flux streamResponse = moonshotApi.chatCompletionStream(
- new ChatCompletionRequest(List.of(chatCompletionMessage), MoonshotApi.ChatModel.MOONSHOT_V1_8K.getValue(), 0.7f, true));
+ new ChatCompletionRequest(List.of(chatCompletionMessage), MoonshotApi.ChatModel.MOONSHOT_V1_8K.getValue(), 0.7, true));
----
Follow the https://github.com/spring-projects/spring-ai/blob/main/models/spring-ai-moonshot/src/main/java/org/springframework/ai/moonshot/api/MoonshotApi.java[MoonshotApi.java]'s JavaDoc for further information.
diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/ollama-chat.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/ollama-chat.adoc
index d307c7a269..4a5a23665f 100644
--- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/ollama-chat.adoc
+++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/ollama-chat.adoc
@@ -278,7 +278,7 @@ var ollamaApi = new OllamaApi();
var chatModel = new OllamaChatModel(ollamaApi,
OllamaOptions.create()
.withModel(OllamaOptions.DEFAULT_MODEL)
- .withTemperature(0.9f));
+ .withTemperature(0.9));
ChatResponse response = chatModel.call(
new Prompt("Generate the names of 5 famous pirates."));
@@ -318,7 +318,7 @@ var request = ChatRequest.builder("orca-mini")
.withContent("What is the capital of Bulgaria and what is the size? "
+ "What is the national anthem?")
.build()))
- .withOptions(OllamaOptions.create().withTemperature(0.9f))
+ .withOptions(OllamaOptions.create().withTemperature(0.9))
.build();
ChatResponse response = ollamaApi.chat(request);
@@ -329,7 +329,7 @@ var request2 = ChatRequest.builder("orca-mini")
.withMessages(List.of(Message.builder(Role.USER)
.withContent("What is the capital of Bulgaria and what is the size? " + "What is the national anthem?")
.build()))
- .withOptions(OllamaOptions.create().withTemperature(0.9f).toMap())
+ .withOptions(OllamaOptions.create().withTemperature(0.9).toMap())
.build();
Flux streamingResponse = ollamaApi.streamingChat(request2);
diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/openai-chat.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/openai-chat.adoc
index cf13a6a358..9e9370f793 100644
--- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/openai-chat.adoc
+++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/openai-chat.adoc
@@ -429,11 +429,11 @@ ChatCompletionMessage chatCompletionMessage =
// Sync request
ResponseEntity response = openAiApi.chatCompletionEntity(
- new ChatCompletionRequest(List.of(chatCompletionMessage), "gpt-3.5-turbo", 0.8f, false));
+ new ChatCompletionRequest(List.of(chatCompletionMessage), "gpt-3.5-turbo", 0.8, false));
// Streaming request
Flux streamResponse = openAiApi.chatCompletionStream(
- new ChatCompletionRequest(List.of(chatCompletionMessage), "gpt-3.5-turbo", 0.8f, true));
+ new ChatCompletionRequest(List.of(chatCompletionMessage), "gpt-3.5-turbo", 0.8, true));
----
Follow the https://github.com/spring-projects/spring-ai/blob/main/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/api/OpenAiApi.java[OpenAiApi.java]'s JavaDoc for further information.
diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/qianfan-chat.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/qianfan-chat.adoc
index a2abbb1326..3e34675969 100644
--- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/qianfan-chat.adoc
+++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/qianfan-chat.adoc
@@ -124,7 +124,7 @@ ChatResponse response = chatClient.call(
"Generate the names of 5 famous pirates.",
QianFanChatOptions.builder()
.withModel(QianFanApi.ChatModel.ERNIE_Speed_8K.getValue())
- .withTemperature(0.5f)
+ .withTemperature(0.5)
.build()
));
----
@@ -208,7 +208,7 @@ var qianFanApi = new QianFanApi(System.getenv("QIANFAN_API_KEY"), System.getenv(
var chatClient = new QianFanChatModel(qianFanApi, QianFanChatOptions.builder()
.withModel(QianFanApi.ChatModel.ERNIE_Speed_8K.getValue())
- .withTemperature(0.4f)
+ .withTemperature(0.4)
.withMaxTokens(200)
.build());
@@ -241,11 +241,11 @@ ChatCompletionMessage chatCompletionMessage =
// Sync request
ResponseEntity response = qianFanApi.chatCompletionEntity(
- new ChatCompletionRequest(List.of(chatCompletionMessage), systemMessage, QianFanApi.ChatModel.ERNIE_Speed_8K.getValue(), 0.7f, false));
+ new ChatCompletionRequest(List.of(chatCompletionMessage), systemMessage, QianFanApi.ChatModel.ERNIE_Speed_8K.getValue(), 0.7, false));
// Streaming request
Flux streamResponse = qianFanApi.chatCompletionStream(
- new ChatCompletionRequest(List.of(chatCompletionMessage), systemMessage, QianFanApi.ChatModel.ERNIE_Speed_8K.getValue(), 0.7f, true));
+ new ChatCompletionRequest(List.of(chatCompletionMessage), systemMessage, QianFanApi.ChatModel.ERNIE_Speed_8K.getValue(), 0.7, true));
----
Follow the https://github.com/spring-projects/spring-ai/blob/main/models/spring-ai-qianfan/src/main/java/org/springframework/ai/qianfan/api/QianFanApi.java[QianFanApi.java]'s JavaDoc for further information.
diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/zhipuai-chat.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/zhipuai-chat.adoc
index 7582cae4ad..d946274232 100644
--- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/zhipuai-chat.adoc
+++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/zhipuai-chat.adoc
@@ -122,7 +122,7 @@ ChatResponse response = chatModel.call(
"Generate the names of 5 famous pirates.",
ZhiPuAiChatOptions.builder()
.withModel(ZhiPuAiApi.ChatModel.GLM_3_Turbo.getValue())
- .withTemperature(0.5f)
+ .withTemperature(0.5)
.build()
));
----
@@ -205,7 +205,7 @@ var zhiPuAiApi = new ZhiPuAiApi(System.getenv("ZHIPU_AI_API_KEY"));
var chatModel = new ZhiPuAiChatModel(zhiPuAiApi, ZhiPuAiChatOptions.builder()
.withModel(ZhiPuAiApi.ChatModel.GLM_3_Turbo.getValue())
- .withTemperature(0.4f)
+ .withTemperature(0.4)
.withMaxTokens(200)
.build());
@@ -236,11 +236,11 @@ ChatCompletionMessage chatCompletionMessage =
// Sync request
ResponseEntity response = zhiPuAiApi.chatCompletionEntity(
- new ChatCompletionRequest(List.of(chatCompletionMessage), ZhiPuAiApi.ChatModel.GLM_3_Turbo.getValue(), 0.7f, false));
+ new ChatCompletionRequest(List.of(chatCompletionMessage), ZhiPuAiApi.ChatModel.GLM_3_Turbo.getValue(), 0.7, false));
// Streaming request
Flux streamResponse = zhiPuAiApi.chatCompletionStream(
- new ChatCompletionRequest(List.of(chatCompletionMessage), ZhiPuAiApi.ChatModel.GLM_3_Turbo.getValue(), 0.7f, true));
+ new ChatCompletionRequest(List.of(chatCompletionMessage), ZhiPuAiApi.ChatModel.GLM_3_Turbo.getValue(), 0.7, true));
----
Follow the https://github.com/spring-projects/spring-ai/blob/main/models/spring-ai-zhipuai/src/main/java/org/springframework/ai/zhipuai/api/ZhiPuAiApi.java[ZhiPuAiApi.java]'s JavaDoc for further information.
diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/upgrade-notes.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/upgrade-notes.adoc
index 0224c328db..3978b06288 100644
--- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/upgrade-notes.adoc
+++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/upgrade-notes.adoc
@@ -1,6 +1,10 @@
[[upgrade-notes]]
= Upgrading Notes
+== Upgrading to 1.0.0.RC1
+
+* The type of the portable chat options (`frequencyPenalty`, `presencePenalty`, `temperature`, `topP`) has been changed from `Float` to `Double`.
+
== Upgrading to 1.0.0.M2
* The configuration prefix for the Chroma Vector Store has been changes from `spring.ai.vectorstore.chroma.store` to `spring.ai.vectorstore.chroma` in order to align with the naming conventions of other vector stores.
diff --git a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/azure/openai/AzureOpenAiChatProperties.java b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/azure/openai/AzureOpenAiChatProperties.java
index 7e0055802e..7ae5ebc8d6 100644
--- a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/azure/openai/AzureOpenAiChatProperties.java
+++ b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/azure/openai/AzureOpenAiChatProperties.java
@@ -36,7 +36,7 @@ public class AzureOpenAiChatProperties {
@NestedConfigurationProperty
private AzureOpenAiChatOptions options = AzureOpenAiChatOptions.builder()
.withDeploymentName(DEFAULT_DEPLOYMENT_NAME)
- .withTemperature(DEFAULT_TEMPERATURE.floatValue())
+ .withTemperature(DEFAULT_TEMPERATURE)
.build();
public AzureOpenAiChatOptions getOptions() {
diff --git a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/anthropic/BedrockAnthropicChatProperties.java b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/anthropic/BedrockAnthropicChatProperties.java
index e9b2636773..daee6365bb 100644
--- a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/anthropic/BedrockAnthropicChatProperties.java
+++ b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/anthropic/BedrockAnthropicChatProperties.java
@@ -47,7 +47,7 @@ public class BedrockAnthropicChatProperties {
@NestedConfigurationProperty
private AnthropicChatOptions options = AnthropicChatOptions.builder()
- .withTemperature(0.7f)
+ .withTemperature(0.7)
.withMaxTokensToSample(300)
.withTopK(10)
.withStopSequences(List.of("\n\nHuman:"))
diff --git a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/anthropic3/BedrockAnthropic3ChatProperties.java b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/anthropic3/BedrockAnthropic3ChatProperties.java
index 71086b0d66..96ddc3e06b 100644
--- a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/anthropic3/BedrockAnthropic3ChatProperties.java
+++ b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/anthropic3/BedrockAnthropic3ChatProperties.java
@@ -46,7 +46,7 @@ public class BedrockAnthropic3ChatProperties {
@NestedConfigurationProperty
private Anthropic3ChatOptions options = Anthropic3ChatOptions.builder()
- .withTemperature(0.7f)
+ .withTemperature(0.7)
.withMaxTokens(300)
.withTopK(10)
.withAnthropicVersion(Anthropic3ChatBedrockApi.DEFAULT_ANTHROPIC_VERSION)
diff --git a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/jurrasic2/BedrockAi21Jurassic2ChatProperties.java b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/jurrasic2/BedrockAi21Jurassic2ChatProperties.java
index eccd7e0c9e..183c050bcf 100644
--- a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/jurrasic2/BedrockAi21Jurassic2ChatProperties.java
+++ b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/jurrasic2/BedrockAi21Jurassic2ChatProperties.java
@@ -45,7 +45,7 @@ public class BedrockAi21Jurassic2ChatProperties {
@NestedConfigurationProperty
private BedrockAi21Jurassic2ChatOptions options = BedrockAi21Jurassic2ChatOptions.builder()
- .withTemperature(0.7f)
+ .withTemperature(0.7)
.withMaxTokens(500)
.build();
diff --git a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/llama/BedrockLlamaChatProperties.java b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/llama/BedrockLlamaChatProperties.java
index 048b7dde2b..c58742ee42 100644
--- a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/llama/BedrockLlamaChatProperties.java
+++ b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/llama/BedrockLlamaChatProperties.java
@@ -43,7 +43,7 @@ public class BedrockLlamaChatProperties {
@NestedConfigurationProperty
private BedrockLlamaChatOptions options = BedrockLlamaChatOptions.builder()
- .withTemperature(0.7f)
+ .withTemperature(0.7)
.withMaxGenLen(300)
.build();
diff --git a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/titan/BedrockTitanChatProperties.java b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/titan/BedrockTitanChatProperties.java
index b196e9797a..4b6df741ab 100644
--- a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/titan/BedrockTitanChatProperties.java
+++ b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/bedrock/titan/BedrockTitanChatProperties.java
@@ -42,7 +42,7 @@ public class BedrockTitanChatProperties {
private String model = TitanChatModel.TITAN_TEXT_EXPRESS_V1.id();
@NestedConfigurationProperty
- private BedrockTitanChatOptions options = BedrockTitanChatOptions.builder().withTemperature(0.7f).build();
+ private BedrockTitanChatOptions options = BedrockTitanChatOptions.builder().withTemperature(0.7).build();
public boolean isEnabled() {
return enabled;
diff --git a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/minimax/MiniMaxChatProperties.java b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/minimax/MiniMaxChatProperties.java
index e32748b30c..5ca297949a 100644
--- a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/minimax/MiniMaxChatProperties.java
+++ b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/minimax/MiniMaxChatProperties.java
@@ -40,7 +40,7 @@ public class MiniMaxChatProperties extends MiniMaxParentProperties {
@NestedConfigurationProperty
private MiniMaxChatOptions options = MiniMaxChatOptions.builder()
.withModel(DEFAULT_CHAT_MODEL)
- .withTemperature(DEFAULT_TEMPERATURE.floatValue())
+ .withTemperature(DEFAULT_TEMPERATURE)
.build();
public MiniMaxChatOptions getOptions() {
diff --git a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/mistralai/MistralAiChatProperties.java b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/mistralai/MistralAiChatProperties.java
index b7c24972e3..9e46cc8307 100644
--- a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/mistralai/MistralAiChatProperties.java
+++ b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/mistralai/MistralAiChatProperties.java
@@ -35,7 +35,7 @@ public class MistralAiChatProperties extends MistralAiParentProperties {
private static final Double DEFAULT_TEMPERATURE = 0.7;
- private static final Float DEFAULT_TOP_P = 1.0f;
+ private static final Double DEFAULT_TOP_P = 1.0;
private static final Boolean IS_ENABLED = false;
@@ -51,7 +51,7 @@ public MistralAiChatProperties() {
@NestedConfigurationProperty
private MistralAiChatOptions options = MistralAiChatOptions.builder()
.withModel(DEFAULT_CHAT_MODEL)
- .withTemperature(DEFAULT_TEMPERATURE.floatValue())
+ .withTemperature(DEFAULT_TEMPERATURE)
.withSafePrompt(!IS_ENABLED)
.withTopP(DEFAULT_TOP_P)
.build();
diff --git a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/moonshot/MoonshotChatProperties.java b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/moonshot/MoonshotChatProperties.java
index 843a264247..91918ff00c 100644
--- a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/moonshot/MoonshotChatProperties.java
+++ b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/moonshot/MoonshotChatProperties.java
@@ -40,7 +40,7 @@ public class MoonshotChatProperties extends MoonshotParentProperties {
@NestedConfigurationProperty
private MoonshotChatOptions options = MoonshotChatOptions.builder()
.withModel(DEFAULT_CHAT_MODEL)
- .withTemperature(DEFAULT_TEMPERATURE.floatValue())
+ .withTemperature(DEFAULT_TEMPERATURE)
.build();
public MoonshotChatOptions getOptions() {
diff --git a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/openai/OpenAiChatProperties.java b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/openai/OpenAiChatProperties.java
index f1a301cd93..e2014de6b8 100644
--- a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/openai/OpenAiChatProperties.java
+++ b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/openai/OpenAiChatProperties.java
@@ -40,7 +40,7 @@ public class OpenAiChatProperties extends OpenAiParentProperties {
@NestedConfigurationProperty
private OpenAiChatOptions options = OpenAiChatOptions.builder()
.withModel(DEFAULT_CHAT_MODEL)
- .withTemperature(DEFAULT_TEMPERATURE.floatValue())
+ .withTemperature(DEFAULT_TEMPERATURE)
.build();
public OpenAiChatOptions getOptions() {
diff --git a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/qianfan/QianFanChatProperties.java b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/qianfan/QianFanChatProperties.java
index 31e631a500..cd0edcd3d7 100644
--- a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/qianfan/QianFanChatProperties.java
+++ b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/qianfan/QianFanChatProperties.java
@@ -40,7 +40,7 @@ public class QianFanChatProperties extends QianFanParentProperties {
@NestedConfigurationProperty
private QianFanChatOptions options = QianFanChatOptions.builder()
.withModel(DEFAULT_CHAT_MODEL)
- .withTemperature(DEFAULT_TEMPERATURE.floatValue())
+ .withTemperature(DEFAULT_TEMPERATURE)
.build();
public QianFanChatOptions getOptions() {
diff --git a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/vertexai/gemini/VertexAiGeminiChatProperties.java b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/vertexai/gemini/VertexAiGeminiChatProperties.java
index b8a93804d7..4e9572c153 100644
--- a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/vertexai/gemini/VertexAiGeminiChatProperties.java
+++ b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/vertexai/gemini/VertexAiGeminiChatProperties.java
@@ -36,7 +36,7 @@ public class VertexAiGeminiChatProperties {
* Vertex AI Gemini API generative options.
*/
private VertexAiGeminiChatOptions options = VertexAiGeminiChatOptions.builder()
- .withTemperature(0.7f)
+ .withTemperature(0.7)
.withCandidateCount(1)
.withModel(DEFAULT_MODEL)
.build();
diff --git a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/vertexai/palm2/VertexAiPlam2ChatProperties.java b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/vertexai/palm2/VertexAiPlam2ChatProperties.java
index d966f9d7e0..417f9a89b1 100644
--- a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/vertexai/palm2/VertexAiPlam2ChatProperties.java
+++ b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/vertexai/palm2/VertexAiPlam2ChatProperties.java
@@ -38,7 +38,7 @@ public class VertexAiPlam2ChatProperties {
* Vertex AI PaLM API generative options.
*/
private VertexAiPaLm2ChatOptions options = VertexAiPaLm2ChatOptions.builder()
- .withTemperature(0.7f)
+ .withTemperature(0.7)
.withTopP(null)
.withCandidateCount(1)
.withTopK(20)
diff --git a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/watsonxai/WatsonxAiChatProperties.java b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/watsonxai/WatsonxAiChatProperties.java
index c0db014dd5..3f9dc8fe9d 100644
--- a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/watsonxai/WatsonxAiChatProperties.java
+++ b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/watsonxai/WatsonxAiChatProperties.java
@@ -43,13 +43,13 @@ public class WatsonxAiChatProperties {
@NestedConfigurationProperty
private WatsonxAiChatOptions options = WatsonxAiChatOptions.builder()
.withModel("google/flan-ul2")
- .withTemperature(0.7f)
- .withTopP(1.0f)
+ .withTemperature(0.7)
+ .withTopP(1.0)
.withTopK(50)
.withDecodingMethod("greedy")
.withMaxNewTokens(20)
.withMinNewTokens(0)
- .withRepetitionPenalty(1.0f)
+ .withRepetitionPenalty(1.0)
.withStopSequences(List.of())
.build();
diff --git a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/zhipuai/ZhiPuAiChatProperties.java b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/zhipuai/ZhiPuAiChatProperties.java
index db916db120..d1179e9900 100644
--- a/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/zhipuai/ZhiPuAiChatProperties.java
+++ b/spring-ai-spring-boot-autoconfigure/src/main/java/org/springframework/ai/autoconfigure/zhipuai/ZhiPuAiChatProperties.java
@@ -40,7 +40,7 @@ public class ZhiPuAiChatProperties extends ZhiPuAiParentProperties {
@NestedConfigurationProperty
private ZhiPuAiChatOptions options = ZhiPuAiChatOptions.builder()
.withModel(DEFAULT_CHAT_MODEL)
- .withTemperature(DEFAULT_TEMPERATURE.floatValue())
+ .withTemperature(DEFAULT_TEMPERATURE)
.build();
public ZhiPuAiChatOptions getOptions() {
diff --git a/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/anthropic/AnthropicPropertiesTests.java b/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/anthropic/AnthropicPropertiesTests.java
index 0cee943b01..ca9cca03f5 100644
--- a/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/anthropic/AnthropicPropertiesTests.java
+++ b/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/anthropic/AnthropicPropertiesTests.java
@@ -54,7 +54,7 @@ public void connectionProperties() {
assertThat(connectionProperties.getBetaVersion()).isEqualTo("7777");
assertThat(chatProperties.getOptions().getModel()).isEqualTo("MODEL_XYZ");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
// enabled is true by default
assertThat(chatProperties.isEnabled()).isTrue();
});
@@ -89,8 +89,8 @@ public void chatOptionsTest() {
assertThat(chatProperties.getOptions().getModel()).isEqualTo("MODEL_XYZ");
assertThat(chatProperties.getOptions().getMaxTokens()).isEqualTo(123);
assertThat(chatProperties.getOptions().getStopSequences()).contains("boza", "koza");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
- assertThat(chatProperties.getOptions().getTopP()).isEqualTo(0.56f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
+ assertThat(chatProperties.getOptions().getTopP()).isEqualTo(0.56);
assertThat(chatProperties.getOptions().getTopK()).isEqualTo(100);
assertThat(chatProperties.getOptions().getMetadata().userId()).isEqualTo("MyUserId");
diff --git a/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/azure/AzureOpenAiAutoConfigurationPropertyTests.java b/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/azure/AzureOpenAiAutoConfigurationPropertyTests.java
index 48d1e9f343..581f178c04 100644
--- a/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/azure/AzureOpenAiAutoConfigurationPropertyTests.java
+++ b/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/azure/AzureOpenAiAutoConfigurationPropertyTests.java
@@ -83,14 +83,14 @@ public void chatPropertiesTest() {
assertThat(embeddingProperties.getOptions().getDeploymentName()).isEqualTo("text-embedding-ada-002");
assertThat(chatProperties.getOptions().getDeploymentName()).isEqualTo("MODEL_XYZ");
- assertThat(chatProperties.getOptions().getFrequencyPenalty()).isEqualTo(-1.5f);
+ assertThat(chatProperties.getOptions().getFrequencyPenalty()).isEqualTo(-1.5);
assertThat(chatProperties.getOptions().getLogitBias().get("myTokenId")).isEqualTo(-5);
assertThat(chatProperties.getOptions().getMaxTokens()).isEqualTo(123);
assertThat(chatProperties.getOptions().getN()).isEqualTo(10);
assertThat(chatProperties.getOptions().getPresencePenalty()).isEqualTo(0);
assertThat(chatProperties.getOptions().getStop()).contains("boza", "koza");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
- assertThat(chatProperties.getOptions().getTopP()).isEqualTo(0.56f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
+ assertThat(chatProperties.getOptions().getTopP()).isEqualTo(0.56);
assertThat(chatProperties.getOptions().getUser()).isEqualTo("userXYZ");
});
diff --git a/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/minimax/MiniMaxPropertiesTests.java b/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/minimax/MiniMaxPropertiesTests.java
index 07b2d4e6a7..f8a2f5e2a1 100644
--- a/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/minimax/MiniMaxPropertiesTests.java
+++ b/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/minimax/MiniMaxPropertiesTests.java
@@ -62,7 +62,7 @@ public void chatProperties() {
assertThat(chatProperties.getBaseUrl()).isNull();
assertThat(chatProperties.getOptions().getModel()).isEqualTo("MODEL_XYZ");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
});
}
@@ -91,7 +91,7 @@ public void chatOverrideConnectionProperties() {
assertThat(chatProperties.getBaseUrl()).isEqualTo("TEST_BASE_URL2");
assertThat(chatProperties.getOptions().getModel()).isEqualTo("MODEL_XYZ");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
});
}
@@ -211,7 +211,7 @@ public void chatOptionsTest() {
assertThat(embeddingProperties.getOptions().getModel()).isEqualTo("embo-01");
assertThat(chatProperties.getOptions().getModel()).isEqualTo("MODEL_XYZ");
- assertThat(chatProperties.getOptions().getFrequencyPenalty()).isEqualTo(-1.5f);
+ assertThat(chatProperties.getOptions().getFrequencyPenalty()).isEqualTo(-1.5);
assertThat(chatProperties.getOptions().getMaxTokens()).isEqualTo(123);
assertThat(chatProperties.getOptions().getN()).isEqualTo(10);
assertThat(chatProperties.getOptions().getPresencePenalty()).isEqualTo(0);
@@ -219,8 +219,8 @@ public void chatOptionsTest() {
.isEqualTo(new MiniMaxApi.ChatCompletionRequest.ResponseFormat("json"));
assertThat(chatProperties.getOptions().getSeed()).isEqualTo(66);
assertThat(chatProperties.getOptions().getStop()).contains("boza", "koza");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
- assertThat(chatProperties.getOptions().getTopP()).isEqualTo(0.56f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
+ assertThat(chatProperties.getOptions().getTopP()).isEqualTo(0.56);
JSONAssert.assertEquals("{\"type\":\"function\",\"function\":{\"name\":\"toolChoiceFunctionName\"}}",
chatProperties.getOptions().getToolChoice(), JSONCompareMode.LENIENT);
diff --git a/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/moonshot/MoonshotPropertiesTests.java b/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/moonshot/MoonshotPropertiesTests.java
index 1ee7986b81..213ccd9451 100644
--- a/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/moonshot/MoonshotPropertiesTests.java
+++ b/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/moonshot/MoonshotPropertiesTests.java
@@ -52,7 +52,7 @@ public void chatProperties() {
assertThat(chatProperties.getBaseUrl()).isNull();
assertThat(chatProperties.getOptions().getModel()).isEqualTo("MODEL_XYZ");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
});
}
@@ -81,7 +81,7 @@ public void chatOverrideConnectionProperties() {
assertThat(chatProperties.getBaseUrl()).isEqualTo("TEST_BASE_URL2");
assertThat(chatProperties.getOptions().getModel()).isEqualTo("MODEL_XYZ");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
});
}
@@ -117,13 +117,13 @@ public void chatOptionsTest() {
assertThat(connectionProperties.getApiKey()).isEqualTo("API_KEY");
assertThat(chatProperties.getOptions().getModel()).isEqualTo("MODEL_XYZ");
- assertThat(chatProperties.getOptions().getFrequencyPenalty()).isEqualTo(-1.5f);
+ assertThat(chatProperties.getOptions().getFrequencyPenalty()).isEqualTo(-1.5);
assertThat(chatProperties.getOptions().getMaxTokens()).isEqualTo(123);
assertThat(chatProperties.getOptions().getN()).isEqualTo(10);
assertThat(chatProperties.getOptions().getPresencePenalty()).isEqualTo(0);
assertThat(chatProperties.getOptions().getStop()).contains("boza", "koza");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
- assertThat(chatProperties.getOptions().getTopP()).isEqualTo(0.56f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
+ assertThat(chatProperties.getOptions().getTopP()).isEqualTo(0.56);
assertThat(chatProperties.getOptions().getUser()).isEqualTo("userXYZ");
});
diff --git a/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/ollama/OllamaChatAutoConfigurationTests.java b/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/ollama/OllamaChatAutoConfigurationTests.java
index fff87017aa..77e7e06a19 100644
--- a/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/ollama/OllamaChatAutoConfigurationTests.java
+++ b/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/ollama/OllamaChatAutoConfigurationTests.java
@@ -49,8 +49,8 @@ public void propertiesTest() {
assertThat(chatProperties.getModel()).isEqualTo("MODEL_XYZ");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
- assertThat(chatProperties.getOptions().getTopP()).isEqualTo(0.56f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
+ assertThat(chatProperties.getOptions().getTopP()).isEqualTo(0.56);
assertThat(chatProperties.getOptions().getTopK()).isEqualTo(123);
});
diff --git a/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/openai/OpenAiPropertiesTests.java b/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/openai/OpenAiPropertiesTests.java
index 05dbdda4c4..5c38bfee6c 100644
--- a/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/openai/OpenAiPropertiesTests.java
+++ b/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/openai/OpenAiPropertiesTests.java
@@ -65,7 +65,7 @@ public void chatProperties() {
assertThat(chatProperties.getBaseUrl()).isNull();
assertThat(chatProperties.getOptions().getModel()).isEqualTo("MODEL_XYZ");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
});
}
@@ -119,7 +119,7 @@ public void chatOverrideConnectionProperties() {
assertThat(chatProperties.getBaseUrl()).isEqualTo("TEST_BASE_URL2");
assertThat(chatProperties.getOptions().getModel()).isEqualTo("MODEL_XYZ");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
});
}
@@ -408,15 +408,15 @@ public void chatOptionsTest() {
assertThat(embeddingProperties.getOptions().getModel()).isEqualTo("text-embedding-ada-002");
assertThat(chatProperties.getOptions().getModel()).isEqualTo("MODEL_XYZ");
- assertThat(chatProperties.getOptions().getFrequencyPenalty()).isEqualTo(-1.5f);
+ assertThat(chatProperties.getOptions().getFrequencyPenalty()).isEqualTo(-1.5);
assertThat(chatProperties.getOptions().getLogitBias().get("myTokenId")).isEqualTo(-5);
assertThat(chatProperties.getOptions().getMaxTokens()).isEqualTo(123);
assertThat(chatProperties.getOptions().getN()).isEqualTo(10);
assertThat(chatProperties.getOptions().getPresencePenalty()).isEqualTo(0);
assertThat(chatProperties.getOptions().getSeed()).isEqualTo(66);
assertThat(chatProperties.getOptions().getStop()).contains("boza", "koza");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
- assertThat(chatProperties.getOptions().getTopP()).isEqualTo(0.56f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
+ assertThat(chatProperties.getOptions().getTopP()).isEqualTo(0.56);
JSONAssert.assertEquals("{\"type\":\"function\",\"function\":{\"name\":\"toolChoiceFunctionName\"}}",
chatProperties.getOptions().getToolChoice(), JSONCompareMode.LENIENT);
diff --git a/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/qianfan/QianFanPropertiesTests.java b/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/qianfan/QianFanPropertiesTests.java
index a389291f41..c5acafd78f 100644
--- a/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/qianfan/QianFanPropertiesTests.java
+++ b/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/qianfan/QianFanPropertiesTests.java
@@ -63,7 +63,7 @@ public void chatProperties() {
assertThat(chatProperties.getBaseUrl()).isNull();
assertThat(chatProperties.getOptions().getModel()).isEqualTo("MODEL_XYZ");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
});
}
@@ -96,7 +96,7 @@ public void chatOverrideConnectionProperties() {
assertThat(chatProperties.getBaseUrl()).isEqualTo("TEST_BASE_URL2");
assertThat(chatProperties.getOptions().getModel()).isEqualTo("MODEL_XYZ");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
});
}
@@ -192,14 +192,14 @@ public void chatOptionsTest() {
assertThat(embeddingProperties.getOptions().getModel()).isEqualTo("bge_large_zh");
assertThat(chatProperties.getOptions().getModel()).isEqualTo("MODEL_XYZ");
- assertThat(chatProperties.getOptions().getFrequencyPenalty()).isEqualTo(-1.5f);
+ assertThat(chatProperties.getOptions().getFrequencyPenalty()).isEqualTo(-1.5);
assertThat(chatProperties.getOptions().getMaxTokens()).isEqualTo(123);
assertThat(chatProperties.getOptions().getPresencePenalty()).isEqualTo(0);
assertThat(chatProperties.getOptions().getResponseFormat())
.isEqualTo(new QianFanApi.ChatCompletionRequest.ResponseFormat("json"));
assertThat(chatProperties.getOptions().getStop()).contains("boza", "koza");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
- assertThat(chatProperties.getOptions().getTopP()).isEqualTo(0.56f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
+ assertThat(chatProperties.getOptions().getTopP()).isEqualTo(0.56);
});
}
diff --git a/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/zhipuai/ZhiPuAiPropertiesTests.java b/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/zhipuai/ZhiPuAiPropertiesTests.java
index 6d7cd9afb1..2aaf629462 100644
--- a/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/zhipuai/ZhiPuAiPropertiesTests.java
+++ b/spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/zhipuai/ZhiPuAiPropertiesTests.java
@@ -61,7 +61,7 @@ public void chatProperties() {
assertThat(chatProperties.getBaseUrl()).isNull();
assertThat(chatProperties.getOptions().getModel()).isEqualTo("MODEL_XYZ");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
});
}
@@ -90,7 +90,7 @@ public void chatOverrideConnectionProperties() {
assertThat(chatProperties.getBaseUrl()).isEqualTo("TEST_BASE_URL2");
assertThat(chatProperties.getOptions().getModel()).isEqualTo("MODEL_XYZ");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
});
}
@@ -259,8 +259,8 @@ public void chatOptionsTest() {
assertThat(chatProperties.getOptions().getModel()).isEqualTo("MODEL_XYZ");
assertThat(chatProperties.getOptions().getMaxTokens()).isEqualTo(123);
assertThat(chatProperties.getOptions().getStop()).contains("boza", "koza");
- assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55f);
- assertThat(chatProperties.getOptions().getTopP()).isEqualTo(0.56f);
+ assertThat(chatProperties.getOptions().getTemperature()).isEqualTo(0.55);
+ assertThat(chatProperties.getOptions().getTopP()).isEqualTo(0.56);
assertThat(chatProperties.getOptions().getRequestId()).isEqualTo("RequestId");
assertThat(chatProperties.getOptions().getDoSample()).isEqualTo(Boolean.TRUE);