Skip to content

Commit

Permalink
Remove deprecations from 1.0.0-M6
Browse files Browse the repository at this point in the history
   - Remove deprecations from models, vector stores and usage
   - Deprecations from FunctionCallback and ObservationContext/Convention will be in a separate PR

Signed-off-by: Ilayaperumal Gopinathan <[email protected]>
  • Loading branch information
ilayaperumalg committed Feb 17, 2025
1 parent fd9f388 commit b87bac3
Show file tree
Hide file tree
Showing 72 changed files with 197 additions and 996 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -410,7 +410,7 @@ private String getApiKey() {

@Bean
public AnthropicChatModel openAiChatModel(AnthropicApi api) {
return new AnthropicChatModel(api);
return AnthropicChatModel.builder().anthropicApi(api).build();
}

}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,7 @@ private String getApiKey() {

@Bean
public AnthropicChatModel anthropicChatModel(AnthropicApi api) {
AnthropicChatModel anthropicChatModel = new AnthropicChatModel(api);
return anthropicChatModel;
return AnthropicChatModel.builder().anthropicApi(api).build();
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -161,47 +161,6 @@ public class AzureOpenAiChatModel extends AbstractToolCallSupport implements Cha
*/
private final ToolCallingManager toolCallingManager;

@Deprecated
public AzureOpenAiChatModel(OpenAIClientBuilder openAIClientBuilder) {
this(openAIClientBuilder,
AzureOpenAiChatOptions.builder()
.deploymentName(DEFAULT_DEPLOYMENT_NAME)
.temperature(DEFAULT_TEMPERATURE)
.build());
}

@Deprecated
public AzureOpenAiChatModel(OpenAIClientBuilder openAIClientBuilder, AzureOpenAiChatOptions options) {
this(openAIClientBuilder, options, null);
}

@Deprecated
public AzureOpenAiChatModel(OpenAIClientBuilder openAIClientBuilder, AzureOpenAiChatOptions options,
FunctionCallbackResolver functionCallbackResolver) {
this(openAIClientBuilder, options, functionCallbackResolver, List.of());
}

@Deprecated
public AzureOpenAiChatModel(OpenAIClientBuilder openAIClientBuilder, AzureOpenAiChatOptions options,
@Nullable FunctionCallbackResolver functionCallbackResolver,
@Nullable List<FunctionCallback> toolFunctionCallbacks) {
this(openAIClientBuilder, options, functionCallbackResolver, toolFunctionCallbacks, ObservationRegistry.NOOP);
}

@Deprecated
public AzureOpenAiChatModel(OpenAIClientBuilder openAIClientBuilder, AzureOpenAiChatOptions options,
@Nullable FunctionCallbackResolver functionCallbackResolver,
@Nullable List<FunctionCallback> toolFunctionCallbacks, ObservationRegistry observationRegistry) {
this(openAIClientBuilder, options,
LegacyToolCallingManager.builder()
.functionCallbackResolver(functionCallbackResolver)
.functionCallbacks(toolFunctionCallbacks)
.build(),
observationRegistry);
logger.warn("This constructor is deprecated and will be removed in the next milestone. "
+ "Please use the AzureOpenAiChatModel.Builder or the new constructor accepting ToolCallingManager instead.");
}

public AzureOpenAiChatModel(OpenAIClientBuilder openAIClientBuilder, AzureOpenAiChatOptions defaultOptions,
ToolCallingManager toolCallingManager, ObservationRegistry observationRegistry) {
// We do not pass the 'defaultOptions' to the AbstractToolSupport,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,10 @@ public class BedrockConverseUsageAggregationTests {

@BeforeEach
public void beforeEach() {
this.chatModel = new BedrockProxyChatModel(this.bedrockRuntimeClient, this.bedrockRuntimeAsyncClient,
FunctionCallingOptions.builder().build(), null, List.of(), ObservationRegistry.NOOP);
this.chatModel = BedrockProxyChatModel.builder()
.bedrockRuntimeClient(this.bedrockRuntimeClient)
.bedrockRuntimeAsyncClient(this.bedrockRuntimeAsyncClient)
.build();
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.springframework.ai.chat.observation.DefaultChatModelObservationConvention;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.model.function.FunctionCallingOptions;
import org.springframework.ai.model.tool.ToolCallingChatOptions;
import org.springframework.ai.observation.conventions.AiOperationType;
import org.springframework.ai.observation.conventions.AiProvider;
import org.springframework.beans.factory.annotation.Autowired;
Expand Down Expand Up @@ -170,10 +171,10 @@ public BedrockProxyChatModel bedrockConverseChatModel(ObservationRegistry observ
String modelId = "anthropic.claude-3-5-sonnet-20240620-v1:0";

return BedrockProxyChatModel.builder()
.withCredentialsProvider(EnvironmentVariableCredentialsProvider.create())
.withRegion(Region.US_EAST_1)
.withObservationRegistry(observationRegistry)
.withDefaultOptions(FunctionCallingOptions.builder().model(modelId).build())
.credentialsProvider(EnvironmentVariableCredentialsProvider.create())
.region(Region.US_EAST_1)
.observationRegistry(observationRegistry)
.defaultOptions(ToolCallingChatOptions.builder().model(modelId).build())
.build();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,8 @@ public static void main(String[] args) {
var prompt = new Prompt("Tell me a joke?", ChatOptions.builder().model(modelId).build());

var chatModel = BedrockProxyChatModel.builder()
.withCredentialsProvider(EnvironmentVariableCredentialsProvider.create())
.withRegion(Region.US_EAST_1)
.credentialsProvider(EnvironmentVariableCredentialsProvider.create())
.region(Region.US_EAST_1)
.build();

var chatResponse = chatModel.call(prompt);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,8 @@ public static void main(String[] args) {
.build());

BedrockProxyChatModel chatModel = BedrockProxyChatModel.builder()
.withCredentialsProvider(EnvironmentVariableCredentialsProvider.create())
.withRegion(Region.US_EAST_1)
.credentialsProvider(EnvironmentVariableCredentialsProvider.create())
.region(Region.US_EAST_1)
.build();

var response = chatModel.call(prompt);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,48 +120,6 @@ public class MistralAiChatModel extends AbstractToolCallSupport implements ChatM
*/
private ChatModelObservationConvention observationConvention = DEFAULT_OBSERVATION_CONVENTION;

/**
* @deprecated Use {@link MistralAiChatModel.Builder}.
*/
@Deprecated
public MistralAiChatModel(MistralAiApi mistralAiApi) {
this(mistralAiApi,
MistralAiChatOptions.builder()
.temperature(0.7)
.topP(1.0)
.safePrompt(false)
.model(MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue())
.build());
}

/**
* @deprecated Use {@link MistralAiChatModel.Builder}.
*/
@Deprecated
public MistralAiChatModel(MistralAiApi mistralAiApi, MistralAiChatOptions options) {
this(mistralAiApi, options, null, RetryUtils.DEFAULT_RETRY_TEMPLATE);
}

/**
* @deprecated Use {@link MistralAiChatModel.Builder}.
*/
@Deprecated
public MistralAiChatModel(MistralAiApi mistralAiApi, MistralAiChatOptions options,
@Nullable FunctionCallbackResolver functionCallbackResolver, @Nullable RetryTemplate retryTemplate) {
this(mistralAiApi, options, functionCallbackResolver, List.of(), retryTemplate);
}

/**
* @deprecated Use {@link MistralAiChatModel.Builder}.
*/
@Deprecated
public MistralAiChatModel(MistralAiApi mistralAiApi, MistralAiChatOptions options,
@Nullable FunctionCallbackResolver functionCallbackResolver,
@Nullable List<FunctionCallback> toolFunctionCallbacks, RetryTemplate retryTemplate) {
this(mistralAiApi, options, functionCallbackResolver, toolFunctionCallbacks, retryTemplate,
ObservationRegistry.NOOP);
}

/**
* @deprecated Use {@link MistralAiChatModel.Builder}.
*/
Expand Down Expand Up @@ -589,7 +547,7 @@ public static class Builder {
.temperature(0.7)
.topP(1.0)
.safePrompt(false)
.model(MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue())
.model(MistralAiApi.ChatModel.SMALL.getValue())
.build();

private ToolCallingManager toolCallingManager;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -265,12 +265,6 @@ public enum ChatCompletionFinishReason {
public enum ChatModel implements ChatModelDescription {

// @formatter:off
@Deprecated(forRemoval = true, since = "1.0.0-M6")
OPEN_MISTRAL_7B("open-mistral-7b"),
@Deprecated(forRemoval = true, since = "1.0.0-M6")
OPEN_MIXTRAL_7B("open-mixtral-8x7b"),
@Deprecated(forRemoval = true, since = "1.0.0-M6")
OPEN_MIXTRAL_22B("open-mixtral-8x22b"),
// Premier Models
CODESTRAL("codestral-latest"),
LARGE("mistral-large-latest"),
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright 2023-2024 the original author or authors.
* Copyright 2023-2025 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -70,7 +70,7 @@ void beforeEach() {
@Test
void observationForChatOperation() {
var options = MistralAiChatOptions.builder()
.model(MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue())
.model(MistralAiApi.ChatModel.SMALL.getValue())
.maxTokens(2048)
.stop(List.of("this-is-the-end"))
.temperature(0.7)
Expand All @@ -91,7 +91,7 @@ void observationForChatOperation() {
@Test
void observationForStreamingChatOperation() {
var options = MistralAiChatOptions.builder()
.model(MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue())
.model(MistralAiApi.ChatModel.SMALL.getValue())
.maxTokens(2048)
.stop(List.of("this-is-the-end"))
.temperature(0.7)
Expand Down Expand Up @@ -125,12 +125,12 @@ private void validate(ChatResponseMetadata responseMetadata) {
.doesNotHaveAnyRemainingCurrentObservation()
.hasObservationWithNameEqualTo(DefaultChatModelObservationConvention.DEFAULT_NAME)
.that()
.hasContextualNameEqualTo("chat " + MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue())
.hasContextualNameEqualTo("chat " + MistralAiApi.ChatModel.SMALL.getValue())
.hasLowCardinalityKeyValue(LowCardinalityKeyNames.AI_OPERATION_TYPE.asString(),
AiOperationType.CHAT.value())
.hasLowCardinalityKeyValue(LowCardinalityKeyNames.AI_PROVIDER.asString(), AiProvider.MISTRAL_AI.value())
.hasLowCardinalityKeyValue(LowCardinalityKeyNames.REQUEST_MODEL.asString(),
MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue())
MistralAiApi.ChatModel.SMALL.getValue())
.hasLowCardinalityKeyValue(LowCardinalityKeyNames.RESPONSE_MODEL.asString(),
StringUtils.hasText(responseMetadata.getModel()) ? responseMetadata.getModel()
: KeyValue.NONE_VALUE)
Expand Down Expand Up @@ -181,9 +181,13 @@ public MistralAiApi mistralAiApi() {
@Bean
public MistralAiChatModel openAiChatModel(MistralAiApi mistralAiApi,
TestObservationRegistry observationRegistry) {
return new MistralAiChatModel(mistralAiApi, MistralAiChatOptions.builder().build(),
new DefaultFunctionCallbackResolver(), List.of(), RetryTemplate.defaultInstance(),
observationRegistry);
return MistralAiChatModel.builder()
.mistralAiApi(mistralAiApi)
.defaultOptions(MistralAiChatOptions.builder().build())
.functionCallbackResolver(new DefaultFunctionCallbackResolver())
.retryTemplate(RetryTemplate.defaultInstance())
.observationRegistry(observationRegistry)
.build();
}

}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,14 +77,16 @@ public void beforeEach() {
this.retryListener = new TestRetryListener();
this.retryTemplate.registerListener(this.retryListener);

this.chatModel = new MistralAiChatModel(this.mistralAiApi,
MistralAiChatOptions.builder()
.temperature(0.7)
.topP(1.0)
.safePrompt(false)
.model(MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue())
.build(),
null, this.retryTemplate);
this.chatModel = MistralAiChatModel.builder()
.mistralAiApi(this.mistralAiApi)
.defaultOptions(MistralAiChatOptions.builder()
.temperature(0.7)
.topP(1.0)
.safePrompt(false)
.model(MistralAiApi.ChatModel.SMALL.getValue())
.build())
.retryTemplate(this.retryTemplate)
.build();
this.embeddingModel = new MistralAiEmbeddingModel(this.mistralAiApi, MetadataMode.EMBED,
MistralAiEmbeddingOptions.builder().withModel(MistralAiApi.EmbeddingModel.EMBED.getValue()).build(),
this.retryTemplate);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,10 @@ public EmbeddingModel mistralAiEmbeddingModel(MistralAiApi api) {

@Bean
public MistralAiChatModel mistralAiChatModel(MistralAiApi mistralAiApi) {
return new MistralAiChatModel(mistralAiApi,
MistralAiChatOptions.builder().model(MistralAiApi.ChatModel.OPEN_MIXTRAL_7B.getValue()).build());
return MistralAiChatModel.builder()
.mistralAiApi(mistralAiApi)
.defaultOptions(MistralAiChatOptions.builder().model(MistralAiApi.ChatModel.SMALL.getValue()).build())
.build();
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public class MistralAiApiIT {
void chatCompletionEntity() {
ChatCompletionMessage chatCompletionMessage = new ChatCompletionMessage("Hello world", Role.USER);
ResponseEntity<ChatCompletion> response = this.mistralAiApi.chatCompletionEntity(new ChatCompletionRequest(
List.of(chatCompletionMessage), MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue(), 0.8, false));
List.of(chatCompletionMessage), MistralAiApi.ChatModel.SMALL.getValue(), 0.8, false));

assertThat(response).isNotNull();
assertThat(response.getBody()).isNotNull();
Expand All @@ -64,7 +64,7 @@ void chatCompletionEntityWithSystemMessage() {
""", Role.SYSTEM);

ResponseEntity<ChatCompletion> response = this.mistralAiApi.chatCompletionEntity(new ChatCompletionRequest(
List.of(systemMessage, userMessage), MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue(), 0.8, false));
List.of(systemMessage, userMessage), MistralAiApi.ChatModel.SMALL.getValue(), 0.8, false));

assertThat(response).isNotNull();
assertThat(response.getBody()).isNotNull();
Expand All @@ -74,7 +74,7 @@ void chatCompletionEntityWithSystemMessage() {
void chatCompletionStream() {
ChatCompletionMessage chatCompletionMessage = new ChatCompletionMessage("Hello world", Role.USER);
Flux<ChatCompletionChunk> response = this.mistralAiApi.chatCompletionStream(new ChatCompletionRequest(
List.of(chatCompletionMessage), MistralAiApi.ChatModel.OPEN_MISTRAL_7B.getValue(), 0.8, true));
List.of(chatCompletionMessage), MistralAiApi.ChatModel.SMALL.getValue(), 0.8, true));

assertThat(response).isNotNull();
assertThat(response.collectList().block()).isNotNull();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,6 @@
import io.micrometer.observation.contextpropagation.ObservationThreadLocalAccessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.ai.model.tool.LegacyToolCallingManager;
import org.springframework.ai.model.tool.ToolCallingChatOptions;
import org.springframework.ai.model.tool.ToolCallingManager;
import org.springframework.ai.model.tool.ToolExecutionResult;
import org.springframework.ai.tool.definition.ToolDefinition;
import org.springframework.ai.util.json.JsonParser;
import org.springframework.lang.Nullable;
import reactor.core.publisher.Flux;

import org.springframework.ai.chat.messages.AssistantMessage;
Expand All @@ -59,6 +52,9 @@
import org.springframework.ai.model.function.FunctionCallback;
import org.springframework.ai.model.function.FunctionCallbackResolver;
import org.springframework.ai.model.function.FunctionCallingOptions;
import org.springframework.ai.model.tool.ToolCallingChatOptions;
import org.springframework.ai.model.tool.ToolCallingManager;
import org.springframework.ai.model.tool.ToolExecutionResult;
import org.springframework.ai.ollama.api.OllamaApi;
import org.springframework.ai.ollama.api.OllamaApi.ChatRequest;
import org.springframework.ai.ollama.api.OllamaApi.Message.Role;
Expand All @@ -69,6 +65,8 @@
import org.springframework.ai.ollama.management.ModelManagementOptions;
import org.springframework.ai.ollama.management.OllamaModelManager;
import org.springframework.ai.ollama.management.PullModelStrategy;
import org.springframework.ai.tool.definition.ToolDefinition;
import org.springframework.ai.util.json.JsonParser;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
Expand Down Expand Up @@ -124,18 +122,6 @@ public class OllamaChatModel extends AbstractToolCallSupport implements ChatMode

private ChatModelObservationConvention observationConvention = DEFAULT_OBSERVATION_CONVENTION;

@Deprecated
public OllamaChatModel(OllamaApi ollamaApi, OllamaOptions defaultOptions,
@Nullable FunctionCallbackResolver functionCallbackResolver,
@Nullable List<FunctionCallback> toolFunctionCallbacks, ObservationRegistry observationRegistry,
ModelManagementOptions modelManagementOptions) {
this(ollamaApi, defaultOptions, new LegacyToolCallingManager(functionCallbackResolver, toolFunctionCallbacks),
observationRegistry, modelManagementOptions);

logger.warn("This constructor is deprecated and will be removed in the next milestone. "
+ "Please use the OllamaChatModel.Builder or the new constructor accepting ToolCallingManager instead.");
}

public OllamaChatModel(OllamaApi ollamaApi, OllamaOptions defaultOptions, ToolCallingManager toolCallingManager,
ObservationRegistry observationRegistry, ModelManagementOptions modelManagementOptions) {
// We do not pass the 'defaultOptions' to the AbstractToolSupport,
Expand Down Expand Up @@ -599,8 +585,14 @@ public OllamaChatModel build() {
"toolCallingManager must not be set when functionCallbackResolver is set");
List<FunctionCallback> toolCallbacks = this.toolFunctionCallbacks != null ? this.toolFunctionCallbacks
: List.of();
return new OllamaChatModel(this.ollamaApi, this.defaultOptions, this.functionCallbackResolver,
toolCallbacks, this.observationRegistry, this.modelManagementOptions);
return OllamaChatModel.builder()
.ollamaApi(this.ollamaApi)
.defaultOptions(this.defaultOptions)
.functionCallbackResolver(this.functionCallbackResolver)
.toolFunctionCallbacks(toolCallbacks)
.observationRegistry(this.observationRegistry)
.modelManagementOptions(this.modelManagementOptions)
.build();
}

return new OllamaChatModel(this.ollamaApi, this.defaultOptions, DEFAULT_TOOL_CALLING_MANAGER,
Expand Down
Loading

0 comments on commit b87bac3

Please sign in to comment.