Skip to content

Commit

Permalink
feat(anthropic): update to Claude 3.7 Sonnet and refactor API
Browse files Browse the repository at this point in the history
- Add support for Claude 3.7 Sonnet model and make it the default
- Rename function-related APIs to tool-related APIs for consistency:
- Change functionCallbacks to toolCallbacks
- Change function to toolNames
- Replace FunctionCallingOptions with ToolCallingChatOptions
- Refactor AnthropicChatModel instantiation to use builder pattern
- Update tests to use latest model versions instead of dated versions

Signed-off-by: Christian Tzolov <[email protected]>
  • Loading branch information
tzolov authored and markpollack committed Feb 27, 2025
1 parent c91163b commit 3dfea48
Show file tree
Hide file tree
Showing 8 changed files with 26 additions and 20 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@
*/
public class AnthropicChatModel extends AbstractToolCallSupport implements ChatModel {

public static final String DEFAULT_MODEL_NAME = AnthropicApi.ChatModel.CLAUDE_3_5_SONNET.getValue();
public static final String DEFAULT_MODEL_NAME = AnthropicApi.ChatModel.CLAUDE_3_7_SONNET.getValue();

public static final Integer DEFAULT_MAX_TOKENS = 500;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,11 @@ public Flux<ChatCompletionResponse> chatCompletionStream(ChatCompletionRequest c
public enum ChatModel implements ChatModelDescription {

// @formatter:off
/**
* The claude-3-7-sonnet-latest model.
*/
CLAUDE_3_7_SONNET("claude-3-7-sonnet-latest"),

/**
* The claude-3-5-sonnet-20241022 model.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,8 @@ private static void validateChatResponseMetadata(ChatResponse response, String m
}

@ParameterizedTest(name = "{0} : {displayName} ")
@ValueSource(strings = { "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307",
"claude-3-5-sonnet-20241022" })
@ValueSource(strings = { "claude-3-7-sonnet-latest", "claude-3-5-sonnet-latest", "claude-3-5-haiku-latest",
"claude-3-opus-latest" })
void roleTest(String modelName) {
UserMessage userMessage = new UserMessage(
"Tell me about 3 famous pirates from the Golden Age of Piracy and why they did.");
Expand Down Expand Up @@ -275,11 +275,11 @@ void functionCallTest() {

var promptOptions = AnthropicChatOptions.builder()
.model(AnthropicApi.ChatModel.CLAUDE_3_OPUS.getName())
.functionCallbacks(List.of(FunctionToolCallback.builder("getCurrentWeather", new MockWeatherService())
.toolCallbacks(FunctionToolCallback.builder("getCurrentWeather", new MockWeatherService())
.description(
"Get the weather in location. Return temperature in 36°F or 36°C format. Use multi-turn if needed.")
.inputType(MockWeatherService.Request.class)
.build()))
.build())
.build();

ChatResponse response = this.chatModel.call(new Prompt(messages, promptOptions));
Expand Down Expand Up @@ -307,11 +307,11 @@ void streamFunctionCallTest() {

var promptOptions = AnthropicChatOptions.builder()
.model(AnthropicApi.ChatModel.CLAUDE_3_5_SONNET.getName())
.functionCallbacks(List.of(FunctionToolCallback.builder("getCurrentWeather", new MockWeatherService())
.toolCallbacks(FunctionToolCallback.builder("getCurrentWeather", new MockWeatherService())
.description(
"Get the weather in location. Return temperature in 36°F or 36°C format. Use multi-turn if needed.")
.inputType(MockWeatherService.Request.class)
.build()))
.build())
.build();

Flux<ChatResponse> response = this.chatModel.stream(new Prompt(messages, promptOptions));
Expand All @@ -337,11 +337,11 @@ void streamFunctionCallUsageTest() {

var promptOptions = AnthropicChatOptions.builder()
.model(AnthropicApi.ChatModel.CLAUDE_3_5_SONNET.getName())
.functionCallbacks(List.of(FunctionToolCallback.builder("getCurrentWeather", new MockWeatherService())
.toolCallbacks(FunctionToolCallback.builder("getCurrentWeather", new MockWeatherService())
.description(
"Get the weather in location. Return temperature in 36°F or 36°C format. Use multi-turn if needed.")
.inputType(MockWeatherService.Request.class)
.build()))
.build())
.build();

Flux<ChatResponse> responseFlux = this.chatModel.stream(new Prompt(messages, promptOptions));
Expand Down Expand Up @@ -410,7 +410,7 @@ private String getApiKey() {

@Bean
public AnthropicChatModel openAiChatModel(AnthropicApi api) {
return new AnthropicChatModel(api);
return AnthropicChatModel.builder().anthropicApi(api).build();
}

}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,7 @@ private String getApiKey() {

@Bean
public AnthropicChatModel anthropicChatModel(AnthropicApi api) {
AnthropicChatModel anthropicChatModel = new AnthropicChatModel(api);
return anthropicChatModel;
return AnthropicChatModel.builder().anthropicApi(api).build();
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,10 @@ public class ChatCompletionRequestTests {
@Test
public void createRequestWithChatOptions() {

var client = new AnthropicChatModel(new AnthropicApi("TEST"),
AnthropicChatOptions.builder().model("DEFAULT_MODEL").temperature(66.6).build());
var client = AnthropicChatModel.builder()
.anthropicApi(new AnthropicApi("TEST"))
.defaultOptions(AnthropicChatOptions.builder().model("DEFAULT_MODEL").temperature(66.6).build())
.build();

var prompt = client.buildRequestPrompt(new Prompt("Test message content"));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ The prefix `spring.ai.anthropic.chat` is the property prefix that lets you confi
| Property | Description | Default

| spring.ai.anthropic.chat.enabled | Enable Anthropic chat model. | true
| spring.ai.anthropic.chat.options.model | This is the Anthropic Chat model to use. Supports: `claude-3-5-sonnet-20241022`, `claude-3-opus-20240229`, `claude-3-sonnet-20240229`, `claude-3-haiku-20240307` and the legacy `claude-2.1`, `claude-2.0` and `claude-instant-1.2` models. | `claude-3-opus-20240229`
| spring.ai.anthropic.chat.options.model | This is the Anthropic Chat model to use. Supports: `claude-3-7-sonnet-latest`, `claude-3-5-sonnet-latest`, `claude-3-opus-20240229`, `claude-3-sonnet-20240229`, `claude-3-haiku-20240307` and the legacy `claude-2.1`, `claude-2.0` and `claude-instant-1.2` models. | `claude-3-7-sonnet-latest`
| spring.ai.anthropic.chat.options.temperature | The sampling temperature to use that controls the apparent creativity of generated completions. Higher values will make output more random while lower values will make results more focused and deterministic. It is not recommended to modify temperature and top_p for the same completions request as the interaction of these two settings is difficult to predict. | 0.8
| spring.ai.anthropic.chat.options.max-tokens | The maximum number of tokens to generate in the chat completion. The total length of input tokens and generated tokens is limited by the model's context length. | 500
| spring.ai.anthropic.chat.options.stop-sequence | Custom text sequences that will cause the model to stop generating. Our models will normally stop when they have naturally completed their turn, which will result in a response stop_reason of "end_turn". If you want the model to stop generating when it encounters custom strings of text, you can use the stop_sequences parameter. If the model encounters one of the custom sequences, the response stop_reason value will be "stop_sequence" and the response stop_sequence value will contain the matched stop sequence. | -
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.model.function.FunctionCallingOptions;
import org.springframework.ai.model.tool.ToolCallingChatOptions;
import org.springframework.boot.autoconfigure.AutoConfigurations;
import org.springframework.boot.test.context.runner.ApplicationContextRunner;
import org.springframework.context.annotation.Bean;
Expand Down Expand Up @@ -66,14 +66,14 @@ void functionCallTest() {
"What's the weather like in San Francisco, in Paris, France and in Tokyo, Japan? Return the temperature in Celsius.");

ChatResponse response = chatModel.call(new Prompt(List.of(userMessage),
AnthropicChatOptions.builder().function("weatherFunction").build()));
AnthropicChatOptions.builder().toolNames("weatherFunction").build()));

logger.info("Response: {}", response);

assertThat(response.getResult().getOutput().getText()).contains("30", "10", "15");

response = chatModel.call(new Prompt(List.of(userMessage),
AnthropicChatOptions.builder().function("weatherFunction3").build()));
AnthropicChatOptions.builder().toolNames("weatherFunction3").build()));

logger.info("Response: {}", response);

Expand All @@ -96,7 +96,7 @@ void functionCallWithPortableFunctionCallingOptions() {
"What's the weather like in San Francisco, in Paris, France and in Tokyo, Japan? Return the temperature in Celsius.");

ChatResponse response = chatModel.call(new Prompt(List.of(userMessage),
FunctionCallingOptions.builder().function("weatherFunction").build()));
ToolCallingChatOptions.builder().toolNames("weatherFunction").build()));

logger.info("Response: {}", response);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ void functionCallTest() {
"What's the weather like in San Francisco, in Paris and in Tokyo? Return the temperature in Celsius.");

var promptOptions = AnthropicChatOptions.builder()
.functionCallbacks(
.toolCallbacks(
List.of(FunctionToolCallback.builder("CurrentWeatherService", new MockWeatherService())
.description("Get the weather in location. Return temperature in 36°F or 36°C format.")
.inputType(MockWeatherService.Request.class)
Expand Down

0 comments on commit 3dfea48

Please sign in to comment.