Coverage for src / agent / providers / anthropic / chat_client.py: 34%

61 statements  

« prev     ^ index     » next       coverage.py v7.13.0, created at 2025-12-11 14:30 +0000

1# Copyright 2025-2026 Microsoft Corporation 

2# 

3# Licensed under the Apache License, Version 2.0 (the "License"); 

4# you may not use this file except in compliance with the License. 

5# You may obtain a copy of the License at 

6# 

7# http://www.apache.org/licenses/LICENSE-2.0 

8# 

9# Unless required by applicable law or agreed to in writing, software 

10# distributed under the License is distributed on an "AS IS" BASIS, 

11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

12# See the License for the specific language governing permissions and 

13# limitations under the License. 

14 

15""" 

16Anthropic Claude chat client implementation. 

17 

18This module provides a custom AnthropicChatClient that integrates Anthropic's 

19Claude models with the Microsoft Agent Framework by extending BaseChatClient. 

20""" 

21 

22import logging 

23from collections.abc import AsyncIterator 

24from typing import Any 

25 

26from agent_framework import ( 

27 BaseChatClient, 

28 ChatMessage, 

29 ChatOptions, 

30 ChatResponse, 

31 ChatResponseUpdate, 

32 use_function_invocation, 

33) 

34from anthropic import AsyncAnthropic 

35 

36from .types import ( 

37 extract_usage_metadata, 

38 from_anthropic_message, 

39 to_anthropic_messages, 

40 to_anthropic_tools, 

41) 

42 

43logger = logging.getLogger(__name__) 

44 

45 

46@use_function_invocation 

47class AnthropicChatClient(BaseChatClient): 

48 """Chat client for Anthropic Claude models. 

49 

50 This client extends BaseChatClient to provide integration with Anthropic's 

51 Claude API, supporting both synchronous and streaming responses. 

52 

53 Args: 

54 model_id: Claude model name (e.g., "claude-sonnet-4-5-20250929", "claude-haiku-4-5-20251001") 

55 api_key: Anthropic API key for authentication 

56 

57 Example: 

58 >>> client = AnthropicChatClient( 

59 ... model_id="claude-haiku-4-5-20251001", 

60 ... api_key="your-api-key" 

61 ... ) 

62 """ 

63 

64 # OpenTelemetry provider name for tracing 

65 OTEL_PROVIDER_NAME = "anthropic" 

66 

67 def __init__( 

68 self, 

69 model_id: str, 

70 api_key: str | None = None, 

71 ): 

72 """Initialize AnthropicChatClient with authentication credentials. 

73 

74 Args: 

75 model_id: Claude model name 

76 api_key: API key for Anthropic API 

77 

78 Raises: 

79 ValueError: If required credentials are missing 

80 """ 

81 super().__init__() 

82 

83 self.model_id = model_id 

84 

85 # Initialize Anthropic async client 

86 if not api_key: 

87 raise ValueError("Anthropic API key is required") 

88 

89 self.client = AsyncAnthropic(api_key=api_key) 

90 logger.info(f"Initialized Anthropic client for model: {model_id}") 

91 

92 def _prepare_options(self, chat_options: ChatOptions | None = None) -> dict[str, Any]: 

93 """Prepare generation config from ChatOptions. 

94 

95 Args: 

96 chat_options: Optional chat configuration 

97 

98 Returns: 

99 Dictionary with Anthropic generation configuration 

100 """ 

101 config: dict[str, Any] = { 

102 "max_tokens": 4096, # Default max tokens 

103 } 

104 

105 if chat_options: 

106 # Map temperature 

107 if chat_options.temperature is not None: 

108 config["temperature"] = chat_options.temperature 

109 

110 # Map max_tokens 

111 if chat_options.max_tokens is not None: 

112 config["max_tokens"] = chat_options.max_tokens 

113 

114 # Map top_p 

115 if chat_options.top_p is not None: 

116 config["top_p"] = chat_options.top_p 

117 

118 # Handle tools/functions 

119 tools = chat_options.tools() if callable(chat_options.tools) else chat_options.tools 

120 if tools: 

121 config["tools"] = to_anthropic_tools(tools) # type: ignore[arg-type] 

122 

123 return config 

124 

125 def _handle_anthropic_error(self, error: Exception) -> Exception: 

126 """Map Anthropic SDK exceptions to agent-framework exceptions. 

127 

128 Args: 

129 error: Exception from Anthropic SDK 

130 

131 Returns: 

132 Mapped exception for agent-framework 

133 """ 

134 import traceback 

135 

136 logger.error(f"Anthropic API error: {error}") 

137 logger.error(f"Traceback: {traceback.format_exc()}") 

138 return error 

139 

140 async def _inner_get_response( # type: ignore[override] 

141 self, 

142 *, 

143 messages: list[ChatMessage], 

144 chat_options: ChatOptions, 

145 **kwargs: Any, 

146 ) -> ChatResponse: 

147 """Get non-streaming response from Anthropic API. 

148 

149 This method is required by BaseChatClient and handles synchronous 

150 chat completions. 

151 

152 Args: 

153 messages: List of chat messages 

154 chat_options: Optional chat configuration 

155 

156 Returns: 

157 ChatResponse with the model's reply 

158 

159 Raises: 

160 Exception: If API call fails 

161 """ 

162 try: 

163 # Convert messages to Anthropic format (extracts system prompt) 

164 system_prompt, anthropic_messages = to_anthropic_messages(messages) 

165 

166 # Prepare generation config 

167 config = self._prepare_options(chat_options) 

168 

169 # Build API call parameters 

170 api_params: dict[str, Any] = { 

171 "model": self.model_id, 

172 "messages": anthropic_messages, 

173 **config, 

174 } 

175 

176 # Add system prompt if present 

177 if system_prompt: 

178 api_params["system"] = system_prompt 

179 

180 # Call Anthropic API 

181 response = await self.client.messages.create(**api_params) 

182 

183 # Convert response to ChatResponse 

184 chat_message = from_anthropic_message(response) 

185 

186 # Extract usage metadata 

187 usage = extract_usage_metadata(response) 

188 

189 return ChatResponse(messages=[chat_message], usage_details=usage or None) # type: ignore[arg-type] 

190 

191 except Exception as e: 

192 raise self._handle_anthropic_error(e) 

193 

194 async def _inner_get_streaming_response( # type: ignore[override] 

195 self, 

196 *, 

197 messages: list[ChatMessage], 

198 chat_options: ChatOptions, 

199 **kwargs: Any, 

200 ) -> AsyncIterator[ChatResponseUpdate]: 

201 """Get streaming response from Anthropic API. 

202 

203 This method is required by BaseChatClient and handles streaming 

204 chat completions, yielding response chunks as they arrive. 

205 

206 Args: 

207 messages: List of chat messages 

208 chat_options: Optional chat configuration 

209 

210 Yields: 

211 ChatResponseUpdate objects with response chunks 

212 

213 Raises: 

214 Exception: If API call fails 

215 """ 

216 try: 

217 # Convert messages to Anthropic format 

218 system_prompt, anthropic_messages = to_anthropic_messages(messages) 

219 

220 # Prepare generation config 

221 config = self._prepare_options(chat_options) 

222 

223 # Build API call parameters 

224 api_params: dict[str, Any] = { 

225 "model": self.model_id, 

226 "messages": anthropic_messages, 

227 **config, 

228 } 

229 

230 # Add system prompt if present 

231 if system_prompt: 

232 api_params["system"] = system_prompt 

233 

234 # Call Anthropic API with streaming 

235 async with self.client.messages.stream(**api_params) as stream: 

236 async for text in stream.text_stream: 

237 if text: 

238 yield ChatResponseUpdate( 

239 text=text, 

240 role="assistant", 

241 ) 

242 

243 except Exception as e: 

244 raise self._handle_anthropic_error(e)