@@ -6,7 +6,7 @@ use std::collections::HashMap;
66use std:: sync:: Arc ;
77
88use crate :: {
9- builder:: LLMBackend ,
9+ builder:: { LLMBackend , SystemContent , SystemPrompt } ,
1010 chat:: {
1111 ChatMessage , ChatProvider , ChatResponse , ChatRole , MessageType , StreamChunk , Tool ,
1212 ToolChoice , Usage ,
@@ -44,7 +44,7 @@ pub struct AnthropicConfig {
4444 /// Request timeout in seconds.
4545 pub timeout_seconds : u64 ,
4646 /// System prompt to guide model behavior.
47- pub system : String ,
47+ pub system : SystemPrompt ,
4848 /// Top-p (nucleus) sampling parameter.
4949 pub top_p : Option < f32 > ,
5050 /// Top-k sampling parameter.
@@ -91,6 +91,14 @@ struct ThinkingConfig {
9191 budget_tokens : u32 ,
9292}
9393
94+ /// System prompt in the request - can be either a string or vector of content objects
95+ #[ derive( Serialize , Debug ) ]
96+ #[ serde( untagged) ]
97+ enum RequestSystemPrompt < ' a > {
98+ String ( & ' a str ) ,
99+ Messages ( & ' a [ SystemContent ] ) ,
100+ }
101+
94102/// Request payload for Anthropic's messages API endpoint.
95103#[ derive( Serialize , Debug ) ]
96104struct AnthropicCompleteRequest < ' a > {
@@ -101,7 +109,7 @@ struct AnthropicCompleteRequest<'a> {
101109 #[ serde( skip_serializing_if = "Option::is_none" ) ]
102110 temperature : Option < f32 > ,
103111 #[ serde( skip_serializing_if = "Option::is_none" ) ]
104- system : Option < & ' a str > ,
112+ system : Option < RequestSystemPrompt < ' a > > ,
105113 #[ serde( skip_serializing_if = "Option::is_none" ) ]
106114 stream : Option < bool > ,
107115 #[ serde( skip_serializing_if = "Option::is_none" ) ]
@@ -487,6 +495,14 @@ impl Anthropic {
487495 ( anthropic_tools, final_tool_choice)
488496 }
489497
498+ /// Converts a SystemPrompt to the request format
499+ fn system_to_request ( system : & SystemPrompt ) -> RequestSystemPrompt < ' _ > {
500+ match system {
501+ SystemPrompt :: String ( s) => RequestSystemPrompt :: String ( s) ,
502+ SystemPrompt :: Messages ( msgs) => RequestSystemPrompt :: Messages ( msgs) ,
503+ }
504+ }
505+
490506 /// Creates a new Anthropic client with the specified configuration.
491507 ///
492508 /// # Arguments
@@ -505,7 +521,7 @@ impl Anthropic {
505521 max_tokens : Option < u32 > ,
506522 temperature : Option < f32 > ,
507523 timeout_seconds : Option < u64 > ,
508- system : Option < String > ,
524+ system : Option < SystemPrompt > ,
509525 top_p : Option < f32 > ,
510526 top_k : Option < u32 > ,
511527 tools : Option < Vec < Tool > > ,
@@ -551,36 +567,6 @@ impl Anthropic {
551567 /// * `timeout_seconds` - Request timeout in seconds (defaults to 30)
552568 /// * `system` - System prompt (defaults to "You are a helpful assistant.")
553569 /// * `thinking_budget_tokens` - Budget tokens for thinking (optional)
554- ///
555- /// # Examples
556- ///
557- /// ```rust
558- /// use reqwest::Client;
559- /// use std::time::Duration;
560- ///
561- /// // Create a shared client with custom settings
562- /// let shared_client = Client::builder()
563- /// .timeout(Duration::from_secs(120))
564- /// .build()
565- /// .unwrap();
566- ///
567- /// // Use the shared client for multiple Anthropic instances
568- /// let anthropic = llm::backends::anthropic::Anthropic::with_client(
569- /// shared_client.clone(),
570- /// "your-api-key",
571- /// Some("claude-3-opus-20240229".to_string()),
572- /// Some(1000),
573- /// Some(0.7),
574- /// Some(120),
575- /// Some("You are a helpful assistant.".to_string()),
576- /// None,
577- /// None,
578- /// None,
579- /// None,
580- /// None,
581- /// None,
582- /// );
583- /// ```
584570 #[ allow( clippy:: too_many_arguments) ]
585571 pub fn with_client (
586572 client : Client ,
@@ -589,7 +575,7 @@ impl Anthropic {
589575 max_tokens : Option < u32 > ,
590576 temperature : Option < f32 > ,
591577 timeout_seconds : Option < u64 > ,
592- system : Option < String > ,
578+ system : Option < SystemPrompt > ,
593579 top_p : Option < f32 > ,
594580 top_k : Option < u32 > ,
595581 tools : Option < Vec < Tool > > ,
@@ -603,7 +589,9 @@ impl Anthropic {
603589 model : model. unwrap_or_else ( || "claude-3-sonnet-20240229" . to_string ( ) ) ,
604590 max_tokens : max_tokens. unwrap_or ( 300 ) ,
605591 temperature : temperature. unwrap_or ( 0.7 ) ,
606- system : system. unwrap_or_else ( || "You are a helpful assistant." . to_string ( ) ) ,
592+ system : system. unwrap_or_else ( || {
593+ SystemPrompt :: String ( "You are a helpful assistant." . to_string ( ) )
594+ } ) ,
607595 timeout_seconds : timeout_seconds. unwrap_or ( 30 ) ,
608596 top_p,
609597 top_k,
@@ -636,7 +624,7 @@ impl Anthropic {
636624 self . config . timeout_seconds
637625 }
638626
639- pub fn system ( & self ) -> & str {
627+ pub fn system ( & self ) -> & SystemPrompt {
640628 & self . config . system
641629 }
642630
@@ -706,12 +694,14 @@ impl ChatProvider for Anthropic {
706694 None
707695 } ;
708696
697+ let system_prompt = Self :: system_to_request ( & self . config . system ) ;
698+
709699 let req_body = AnthropicCompleteRequest {
710700 messages : anthropic_messages,
711701 model : & self . config . model ,
712702 max_tokens : Some ( self . config . max_tokens ) ,
713703 temperature : Some ( self . config . temperature ) ,
714- system : Some ( & self . config . system ) ,
704+ system : Some ( system_prompt ) ,
715705 stream : Some ( false ) ,
716706 top_p : self . config . top_p ,
717707 top_k : self . config . top_k ,
@@ -833,12 +823,14 @@ impl ChatProvider for Anthropic {
833823 } )
834824 . collect ( ) ;
835825
826+ let system_prompt = Self :: system_to_request ( & self . config . system ) ;
827+
836828 let req_body = AnthropicCompleteRequest {
837829 messages : anthropic_messages,
838830 model : & self . config . model ,
839831 max_tokens : Some ( self . config . max_tokens ) ,
840832 temperature : Some ( self . config . temperature ) ,
841- system : Some ( & self . config . system ) ,
833+ system : Some ( system_prompt ) ,
842834 stream : Some ( true ) ,
843835 top_p : self . config . top_p ,
844836 top_k : self . config . top_k ,
@@ -901,12 +893,14 @@ impl ChatProvider for Anthropic {
901893 & self . config . tool_choice ,
902894 ) ;
903895
896+ let system_prompt = Self :: system_to_request ( & self . config . system ) ;
897+
904898 let req_body = AnthropicCompleteRequest {
905899 messages : anthropic_messages,
906900 model : & self . config . model ,
907901 max_tokens : Some ( self . config . max_tokens ) ,
908902 temperature : Some ( self . config . temperature ) ,
909- system : Some ( & self . config . system ) ,
903+ system : Some ( system_prompt ) ,
910904 stream : Some ( true ) ,
911905 top_p : self . config . top_p ,
912906 top_k : self . config . top_k ,
0 commit comments