Class: Anthropic
Unified language model interface
Extends
ToolCallLLM
<AnthropicAdditionalChatOptions
>
Constructors
new Anthropic()
new Anthropic(
init
?):Anthropic
Parameters
• init?: Partial
<Anthropic
>
Returns
Overrides
Defined in
packages/providers/anthropic/dist/index.d.ts:75
Properties
apiKey?
optional
apiKey:string
Defined in
packages/providers/anthropic/dist/index.d.ts:71
getModelName()
getModelName: (
model
) =>string
Parameters
• model: string
Returns
string
Defined in
packages/providers/anthropic/dist/index.d.ts:85
maxRetries
maxRetries:
number
Defined in
packages/providers/anthropic/dist/index.d.ts:72
maxTokens?
optional
maxTokens:number
Defined in
packages/providers/anthropic/dist/index.d.ts:70
model
model:
"claude-3-5-sonnet"
|"claude-3-opus"
|"claude-3-sonnet"
|"claude-3-haiku"
|"claude-2.1"
|"claude-instant-1.2"
Defined in
packages/providers/anthropic/dist/index.d.ts:67
session
session:
AnthropicSession
Defined in
packages/providers/anthropic/dist/index.d.ts:74
temperature
temperature:
number
Defined in
packages/providers/anthropic/dist/index.d.ts:68
timeout?
optional
timeout:number
Defined in
packages/providers/anthropic/dist/index.d.ts:73
topP
topP:
number
Defined in
packages/providers/anthropic/dist/index.d.ts:69
Accessors
metadata
get
metadata():object
Returns
object
contextWindow
contextWindow:
number
maxTokens
maxTokens:
undefined
|number
model
model:
"claude-3-5-sonnet"
|"claude-3-opus"
|"claude-3-sonnet"
|"claude-3-haiku"
|"claude-2.1"
|"claude-instant-1.2"
temperature
temperature:
number
tokenizer
tokenizer:
undefined
topP
topP:
number
Overrides
Defined in
packages/providers/anthropic/dist/index.d.ts:77
supportToolCall
get
supportToolCall():boolean
Returns
boolean
Overrides
Defined in
packages/providers/anthropic/dist/index.d.ts:76
Methods
chat()
chat(params)
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>,any
,any
>>
Get a chat response from the LLM
Parameters
• params: LLMChatParamsStreaming
<object
, ToolCallLLMMessageOptions
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>, any
, any
>>
Overrides
Defined in
packages/providers/anthropic/dist/index.d.ts:87
chat(params)
chat(
params
):Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Get a chat response from the LLM
Parameters
• params: LLMChatParamsNonStreaming
<object
, ToolCallLLMMessageOptions
>
Returns
Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Overrides
Defined in
packages/providers/anthropic/dist/index.d.ts:88
complete()
complete(params)
complete(
params
):Promise
<AsyncIterable
<CompletionResponse
,any
,any
>>
Get a prompt completion from the LLM
Parameters
• params: LLMCompletionParamsStreaming
Returns
Promise
<AsyncIterable
<CompletionResponse
, any
, any
>>
Inherited from
Defined in
packages/core/llms/dist/index.d.ts:168
complete(params)
complete(
params
):Promise
<CompletionResponse
>
Get a prompt completion from the LLM
Parameters
• params: LLMCompletionParamsNonStreaming
Returns
Promise
<CompletionResponse
>
Inherited from
Defined in
packages/core/llms/dist/index.d.ts:169
formatMessages()
formatMessages(
messages
):MessageParam
[]
Parameters
• messages: ChatMessage
<ToolCallLLMMessageOptions
>[]
Returns
MessageParam
[]
Defined in
packages/providers/anthropic/dist/index.d.ts:86
streamChat()
protected
streamChat(messages
,systemPrompt
?):AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>,any
,any
>
Parameters
• messages: ChatMessage
<ToolCallLLMMessageOptions
>[]
• systemPrompt?: null
| string
Returns
AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>, any
, any
>
Defined in
packages/providers/anthropic/dist/index.d.ts:89
toTool()
static
toTool(tool
):Tool
Parameters
• tool: BaseTool
<any
>
Returns
Tool
Defined in
packages/providers/anthropic/dist/index.d.ts:90