Skip to main content

Class: Anthropic

Unified language model interface

Extends

Constructors

new Anthropic()

new Anthropic(init?): Anthropic

Parameters

init?: Partial<Anthropic>

Returns

Anthropic

Overrides

ToolCallLLM.constructor

Defined in

packages/llamaindex/src/llm/anthropic.ts:120

Properties

apiKey?

optional apiKey: string

Defined in

packages/llamaindex/src/llm/anthropic.ts:115


maxRetries

maxRetries: number

Defined in

packages/llamaindex/src/llm/anthropic.ts:116


maxTokens?

optional maxTokens: number

Defined in

packages/llamaindex/src/llm/anthropic.ts:112


model

model: "claude-3-5-sonnet" | "claude-3-opus" | "claude-3-sonnet" | "claude-3-haiku" | "claude-2.1" | "claude-instant-1.2"

Defined in

packages/llamaindex/src/llm/anthropic.ts:109


session

session: AnthropicSession

Defined in

packages/llamaindex/src/llm/anthropic.ts:118


temperature

temperature: number

Defined in

packages/llamaindex/src/llm/anthropic.ts:110


timeout?

optional timeout: number

Defined in

packages/llamaindex/src/llm/anthropic.ts:117


topP

topP: number

Defined in

packages/llamaindex/src/llm/anthropic.ts:111

Accessors

metadata

get metadata(): object

Returns

object

contextWindow

contextWindow: number

maxTokens

maxTokens: undefined | number

model

model: "claude-3-5-sonnet" | "claude-3-opus" | "claude-3-sonnet" | "claude-3-haiku" | "claude-2.1" | "claude-instant-1.2"

temperature

temperature: number

tokenizer

tokenizer: undefined = undefined

topP

topP: number

Overrides

ToolCallLLM.metadata

Defined in

packages/llamaindex/src/llm/anthropic.ts:143


supportToolCall

get supportToolCall(): boolean

Returns

boolean

Overrides

ToolCallLLM.supportToolCall

Defined in

packages/llamaindex/src/llm/anthropic.ts:139

Methods

chat()

chat(params)

chat(params): Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>>

Get a chat response from the LLM

Parameters

params: LLMChatParamsStreaming<AnthropicAdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>>

Overrides

ToolCallLLM.chat

Defined in

packages/llamaindex/src/llm/anthropic.ts:290

chat(params)

chat(params): Promise<ChatResponse<ToolCallLLMMessageOptions>>

Get a chat response from the LLM

Parameters

params: LLMChatParamsNonStreaming<AnthropicAdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise<ChatResponse<ToolCallLLMMessageOptions>>

Overrides

ToolCallLLM.chat

Defined in

packages/llamaindex/src/llm/anthropic.ts:296


complete()

complete(params)

complete(params): Promise<AsyncIterable<CompletionResponse>>

Get a prompt completion from the LLM

Parameters

params: LLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Inherited from

ToolCallLLM.complete

Defined in

packages/core/dist/llms/index.d.ts:168

complete(params)

complete(params): Promise<CompletionResponse>

Get a prompt completion from the LLM

Parameters

params: LLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

ToolCallLLM.complete

Defined in

packages/core/dist/llms/index.d.ts:169


formatMessages()

formatMessages(messages): MessageParam[]

Parameters

messages: ChatMessage<ToolCallLLMMessageOptions>[]

Returns

MessageParam[]

Defined in

packages/llamaindex/src/llm/anthropic.ts:161


getModelName()

getModelName(model): string

Parameters

model: string

Returns

string

Defined in

packages/llamaindex/src/llm/anthropic.ts:154


streamChat()

protected streamChat(messages, systemPrompt?): AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>

Parameters

messages: ChatMessage<ToolCallLLMMessageOptions>[]

systemPrompt?: null | string

Returns

AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>

Defined in

packages/llamaindex/src/llm/anthropic.ts:412


toTool()

static toTool(tool): Tool

Parameters

tool: BaseTool<any>

Returns

Tool

Defined in

packages/llamaindex/src/llm/anthropic.ts:447