Skip to main content

Class: OpenAI

Unified language model interface

Extends

Extended by

Constructors

new OpenAI()

new OpenAI(init?): OpenAI

Parameters

init?: Partial<OpenAI> & object

Returns

OpenAI

Overrides

ToolCallLLM.constructor

Defined in

packages/llamaindex/src/llm/openai.ts:187

Properties

additionalChatOptions?

optional additionalChatOptions: OpenAIAdditionalChatOptions

Defined in

packages/llamaindex/src/llm/openai.ts:176


additionalSessionOptions?

optional additionalSessionOptions: Omit<Partial<ClientOptions>, "apiKey" | "timeout" | "maxRetries">

Defined in

packages/llamaindex/src/llm/openai.ts:183


apiKey?

optional apiKey: string = undefined

Defined in

packages/llamaindex/src/llm/openai.ts:179


maxRetries

maxRetries: number

Defined in

packages/llamaindex/src/llm/openai.ts:180


maxTokens?

optional maxTokens: number

Defined in

packages/llamaindex/src/llm/openai.ts:175


model

model: ChatModel | string & object

Defined in

packages/llamaindex/src/llm/openai.ts:169


session

session: OpenAISession

Defined in

packages/llamaindex/src/llm/openai.ts:182


temperature

temperature: number

Defined in

packages/llamaindex/src/llm/openai.ts:173


timeout?

optional timeout: number

Defined in

packages/llamaindex/src/llm/openai.ts:181


topP

topP: number

Defined in

packages/llamaindex/src/llm/openai.ts:174

Accessors

metadata

get metadata(): LLMMetadata

Returns

LLMMetadata

Overrides

ToolCallLLM.metadata

Defined in

packages/llamaindex/src/llm/openai.ts:238


supportToolCall

get supportToolCall(): boolean

Returns

boolean

Overrides

ToolCallLLM.supportToolCall

Defined in

packages/llamaindex/src/llm/openai.ts:234

Methods

chat()

chat(params)

chat(params): Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>>

Get a chat response from the LLM

Parameters

params: LLMChatParamsStreaming<OpenAIAdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>>

Overrides

ToolCallLLM.chat

Defined in

packages/llamaindex/src/llm/openai.ts:315

chat(params)

chat(params): Promise<ChatResponse<ToolCallLLMMessageOptions>>

Get a chat response from the LLM

Parameters

params: LLMChatParamsNonStreaming<OpenAIAdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise<ChatResponse<ToolCallLLMMessageOptions>>

Overrides

ToolCallLLM.chat

Defined in

packages/llamaindex/src/llm/openai.ts:321


complete()

complete(params)

complete(params): Promise<AsyncIterable<CompletionResponse>>

Get a prompt completion from the LLM

Parameters

params: LLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Inherited from

ToolCallLLM.complete

Defined in

packages/core/dist/llms/index.d.ts:168

complete(params)

complete(params): Promise<CompletionResponse>

Get a prompt completion from the LLM

Parameters

params: LLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

ToolCallLLM.complete

Defined in

packages/core/dist/llms/index.d.ts:169


streamChat()

protected streamChat(baseRequestParams): AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>

Parameters

baseRequestParams: ChatCompletionCreateParams

Returns

AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>

Defined in

packages/llamaindex/src/llm/openai.ts:397


toOpenAIMessage()

static toOpenAIMessage(messages): ChatCompletionMessageParam[]

Parameters

messages: ChatMessage<ToolCallLLMMessageOptions>[]

Returns

ChatCompletionMessageParam[]

Defined in

packages/llamaindex/src/llm/openai.ts:266


toOpenAIRole()

static toOpenAIRole(messageType): ChatCompletionRole

Parameters

messageType: MessageType

Returns

ChatCompletionRole

Defined in

packages/llamaindex/src/llm/openai.ts:253


toTool()

static toTool(tool): ChatCompletionTool

Parameters

tool: BaseTool<any>

Returns

ChatCompletionTool

Defined in

packages/llamaindex/src/llm/openai.ts:467