Class: OpenAI
Unified language model interface
Extends
Extended by
Constructors
new OpenAI()
new OpenAI(
init
?):OpenAI
Parameters
• init?: Omit
<Partial
<OpenAI
>, "session"
> & object
Returns
Overrides
Defined in
packages/providers/openai/dist/index.d.ts:316
Properties
additionalChatOptions?
optional
additionalChatOptions:OpenAIAdditionalChatOptions
Defined in
packages/providers/openai/dist/index.d.ts:309
additionalSessionOptions?
optional
additionalSessionOptions:Omit
<Partial
<ClientOptions
>,"apiKey"
|"maxRetries"
|"timeout"
>
Defined in
packages/providers/openai/dist/index.d.ts:313
apiKey?
optional
apiKey:string
Defined in
packages/providers/openai/dist/index.d.ts:310
lazySession()
lazySession: () =>
Promise
<LLMInstance
>
Returns
Promise
<LLMInstance
>
Defined in
packages/providers/openai/dist/index.d.ts:314
maxRetries
maxRetries:
number
Defined in
packages/providers/openai/dist/index.d.ts:311
maxTokens?
optional
maxTokens:number
Defined in
packages/providers/openai/dist/index.d.ts:308
model
model:
string
&object
|ChatModel
Defined in
packages/providers/openai/dist/index.d.ts:305
temperature
temperature:
number
Defined in
packages/providers/openai/dist/index.d.ts:306
timeout?
optional
timeout:number
Defined in
packages/providers/openai/dist/index.d.ts:312
topP
topP:
number
Defined in
packages/providers/openai/dist/index.d.ts:307
Accessors
metadata
Get Signature
get metadata():
LLMMetadata
&object
Returns
LLMMetadata
& object
Overrides
Defined in
packages/providers/openai/dist/index.d.ts:321
session
Get Signature
get session():
Promise
<LLMInstance
>
Returns
Promise
<LLMInstance
>
Defined in
packages/providers/openai/dist/index.d.ts:315
supportToolCall
Get Signature
get supportToolCall():
boolean
Returns
boolean
Overrides
Defined in
packages/providers/openai/dist/index.d.ts:320
Methods
chat()
chat(params)
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>,any
,any
>>
Get a chat response from the LLM
Parameters
• params: LLMChatParamsStreaming
<OpenAIAdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>, any
, any
>>
Overrides
Defined in
packages/providers/openai/dist/index.d.ts:324
chat(params)
chat(
params
):Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Parameters
• params: LLMChatParamsNonStreaming
<OpenAIAdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Overrides
Defined in
packages/providers/openai/dist/index.d.ts:325
complete()
complete(params)
complete(
params
):Promise
<AsyncIterable
<CompletionResponse
,any
,any
>>
Get a prompt completion from the LLM
Parameters
• params: LLMCompletionParamsStreaming
Returns
Promise
<AsyncIterable
<CompletionResponse
, any
, any
>>
Inherited from
Defined in
packages/core/llms/dist/index.d.ts:163
complete(params)
complete(
params
):Promise
<CompletionResponse
>
Parameters
• params: LLMCompletionParamsNonStreaming
Returns
Promise
<CompletionResponse
>
Inherited from
Defined in
packages/core/llms/dist/index.d.ts:164
streamChat()
protected
streamChat(baseRequestParams
):AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>,any
,any
>
Parameters
• baseRequestParams: ChatCompletionCreateParams
Returns
AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>, any
, any
>
Defined in
packages/providers/openai/dist/index.d.ts:326
toOpenAIMessage()
static
toOpenAIMessage(messages
):ChatCompletionMessageParam
[]
Parameters
• messages: ChatMessage
<ToolCallLLMMessageOptions
>[]
Returns
ChatCompletionMessageParam
[]
Defined in
packages/providers/openai/dist/index.d.ts:323
toOpenAIRole()
static
toOpenAIRole(messageType
):ChatCompletionRole
Parameters
• messageType: MessageType
Returns
ChatCompletionRole
Defined in
packages/providers/openai/dist/index.d.ts:322
toTool()
static
toTool(tool
):ChatCompletionTool
Parameters
• tool: BaseTool
<any
>
Returns
ChatCompletionTool
Defined in
packages/providers/openai/dist/index.d.ts:327