Przejdź do głównej zawartości

Class: OpenAI

Hierarchy

Constructors

constructor

new OpenAI(init?): OpenAI

Parameters

NameType
init?Partial<OpenAI> & { azure?: AzureOpenAIConfig }

Returns

OpenAI

Overrides

BaseLLM&lt; OpenAIAdditionalChatOptions, OpenAIAdditionalMessageOptions &gt;.constructor

Defined in

packages/core/src/llm/open_ai.ts:188

Properties

additionalChatOptions

Optional additionalChatOptions: OpenAIAdditionalChatOptions

Defined in

packages/core/src/llm/open_ai.ts:176


additionalSessionOptions

Optional additionalSessionOptions: Omit<Partial<ClientOptions>, "apiKey" | "timeout" | "maxRetries">

Defined in

packages/core/src/llm/open_ai.ts:183


apiKey

Optional apiKey: string = undefined

Defined in

packages/core/src/llm/open_ai.ts:179


maxRetries

maxRetries: number

Defined in

packages/core/src/llm/open_ai.ts:180


maxTokens

Optional maxTokens: number

Defined in

packages/core/src/llm/open_ai.ts:175


model

model: string

Defined in

packages/core/src/llm/open_ai.ts:172


session

session: OpenAISession

Defined in

packages/core/src/llm/open_ai.ts:182


temperature

temperature: number

Defined in

packages/core/src/llm/open_ai.ts:173


timeout

Optional timeout: number

Defined in

packages/core/src/llm/open_ai.ts:181


topP

topP: number

Defined in

packages/core/src/llm/open_ai.ts:174

Accessors

metadata

get metadata(): LLMMetadata & OpenAIAdditionalMetadata

Returns

LLMMetadata & OpenAIAdditionalMetadata

Overrides

BaseLLM.metadata

Defined in

packages/core/src/llm/open_ai.ts:241

Methods

chat

chat(params): Promise<AsyncIterable<{ delta: string ; options?: OpenAIAdditionalMessageOptions }>>

Parameters

NameType
paramsLLMChatParamsStreaming<OpenAIAdditionalChatOptions>

Returns

Promise<AsyncIterable<{ delta: string ; options?: OpenAIAdditionalMessageOptions }>>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/open_ai.ts:321

chat(params): Promise<ChatResponse<OpenAIAdditionalMessageOptions>>

Parameters

NameType
paramsLLMChatParamsNonStreaming<OpenAIAdditionalChatOptions>

Returns

Promise<ChatResponse<OpenAIAdditionalMessageOptions>>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/open_ai.ts:324


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/base.ts:27

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/base.ts:30


streamChat

streamChat(baseRequestParams): AsyncIterable<{ delta: string ; options?: OpenAIAdditionalMessageOptions }>

Parameters

NameType
baseRequestParamsChatCompletionCreateParams

Returns

AsyncIterable<{ delta: string ; options?: OpenAIAdditionalMessageOptions }>

Defined in

packages/core/src/llm/open_ai.ts:378


toOpenAIMessage

toOpenAIMessage(messages): ChatCompletionMessageParam[]

Parameters

NameType
messages{ content: MessageContent ; options?: OpenAIAdditionalMessageOptions ; role: MessageType }[]

Returns

ChatCompletionMessageParam[]

Defined in

packages/core/src/llm/open_ai.ts:274


toOpenAIRole

toOpenAIRole(messageType): ChatCompletionRole

Parameters

NameType
messageTypeMessageType

Returns

ChatCompletionRole

Defined in

packages/core/src/llm/open_ai.ts:257


toTool

toTool(tool): ChatCompletionTool

Parameters

NameType
toolBaseTool

Returns

ChatCompletionTool

Defined in

packages/core/src/llm/open_ai.ts:413