Class: MistralAI
MistralAI LLM implementation
Hierarchy
-
BaseLLM
↳
MistralAI
Constructors
constructor
• new MistralAI(init?
): MistralAI
Parameters
Name | Type |
---|---|
init? | Partial <MistralAI > |
Returns
Overrides
BaseLLM.constructor
Defined in
packages/core/src/llm/mistral.ts:58
Properties
apiKey
• Optional
apiKey: string
Defined in
packages/core/src/llm/mistral.ts:52
maxTokens
• Optional
maxTokens: number
Defined in
packages/core/src/llm/mistral.ts:51
model
• model: "mistral-tiny"
| "mistral-small"
| "mistral-medium"
Defined in
packages/core/src/llm/mistral.ts:48
randomSeed
• Optional
randomSeed: number
Defined in
packages/core/src/llm/mistral.ts:54
safeMode
• safeMode: boolean
Defined in
packages/core/src/llm/mistral.ts:53
session
• Private
session: MistralAISession
Defined in
packages/core/src/llm/mistral.ts:56
temperature
• temperature: number
Defined in
packages/core/src/llm/mistral.ts:49
topP
• topP: number
Defined in
packages/core/src/llm/mistral.ts:50
Accessors
metadata
• get
metadata(): Object
Returns
Object
Name | Type |
---|---|
contextWindow | number |
maxTokens | undefined | number |
model | "mistral-tiny" | "mistral-small" | "mistral-medium" |
temperature | number |
tokenizer | undefined |
topP | number |
Overrides
BaseLLM.metadata
Defined in
packages/core/src/llm/mistral.ts:69
Methods
buildParams
▸ buildParams(messages
): any
Parameters
Name | Type |
---|---|
messages | { content : MessageContent ; options? : Record <string , unknown > ; role : MessageType }[] |
Returns
any
Defined in
packages/core/src/llm/mistral.ts:80
chat
▸ chat(params
): Promise
<AsyncIterable
<{ delta
: string
; options?
: Record
<string
, unknown
> }>>
Parameters
Name | Type |
---|---|
params | LLMChatParamsStreaming <Record <string , unknown >> |
Returns
Promise
<AsyncIterable
<{ delta
: string
; options?
: Record
<string
, unknown
> }>>
Overrides
BaseLLM.chat
Defined in
packages/core/src/llm/mistral.ts:92
▸ chat(params
): Promise
<ChatResponse
<Record
<string
, unknown
>>>
Parameters
Name | Type |
---|---|
params | LLMChatParamsNonStreaming <Record <string , unknown >> |
Returns
Promise
<ChatResponse
<Record
<string
, unknown
>>>
Overrides
BaseLLM.chat
Defined in
packages/core/src/llm/mistral.ts:95
complete
▸ complete(params
): Promise
<AsyncIterable
<CompletionResponse
>>
Parameters
Name | Type |
---|---|
params | LLMCompletionParamsStreaming |
Returns
Promise
<AsyncIterable
<CompletionResponse
>>
Inherited from
BaseLLM.complete
Defined in
packages/core/src/llm/base.ts:27
▸ complete(params
): Promise
<CompletionResponse
>
Parameters
Name | Type |
---|---|
params | LLMCompletionParamsNonStreaming |
Returns
Promise
<CompletionResponse
>
Inherited from
BaseLLM.complete
Defined in
packages/core/src/llm/base.ts:30
streamChat
▸ streamChat(«destructured»
): AsyncIterable
<{ delta
: string
; options?
: Record
<string
, unknown
> }>
Parameters
Name | Type |
---|---|
«destructured» | LLMChatParamsStreaming <Record <string , unknown >> |
Returns
AsyncIterable
<{ delta
: string
; options?
: Record
<string
, unknown
> }>