Class: LlamaDeuce
Llama2 LLM implementation
Hierarchy
-
BaseLLM
↳
LlamaDeuce
Constructors
constructor
• new LlamaDeuce(init?
): LlamaDeuce
Parameters
Name | Type |
---|---|
init? | Partial <LlamaDeuce > |
Returns
Overrides
BaseLLM.constructor
Defined in
packages/core/src/llm/LLM.ts:83
Properties
chatStrategy
• chatStrategy: DeuceChatStrategy
Defined in
packages/core/src/llm/LLM.ts:77
maxTokens
• Optional
maxTokens: number
Defined in
packages/core/src/llm/LLM.ts:80
model
• model: "Llama-2-70b-chat-old"
| "Llama-2-70b-chat-4bit"
| "Llama-2-13b-chat-old"
| "Llama-2-13b-chat-4bit"
| "Llama-2-7b-chat-old"
| "Llama-2-7b-chat-4bit"
Defined in
packages/core/src/llm/LLM.ts:76
replicateSession
• replicateSession: ReplicateSession
Defined in
packages/core/src/llm/LLM.ts:81
temperature
• temperature: number
Defined in
packages/core/src/llm/LLM.ts:78
topP
• topP: number
Defined in
packages/core/src/llm/LLM.ts:79
Accessors
metadata
• get
metadata(): Object
Returns
Object
Name | Type |
---|---|
contextWindow | number |
maxTokens | undefined | number |
model | "Llama-2-70b-chat-old" | "Llama-2-70b-chat-4bit" | "Llama-2-13b-chat-old" | "Llama-2-13b-chat-4bit" | "Llama-2-7b-chat-old" | "Llama-2-7b-chat-4bit" |
temperature | number |
tokenizer | undefined |
topP | number |
Overrides
BaseLLM.metadata
Defined in
packages/core/src/llm/LLM.ts:99
Methods
chat
▸ chat(params
): Promise
<AsyncIterable
<{ delta
: string
; options?
: Record
<string
, unknown
> }>>
Parameters
Name | Type |
---|---|
params | LLMChatParamsStreaming <Record <string , unknown >> |
Returns
Promise
<AsyncIterable
<{ delta
: string
; options?
: Record
<string
, unknown
> }>>
Overrides
BaseLLM.chat
Defined in
packages/core/src/llm/LLM.ts:236
▸ chat(params
): Promise
<ChatResponse
<Record
<string
, unknown
>>>
Parameters
Name | Type |
---|---|
params | LLMChatParamsNonStreaming <Record <string , unknown >> |
Returns
Promise
<ChatResponse
<Record
<string
, unknown
>>>
Overrides
BaseLLM.chat
Defined in
packages/core/src/llm/LLM.ts:239
complete
▸ complete(params
): Promise
<AsyncIterable
<CompletionResponse
>>
Parameters
Name | Type |
---|---|
params | LLMCompletionParamsStreaming |
Returns
Promise
<AsyncIterable
<CompletionResponse
>>
Inherited from
BaseLLM.complete
Defined in
packages/core/src/llm/base.ts:27
▸ complete(params
): Promise
<CompletionResponse
>
Parameters
Name | Type |
---|---|
params | LLMCompletionParamsNonStreaming |
Returns
Promise
<CompletionResponse
>
Inherited from
BaseLLM.complete
Defined in
packages/core/src/llm/base.ts:30
mapMessageTypeA16Z
▸ mapMessageTypeA16Z(messageType
): string
Parameters
Name | Type |
---|---|
messageType | MessageType |
Returns
string
Defined in
packages/core/src/llm/LLM.ts:146
mapMessagesToPrompt
▸ mapMessagesToPrompt(messages
): Object
Parameters
Name | Type |
---|---|
messages | { content : MessageContent ; options? : Record <string , unknown > ; role : MessageType }[] |
Returns
Object
Name | Type |
---|---|
prompt | string |
systemPrompt | undefined | MessageContent |
Defined in
packages/core/src/llm/LLM.ts:110
mapMessagesToPromptA16Z
▸ mapMessagesToPromptA16Z(messages
): Object
Parameters
Name | Type |
---|---|
messages | { content : MessageContent ; options? : Record <string , unknown > ; role : MessageType }[] |
Returns
Object
Name | Type |
---|---|
prompt | string |
systemPrompt | undefined |
Defined in
packages/core/src/llm/LLM.ts:132
mapMessagesToPromptMeta
▸ mapMessagesToPromptMeta(messages
, opts?
): Object
Parameters
Name | Type |
---|---|
messages | { content : MessageContent ; options? : Record <string , unknown > ; role : MessageType }[] |
opts? | Object |
opts.replicate4Bit? | boolean |
opts.withBos? | boolean |
opts.withNewlines? | boolean |
Returns
Object
Name | Type |
---|---|
prompt | string |
systemPrompt | undefined | MessageContent |