Class: ReplicateLLM
Replicate LLM implementation used
Extends
Constructors
new ReplicateLLM()
new ReplicateLLM(
init
?):ReplicateLLM
Parameters
• init?: Partial
<ReplicateLLM
> & object
Returns
Overrides
Defined in
packages/providers/replicate/dist/index.d.ts:63
Properties
chatStrategy
chatStrategy:
ReplicateChatStrategy
Defined in
packages/providers/replicate/dist/index.d.ts:58
maxTokens?
optional
maxTokens:number
Defined in
packages/providers/replicate/dist/index.d.ts:61
model
model:
"Llama-2-70b-chat-old"
|"Llama-2-70b-chat-4bit"
|"Llama-2-13b-chat-old"
|"Llama-2-13b-chat-4bit"
|"Llama-2-7b-chat-old"
|"Llama-2-7b-chat-4bit"
|"llama-3-70b-instruct"
|"llama-3-8b-instruct"
Defined in
packages/providers/replicate/dist/index.d.ts:57
replicateSession
replicateSession:
ReplicateSession
Defined in
packages/providers/replicate/dist/index.d.ts:62
temperature
temperature:
number
Defined in
packages/providers/replicate/dist/index.d.ts:59
topP
topP:
number
Defined in
packages/providers/replicate/dist/index.d.ts:60
Accessors
metadata
Get Signature
get metadata():
object
Returns
object
contextWindow
contextWindow:
number
maxTokens
maxTokens:
undefined
|number
model
model:
"Llama-2-70b-chat-old"
|"Llama-2-70b-chat-4bit"
|"Llama-2-13b-chat-old"
|"Llama-2-13b-chat-4bit"
|"Llama-2-7b-chat-old"
|"Llama-2-7b-chat-4bit"
|"llama-3-70b-instruct"
|"llama-3-8b-instruct"
temperature
temperature:
number
tokenizer
tokenizer:
undefined
topP
topP:
number
Overrides
Defined in
packages/providers/replicate/dist/index.d.ts:66
Methods
chat()
chat(params)
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
,any
,any
>>
Get a chat response from the LLM
Parameters
• params: LLMChatParamsStreaming
<object
, object
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
, any
, any
>>
Overrides
Defined in
packages/providers/replicate/dist/index.d.ts:95
chat(params)
chat(
params
):Promise
<ChatResponse
<object
>>
Parameters
• params: LLMChatParamsNonStreaming
<object
, object
>
Returns
Promise
<ChatResponse
<object
>>
Overrides
Defined in
packages/providers/replicate/dist/index.d.ts:96
complete()
complete(params)
complete(
params
):Promise
<AsyncIterable
<CompletionResponse
,any
,any
>>
Get a prompt completion from the LLM
Parameters
• params: LLMCompletionParamsStreaming
Returns
Promise
<AsyncIterable
<CompletionResponse
, any
, any
>>
Inherited from
Defined in
packages/core/llms/dist/index.d.ts:163
complete(params)
complete(
params
):Promise
<CompletionResponse
>
Parameters
• params: LLMCompletionParamsNonStreaming
Returns
Promise
<CompletionResponse
>
Inherited from
Defined in
packages/core/llms/dist/index.d.ts:164
mapMessagesToPrompt()
mapMessagesToPrompt(
messages
):object
Parameters
• messages: ChatMessage
[]
Returns
object
prompt
prompt:
string
systemPrompt
systemPrompt:
undefined
|MessageContent
Defined in
packages/providers/replicate/dist/index.d.ts:74
mapMessagesToPromptA16Z()
mapMessagesToPromptA16Z(
messages
):object
Parameters
• messages: ChatMessage
[]
Returns
object
prompt
prompt:
string
systemPrompt
systemPrompt:
undefined
Defined in
packages/providers/replicate/dist/index.d.ts:82
mapMessagesToPromptLlama3()
mapMessagesToPromptLlama3(
messages
):object
Parameters
• messages: ChatMessage
[]
Returns
object
prompt
prompt:
string
systemPrompt
systemPrompt:
undefined
Defined in
packages/providers/replicate/dist/index.d.ts:78
mapMessagesToPromptMeta()
mapMessagesToPromptMeta(
messages
,opts
?):object
Parameters
• messages: ChatMessage
[]
• opts?
• opts.replicate4Bit?: boolean
• opts.withBos?: boolean
• opts.withNewlines?: boolean
Returns
object
prompt
prompt:
string
systemPrompt
systemPrompt:
undefined
|MessageContent
Defined in
packages/providers/replicate/dist/index.d.ts:87
mapMessageTypeA16Z()
mapMessageTypeA16Z(
messageType
):string
Parameters
• messageType: MessageType
Returns
string
Defined in
packages/providers/replicate/dist/index.d.ts:86