Class: MistralAI
MistralAI LLM implementation
Extends
BaseLLM
Constructors
new MistralAI()
new MistralAI(
init
?):MistralAI
Parameters
• init?: Partial
<MistralAI
>
Returns
Overrides
BaseLLM.constructor
Source
packages/llamaindex/src/llm/mistral.ts:58
Properties
apiKey?
optional
apiKey:string
Source
packages/llamaindex/src/llm/mistral.ts:52
maxTokens?
optional
maxTokens:number
Source
packages/llamaindex/src/llm/mistral.ts:51
model
model:
"mistral-tiny"
|"mistral-small"
|"mistral-medium"
Source
packages/llamaindex/src/llm/mistral.ts:48
randomSeed?
optional
randomSeed:number
Source
packages/llamaindex/src/llm/mistral.ts:54
safeMode
safeMode:
boolean
Source
packages/llamaindex/src/llm/mistral.ts:53
session
private
session:MistralAISession
Source
packages/llamaindex/src/llm/mistral.ts:56
temperature
temperature:
number
Source
packages/llamaindex/src/llm/mistral.ts:49
topP
topP:
number
Source
packages/llamaindex/src/llm/mistral.ts:50
Accessors
metadata
get
metadata():object
Returns
object
contextWindow
contextWindow:
number
maxTokens
maxTokens:
undefined
|number
model
model:
"mistral-tiny"
|"mistral-small"
|"mistral-medium"
temperature
temperature:
number
tokenizer
tokenizer:
undefined
=undefined
topP
topP:
number
Source
packages/llamaindex/src/llm/mistral.ts:69
Methods
buildParams()
private
buildParams(messages
):any
Parameters
• messages: ChatMessage
[]
Returns
any
Source
packages/llamaindex/src/llm/mistral.ts:80
chat()
chat(params)
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
>>
Parameters
• params: LLMChatParamsStreaming
<object
, object
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
>>
Overrides
BaseLLM.chat
Source
packages/llamaindex/src/llm/mistral.ts:92
chat(params)
chat(
params
):Promise
<ChatResponse
<object
>>
Parameters
• params: LLMChatParamsNonStreaming
<object
, object
>
Returns
Promise
<ChatResponse
<object
>>
Overrides
BaseLLM.chat
Source
packages/llamaindex/src/llm/mistral.ts:95
complete()
complete(params)
complete(
params
):Promise
<AsyncIterable
<CompletionResponse
>>
Parameters
• params: LLMCompletionParamsStreaming
Returns
Promise
<AsyncIterable
<CompletionResponse
>>
Inherited from
BaseLLM.complete
Source
packages/llamaindex/src/llm/base.ts:22
complete(params)
complete(
params
):Promise
<CompletionResponse
>
Parameters
• params: LLMCompletionParamsNonStreaming
Returns
Promise
<CompletionResponse
>
Inherited from
BaseLLM.complete
Source
packages/llamaindex/src/llm/base.ts:25
streamChat()
protected
streamChat(__namedParameters
):AsyncIterable
<ChatResponseChunk
>
Parameters
• __namedParameters: LLMChatParamsStreaming
<object
, object
>
Returns
AsyncIterable
<ChatResponseChunk
>