Class: MistralAI
MistralAI LLM implementation
Extends
Constructors
new MistralAI()
new MistralAI(
init
?):MistralAI
Parameters
• init?: Partial
<MistralAI
>
Returns
Overrides
Defined in
packages/llamaindex/src/llm/mistral.ts:58
Properties
apiKey?
optional
apiKey:string
Defined in
packages/llamaindex/src/llm/mistral.ts:52
maxTokens?
optional
maxTokens:number
Defined in
packages/llamaindex/src/llm/mistral.ts:51
model
model:
"mistral-tiny"
|"mistral-small"
|"mistral-medium"
Defined in
packages/llamaindex/src/llm/mistral.ts:48
randomSeed?
optional
randomSeed:number
Defined in
packages/llamaindex/src/llm/mistral.ts:54
safeMode
safeMode:
boolean
Defined in
packages/llamaindex/src/llm/mistral.ts:53
temperature
temperature:
number
Defined in
packages/llamaindex/src/llm/mistral.ts:49
topP
topP:
number
Defined in
packages/llamaindex/src/llm/mistral.ts:50
Accessors
metadata
get
metadata():object
Returns
object
contextWindow
contextWindow:
number
maxTokens
maxTokens:
undefined
|number
model
model:
"mistral-tiny"
|"mistral-small"
|"mistral-medium"
temperature
temperature:
number
tokenizer
tokenizer:
undefined
=undefined
topP
topP:
number
Overrides
Defined in
packages/llamaindex/src/llm/mistral.ts:69