Skip to main content

Class: MistralAI

MistralAI LLM implementation

Hierarchy

  • BaseLLM

    MistralAI

Constructors

constructor

new MistralAI(init?): MistralAI

Parameters

NameType
init?Partial<MistralAI>

Returns

MistralAI

Overrides

BaseLLM.constructor

Defined in

packages/core/src/llm/mistral.ts:63

Properties

apiKey

Optional apiKey: string

Defined in

packages/core/src/llm/mistral.ts:56


callbackManager

Optional callbackManager: CallbackManager

Defined in

packages/core/src/llm/mistral.ts:57


maxTokens

Optional maxTokens: number

Defined in

packages/core/src/llm/mistral.ts:55


model

model: "mistral-tiny" | "mistral-small" | "mistral-medium"

Defined in

packages/core/src/llm/mistral.ts:52


randomSeed

Optional randomSeed: number

Defined in

packages/core/src/llm/mistral.ts:59


safeMode

safeMode: boolean

Defined in

packages/core/src/llm/mistral.ts:58


session

Private session: MistralAISession

Defined in

packages/core/src/llm/mistral.ts:61


temperature

temperature: number

Defined in

packages/core/src/llm/mistral.ts:53


topP

topP: number

Defined in

packages/core/src/llm/mistral.ts:54

Accessors

metadata

get metadata(): Object

Returns

Object

NameType
contextWindownumber
maxTokensundefined | number
model"mistral-tiny" | "mistral-small" | "mistral-medium"
temperaturenumber
tokenizerundefined
topPnumber

Overrides

BaseLLM.metadata

Defined in

packages/core/src/llm/mistral.ts:75

Methods

buildParams

buildParams(messages): any

Parameters

NameType
messagesChatMessage[]

Returns

any

Defined in

packages/core/src/llm/mistral.ts:90


chat

chat(params): Promise<AsyncIterable<ChatResponseChunk>>

Parameters

NameType
paramsLLMChatParamsStreaming

Returns

Promise<AsyncIterable<ChatResponseChunk>>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/mistral.ts:102

chat(params): Promise<ChatResponse>

Parameters

NameType
paramsLLMChatParamsNonStreaming

Returns

Promise<ChatResponse>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/mistral.ts:105


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/base.ts:18

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/base.ts:21


streamChat

streamChat(«destructured»): AsyncIterable<ChatResponseChunk>

Parameters

NameType
«destructured»LLMChatParamsStreaming

Returns

AsyncIterable<ChatResponseChunk>

Defined in

packages/core/src/llm/mistral.ts:123


tokens

tokens(messages): number

Parameters

NameType
messagesChatMessage[]

Returns

number

Overrides

BaseLLM.tokens

Defined in

packages/core/src/llm/mistral.ts:86