Skip to main content

Class: MistralAI

MistralAI LLM implementation

Implements

Constructors

constructor

new MistralAI(init?)

Parameters

NameType
init?Partial<MistralAI>

Defined in

packages/core/src/llm/mistral.ts:59

Properties

apiKey

Optional apiKey: string

Defined in

packages/core/src/llm/mistral.ts:52


callbackManager

Optional callbackManager: CallbackManager

Defined in

packages/core/src/llm/mistral.ts:53


hasStreaming

hasStreaming: boolean = true

Implementation of

LLM.hasStreaming

Defined in

packages/core/src/llm/mistral.ts:45


maxTokens

Optional maxTokens: number

Defined in

packages/core/src/llm/mistral.ts:51


model

model: "mistral-tiny" | "mistral-small" | "mistral-medium"

Defined in

packages/core/src/llm/mistral.ts:48


randomSeed

Optional randomSeed: number

Defined in

packages/core/src/llm/mistral.ts:55


safeMode

safeMode: boolean

Defined in

packages/core/src/llm/mistral.ts:54


session

Private session: MistralAISession

Defined in

packages/core/src/llm/mistral.ts:57


temperature

temperature: number

Defined in

packages/core/src/llm/mistral.ts:49


topP

topP: number

Defined in

packages/core/src/llm/mistral.ts:50

Accessors

metadata

get metadata(): Object

Returns

Object

NameType
contextWindownumber
maxTokensundefined | number
model"mistral-tiny" | "mistral-small" | "mistral-medium"
temperaturenumber
tokenizerundefined
topPnumber

Implementation of

LLM.metadata

Defined in

packages/core/src/llm/mistral.ts:70

Methods

buildParams

Private buildParams(messages): any

Parameters

NameType
messagesChatMessage[]

Returns

any

Defined in

packages/core/src/llm/mistral.ts:85


chat

chat<T, R>(messages, parentEvent?, streaming?): Promise<R>

Get a chat response from the LLM

Type parameters

NameType
Textends undefined | boolean = undefined
RT extends true ? AsyncGenerator<string, void, unknown> : ChatResponse

Parameters

NameTypeDescription
messagesChatMessage[]The return type of chat() and complete() are set by the "streaming" parameter being set to True.
parentEvent?Event-
streaming?T-

Returns

Promise<R>

Implementation of

LLM.chat

Defined in

packages/core/src/llm/mistral.ts:97


complete

complete<T, R>(prompt, parentEvent?, streaming?): Promise<R>

Get a prompt completion from the LLM

Type parameters

NameType
Textends undefined | boolean = undefined
RT extends true ? AsyncGenerator<string, void, unknown> : ChatResponse

Parameters

NameTypeDescription
promptstringthe prompt to complete
parentEvent?Event-
streaming?T-

Returns

Promise<R>

Implementation of

LLM.complete

Defined in

packages/core/src/llm/mistral.ts:117


streamChat

Protected streamChat(messages, parentEvent?): AsyncGenerator<string, void, unknown>

Parameters

NameType
messagesChatMessage[]
parentEvent?Event

Returns

AsyncGenerator<string, void, unknown>

Defined in

packages/core/src/llm/mistral.ts:128


streamComplete

Protected streamComplete(query, parentEvent?): AsyncGenerator<string, void, unknown>

Parameters

NameType
querystring
parentEvent?Event

Returns

AsyncGenerator<string, void, unknown>

Defined in

packages/core/src/llm/mistral.ts:172


tokens

tokens(messages): number

Calculates the number of tokens needed for the given chat messages

Parameters

NameType
messagesChatMessage[]

Returns

number

Implementation of

LLM.tokens

Defined in

packages/core/src/llm/mistral.ts:81