Skip to main content

Class: Ollama

Unified language model interface

Hierarchy

Implements

Constructors

constructor

new Ollama(init)

Parameters

NameType
initPartial<Ollama> & { model: string }

Overrides

BaseEmbedding.constructor

Defined in

packages/core/src/llm/ollama.ts:24

Properties

additionalChatOptions

Optional additionalChatOptions: Record<string, unknown>

Defined in

packages/core/src/llm/ollama.ts:21


baseURL

baseURL: string = "http://127.0.0.1:11434"

Defined in

packages/core/src/llm/ollama.ts:16


callbackManager

Optional callbackManager: CallbackManager

Defined in

packages/core/src/llm/ollama.ts:22


contextWindow

contextWindow: number = 4096

Defined in

packages/core/src/llm/ollama.ts:19


hasStreaming

Readonly hasStreaming: true

Implementation of

LLM.hasStreaming

Defined in

packages/core/src/llm/ollama.ts:12


model

model: string

Defined in

packages/core/src/llm/ollama.ts:15


requestTimeout

requestTimeout: number

Defined in

packages/core/src/llm/ollama.ts:20


temperature

temperature: number = 0.7

Defined in

packages/core/src/llm/ollama.ts:17


topP

topP: number = 0.9

Defined in

packages/core/src/llm/ollama.ts:18

Accessors

metadata

get metadata(): LLMMetadata

Returns

LLMMetadata

Implementation of

LLM.metadata

Defined in

packages/core/src/llm/ollama.ts:35

Methods

chat

chat<T, R>(messages, parentEvent?, streaming?): Promise<R>

Get a chat response from the LLM

Type parameters

NameType
Textends undefined | boolean = undefined
RT extends true ? AsyncGenerator<string, void, unknown> : ChatResponse

Parameters

NameTypeDescription
messagesChatMessage[]The return type of chat() and complete() are set by the "streaming" parameter being set to True.
parentEvent?Event-
streaming?T-

Returns

Promise<R>

Implementation of

LLM.chat

Defined in

packages/core/src/llm/ollama.ts:46


complete

complete<T, R>(prompt, parentEvent?, streaming?): Promise<R>

Get a prompt completion from the LLM

Type parameters

NameType
Textends undefined | boolean = undefined
RT extends true ? AsyncGenerator<string, void, unknown> : ChatResponse

Parameters

NameTypeDescription
promptMessageContentthe prompt to complete
parentEvent?Event-
streaming?T-

Returns

Promise<R>

Implementation of

LLM.complete

Defined in

packages/core/src/llm/ollama.ts:122


getEmbedding

Private getEmbedding(prompt): Promise<number[]>

Parameters

NameType
promptstring

Returns

Promise<number[]>

Defined in

packages/core/src/llm/ollama.ts:170


getQueryEmbedding

getQueryEmbedding(query): Promise<number[]>

Parameters

NameType
querystring

Returns

Promise<number[]>

Overrides

BaseEmbedding.getQueryEmbedding

Defined in

packages/core/src/llm/ollama.ts:197


getTextEmbedding

getTextEmbedding(text): Promise<number[]>

Parameters

NameType
textstring

Returns

Promise<number[]>

Overrides

BaseEmbedding.getTextEmbedding

Defined in

packages/core/src/llm/ollama.ts:193


similarity

similarity(embedding1, embedding2, mode?): number

Parameters

NameTypeDefault value
embedding1number[]undefined
embedding2number[]undefined
modeSimilarityTypeSimilarityType.DEFAULT

Returns

number

Inherited from

BaseEmbedding.similarity

Defined in

packages/core/src/embeddings/types.ts:14


streamChat

Private streamChat(stream, accessor, parentEvent?): AsyncGenerator<string, void, unknown>

Parameters

NameType
streamReadableStream<Uint8Array>
accessor(data: any) => string
parentEvent?Event

Returns

AsyncGenerator<string, void, unknown>

Defined in

packages/core/src/llm/ollama.ts:94


tokens

tokens(messages): number

Calculates the number of tokens needed for the given chat messages

Parameters

NameType
messagesChatMessage[]

Returns

number

Implementation of

LLM.tokens

Defined in

packages/core/src/llm/ollama.ts:166