Skip to main content

Class: OllamaCompletionModel<CONTEXT_WINDOW_SIZE>

Type parameters

NameType
CONTEXT_WINDOW_SIZEextends number | undefined

Hierarchy

Implements

Accessors

contextWindowSize

get contextWindowSize(): CONTEXT_WINDOW_SIZE

Returns

CONTEXT_WINDOW_SIZE

Implementation of

TextStreamingBaseModel.contextWindowSize

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:101


modelInformation

get modelInformation(): ModelInformation

Returns

ModelInformation

Implementation of

TextStreamingBaseModel.modelInformation

Inherited from

AbstractModel.modelInformation

Defined in

packages/modelfusion/src/model-function/AbstractModel.ts:17


modelName

get modelName(): string

Returns

string

Overrides

AbstractModel.modelName

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:94


settingsForEvent

get settingsForEvent(): Partial<OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>>

Returns settings that should be recorded in observability events. Security-related settings (e.g. API keys) should not be included here.

Returns

Partial<OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>>

Implementation of

TextStreamingBaseModel.settingsForEvent

Overrides

AbstractModel.settingsForEvent

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:164

Constructors

constructor

new OllamaCompletionModel<CONTEXT_WINDOW_SIZE>(settings): OllamaCompletionModel<CONTEXT_WINDOW_SIZE>

Type parameters

NameType
CONTEXT_WINDOW_SIZEextends undefined | number

Parameters

NameType
settingsOllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>

Returns

OllamaCompletionModel<CONTEXT_WINDOW_SIZE>

Overrides

AbstractModel&lt;OllamaCompletionModelSettings&lt;CONTEXT_WINDOW_SIZE&gt;&gt;.constructor

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:89

Methods

asObjectGenerationModel

asObjectGenerationModel<INPUT_PROMPT, OllamaCompletionPrompt>(promptTemplate): ObjectFromTextStreamingModel<INPUT_PROMPT, unknown, TextStreamingModel<unknown, TextGenerationModelSettings>> | ObjectFromTextStreamingModel<INPUT_PROMPT, OllamaCompletionPrompt, TextStreamingModel<OllamaCompletionPrompt, TextGenerationModelSettings>>

Type parameters

Name
INPUT_PROMPT
OllamaCompletionPrompt

Parameters

NameType
promptTemplateObjectFromTextPromptTemplate<INPUT_PROMPT, OllamaCompletionPrompt> | FlexibleObjectFromTextPromptTemplate<INPUT_PROMPT, unknown>

Returns

ObjectFromTextStreamingModel<INPUT_PROMPT, unknown, TextStreamingModel<unknown, TextGenerationModelSettings>> | ObjectFromTextStreamingModel<INPUT_PROMPT, OllamaCompletionPrompt, TextStreamingModel<OllamaCompletionPrompt, TextGenerationModelSettings>>

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:242


asToolCallGenerationModel

asToolCallGenerationModel<INPUT_PROMPT>(promptTemplate): TextGenerationToolCallModel<INPUT_PROMPT, OllamaCompletionPrompt, OllamaCompletionModel<CONTEXT_WINDOW_SIZE>>

Type parameters

Name
INPUT_PROMPT

Parameters

NameType
promptTemplateToolCallPromptTemplate<INPUT_PROMPT, OllamaCompletionPrompt>

Returns

TextGenerationToolCallModel<INPUT_PROMPT, OllamaCompletionPrompt, OllamaCompletionModel<CONTEXT_WINDOW_SIZE>>

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:258


asToolCallsOrTextGenerationModel

asToolCallsOrTextGenerationModel<INPUT_PROMPT>(promptTemplate): TextGenerationToolCallsModel<INPUT_PROMPT, OllamaCompletionPrompt, OllamaCompletionModel<CONTEXT_WINDOW_SIZE>>

Type parameters

Name
INPUT_PROMPT

Parameters

NameType
promptTemplateToolCallsPromptTemplate<INPUT_PROMPT, OllamaCompletionPrompt>

Returns

TextGenerationToolCallsModel<INPUT_PROMPT, OllamaCompletionPrompt, OllamaCompletionModel<CONTEXT_WINDOW_SIZE>>

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:267


callAPI

callAPI<RESPONSE>(prompt, callOptions, options): Promise<RESPONSE>

Type parameters

Name
RESPONSE

Parameters

NameType
promptOllamaCompletionPrompt
callOptionsFunctionCallOptions
optionsObject
options.responseFormatOllamaCompletionResponseFormatType<RESPONSE>

Returns

Promise<RESPONSE>

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:105


doGenerateTexts

doGenerateTexts(prompt, options): Promise<{ rawResponse: { context?: number[] ; created_at: string ; done: true ; eval_count: number ; eval_duration: number ; load_duration?: number ; model: string ; prompt_eval_count?: number ; prompt_eval_duration?: number ; response: string ; total_duration: number } ; textGenerationResults: { finishReason: "unknown" ; text: string = rawResponse.response }[] }>

Parameters

NameType
promptOllamaCompletionPrompt
optionsFunctionCallOptions

Returns

Promise<{ rawResponse: { context?: number[] ; created_at: string ; done: true ; eval_count: number ; eval_duration: number ; load_duration?: number ; model: string ; prompt_eval_count?: number ; prompt_eval_duration?: number ; response: string ; total_duration: number } ; textGenerationResults: { finishReason: "unknown" ; text: string = rawResponse.response }[] }>

Implementation of

TextStreamingBaseModel.doGenerateTexts

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:198


doStreamText

doStreamText(prompt, options): Promise<AsyncIterable<Delta<{ created_at: string ; done: false ; model: string ; response: string } | { context?: number[] ; created_at: string ; done: true ; eval_count: number ; eval_duration: number ; load_duration?: number ; model: string ; prompt_eval_count?: number ; prompt_eval_duration?: number ; sample_count?: number ; sample_duration?: number ; total_duration: number }>>>

Parameters

NameType
promptOllamaCompletionPrompt
optionsFunctionCallOptions

Returns

Promise<AsyncIterable<Delta<{ created_at: string ; done: false ; model: string ; response: string } | { context?: number[] ; created_at: string ; done: true ; eval_count: number ; eval_duration: number ; load_duration?: number ; model: string ; prompt_eval_count?: number ; prompt_eval_duration?: number ; sample_count?: number ; sample_duration?: number ; total_duration: number }>>>

Implementation of

TextStreamingBaseModel.doStreamText

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:230


extractTextDelta

extractTextDelta(delta): undefined | string

Parameters

NameType
deltaunknown

Returns

undefined | string

Implementation of

TextStreamingBaseModel.extractTextDelta

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:237


processTextGenerationResponse

processTextGenerationResponse(rawResponse): Object

Parameters

NameType
rawResponseObject
rawResponse.context?number[]
rawResponse.created_atstring
rawResponse.donetrue
rawResponse.eval_countnumber
rawResponse.eval_durationnumber
rawResponse.load_duration?number
rawResponse.modelstring
rawResponse.prompt_eval_count?number
rawResponse.prompt_eval_duration?number
rawResponse.responsestring
rawResponse.total_durationnumber

Returns

Object

NameType
rawResponse{ context?: number[] ; created_at: string ; done: true ; eval_count: number ; eval_duration: number ; load_duration?: number ; model: string ; prompt_eval_count?: number ; prompt_eval_duration?: number ; response: string ; total_duration: number }
rawResponse.context?number[]
rawResponse.created_atstring
rawResponse.donetrue
rawResponse.eval_countnumber
rawResponse.eval_durationnumber
rawResponse.load_duration?number
rawResponse.modelstring
rawResponse.prompt_eval_count?number
rawResponse.prompt_eval_duration?number
rawResponse.responsestring
rawResponse.total_durationnumber
textGenerationResults{ finishReason: "unknown" ; text: string = rawResponse.response }[]

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:218


restoreGeneratedTexts

restoreGeneratedTexts(rawResponse): Object

Parameters

NameType
rawResponseunknown

Returns

Object

NameType
rawResponse{ context?: number[] ; created_at: string ; done: true ; eval_count: number ; eval_duration: number ; load_duration?: number ; model: string ; prompt_eval_count?: number ; prompt_eval_duration?: number ; response: string ; total_duration: number }
rawResponse.context?number[]
rawResponse.created_atstring
rawResponse.donetrue
rawResponse.eval_countnumber
rawResponse.eval_durationnumber
rawResponse.load_duration?number
rawResponse.modelstring
rawResponse.prompt_eval_count?number
rawResponse.prompt_eval_duration?number
rawResponse.responsestring
rawResponse.total_durationnumber
textGenerationResults{ finishReason: "unknown" ; text: string = rawResponse.response }[]

Implementation of

TextStreamingBaseModel.restoreGeneratedTexts

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:209


withChatPrompt

withChatPrompt(): PromptTemplateTextStreamingModel<ChatPrompt, OllamaCompletionPrompt, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>, OllamaCompletionModel<CONTEXT_WINDOW_SIZE>>

Returns this model with a chat prompt template.

Returns

PromptTemplateTextStreamingModel<ChatPrompt, OllamaCompletionPrompt, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>, OllamaCompletionModel<CONTEXT_WINDOW_SIZE>>

Implementation of

TextStreamingBaseModel.withChatPrompt

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:305


withInstructionPrompt

withInstructionPrompt(): PromptTemplateTextStreamingModel<InstructionPrompt, OllamaCompletionPrompt, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>, OllamaCompletionModel<CONTEXT_WINDOW_SIZE>>

Returns this model with an instruction prompt template.

Returns

PromptTemplateTextStreamingModel<InstructionPrompt, OllamaCompletionPrompt, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>, OllamaCompletionModel<CONTEXT_WINDOW_SIZE>>

Implementation of

TextStreamingBaseModel.withInstructionPrompt

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:296


withJsonOutput

withJsonOutput(): OllamaCompletionModel<CONTEXT_WINDOW_SIZE>

When possible, limit the output generation to the specified JSON schema, or super sets of it (e.g. JSON in general).

Returns

OllamaCompletionModel<CONTEXT_WINDOW_SIZE>

Implementation of

TextStreamingBaseModel.withJsonOutput

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:283


withPromptTemplate

withPromptTemplate<INPUT_PROMPT>(promptTemplate): PromptTemplateTextStreamingModel<INPUT_PROMPT, OllamaCompletionPrompt, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>, OllamaCompletionModel<CONTEXT_WINDOW_SIZE>>

Type parameters

Name
INPUT_PROMPT

Parameters

NameType
promptTemplateTextGenerationPromptTemplate<INPUT_PROMPT, OllamaCompletionPrompt>

Returns

PromptTemplateTextStreamingModel<INPUT_PROMPT, OllamaCompletionPrompt, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>, OllamaCompletionModel<CONTEXT_WINDOW_SIZE>>

Implementation of

TextStreamingBaseModel.withPromptTemplate

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:314


withSettings

withSettings(additionalSettings): OllamaCompletionModel<CONTEXT_WINDOW_SIZE>

The withSettings method creates a new model with the same configuration as the original model, but with the specified settings changed.

Parameters

NameType
additionalSettingsPartial<OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>>

Returns

OllamaCompletionModel<CONTEXT_WINDOW_SIZE>

Example

const model = new OpenAICompletionModel({
model: "gpt-3.5-turbo-instruct",
maxGenerationTokens: 500,
});

const modelWithMoreTokens = model.withSettings({
maxGenerationTokens: 1000,
});

Implementation of

TextStreamingBaseModel.withSettings

Overrides

AbstractModel.withSettings

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:336


withTextPrompt

withTextPrompt(): PromptTemplateTextStreamingModel<string, OllamaCompletionPrompt, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>, OllamaCompletionModel<CONTEXT_WINDOW_SIZE>>

Returns this model with a text prompt template.

Returns

PromptTemplateTextStreamingModel<string, OllamaCompletionPrompt, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>, OllamaCompletionModel<CONTEXT_WINDOW_SIZE>>

Implementation of

TextStreamingBaseModel.withTextPrompt

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:287

Properties

countPromptTokens

Readonly countPromptTokens: undefined = undefined

Optional. Implement if you have a tokenizer and want to count the number of tokens in a prompt.

Implementation of

TextStreamingBaseModel.countPromptTokens

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:99


provider

Readonly provider: "ollama"

Overrides

AbstractModel.provider

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:93


settings

Readonly settings: OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>

Implementation of

TextStreamingBaseModel.settings

Inherited from

AbstractModel.settings

Defined in

packages/modelfusion/src/model-function/AbstractModel.ts:7


tokenizer

Readonly tokenizer: undefined = undefined

Implementation of

TextStreamingBaseModel.tokenizer

Defined in

packages/modelfusion/src/model-provider/ollama/OllamaCompletionModel.ts:98