Skip to main content

Class: AbstractOpenAIChatModel<SETTINGS>

Abstract text generation model that calls an API that is compatible with the OpenAI chat API.

See

https://platform.openai.com/docs/api-reference/chat/create

Type parameters

NameType
SETTINGSextends AbstractOpenAIChatSettings

Hierarchy

Accessors

modelInformation

get modelInformation(): ModelInformation

Returns

ModelInformation

Inherited from

AbstractModel.modelInformation

Defined in

packages/modelfusion/src/model-function/AbstractModel.ts:17


settingsForEvent

get settingsForEvent(): Partial<SETTINGS>

Returns

Partial<SETTINGS>

Inherited from

AbstractModel.settingsForEvent

Defined in

packages/modelfusion/src/model-function/AbstractModel.ts:24

Constructors

constructor

new AbstractOpenAIChatModel<SETTINGS>(settings): AbstractOpenAIChatModel<SETTINGS>

Type parameters

NameType
SETTINGSextends AbstractOpenAIChatSettings

Parameters

NameType
settingsSETTINGS

Returns

AbstractOpenAIChatModel<SETTINGS>

Overrides

AbstractModel&lt;SETTINGS&gt;.constructor

Defined in

packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:108

Methods

callAPI

callAPI<RESULT>(messages, callOptions, options): Promise<RESULT>

Type parameters

Name
RESULT

Parameters

NameType
messagesChatPrompt
callOptionsFunctionCallOptions
optionsObject
options.functionCall?"auto" | { name: string } | "none"
options.functions?{ description?: string ; name: string ; parameters: unknown }[]
options.responseFormatOpenAIChatResponseFormatType<RESULT>
options.toolChoice?"auto" | "none" | { function: { name: string } ; type: "function" }
options.tools?{ function: { description?: string ; name: string ; parameters: unknown } ; type: "function" }[]

Returns

Promise<RESULT>

Defined in

packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:112


doGenerateTexts

doGenerateTexts(prompt, options): Promise<{ rawResponse: { choices: { finish_reason?: null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index?: number ; logprobs?: any ; message: { content: null | string ; function_call?: { arguments: string ; name: string } ; role: "assistant" ; tool_calls?: { function: { arguments: string ; name: string } ; id: string ; type: "function" }[] } }[] ; created: number ; id: string ; model: string ; object: "chat.completion" ; system_fingerprint?: null | string ; usage: { completion_tokens: number ; prompt_tokens: number ; total_tokens: number } } ; textGenerationResults: { finishReason: TextGenerationFinishReason ; text: string }[] ; usage: { completionTokens: number = response.usage.completion_tokens; promptTokens: number = response.usage.prompt_tokens; totalTokens: number = response.usage.total_tokens } }>

Parameters

NameType
promptChatPrompt
optionsFunctionCallOptions

Returns

Promise<{ rawResponse: { choices: { finish_reason?: null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index?: number ; logprobs?: any ; message: { content: null | string ; function_call?: { arguments: string ; name: string } ; role: "assistant" ; tool_calls?: { function: { arguments: string ; name: string } ; id: string ; type: "function" }[] } }[] ; created: number ; id: string ; model: string ; object: "chat.completion" ; system_fingerprint?: null | string ; usage: { completion_tokens: number ; prompt_tokens: number ; total_tokens: number } } ; textGenerationResults: { finishReason: TextGenerationFinishReason ; text: string }[] ; usage: { completionTokens: number = response.usage.completion_tokens; promptTokens: number = response.usage.prompt_tokens; totalTokens: number = response.usage.total_tokens } }>

Defined in

packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:188


doGenerateToolCall

doGenerateToolCall(tool, prompt, options): Promise<{ rawResponse: { choices: { finish_reason?: null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index?: number ; logprobs?: any ; message: { content: null | string ; function_call?: { arguments: string ; name: string } ; role: "assistant" ; tool_calls?: { function: { arguments: string ; name: string } ; id: string ; type: "function" }[] } }[] ; created: number ; id: string ; model: string ; object: "chat.completion" ; system_fingerprint?: null | string ; usage: { completion_tokens: number ; prompt_tokens: number ; total_tokens: number } } ; toolCall: null | { args: unknown ; id: string } ; usage: { completionTokens: number = response.usage.completion_tokens; promptTokens: number = response.usage.prompt_tokens; totalTokens: number = response.usage.total_tokens } }>

Parameters

NameType
toolToolDefinition<string, unknown>
promptChatPrompt
optionsFunctionCallOptions

Returns

Promise<{ rawResponse: { choices: { finish_reason?: null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index?: number ; logprobs?: any ; message: { content: null | string ; function_call?: { arguments: string ; name: string } ; role: "assistant" ; tool_calls?: { function: { arguments: string ; name: string } ; id: string ; type: "function" }[] } }[] ; created: number ; id: string ; model: string ; object: "chat.completion" ; system_fingerprint?: null | string ; usage: { completion_tokens: number ; prompt_tokens: number ; total_tokens: number } } ; toolCall: null | { args: unknown ; id: string } ; usage: { completionTokens: number = response.usage.completion_tokens; promptTokens: number = response.usage.prompt_tokens; totalTokens: number = response.usage.total_tokens } }>

Defined in

packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:264


doGenerateToolCalls

doGenerateToolCalls(tools, prompt, options): Promise<{ rawResponse: { choices: { finish_reason?: null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index?: number ; logprobs?: any ; message: { content: null | string ; function_call?: { arguments: string ; name: string } ; role: "assistant" ; tool_calls?: { function: { arguments: string ; name: string } ; id: string ; type: "function" }[] } }[] ; created: number ; id: string ; model: string ; object: "chat.completion" ; system_fingerprint?: null | string ; usage: { completion_tokens: number ; prompt_tokens: number ; total_tokens: number } } ; text: null | string ; toolCalls: null | { args: unknown ; id: string = toolCall.id; name: string = toolCall.function.name }[] ; usage: { completionTokens: number = response.usage.completion_tokens; promptTokens: number = response.usage.prompt_tokens; totalTokens: number = response.usage.total_tokens } }>

Parameters

NameType
toolsToolDefinition<string, unknown>[]
promptChatPrompt
optionsFunctionCallOptions

Returns

Promise<{ rawResponse: { choices: { finish_reason?: null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index?: number ; logprobs?: any ; message: { content: null | string ; function_call?: { arguments: string ; name: string } ; role: "assistant" ; tool_calls?: { function: { arguments: string ; name: string } ; id: string ; type: "function" }[] } }[] ; created: number ; id: string ; model: string ; object: "chat.completion" ; system_fingerprint?: null | string ; usage: { completion_tokens: number ; prompt_tokens: number ; total_tokens: number } } ; text: null | string ; toolCalls: null | { args: unknown ; id: string = toolCall.id; name: string = toolCall.function.name }[] ; usage: { completionTokens: number = response.usage.completion_tokens; promptTokens: number = response.usage.prompt_tokens; totalTokens: number = response.usage.total_tokens } }>

Defined in

packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:302


doStreamText

doStreamText(prompt, options): Promise<AsyncIterable<Delta<{ choices: { delta: { content?: null | string ; function_call?: { arguments?: string ; name?: string } ; role?: "user" | "assistant" ; tool_calls?: { function: { arguments: string ; name: string } ; id: string ; type: "function" }[] } ; finish_reason?: null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index: number }[] ; created: number ; id: string ; model?: string ; object: string ; system_fingerprint?: null | string }>>>

Parameters

NameType
promptChatPrompt
optionsFunctionCallOptions

Returns

Promise<AsyncIterable<Delta<{ choices: { delta: { content?: null | string ; function_call?: { arguments?: string ; name?: string } ; role?: "user" | "assistant" ; tool_calls?: { function: { arguments: string ; name: string } ; id: string ; type: "function" }[] } ; finish_reason?: null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index: number }[] ; created: number ; id: string ; model?: string ; object: string ; system_fingerprint?: null | string }>>>

Defined in

packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:237


extractTextDelta

extractTextDelta(delta): undefined | string

Parameters

NameType
deltaunknown

Returns

undefined | string

Defined in

packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:243


extractUsage

extractUsage(response): Object

Parameters

NameType
responseObject
response.choices{ finish_reason?: null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index?: number ; logprobs?: any ; message: { content: null | string ; function_call?: { arguments: string ; name: string } ; role: "assistant" ; tool_calls?: { function: { arguments: string ; name: string } ; id: string ; type: "function" }[] } }[]
response.creatednumber
response.idstring
response.modelstring
response.object"chat.completion"
response.system_fingerprint?null | string
response.usageObject
response.usage.completion_tokensnumber
response.usage.prompt_tokensnumber
response.usage.total_tokensnumber

Returns

Object

NameType
completionTokensnumber
promptTokensnumber
totalTokensnumber

Defined in

packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:335


processTextGenerationResponse

processTextGenerationResponse(rawResponse): Object

Parameters

NameType
rawResponseObject
rawResponse.choices{ finish_reason?: null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index?: number ; logprobs?: any ; message: { content: null | string ; function_call?: { arguments: string ; name: string } ; role: "assistant" ; tool_calls?: { function: { arguments: string ; name: string } ; id: string ; type: "function" }[] } }[]
rawResponse.creatednumber
rawResponse.idstring
rawResponse.modelstring
rawResponse.object"chat.completion"
rawResponse.system_fingerprint?null | string
rawResponse.usageObject
rawResponse.usage.completion_tokensnumber
rawResponse.usage.prompt_tokensnumber
rawResponse.usage.total_tokensnumber

Returns

Object

NameType
rawResponse{ choices: { finish_reason?: null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index?: number ; logprobs?: any ; message: { content: null | string ; function_call?: { arguments: string ; name: string } ; role: "assistant" ; tool_calls?: { function: { arguments: string ; name: string } ; id: string ; type: "function" }[] } }[] ; created: number ; id: string ; model: string ; object: "chat.completion" ; system_fingerprint?: null | string ; usage: { completion_tokens: number ; prompt_tokens: number ; total_tokens: number } }
rawResponse.choices{ finish_reason?: null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index?: number ; logprobs?: any ; message: { content: null | string ; function_call?: { arguments: string ; name: string } ; role: "assistant" ; tool_calls?: { function: { arguments: string ; name: string } ; id: string ; type: "function" }[] } }[]
rawResponse.creatednumber
rawResponse.idstring
rawResponse.modelstring
rawResponse.object"chat.completion"
rawResponse.system_fingerprint?null | string
rawResponse.usage{ completion_tokens: number ; prompt_tokens: number ; total_tokens: number }
rawResponse.usage.completion_tokensnumber
rawResponse.usage.prompt_tokensnumber
rawResponse.usage.total_tokensnumber
textGenerationResults{ finishReason: TextGenerationFinishReason ; text: string }[]
usage{ completionTokens: number = response.usage.completion_tokens; promptTokens: number = response.usage.prompt_tokens; totalTokens: number = response.usage.total_tokens }
usage.completionTokensnumber
usage.promptTokensnumber
usage.totalTokensnumber

Defined in

packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:208


restoreGeneratedTexts

restoreGeneratedTexts(rawResponse): Object

Parameters

NameType
rawResponseunknown

Returns

Object

NameType
rawResponse{ choices: { finish_reason?: null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index?: number ; logprobs?: any ; message: { content: null | string ; function_call?: { arguments: string ; name: string } ; role: "assistant" ; tool_calls?: { function: { arguments: string ; name: string } ; id: string ; type: "function" }[] } }[] ; created: number ; id: string ; model: string ; object: "chat.completion" ; system_fingerprint?: null | string ; usage: { completion_tokens: number ; prompt_tokens: number ; total_tokens: number } }
rawResponse.choices{ finish_reason?: null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index?: number ; logprobs?: any ; message: { content: null | string ; function_call?: { arguments: string ; name: string } ; role: "assistant" ; tool_calls?: { function: { arguments: string ; name: string } ; id: string ; type: "function" }[] } }[]
rawResponse.creatednumber
rawResponse.idstring
rawResponse.modelstring
rawResponse.object"chat.completion"
rawResponse.system_fingerprint?null | string
rawResponse.usage{ completion_tokens: number ; prompt_tokens: number ; total_tokens: number }
rawResponse.usage.completion_tokensnumber
rawResponse.usage.prompt_tokensnumber
rawResponse.usage.total_tokensnumber
textGenerationResults{ finishReason: TextGenerationFinishReason ; text: string }[]
usage{ completionTokens: number = response.usage.completion_tokens; promptTokens: number = response.usage.prompt_tokens; totalTokens: number = response.usage.total_tokens }
usage.completionTokensnumber
usage.promptTokensnumber
usage.totalTokensnumber

Defined in

packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:199


withSettings

withSettings(additionalSettings): this

Parameters

NameType
additionalSettingsPartial<SETTINGS>

Returns

this

Inherited from

AbstractModel.withSettings

Defined in

packages/modelfusion/src/model-function/AbstractModel.ts:26

Properties

modelName

Readonly Abstract modelName: null | string

Inherited from

AbstractModel.modelName

Defined in

packages/modelfusion/src/model-function/AbstractModel.ts:14


provider

Readonly Abstract provider: string

Inherited from

AbstractModel.provider

Defined in

packages/modelfusion/src/model-function/AbstractModel.ts:13


settings

Readonly settings: SETTINGS

Inherited from

AbstractModel.settings

Defined in

packages/modelfusion/src/model-function/AbstractModel.ts:7