Class: AbstractOpenAIChatModel<SETTINGS>
Abstract text generation model that calls an API that is compatible with the OpenAI chat API.
See
https://platform.openai.com/docs/api-reference/chat/create
Type parameters
Name | Type |
---|---|
SETTINGS | extends AbstractOpenAIChatSettings |
Hierarchy
-
AbstractModel
<SETTINGS
>↳
AbstractOpenAIChatModel
Accessors
modelInformation
• get
modelInformation(): ModelInformation
Returns
Inherited from
AbstractModel.modelInformation
Defined in
packages/modelfusion/src/model-function/AbstractModel.ts:17
settingsForEvent
• get
settingsForEvent(): Partial
<SETTINGS
>
Returns
Partial
<SETTINGS
>
Inherited from
AbstractModel.settingsForEvent
Defined in
packages/modelfusion/src/model-function/AbstractModel.ts:24
Constructors
constructor
• new AbstractOpenAIChatModel<SETTINGS
>(settings
): AbstractOpenAIChatModel
<SETTINGS
>
Type parameters
Name | Type |
---|---|
SETTINGS | extends AbstractOpenAIChatSettings |
Parameters
Name | Type |
---|---|
settings | SETTINGS |
Returns
AbstractOpenAIChatModel
<SETTINGS
>
Overrides
AbstractModel<SETTINGS>.constructor
Defined in
packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:108
Methods
callAPI
▸ callAPI<RESULT
>(messages
, callOptions
, options
): Promise
<RESULT
>
Type parameters
Name |
---|
RESULT |
Parameters
Name | Type |
---|---|
messages | ChatPrompt |
callOptions | FunctionCallOptions |
options | Object |
options.functionCall? | "auto" | { name : string } | "none" |
options.functions? | { description? : string ; name : string ; parameters : unknown }[] |
options.responseFormat | OpenAIChatResponseFormatType <RESULT > |
options.toolChoice? | "auto" | "none" | { function : { name : string } ; type : "function" } |
options.tools? | { function : { description? : string ; name : string ; parameters : unknown } ; type : "function" }[] |
Returns
Promise
<RESULT
>
Defined in
packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:112
doGenerateTexts
▸ doGenerateTexts(prompt
, options
): Promise
<{ rawResponse
: { choices
: { finish_reason?
: null
| "length"
| "stop"
| "function_call"
| "tool_calls"
| "content_filter"
; index?
: number
; logprobs?
: any
; message
: { content
: null
| string
; function_call?
: { arguments
: string
; name
: string
} ; role
: "assistant"
; tool_calls?
: { function
: { arguments
: string
; name
: string
} ; id
: string
; type
: "function"
}[] } }[] ; created
: number
; id
: string
; model
: string
; object
: "chat.completion"
; system_fingerprint?
: null
| string
; usage
: { completion_tokens
: number
; prompt_tokens
: number
; total_tokens
: number
} } ; textGenerationResults
: { finishReason
: TextGenerationFinishReason
; text
: string
}[] ; usage
: { completionTokens
: number
= response.usage.completion_tokens; promptTokens
: number
= response.usage.prompt_tokens; totalTokens
: number
= response.usage.total_tokens } }>
Parameters
Name | Type |
---|---|
prompt | ChatPrompt |
options | FunctionCallOptions |
Returns
Promise
<{ rawResponse
: { choices
: { finish_reason?
: null
| "length"
| "stop"
| "function_call"
| "tool_calls"
| "content_filter"
; index?
: number
; logprobs?
: any
; message
: { content
: null
| string
; function_call?
: { arguments
: string
; name
: string
} ; role
: "assistant"
; tool_calls?
: { function
: { arguments
: string
; name
: string
} ; id
: string
; type
: "function"
}[] } }[] ; created
: number
; id
: string
; model
: string
; object
: "chat.completion"
; system_fingerprint?
: null
| string
; usage
: { completion_tokens
: number
; prompt_tokens
: number
; total_tokens
: number
} } ; textGenerationResults
: { finishReason
: TextGenerationFinishReason
; text
: string
}[] ; usage
: { completionTokens
: number
= response.usage.completion_tokens; promptTokens
: number
= response.usage.prompt_tokens; totalTokens
: number
= response.usage.total_tokens } }>
Defined in
packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:188
doGenerateToolCall
▸ doGenerateToolCall(tool
, prompt
, options
): Promise
<{ rawResponse
: { choices
: { finish_reason?
: null
| "length"
| "stop"
| "function_call"
| "tool_calls"
| "content_filter"
; index?
: number
; logprobs?
: any
; message
: { content
: null
| string
; function_call?
: { arguments
: string
; name
: string
} ; role
: "assistant"
; tool_calls?
: { function
: { arguments
: string
; name
: string
} ; id
: string
; type
: "function"
}[] } }[] ; created
: number
; id
: string
; model
: string
; object
: "chat.completion"
; system_fingerprint?
: null
| string
; usage
: { completion_tokens
: number
; prompt_tokens
: number
; total_tokens
: number
} } ; toolCall
: null
| { args
: unknown
; id
: string
} ; usage
: { completionTokens
: number
= response.usage.completion_tokens; promptTokens
: number
= response.usage.prompt_tokens; totalTokens
: number
= response.usage.total_tokens } }>
Parameters
Name | Type |
---|---|
tool | ToolDefinition <string , unknown > |
prompt | ChatPrompt |
options | FunctionCallOptions |
Returns
Promise
<{ rawResponse
: { choices
: { finish_reason?
: null
| "length"
| "stop"
| "function_call"
| "tool_calls"
| "content_filter"
; index?
: number
; logprobs?
: any
; message
: { content
: null
| string
; function_call?
: { arguments
: string
; name
: string
} ; role
: "assistant"
; tool_calls?
: { function
: { arguments
: string
; name
: string
} ; id
: string
; type
: "function"
}[] } }[] ; created
: number
; id
: string
; model
: string
; object
: "chat.completion"
; system_fingerprint?
: null
| string
; usage
: { completion_tokens
: number
; prompt_tokens
: number
; total_tokens
: number
} } ; toolCall
: null
| { args
: unknown
; id
: string
} ; usage
: { completionTokens
: number
= response.usage.completion_tokens; promptTokens
: number
= response.usage.prompt_tokens; totalTokens
: number
= response.usage.total_tokens } }>
Defined in
packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:264
doGenerateToolCalls
▸ doGenerateToolCalls(tools
, prompt
, options
): Promise
<{ rawResponse
: { choices
: { finish_reason?
: null
| "length"
| "stop"
| "function_call"
| "tool_calls"
| "content_filter"
; index?
: number
; logprobs?
: any
; message
: { content
: null
| string
; function_call?
: { arguments
: string
; name
: string
} ; role
: "assistant"
; tool_calls?
: { function
: { arguments
: string
; name
: string
} ; id
: string
; type
: "function"
}[] } }[] ; created
: number
; id
: string
; model
: string
; object
: "chat.completion"
; system_fingerprint?
: null
| string
; usage
: { completion_tokens
: number
; prompt_tokens
: number
; total_tokens
: number
} } ; text
: null
| string
; toolCalls
: null
| { args
: unknown
; id
: string
= toolCall.id; name
: string
= toolCall.function.name }[] ; usage
: { completionTokens
: number
= response.usage.completion_tokens; promptTokens
: number
= response.usage.prompt_tokens; totalTokens
: number
= response.usage.total_tokens } }>
Parameters
Name | Type |
---|---|
tools | ToolDefinition <string , unknown >[] |
prompt | ChatPrompt |
options | FunctionCallOptions |
Returns
Promise
<{ rawResponse
: { choices
: { finish_reason?
: null
| "length"
| "stop"
| "function_call"
| "tool_calls"
| "content_filter"
; index?
: number
; logprobs?
: any
; message
: { content
: null
| string
; function_call?
: { arguments
: string
; name
: string
} ; role
: "assistant"
; tool_calls?
: { function
: { arguments
: string
; name
: string
} ; id
: string
; type
: "function"
}[] } }[] ; created
: number
; id
: string
; model
: string
; object
: "chat.completion"
; system_fingerprint?
: null
| string
; usage
: { completion_tokens
: number
; prompt_tokens
: number
; total_tokens
: number
} } ; text
: null
| string
; toolCalls
: null
| { args
: unknown
; id
: string
= toolCall.id; name
: string
= toolCall.function.name }[] ; usage
: { completionTokens
: number
= response.usage.completion_tokens; promptTokens
: number
= response.usage.prompt_tokens; totalTokens
: number
= response.usage.total_tokens } }>
Defined in
packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:302
doStreamText
▸ doStreamText(prompt
, options
): Promise
<AsyncIterable
<Delta
<{ choices
: { delta
: { content?
: null
| string
; function_call?
: { arguments?
: string
; name?
: string
} ; role?
: "user"
| "assistant"
; tool_calls?
: { function
: { arguments
: string
; name
: string
} ; id
: string
; type
: "function"
}[] } ; finish_reason?
: null
| "length"
| "stop"
| "function_call"
| "tool_calls"
| "content_filter"
; index
: number
}[] ; created
: number
; id
: string
; model?
: string
; object
: string
; system_fingerprint?
: null
| string
}>>>
Parameters
Name | Type |
---|---|
prompt | ChatPrompt |
options | FunctionCallOptions |
Returns
Promise
<AsyncIterable
<Delta
<{ choices
: { delta
: { content?
: null
| string
; function_call?
: { arguments?
: string
; name?
: string
} ; role?
: "user"
| "assistant"
; tool_calls?
: { function
: { arguments
: string
; name
: string
} ; id
: string
; type
: "function"
}[] } ; finish_reason?
: null
| "length"
| "stop"
| "function_call"
| "tool_calls"
| "content_filter"
; index
: number
}[] ; created
: number
; id
: string
; model?
: string
; object
: string
; system_fingerprint?
: null
| string
}>>>
Defined in
packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:237
extractTextDelta
▸ extractTextDelta(delta
): undefined
| string
Parameters
Name | Type |
---|---|
delta | unknown |
Returns
undefined
| string
Defined in
packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:243
extractUsage
▸ extractUsage(response
): Object
Parameters
Name | Type |
---|---|
response | Object |
response.choices | { finish_reason? : null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index? : number ; logprobs? : any ; message : { content : null | string ; function_call? : { arguments : string ; name : string } ; role : "assistant" ; tool_calls? : { function : { arguments : string ; name : string } ; id : string ; type : "function" }[] } }[] |
response.created | number |
response.id | string |
response.model | string |
response.object | "chat.completion" |
response.system_fingerprint? | null | string |
response.usage | Object |
response.usage.completion_tokens | number |
response.usage.prompt_tokens | number |
response.usage.total_tokens | number |
Returns
Object
Name | Type |
---|---|
completionTokens | number |
promptTokens | number |
totalTokens | number |
Defined in
packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:335
processTextGenerationResponse
▸ processTextGenerationResponse(rawResponse
): Object
Parameters
Name | Type |
---|---|
rawResponse | Object |
rawResponse.choices | { finish_reason? : null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index? : number ; logprobs? : any ; message : { content : null | string ; function_call? : { arguments : string ; name : string } ; role : "assistant" ; tool_calls? : { function : { arguments : string ; name : string } ; id : string ; type : "function" }[] } }[] |
rawResponse.created | number |
rawResponse.id | string |
rawResponse.model | string |
rawResponse.object | "chat.completion" |
rawResponse.system_fingerprint? | null | string |
rawResponse.usage | Object |
rawResponse.usage.completion_tokens | number |
rawResponse.usage.prompt_tokens | number |
rawResponse.usage.total_tokens | number |
Returns
Object
Name | Type |
---|---|
rawResponse | { choices : { finish_reason? : null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index? : number ; logprobs? : any ; message : { content : null | string ; function_call? : { arguments : string ; name : string } ; role : "assistant" ; tool_calls? : { function : { arguments : string ; name : string } ; id : string ; type : "function" }[] } }[] ; created : number ; id : string ; model : string ; object : "chat.completion" ; system_fingerprint? : null | string ; usage : { completion_tokens : number ; prompt_tokens : number ; total_tokens : number } } |
rawResponse.choices | { finish_reason? : null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index? : number ; logprobs? : any ; message : { content : null | string ; function_call? : { arguments : string ; name : string } ; role : "assistant" ; tool_calls? : { function : { arguments : string ; name : string } ; id : string ; type : "function" }[] } }[] |
rawResponse.created | number |
rawResponse.id | string |
rawResponse.model | string |
rawResponse.object | "chat.completion" |
rawResponse.system_fingerprint? | null | string |
rawResponse.usage | { completion_tokens : number ; prompt_tokens : number ; total_tokens : number } |
rawResponse.usage.completion_tokens | number |
rawResponse.usage.prompt_tokens | number |
rawResponse.usage.total_tokens | number |
textGenerationResults | { finishReason : TextGenerationFinishReason ; text : string }[] |
usage | { completionTokens : number = response.usage.completion_tokens; promptTokens : number = response.usage.prompt_tokens; totalTokens : number = response.usage.total_tokens } |
usage.completionTokens | number |
usage.promptTokens | number |
usage.totalTokens | number |
Defined in
packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:208
restoreGeneratedTexts
▸ restoreGeneratedTexts(rawResponse
): Object
Parameters
Name | Type |
---|---|
rawResponse | unknown |
Returns
Object
Name | Type |
---|---|
rawResponse | { choices : { finish_reason? : null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index? : number ; logprobs? : any ; message : { content : null | string ; function_call? : { arguments : string ; name : string } ; role : "assistant" ; tool_calls? : { function : { arguments : string ; name : string } ; id : string ; type : "function" }[] } }[] ; created : number ; id : string ; model : string ; object : "chat.completion" ; system_fingerprint? : null | string ; usage : { completion_tokens : number ; prompt_tokens : number ; total_tokens : number } } |
rawResponse.choices | { finish_reason? : null | "length" | "stop" | "function_call" | "tool_calls" | "content_filter" ; index? : number ; logprobs? : any ; message : { content : null | string ; function_call? : { arguments : string ; name : string } ; role : "assistant" ; tool_calls? : { function : { arguments : string ; name : string } ; id : string ; type : "function" }[] } }[] |
rawResponse.created | number |
rawResponse.id | string |
rawResponse.model | string |
rawResponse.object | "chat.completion" |
rawResponse.system_fingerprint? | null | string |
rawResponse.usage | { completion_tokens : number ; prompt_tokens : number ; total_tokens : number } |
rawResponse.usage.completion_tokens | number |
rawResponse.usage.prompt_tokens | number |
rawResponse.usage.total_tokens | number |
textGenerationResults | { finishReason : TextGenerationFinishReason ; text : string }[] |
usage | { completionTokens : number = response.usage.completion_tokens; promptTokens : number = response.usage.prompt_tokens; totalTokens : number = response.usage.total_tokens } |
usage.completionTokens | number |
usage.promptTokens | number |
usage.totalTokens | number |
Defined in
packages/modelfusion/src/model-provider/openai/AbstractOpenAIChatModel.ts:199
withSettings
▸ withSettings(additionalSettings
): this
Parameters
Name | Type |
---|---|
additionalSettings | Partial <SETTINGS > |
Returns
this
Inherited from
AbstractModel.withSettings
Defined in
packages/modelfusion/src/model-function/AbstractModel.ts:26
Properties
modelName
• Readonly
Abstract
modelName: null
| string
Inherited from
AbstractModel.modelName
Defined in
packages/modelfusion/src/model-function/AbstractModel.ts:14
provider
• Readonly
Abstract
provider: string
Inherited from
AbstractModel.provider
Defined in
packages/modelfusion/src/model-function/AbstractModel.ts:13
settings
• Readonly
settings: SETTINGS
Inherited from
AbstractModel.settings