Class: abstract
ToolCallLLM<AdditionalChatOptions>
Extends
BaseLLM
<AdditionalChatOptions
,ToolCallLLMMessageOptions
>
Extended by
Type parameters
• AdditionalChatOptions extends object
= object
Constructors
new ToolCallLLM()
new ToolCallLLM<
AdditionalChatOptions
>():ToolCallLLM
<AdditionalChatOptions
>
Returns
ToolCallLLM
<AdditionalChatOptions
>
Inherited from
BaseLLM<AdditionalChatOptions, ToolCallLLMMessageOptions>.constructor
Properties
metadata
abstract
metadata:LLMMetadata
Inherited from
BaseLLM.metadata
Source
packages/llamaindex/src/llm/base.ts:20
supportToolCall
abstract
supportToolCall:boolean
Source
packages/llamaindex/src/llm/base.ts:70
Methods
chat()
chat(params)
abstract
chat(params
):Promise
<AsyncIterable
<ChatResponseChunk
>>
Parameters
• params: LLMChatParamsStreaming
<AdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
>>
Inherited from
BaseLLM.chat
Source
packages/llamaindex/src/llm/base.ts:53
chat(params)
abstract
chat(params
):Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Parameters
• params: LLMChatParamsNonStreaming
<AdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Inherited from
BaseLLM.chat
Source
packages/llamaindex/src/llm/base.ts:59
complete()
complete(params)
complete(
params
):Promise
<AsyncIterable
<CompletionResponse
>>
Parameters
• params: LLMCompletionParamsStreaming
Returns
Promise
<AsyncIterable
<CompletionResponse
>>
Inherited from
BaseLLM.complete
Source
packages/llamaindex/src/llm/base.ts:22
complete(params)
complete(
params
):Promise
<CompletionResponse
>
Parameters
• params: LLMCompletionParamsNonStreaming
Returns
Promise
<CompletionResponse
>
Inherited from
BaseLLM.complete