Pular para o conteúdo principal

Class: AnthropicAgent

Extends

Constructors

new AnthropicAgent()

new AnthropicAgent(params): AnthropicAgent

Parameters

params: AnthropicAgentParams

Returns

AnthropicAgent

Overrides

AgentRunner<Anthropic>.constructor

Source

packages/llamaindex/src/agent/anthropic.ts:34

Properties

#chatHistory

private #chatHistory: ChatMessage <ToolCallLLMMessageOptions>[]

Inherited from

AgentRunner.#chatHistory

Source

packages/llamaindex/src/agent/base.ts:206


#llm

private readonly #llm: Anthropic

Inherited from

AgentRunner.#llm

Source

packages/llamaindex/src/agent/base.ts:201


#runner

private readonly #runner: AgentWorker <Anthropic, object, ToolCallLLMMessageOptions>

Inherited from

AgentRunner.#runner

Source

packages/llamaindex/src/agent/base.ts:207


#systemPrompt

private readonly #systemPrompt: null | MessageContent = null

Inherited from

AgentRunner.#systemPrompt

Source

packages/llamaindex/src/agent/base.ts:205


#tools

private readonly #tools: BaseToolWithCall[] | (query) => Promise <BaseToolWithCall[]>

Inherited from

AgentRunner.#tools

Source

packages/llamaindex/src/agent/base.ts:202


#verbose

private readonly #verbose: boolean

Inherited from

AgentRunner.#verbose

Source

packages/llamaindex/src/agent/base.ts:208


createStore()

createStore: () => object = AgentRunner.defaultCreateStore

Returns

object

Overrides

AgentRunner.createStore

Source

packages/llamaindex/src/agent/anthropic.ts:52


taskHandler

static taskHandler: TaskHandler <Anthropic>

Source

packages/llamaindex/src/agent/anthropic.ts:65

Accessors

chatHistory

get chatHistory(): ChatMessage<AdditionalMessageOptions>[]

Returns

ChatMessage<AdditionalMessageOptions>[]

Source

packages/llamaindex/src/agent/base.ts:235


llm

get llm(): AI

Returns

AI

Source

packages/llamaindex/src/agent/base.ts:231


verbose

get verbose(): boolean

Returns

boolean

Source

packages/llamaindex/src/agent/base.ts:239

Methods

chat()

chat(params)

chat(params): Promise <EngineResponse>

Parameters

params: ChatEngineParamsNonStreaming

Returns

Promise <EngineResponse>

Overrides

AgentRunner.chat

Source

packages/llamaindex/src/agent/anthropic.ts:54

chat(params)

chat(params): Promise<never>

Parameters

params: ChatEngineParamsStreaming

Returns

Promise<never>

Overrides

AgentRunner.chat

Source

packages/llamaindex/src/agent/anthropic.ts:55


createTask()

createTask(message, stream, verbose, chatHistory?): ReadableStream<TaskStepOutput <Anthropic, object, ToolCallLLMMessageOptions>>

Parameters

message: MessageContent

stream: boolean= false

verbose: undefined | boolean= undefined

chatHistory?: ChatMessage <ToolCallLLMMessageOptions>[]

Returns

ReadableStream<TaskStepOutput <Anthropic, object, ToolCallLLMMessageOptions>>

Inherited from

AgentRunner.createTask

Source

packages/llamaindex/src/agent/base.ts:266


getTools()

getTools(query): BaseToolWithCall[] | Promise <BaseToolWithCall[]>

Parameters

query: MessageContent

Returns

BaseToolWithCall[] | Promise <BaseToolWithCall[]>

Inherited from

AgentRunner.getTools

Source

packages/llamaindex/src/agent/base.ts:247


reset()

reset(): void

Returns

void

Inherited from

AgentRunner.reset

Source

packages/llamaindex/src/agent/base.ts:243


defaultCreateStore()

static defaultCreateStore(): object

Returns

object

Inherited from

AgentRunner.defaultCreateStore

Source

packages/llamaindex/src/agent/base.ts:213


shouldContinue()

static shouldContinue<AI, Store, AdditionalMessageOptions>(task): boolean

Type parameters

AI extends LLM<object, object>

Store extends object = object

AdditionalMessageOptions extends object = AI extends LLM<object, AdditionalMessageOptions> ? AdditionalMessageOptions : never

Parameters

task: Readonly<TaskStep<AI, Store, AdditionalMessageOptions>>

Returns

boolean

Inherited from

AgentRunner.shouldContinue

Source

packages/llamaindex/src/agent/base.ts:253