Class: EngineResponse
Implements
Constructors
new EngineResponse()
private
new EngineResponse(chatResponse
,stream
,sourceNodes
?):EngineResponse
Parameters
• chatResponse: ChatResponse
<object
>
• stream: boolean
• sourceNodes?: NodeWithScore
<Metadata
>[]
Returns
Source
packages/llamaindex/src/EngineResponse.ts:19
Properties
#stream
private
#stream:boolean
Source
packages/llamaindex/src/EngineResponse.ts:17
message
message:
ChatMessage
Implementation of
Source
packages/llamaindex/src/EngineResponse.ts:14
metadata
metadata:
Record
<string
,unknown
> ={}
Source
packages/llamaindex/src/EngineResponse.ts:12
raw
raw:
null
|object
Raw response from the LLM
If LLM response an iterable of chunks, this will be an array of those chunks
Implementation of
Source
packages/llamaindex/src/EngineResponse.ts:15
sourceNodes?
optional
sourceNodes:NodeWithScore
<Metadata
>[]
Source
packages/llamaindex/src/EngineResponse.ts:10
Accessors
delta
get
delta():string
Returns
string
Source
packages/llamaindex/src/EngineResponse.ts:78
response
get
response():string
Returns
string
Source
packages/llamaindex/src/EngineResponse.ts:74
Methods
toString()
toString():
string
Returns
string
Source
packages/llamaindex/src/EngineResponse.ts:87
fromChatResponse()
static
fromChatResponse(chatResponse
,sourceNodes
?):EngineResponse
Parameters
• chatResponse: ChatResponse
<object
>
• sourceNodes?: NodeWithScore
<Metadata
>[]
Returns
Source
packages/llamaindex/src/EngineResponse.ts:55
fromChatResponseChunk()
static
fromChatResponseChunk(chunk
,sourceNodes
?):EngineResponse
Parameters
• chunk: ChatResponseChunk
• sourceNodes?: NodeWithScore
<Metadata
>[]
Returns
Source
packages/llamaindex/src/EngineResponse.ts:62