Class: Portkey
Extends
BaseLLM
Constructors
new Portkey()
new Portkey(
init
?):Portkey
Parameters
• init?: Partial
<Portkey
>
Returns
Overrides
BaseLLM.constructor
Source
packages/llamaindex/src/llm/portkey.ts:78
Properties
apiKey?
optional
apiKey:string
=undefined
Source
packages/llamaindex/src/llm/portkey.ts:72
baseURL?
optional
baseURL:string
=undefined
Source
packages/llamaindex/src/llm/portkey.ts:73
llms?
optional
llms:null
| [LLMOptions
] =undefined
Source
packages/llamaindex/src/llm/portkey.ts:75
mode?
optional
mode:string
=undefined
Source
packages/llamaindex/src/llm/portkey.ts:74
session
session:
PortkeySession
Source
packages/llamaindex/src/llm/portkey.ts:76
Accessors
metadata
get
metadata():LLMMetadata
Returns
Source
packages/llamaindex/src/llm/portkey.ts:92
Methods
chat()
chat(params)
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
>>
Parameters
• params: LLMChatParamsStreaming
<object
, object
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
>>
Overrides
BaseLLM.chat
Source
packages/llamaindex/src/llm/portkey.ts:96
chat(params)
chat(
params
):Promise
<ChatResponse
<object
>>
Parameters
• params: LLMChatParamsNonStreaming
<object
, object
>
Returns
Promise
<ChatResponse
<object
>>
Overrides
BaseLLM.chat
Source
packages/llamaindex/src/llm/portkey.ts:99
complete()
complete(params)
complete(
params
):Promise
<AsyncIterable
<CompletionResponse
>>
Parameters
• params: LLMCompletionParamsStreaming
Returns
Promise
<AsyncIterable
<CompletionResponse
>>
Inherited from
BaseLLM.complete
Source
packages/llamaindex/src/llm/base.ts:22
complete(params)
complete(
params
):Promise
<CompletionResponse
>
Parameters
• params: LLMCompletionParamsNonStreaming
Returns
Promise
<CompletionResponse
>
Inherited from
BaseLLM.complete
Source
packages/llamaindex/src/llm/base.ts:25