Class: abstract
AgentRunner<AI, Store, AdditionalMessageOptions>
Runner will manage the task execution and provide a high-level API for the user
Extended by
Type parameters
• AI extends LLM
• Store extends object
= object
• AdditionalMessageOptions extends object
= AI
extends LLM
<object
, infer AdditionalMessageOptions> ? AdditionalMessageOptions
: never
Implements
Constructors
new AgentRunner()
protected
new AgentRunner<AI
,Store
,AdditionalMessageOptions
>(params
):AgentRunner
<AI
,Store
,AdditionalMessageOptions
>
Parameters
• params: AgentRunnerParams
<AI
, Store
, AdditionalMessageOptions
>
Returns
AgentRunner
<AI
, Store
, AdditionalMessageOptions
>
Source
packages/llamaindex/src/agent/base.ts:255
Properties
#chatHistory
private
#chatHistory:ChatMessage
<AdditionalMessageOptions
>[]
Source
packages/llamaindex/src/agent/base.ts:217
#llm
private
readonly
#llm:AI
Source
packages/llamaindex/src/agent/base.ts:212
#runner
private
readonly
#runner:AgentWorker
<AI
,Store
,AdditionalMessageOptions
>
Source
packages/llamaindex/src/agent/base.ts:218
#systemPrompt
private
readonly
#systemPrompt:null
|MessageContent
=null
Source
packages/llamaindex/src/agent/base.ts:216
#tools
private
readonly
#tools:BaseToolWithCall
[] | (query
) =>Promise
<BaseToolWithCall
[]>
Source
packages/llamaindex/src/agent/base.ts:213
#verbose
private
readonly
#verbose:boolean
Source
packages/llamaindex/src/agent/base.ts:219
defaultTaskHandler
static
defaultTaskHandler:TaskHandler
<LLM
<object
,object
>>
Source
packages/llamaindex/src/agent/base.ts:228
Accessors
chatHistory
get
chatHistory():ChatMessage
<AdditionalMessageOptions
>[]
Returns
ChatMessage
<AdditionalMessageOptions
>[]
Source
packages/llamaindex/src/agent/base.ts:273
llm
get
llm():AI
Returns
AI
Source
packages/llamaindex/src/agent/base.ts:269
verbose
get
verbose():boolean
Returns
boolean
Source
packages/llamaindex/src/agent/base.ts:277
Methods
chat()
chat(params)
chat(
params
):Promise
<EngineResponse
>
Send message along with the class's current chat history to the LLM.
Parameters
• params: ChatEngineParamsNonStreaming
Returns
Promise
<EngineResponse
>
Implementation of
Source
packages/llamaindex/src/agent/base.ts:344
chat(params)
chat(
params
):Promise
<ReadableStream
<EngineResponse
>>
Parameters
• params: ChatEngineParamsStreaming
Returns
Promise
<ReadableStream
<EngineResponse
>>
Implementation of
Source
packages/llamaindex/src/agent/base.ts:345
createStore()
abstract
createStore():Store
Returns
Store
Source
packages/llamaindex/src/agent/base.ts:222
createTask()
createTask(
message
,stream
,verbose
,chatHistory
?):ReadableStream
<TaskStepOutput
<AI
,Store
,AdditionalMessageOptions
>>
Parameters
• message: MessageContent
• stream: boolean
= false
• verbose: undefined
| boolean
= undefined
• chatHistory?: ChatMessage
<AdditionalMessageOptions
>[]
Returns
ReadableStream
<TaskStepOutput
<AI
, Store
, AdditionalMessageOptions
>>
Source
packages/llamaindex/src/agent/base.ts:304
getTools()
getTools(
query
):BaseToolWithCall
[] |Promise
<BaseToolWithCall
[]>
Parameters
• query: MessageContent
Returns
BaseToolWithCall
[] | Promise
<BaseToolWithCall
[]>
Source
packages/llamaindex/src/agent/base.ts:285
reset()
reset():
void
Resets the chat history so that it's empty.
Returns
void
Implementation of
Source
packages/llamaindex/src/agent/base.ts:281
defaultCreateStore()
static
defaultCreateStore():object
Returns
object
Source
packages/llamaindex/src/agent/base.ts:224
shouldContinue()
static
shouldContinue<AI
,Store
,AdditionalMessageOptions
>(task
):boolean
Type parameters
• AI extends LLM
<object
, object
>
• Store extends object
= object
• AdditionalMessageOptions extends object
= AI
extends LLM
<object
, AdditionalMessageOptions
> ? AdditionalMessageOptions
: never
Parameters
• task: Readonly
<TaskStep
<AI
, Store
, AdditionalMessageOptions
>>
Returns
boolean