@@ -167,7 +167,7 @@ class UserPromptNode(AgentNode[DepsT, NodeRunEndT]):
167
167
system_prompt_functions : list [_system_prompt .SystemPromptRunner [DepsT ]]
168
168
system_prompt_dynamic_functions : dict [str , _system_prompt .SystemPromptRunner [DepsT ]]
169
169
170
- async def run (
170
+ async def run ( # noqa: C901
171
171
self , ctx : GraphRunContext [GraphAgentState , GraphAgentDeps [DepsT , NodeRunEndT ]]
172
172
) -> ModelRequestNode [DepsT , NodeRunEndT ] | CallToolsNode [DepsT , NodeRunEndT ]:
173
173
try :
@@ -186,15 +186,6 @@ async def run(
186
186
# Use the `capture_run_messages` list as the message history so that new messages are added to it
187
187
ctx .state .message_history = messages
188
188
189
- run_context = build_run_context (ctx )
190
-
191
- parts : list [_messages .ModelRequestPart ] = []
192
- if messages :
193
- # Reevaluate any dynamic system prompt parts
194
- await self ._reevaluate_dynamic_prompts (messages , run_context )
195
- else :
196
- parts .extend (await self ._sys_parts (run_context ))
197
-
198
189
if (tool_call_results := ctx .deps .tool_call_results ) is not None :
199
190
if messages and (last_message := messages [- 1 ]) and isinstance (last_message , _messages .ModelRequest ):
200
191
# If tool call results were provided, that means the previous run ended on deferred tool calls.
@@ -209,21 +200,52 @@ async def run(
209
200
if not messages :
210
201
raise exceptions .UserError ('Tool call results were provided, but the message history is empty.' )
211
202
203
+ next_message : _messages .ModelRequest | None = None
204
+
212
205
if messages and (last_message := messages [- 1 ]):
213
206
if isinstance (last_message , _messages .ModelRequest ) and self .user_prompt is None :
214
207
# Drop last message from history and reuse its parts
215
208
messages .pop ()
216
- parts .extend (last_message .parts )
209
+ next_message = _messages .ModelRequest (parts = last_message .parts )
210
+
211
+ # Extract `UserPromptPart` content from the popped message and add to `ctx.deps.prompt`
212
+ user_prompt_parts = [part for part in last_message .parts if isinstance (part , _messages .UserPromptPart )]
213
+ if user_prompt_parts :
214
+ if len (user_prompt_parts ) == 1 :
215
+ ctx .deps .prompt = user_prompt_parts [0 ].content
216
+ else :
217
+ combined_content : list [_messages .UserContent ] = []
218
+ for part in user_prompt_parts :
219
+ if isinstance (part .content , str ):
220
+ combined_content .append (part .content )
221
+ else :
222
+ combined_content .extend (part .content )
223
+ ctx .deps .prompt = combined_content
217
224
elif isinstance (last_message , _messages .ModelResponse ):
218
225
call_tools_node = await self ._handle_message_history_model_response (ctx , last_message )
219
226
if call_tools_node is not None :
220
227
return call_tools_node
221
228
222
- if self .user_prompt is not None :
223
- parts .append (_messages .UserPromptPart (self .user_prompt ))
229
+ # Build the run context after `ctx.deps.prompt` has been updated
230
+ run_context = build_run_context (ctx )
231
+
232
+ parts : list [_messages .ModelRequestPart ] = []
233
+ if messages :
234
+ await self ._reevaluate_dynamic_prompts (messages , run_context )
235
+
236
+ if next_message :
237
+ await self ._reevaluate_dynamic_prompts ([next_message ], run_context )
238
+ else :
239
+ parts : list [_messages .ModelRequestPart ] = []
240
+ if not messages :
241
+ parts .extend (await self ._sys_parts (run_context ))
242
+
243
+ if self .user_prompt is not None :
244
+ parts .append (_messages .UserPromptPart (self .user_prompt ))
245
+
246
+ next_message = _messages .ModelRequest (parts = parts )
224
247
225
- instructions = await ctx .deps .get_instructions (run_context )
226
- next_message = _messages .ModelRequest (parts , instructions = instructions )
248
+ next_message .instructions = await ctx .deps .get_instructions (run_context )
227
249
228
250
return ModelRequestNode [DepsT , NodeRunEndT ](request = next_message )
229
251
0 commit comments