@ -1,5 +1,7 @@
import logging
from datetime import datetime , timedelta
from time import perf_counter
from uuid import uuid4
from fastapi import HTTPException
from sqlalchemy . orm import Session
@ -56,16 +58,25 @@ class OrquestradorService(ReviewFlowMixin, OrderFlowMixin):
async def handle_message ( self , message : str , user_id : int | None = None ) - > str :
""" Processa mensagem, executa tool quando necessario e retorna resposta final. """
self . _turn_trace = {
" request_id " : str ( uuid4 ( ) ) ,
" conversation_id " : f " user: { user_id } " if user_id is not None else " anonymous " ,
" user_id " : user_id ,
}
self . _log_turn_event ( " turn_received " , message = message )
async def finish ( response : str , queue_notice : str | None = None ) - > str :
composed = self . _compose_order_aware_response (
response = response ,
user_id = user_id ,
queue_notice = queue_notice ,
)
return await self . _maybe_auto_advance_next_order (
final_response = await self . _maybe_auto_advance_next_order (
base_response = composed ,
user_id = user_id ,
)
self . _log_turn_event ( " turn_completed " , response = final_response )
return final_response
self . _upsert_user_context ( user_id = user_id )
# Faz uma leitura inicial do turno para ajudar a policy
@ -239,7 +250,8 @@ class OrquestradorService(ReviewFlowMixin, OrderFlowMixin):
tools = self . registry . get_tools ( )
llm_result = await self . llm . generate_response (
llm_result = await self . _call_llm_with_trace (
operation = " router " ,
message = self . _build_router_prompt ( user_message = routing_message , user_id = user_id ) ,
tools = tools ,
)
@ -251,7 +263,8 @@ class OrquestradorService(ReviewFlowMixin, OrderFlowMixin):
and self . _is_low_value_response ( first_pass_text )
)
if should_force_tool :
llm_result = await self . llm . generate_response (
llm_result = await self . _call_llm_with_trace (
operation = " force_tool " ,
message = self . _build_force_tool_prompt ( user_message = routing_message , user_id = user_id ) ,
tools = tools ,
)
@ -261,7 +274,7 @@ class OrquestradorService(ReviewFlowMixin, OrderFlowMixin):
arguments = llm_result [ " tool_call " ] [ " arguments " ]
try :
tool_result = await self . tool_executor. execut e(
tool_result = await self . _execute_tool_with_trac e(
tool_name ,
arguments ,
user_id = user_id ,
@ -295,24 +308,13 @@ class OrquestradorService(ReviewFlowMixin, OrderFlowMixin):
queue_notice = queue_notice ,
)
final_response = await self . llm . generate_response (
message = self . _build_result_prompt (
return await finish (
await self . _render_tool_response_with_fallback (
user_message = routing_message ,
user_id = user_id ,
tool_name = tool_name ,
tool_result = tool_result ,
) ,
tools = [ ] ,
)
text = ( final_response . get ( " response " ) or " " ) . strip ( )
if self . _is_low_value_response ( text ) :
return await finish (
self . _fallback_format_tool_result ( tool_name , tool_result ) ,
queue_notice = queue_notice ,
)
return await finish (
text or self . _fallback_format_tool_result ( tool_name , tool_result ) ,
queue_notice = queue_notice ,
)
@ -352,7 +354,7 @@ class OrquestradorService(ReviewFlowMixin, OrderFlowMixin):
) :
return None
try :
tool_result = await self . tool_executor. execut e(
tool_result = await self . _execute_tool_with_trac e(
planned_tool_name ,
decision_tool_arguments or { } ,
user_id = user_id ,
@ -365,7 +367,8 @@ class OrquestradorService(ReviewFlowMixin, OrderFlowMixin):
)
tools = self . registry . get_tools ( )
llm_result = await self . llm . generate_response (
llm_result = await self . _call_llm_with_trace (
operation = " orchestration_router " ,
message = self . _build_router_prompt ( user_message = message , user_id = user_id ) ,
tools = tools ,
)
@ -405,7 +408,8 @@ class OrquestradorService(ReviewFlowMixin, OrderFlowMixin):
if not should_force_tool :
return None
llm_result = await self . llm . generate_response (
llm_result = await self . _call_llm_with_trace (
operation = " orchestration_force_tool " ,
message = self . _build_force_tool_prompt ( user_message = message , user_id = user_id ) ,
tools = tools ,
)
@ -420,7 +424,7 @@ class OrquestradorService(ReviewFlowMixin, OrderFlowMixin):
return None
try :
tool_result = await self . tool_executor. execut e(
tool_result = await self . _execute_tool_with_trac e(
forced_tool_name ,
forced_tool_call . get ( " arguments " ) or { } ,
user_id = user_id ,
@ -450,7 +454,7 @@ class OrquestradorService(ReviewFlowMixin, OrderFlowMixin):
arguments = decision . get ( " tool_arguments " ) if isinstance ( decision . get ( " tool_arguments " ) , dict ) else { }
try :
tool_result = await self . tool_executor. execut e(
tool_result = await self . _execute_tool_with_trac e(
tool_name ,
arguments ,
user_id = user_id ,
@ -485,23 +489,13 @@ class OrquestradorService(ReviewFlowMixin, OrderFlowMixin):
queue_notice = queue_notice ,
)
final_response = await self . llm . generate_response (
message = self . _build_result_prompt (
return await finish (
await self . _render_tool_response_with_fallback (
user_message = message ,
user_id = user_id ,
tool_name = tool_name ,
tool_result = tool_result ,
) ,
tools = [ ] ,
)
text = ( final_response . get ( " response " ) or " " ) . strip ( )
if self . _is_low_value_response ( text ) :
return await finish (
self . _fallback_format_tool_result ( tool_name , tool_result ) ,
queue_notice = queue_notice ,
)
return await finish (
text or self . _fallback_format_tool_result ( tool_name , tool_result ) ,
queue_notice = queue_notice ,
)
@ -1241,6 +1235,108 @@ class OrquestradorService(ReviewFlowMixin, OrderFlowMixin):
conversation_context = conversation_context ,
)
def _log_turn_event ( self , event : str , * * payload ) - > None :
trace = getattr ( self , " _turn_trace " , { } ) or { }
logger . info (
" turn_event= %s payload= %s " ,
event ,
{
" request_id " : trace . get ( " request_id " ) ,
" conversation_id " : trace . get ( " conversation_id " ) ,
* * payload ,
} ,
)
async def _call_llm_with_trace ( self , operation : str , message : str , tools ) :
started_at = perf_counter ( )
try :
result = await self . llm . generate_response ( message = message , tools = tools )
elapsed_ms = round ( ( perf_counter ( ) - started_at ) * 1000 , 2 )
self . _log_turn_event (
" llm_completed " ,
operation = operation ,
elapsed_ms = elapsed_ms ,
tool_call = bool ( result . get ( " tool_call " ) ) ,
)
return result
except Exception :
elapsed_ms = round ( ( perf_counter ( ) - started_at ) * 1000 , 2 )
self . _log_turn_event (
" llm_failed " ,
operation = operation ,
elapsed_ms = elapsed_ms ,
)
raise
async def _execute_tool_with_trace ( self , tool_name : str , arguments : dict , user_id : int | None ) :
started_at = perf_counter ( )
try :
result = await self . tool_executor . execute ( tool_name , arguments , user_id = user_id )
elapsed_ms = round ( ( perf_counter ( ) - started_at ) * 1000 , 2 )
self . _log_turn_event (
" tool_completed " ,
tool_name = tool_name ,
elapsed_ms = elapsed_ms ,
arguments = arguments ,
result = result ,
)
return result
except HTTPException as exc :
elapsed_ms = round ( ( perf_counter ( ) - started_at ) * 1000 , 2 )
self . _log_turn_event (
" tool_failed " ,
tool_name = tool_name ,
elapsed_ms = elapsed_ms ,
arguments = arguments ,
error = self . tool_executor . coerce_http_error ( exc ) ,
)
raise
async def _render_tool_response_with_fallback (
self ,
user_message : str ,
user_id : int | None ,
tool_name : str ,
tool_result ,
) - > str :
fallback_response = self . _fallback_format_tool_result ( tool_name , tool_result )
if self . _should_use_deterministic_response ( tool_name ) :
self . _log_turn_event (
" tool_response_fallback " ,
tool_name = tool_name ,
reason = " deterministic_tool " ,
)
return fallback_response
try :
final_response = await self . _call_llm_with_trace (
operation = " tool_result_response " ,
message = self . _build_result_prompt (
user_message = user_message ,
user_id = user_id ,
tool_name = tool_name ,
tool_result = tool_result ,
) ,
tools = [ ] ,
)
except Exception :
self . _log_turn_event (
" tool_response_fallback " ,
tool_name = tool_name ,
reason = " llm_failure " ,
)
return fallback_response
text = ( final_response . get ( " response " ) or " " ) . strip ( )
if self . _is_low_value_response ( text ) :
self . _log_turn_event (
" tool_response_fallback " ,
tool_name = tool_name ,
reason = " low_value_response " ,
)
return fallback_response
return text or fallback_response
def _http_exception_detail ( self , exc : HTTPException ) - > str :
return self . tool_executor . http_exception_detail ( exc )