@@ -53,10 +53,24 @@ def __init__(self, llm_session: LLMSession, verbose: bool = False) -> None:
5353 self .processor = LTPQueryEngine (self .llm_session )
5454 self .smart_help = SmartHelp (self .help_msg , self .llm_session )
5555
56+ def _update_component_sessions (self , llm_session : LLMSession ) -> None :
57+ """Update llm_session for all components to ensure thread-safe operation.
58+
59+ This is necessary when the same CoPilotTurn instance is used across
60+ multiple threads with different LLMSession instances.
61+ """
62+ self .llm_session = llm_session
63+ self .classifier .llm_session = llm_session
64+ self .contextualizer .llm_session = llm_session
65+ self .processor .llm_session = llm_session
66+ self .smart_help .llm_session = llm_session
67+
5668 # entry function, processes the list of messages and returns a dictionary with the results
5769 def process_turn (self , messages_list : list , skip_summary : bool = False , debugging : bool = False ) -> dict :
5870 """Process the list of messages and return a dictionary with the results."""
59-
71+ # Ensure all components use the current thread's LLM session
72+ self ._update_component_sessions (self .llm_session )
73+
6074 # Set thread-local session for push_frontend functions to use correct callback
6175 set_thread_llm_session (self .llm_session )
6276
@@ -82,7 +96,6 @@ def process_turn(self, messages_list: list, skip_summary: bool = False, debuggin
8296
8397 # verion f3, f4, resolves objective 8 (Lucia Training Platform)
8498 push_frontend_event ('<span class="text-gray-400 italic">⏳ Copilot is processing your inquiry...</span><br/>' , replace = False )
85- self .smart_help .llm_session = self .llm_session # ensure processor uses the current llm_session
8699 if self ._version in ['f3' , 'f4' ]:
87100 # If classification failed, treat as unsupported.
88101 if obj is None or con is None :
@@ -112,9 +125,8 @@ def process_turn(self, messages_list: list, skip_summary: bool = False, debuggin
112125
113126 def query_ltp (self , question : str , con : str , skip_summary : bool ) -> tuple [str , dict ]:
114127 """Query about Lucia Training Platform."""
115- self .processor .llm_session = self .llm_session # ensure processor uses the current llm_session
116128 # Mapping concern codes to handler functions
117- # Updated to pass llm_session to prevent singleton blocking
129+ # Pass llm_session to methods to ensure correct session usage
118130 handlers = {
119131 self .CONCERN_METRICS : lambda : self .processor .query_metrics (question , self .help_msg , skip_summary ),
120132 self .CONCERN_METADATA : lambda : self .processor .query_metadata (question , self .help_msg , skip_summary ),
0 commit comments