@@ -1778,8 +1778,6 @@ async def _process_grounding_metadata(
17781778 grounding_metadata_list : List [types .GroundingMetadata ],
17791779 text : str ,
17801780 __event_emitter__ : Callable ,
1781- * ,
1782- emit_replace : bool = True ,
17831781 ):
17841782 """Process and emit grounding metadata events."""
17851783 grounding_chunks = []
@@ -1847,17 +1845,8 @@ async def _process_grounding_metadata(
18471845 cited_chunks .append (text_bytes [last_byte_index :].decode (ENCODING ))
18481846
18491847 replaced_text = "" .join (cited_chunks )
1850- if emit_replace :
1851- await __event_emitter__ (
1852- {
1853- "type" : "replace" ,
1854- "data" : {"content" : replaced_text },
1855- }
1856- )
18571848
1858- # Return the transformed text when requested by caller
1859- if not emit_replace :
1860- return replaced_text if replaced_text is not None else text
1849+ return replaced_text if replaced_text is not None else text
18611850
18621851 async def _handle_streaming_response (
18631852 self ,
@@ -1995,12 +1984,10 @@ async def emit_chat_event(event_type: str, data: Dict[str, Any]) -> None:
19951984 # After processing all chunks, handle grounding data
19961985 final_answer_text = "" .join (answer_chunks )
19971986 if grounding_metadata_list and __event_emitter__ :
1998- # Don't emit replace here; we'll compose final content below
19991987 cited = await self ._process_grounding_metadata (
20001988 grounding_metadata_list ,
20011989 final_answer_text ,
20021990 __event_emitter__ ,
2003- emit_replace = False ,
20041991 )
20051992 final_answer_text = cited or final_answer_text
20061993
@@ -2378,7 +2365,6 @@ async def get_response():
23782365 grounding_metadata_list ,
23792366 final_answer ,
23802367 __event_emitter__ ,
2381- emit_replace = False ,
23822368 )
23832369 final_answer = cited or final_answer
23842370
0 commit comments