Fix Google GenAI streaming handler to use async generator

- Keep stream_generator as async function (not sync)
- Wrap Google's synchronous iterator in async generator
- Properly structure if/else for streaming vs non-streaming paths
- Fix 'client has been closed' error in streaming responses

This fixes the issue where streaming requests through autoselect
were failing with 'Cannot send a request, as a client has been closed'
error.
parent 63268f97
......@@ -160,23 +160,14 @@ class GoogleProviderHandler(BaseProviderHandler):
)
logging.info(f"GoogleProviderHandler: Streaming response received")
self.record_success()
return response
else:
# Generate content using the google-genai client
response = self.client.models.generate_content(
model=model,
contents=content,
config=config
)
# Handle streaming response
if stream:
logging.info(f"GoogleProviderHandler: Processing streaming response")
# Create a generator that yields OpenAI-compatible chunks
# Create an async generator that yields OpenAI-compatible chunks
# Google's generate_content_stream() returns a synchronous iterator
# We need to wrap it in an async generator
async def stream_generator():
try:
chunk_id = 0
# Iterate over the sync iterator
for chunk in response:
logging.info(f"GoogleProviderHandler: Processing stream chunk")
......@@ -210,7 +201,7 @@ class GoogleProviderHandler(BaseProviderHandler):
chunk_id += 1
logging.info(f"Yielding OpenAI chunk: {openai_chunk}")
# Yield the complete chunk object as a single line
# Yield to complete chunk object as a single line
yield openai_chunk
except Exception as e:
......@@ -218,8 +209,15 @@ class GoogleProviderHandler(BaseProviderHandler):
raise
return stream_generator()
else:
# Non-streaming request
# Generate content using the google-genai client
response = self.client.models.generate_content(
model=model,
contents=content,
config=config
)
# Non-streaming response
logging.info(f"GoogleProviderHandler: Response received: {response}")
self.record_success()
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment