Fix streaming response error in OpenAIProviderHandler

- Fixed AttributeError when stream=True is passed to OpenAI client
- Changed return type to Union[Dict, object] to support streaming
- Added conditional check to return Stream object for streaming requests
- Bumped version to 0.2.7
parent d7861544
...@@ -43,7 +43,7 @@ from .providers import ( ...@@ -43,7 +43,7 @@ from .providers import (
) )
from .handlers import RequestHandler, RotationHandler, AutoselectHandler from .handlers import RequestHandler, RotationHandler, AutoselectHandler
__version__ = "0.2.6" __version__ = "0.2.7"
__all__ = [ __all__ = [
# Config # Config
"config", "config",
......
...@@ -204,7 +204,7 @@ class OpenAIProviderHandler(BaseProviderHandler): ...@@ -204,7 +204,7 @@ class OpenAIProviderHandler(BaseProviderHandler):
self.client = OpenAI(base_url=config.providers[provider_id].endpoint, api_key=api_key) self.client = OpenAI(base_url=config.providers[provider_id].endpoint, api_key=api_key)
async def handle_request(self, model: str, messages: List[Dict], max_tokens: Optional[int] = None, async def handle_request(self, model: str, messages: List[Dict], max_tokens: Optional[int] = None,
temperature: Optional[float] = 1.0, stream: Optional[bool] = False) -> Dict: temperature: Optional[float] = 1.0, stream: Optional[bool] = False) -> Union[Dict, object]:
if self.is_rate_limited(): if self.is_rate_limited():
raise Exception("Provider rate limited") raise Exception("Provider rate limited")
...@@ -225,6 +225,10 @@ class OpenAIProviderHandler(BaseProviderHandler): ...@@ -225,6 +225,10 @@ class OpenAIProviderHandler(BaseProviderHandler):
) )
logging.info(f"OpenAIProviderHandler: Response received: {response}") logging.info(f"OpenAIProviderHandler: Response received: {response}")
self.record_success() self.record_success()
# Return Stream object directly for streaming, otherwise dump to dict
if stream:
return response
return response.model_dump() return response.model_dump()
except Exception as e: except Exception as e:
import logging import logging
......
...@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" ...@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project] [project]
name = "aisbf" name = "aisbf"
version = "0.2.6" version = "0.2.7"
description = "AISBF - AI Service Broker Framework || AI Should Be Free - A modular proxy server for managing multiple AI provider integrations" description = "AISBF - AI Service Broker Framework || AI Should Be Free - A modular proxy server for managing multiple AI provider integrations"
readme = "README.md" readme = "README.md"
license = "GPL-3.0-or-later" license = "GPL-3.0-or-later"
......
...@@ -49,7 +49,7 @@ class InstallCommand(_install): ...@@ -49,7 +49,7 @@ class InstallCommand(_install):
setup( setup(
name="aisbf", name="aisbf",
version="0.2.6", version="0.2.7",
author="AISBF Contributors", author="AISBF Contributors",
author_email="stefy@nexlab.net", author_email="stefy@nexlab.net",
description="AISBF - AI Service Broker Framework || AI Should Be Free - A modular proxy server for managing multiple AI provider integrations", description="AISBF - AI Service Broker Framework || AI Should Be Free - A modular proxy server for managing multiple AI provider integrations",
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment