NexraGeminiPro.py 2.2 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768
  1. from __future__ import annotations
  2. from aiohttp import ClientSession
  3. import json
  4. from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
  5. from ..helper import format_prompt
  6. from ...typing import AsyncResult, Messages
  7. class NexraGeminiPro(AsyncGeneratorProvider, ProviderModelMixin):
  8. label = "Nexra Gemini PRO"
  9. url = "https://nexra.aryahcr.cc/documentation/gemini-pro/en"
  10. api_endpoint = "https://nexra.aryahcr.cc/api/chat/complements"
  11. working = False
  12. supports_stream = True
  13. default_model = 'gemini-pro'
  14. models = [default_model]
  15. @classmethod
  16. def get_model(cls, model: str) -> str:
  17. return cls.default_model
  18. @classmethod
  19. async def create_async_generator(
  20. cls,
  21. model: str,
  22. messages: Messages,
  23. proxy: str = None,
  24. stream: bool = False,
  25. markdown: bool = False,
  26. **kwargs
  27. ) -> AsyncResult:
  28. model = cls.get_model(model)
  29. headers = {
  30. "Content-Type": "application/json"
  31. }
  32. data = {
  33. "messages": [
  34. {
  35. "role": "user",
  36. "content": format_prompt(messages)
  37. }
  38. ],
  39. "markdown": markdown,
  40. "stream": stream,
  41. "model": model
  42. }
  43. async with ClientSession(headers=headers) as session:
  44. async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
  45. response.raise_for_status()
  46. buffer = ""
  47. async for chunk in response.content.iter_any():
  48. if chunk.strip(): # Check if chunk is not empty
  49. buffer += chunk.decode()
  50. while '\x1e' in buffer:
  51. part, buffer = buffer.split('\x1e', 1)
  52. if part.strip():
  53. try:
  54. response_json = json.loads(part)
  55. message = response_json.get("message", "")
  56. if message:
  57. yield message
  58. except json.JSONDecodeError as e:
  59. print(f"JSONDecodeError: {e}")