AiMathGPT.py 2.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778
  1. from __future__ import annotations
  2. from aiohttp import ClientSession
  3. from ..typing import AsyncResult, Messages
  4. from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
  5. from .helper import format_prompt
  6. class AiMathGPT(AsyncGeneratorProvider, ProviderModelMixin):
  7. url = "https://aimathgpt.forit.ai"
  8. api_endpoint = "https://aimathgpt.forit.ai/api/ai"
  9. working = True
  10. supports_stream = False
  11. supports_system_message = True
  12. supports_message_history = True
  13. default_model = 'llama3'
  14. models = ['llama3']
  15. model_aliases = {"llama-3.1-70b": "llama3",}
  16. @classmethod
  17. def get_model(cls, model: str) -> str:
  18. if model in cls.models:
  19. return model
  20. elif model in cls.model_aliases:
  21. return cls.model_aliases[model]
  22. else:
  23. return cls.default_model
  24. @classmethod
  25. async def create_async_generator(
  26. cls,
  27. model: str,
  28. messages: Messages,
  29. proxy: str = None,
  30. **kwargs
  31. ) -> AsyncResult:
  32. model = cls.get_model(model)
  33. headers = {
  34. 'accept': '*/*',
  35. 'accept-language': 'en-US,en;q=0.9',
  36. 'cache-control': 'no-cache',
  37. 'content-type': 'application/json',
  38. 'origin': cls.url,
  39. 'pragma': 'no-cache',
  40. 'priority': 'u=1, i',
  41. 'referer': f'{cls.url}/',
  42. 'sec-ch-ua': '"Chromium";v="129", "Not=A?Brand";v="8"',
  43. 'sec-ch-ua-mobile': '?0',
  44. 'sec-ch-ua-platform': '"Linux"',
  45. 'sec-fetch-dest': 'empty',
  46. 'sec-fetch-mode': 'cors',
  47. 'sec-fetch-site': 'same-origin',
  48. 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36'
  49. }
  50. async with ClientSession(headers=headers) as session:
  51. data = {
  52. "messages": [
  53. {
  54. "role": "system",
  55. "content": ""
  56. },
  57. {
  58. "role": "user",
  59. "content": format_prompt(messages)
  60. }
  61. ],
  62. "model": model
  63. }
  64. async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
  65. response.raise_for_status()
  66. response_data = await response.json()
  67. filtered_response = response_data['result']['response']
  68. yield filtered_response