voice-ass6/llm-service/providers/openrouter.py
2025-07-16 20:32:55 +05:00

41 lines
1.4 KiB
Python

# llm/providers/openrouter.py
import httpx
import asyncio
from loguru import logger
class OpenRouterProvider:
def __init__(self, api_key: str, model: str, system_prompt: str = ""):
self.api_key = api_key
self.model = model
self.base_url = "https://openrouter.ai/api/v1/chat/completions"
self.system_prompt = system_prompt
async def chat(self, messages: list[dict]) -> str:
full_messages = []
if self.system_prompt:
full_messages.append({"role": "system", "content": self.system_prompt})
full_messages.extend(messages)
headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json",
"HTTP-Referer": "http://localhost",
"X-Title": "llm-service"
}
payload = {
"model": self.model,
"messages": full_messages
}
async with httpx.AsyncClient(timeout=30) as client:
response = await client.post(self.base_url, headers=headers, json=payload)
try:
response.raise_for_status()
data = response.json()
logger.info(f"OPENROUTER response: {data}")
return data["choices"][0]["message"]["content"]
except Exception as e:
logger.exception("LLM error: %s", e)
return "[ошибка генерации ответа]"