init
This commit is contained in:
commit
3ffc8bdbc2
15 changed files with 1221 additions and 0 deletions
41
llm-service/providers/openrouter.py
Normal file
41
llm-service/providers/openrouter.py
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
# llm/providers/openrouter.py
|
||||
import httpx
|
||||
import asyncio
|
||||
from loguru import logger
|
||||
|
||||
class OpenRouterProvider:
|
||||
def __init__(self, api_key: str, model: str, system_prompt: str = ""):
|
||||
self.api_key = api_key
|
||||
self.model = model
|
||||
self.base_url = "https://openrouter.ai/api/v1/chat/completions"
|
||||
self.system_prompt = system_prompt
|
||||
|
||||
async def chat(self, messages: list[dict]) -> str:
|
||||
full_messages = []
|
||||
if self.system_prompt:
|
||||
full_messages.append({"role": "system", "content": self.system_prompt})
|
||||
full_messages.extend(messages)
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {self.api_key}",
|
||||
"Content-Type": "application/json",
|
||||
"HTTP-Referer": "http://localhost",
|
||||
"X-Title": "llm-service"
|
||||
}
|
||||
|
||||
payload = {
|
||||
"model": self.model,
|
||||
"messages": full_messages
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
response = await client.post(self.base_url, headers=headers, json=payload)
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
logger.info(f"OPENROUTER response: {data}")
|
||||
return data["choices"][0]["message"]["content"]
|
||||
except Exception as e:
|
||||
logger.exception("LLM error: %s", e)
|
||||
return "[ошибка генерации ответа]"
|
||||
Loading…
Add table
Add a link
Reference in a new issue