pipV1 / services /openrouter_client.py
Amit
Add OpenRouter as final LLM fallback (free tier)
8a87574
"""
OpenRouter client for Pip's fallback responses.
OpenRouter provides access to multiple LLM providers with a unified API.
Uses OpenAI-compatible API.
"""
import os
import asyncio
from typing import AsyncGenerator
from openai import AsyncOpenAI
class OpenRouterClient:
"""OpenRouter-powered fallback for Pip."""
# Free/cheap models on OpenRouter
DEFAULT_MODEL = "meta-llama/llama-3.1-8b-instruct:free"
FALLBACK_MODEL = "meta-llama/llama-3.2-3b-instruct:free"
def __init__(self, api_key: str = None):
api_key = api_key or os.getenv("OPENROUTER_API_KEY")
self.available = bool(api_key)
if self.available:
self.client = AsyncOpenAI(
api_key=api_key,
base_url="https://openrouter.ai/api/v1"
)
else:
self.client = None
print("⚠️ OpenRouter: No API key found - service disabled")
self.model = self.DEFAULT_MODEL
async def quick_acknowledge(self, user_input: str, system_prompt: str) -> str:
"""Generate a quick acknowledgment."""
if not self.available or not self.client:
return "I hear you..."
try:
response = await self.client.chat.completions.create(
model=self.model,
max_tokens=50,
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_input}
],
extra_headers={
"HTTP-Referer": "https://huggingface.co/spaces/MCP-1st-Birthday/pipV1",
"X-Title": "Pip Emotional Companion"
}
)
return response.choices[0].message.content
except Exception as e:
print(f"OpenRouter quick_acknowledge error: {e}")
return "I hear you..."
async def analyze_emotion_fast(self, user_input: str, system_prompt: str) -> dict:
"""Quick emotion analysis."""
import json
default_response = {
"primary_emotions": ["neutral"],
"intensity": 5,
"pip_expression": "neutral",
"intervention_needed": False
}
if not self.available or not self.client:
return default_response
try:
response = await self.client.chat.completions.create(
model=self.model,
max_tokens=256,
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_input}
],
extra_headers={
"HTTP-Referer": "https://huggingface.co/spaces/MCP-1st-Birthday/pipV1",
"X-Title": "Pip Emotional Companion"
}
)
content = response.choices[0].message.content
if "```json" in content:
content = content.split("```json")[1].split("```")[0]
elif "```" in content:
content = content.split("```")[1].split("```")[0]
return json.loads(content.strip())
except Exception as e:
print(f"OpenRouter analyze_emotion error: {e}")
return default_response
async def generate_response_stream(
self,
user_input: str,
emotion_state: dict,
system_prompt: str
) -> AsyncGenerator[str, None]:
"""Generate conversational response with streaming."""
if not self.available or not self.client:
yield "I'm here with you. Sometimes words take a moment to find..."
return
context = f"""
User's emotions: {emotion_state.get('primary_emotions', [])}
Intensity: {emotion_state.get('intensity', 5)}/10
User said: {user_input}
"""
try:
stream = await self.client.chat.completions.create(
model=self.model,
max_tokens=512,
stream=True,
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": context}
],
extra_headers={
"HTTP-Referer": "https://huggingface.co/spaces/MCP-1st-Birthday/pipV1",
"X-Title": "Pip Emotional Companion"
}
)
async for chunk in stream:
if chunk.choices[0].delta.content:
yield chunk.choices[0].delta.content
except Exception as e:
print(f"OpenRouter generate_response_stream error: {e}")
yield "I'm here with you. Let me gather my thoughts..."
async def enhance_prompt(
self,
user_input: str,
emotion_state: dict,
mode: str,
system_prompt: str
) -> str:
"""Transform user context into a detailed image prompt."""
emotions = emotion_state.get('primary_emotions', ['peaceful'])
fallback = f"A beautiful, calming scene representing {emotions[0] if emotions else 'peace'}, soft colors, dreamy atmosphere"
if not self.available or not self.client:
return fallback
context = f"""
User said: "{user_input}"
Detected emotions: {emotion_state.get('primary_emotions', [])}
Emotional intensity: {emotion_state.get('intensity', 5)}/10
Current mode: {mode}
Generate a vivid, specific image prompt based on THIS user's context.
"""
try:
response = await self.client.chat.completions.create(
model=self.model,
max_tokens=300,
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": context}
],
extra_headers={
"HTTP-Referer": "https://huggingface.co/spaces/MCP-1st-Birthday/pipV1",
"X-Title": "Pip Emotional Companion"
}
)
return response.choices[0].message.content
except Exception as e:
print(f"OpenRouter enhance_prompt error: {e}")
return fallback
async def generate_text(self, prompt: str) -> str:
"""Generate text for various purposes."""
if not self.available or not self.client:
return ""
try:
response = await self.client.chat.completions.create(
model=self.model,
max_tokens=512,
messages=[
{"role": "user", "content": prompt}
],
extra_headers={
"HTTP-Referer": "https://huggingface.co/spaces/MCP-1st-Birthday/pipV1",
"X-Title": "Pip Emotional Companion"
}
)
return response.choices[0].message.content
except Exception as e:
print(f"OpenRouter generate_text error: {e}")
return ""