Amit commited on
Commit
8a87574
·
1 Parent(s): a9a6884

Add OpenRouter as final LLM fallback (free tier)

Browse files
Files changed (2) hide show
  1. pip_brain.py +71 -18
  2. services/openrouter_client.py +199 -0
pip_brain.py CHANGED
@@ -14,6 +14,7 @@ import os
14
  from services.gemini_client import GeminiClient
15
  from services.anthropic_client import AnthropicClient
16
  from services.sambanova_client import SambanovaClient
 
17
  from pip_prompts import (
18
  EMOTION_ANALYZER_PROMPT,
19
  ACTION_DECIDER_PROMPT,
@@ -97,6 +98,9 @@ class PipBrain:
97
  # Fast LLM for acknowledgments
98
  self.sambanova = SambanovaClient()
99
 
 
 
 
100
  # Other services
101
  self.artist = PipArtist()
102
  self.voice = PipVoice()
@@ -112,12 +116,14 @@ class PipBrain:
112
  self._gemini_available = self.gemini.available
113
  self._claude_available = self.claude is not None and self.claude.available
114
  self._sambanova_available = self.sambanova.available
 
115
 
116
  # Log what's available
117
  print(f"🧠 PipBrain initialized:")
118
  print(f" - Gemini: {'✅' if self._gemini_available else '❌'}")
119
  print(f" - Claude: {'✅' if self._claude_available else '❌'}")
120
  print(f" - SambaNova: {'✅' if self._sambanova_available else '❌'}")
 
121
 
122
  def set_mode(self, session_id: str, mode: str):
123
  """Set the interaction mode for a session."""
@@ -296,7 +302,7 @@ class PipBrain:
296
  # =========================================================================
297
 
298
  async def _quick_acknowledge_with_fallback(self, user_input: str) -> str:
299
- """Quick acknowledgment with Gemini -> SambaNova fallback."""
300
  # Try Gemini first
301
  if self._gemini_available:
302
  try:
@@ -307,14 +313,25 @@ class PipBrain:
307
  print(f"Gemini quick ack failed: {e}")
308
 
309
  # Fallback to SambaNova
310
- try:
311
- return await self.sambanova.quick_acknowledge(user_input, QUICK_ACK_PROMPT)
312
- except Exception as e:
313
- print(f"SambaNova quick ack failed: {e}")
314
- return "I hear you..."
 
 
 
 
 
 
 
 
 
 
 
315
 
316
  async def _analyze_emotion_with_fallback(self, user_input: str) -> dict:
317
- """Emotion analysis with Gemini -> Claude fallback."""
318
  default_emotion = {
319
  "primary_emotions": ["neutral"],
320
  "secondary_emotions": [],
@@ -331,7 +348,6 @@ class PipBrain:
331
  return result
332
  except Exception as e:
333
  print(f"Gemini emotion analysis failed: {e}")
334
- self._gemini_available = False # Temporarily disable
335
 
336
  # Fallback to Claude
337
  if self._claude_available and self.claude:
@@ -342,6 +358,24 @@ class PipBrain:
342
  except Exception as e:
343
  print(f"Claude emotion analysis failed: {e}")
344
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
345
  return default_emotion
346
 
347
  async def _decide_action_with_fallback(self, emotion_state: dict) -> dict:
@@ -379,7 +413,7 @@ class PipBrain:
379
  action: dict,
380
  history: list
381
  ) -> AsyncGenerator[str, None]:
382
- """Generate response with Gemini -> Claude -> SambaNova fallback."""
383
 
384
  # Try Gemini first
385
  if self._gemini_available:
@@ -409,15 +443,34 @@ class PipBrain:
409
  except Exception as e:
410
  print(f"Claude response generation failed: {e}")
411
 
412
- # Final fallback to SambaNova
413
- try:
414
- async for chunk in self.sambanova.generate_response_stream(
415
- user_input, emotion_state, CONVERSATION_PROMPT
416
- ):
417
- yield chunk
418
- except Exception as e:
419
- print(f"All LLMs failed: {e}")
420
- yield "I'm here with you. Tell me more about what's on your mind."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
421
 
422
  async def _generate_intervention_with_fallback(
423
  self,
 
14
  from services.gemini_client import GeminiClient
15
  from services.anthropic_client import AnthropicClient
16
  from services.sambanova_client import SambanovaClient
17
+ from services.openrouter_client import OpenRouterClient
18
  from pip_prompts import (
19
  EMOTION_ANALYZER_PROMPT,
20
  ACTION_DECIDER_PROMPT,
 
98
  # Fast LLM for acknowledgments
99
  self.sambanova = SambanovaClient()
100
 
101
+ # Final fallback: OpenRouter (free tier available)
102
+ self.openrouter = OpenRouterClient()
103
+
104
  # Other services
105
  self.artist = PipArtist()
106
  self.voice = PipVoice()
 
116
  self._gemini_available = self.gemini.available
117
  self._claude_available = self.claude is not None and self.claude.available
118
  self._sambanova_available = self.sambanova.available
119
+ self._openrouter_available = self.openrouter.available
120
 
121
  # Log what's available
122
  print(f"🧠 PipBrain initialized:")
123
  print(f" - Gemini: {'✅' if self._gemini_available else '❌'}")
124
  print(f" - Claude: {'✅' if self._claude_available else '❌'}")
125
  print(f" - SambaNova: {'✅' if self._sambanova_available else '❌'}")
126
+ print(f" - OpenRouter: {'✅' if self._openrouter_available else '❌'}")
127
 
128
  def set_mode(self, session_id: str, mode: str):
129
  """Set the interaction mode for a session."""
 
302
  # =========================================================================
303
 
304
  async def _quick_acknowledge_with_fallback(self, user_input: str) -> str:
305
+ """Quick acknowledgment with Gemini -> SambaNova -> OpenRouter fallback."""
306
  # Try Gemini first
307
  if self._gemini_available:
308
  try:
 
313
  print(f"Gemini quick ack failed: {e}")
314
 
315
  # Fallback to SambaNova
316
+ if self._sambanova_available:
317
+ try:
318
+ result = await self.sambanova.quick_acknowledge(user_input, QUICK_ACK_PROMPT)
319
+ if result:
320
+ return result
321
+ except Exception as e:
322
+ print(f"SambaNova quick ack failed: {e}")
323
+
324
+ # Fallback to OpenRouter
325
+ if self._openrouter_available:
326
+ try:
327
+ return await self.openrouter.quick_acknowledge(user_input, QUICK_ACK_PROMPT)
328
+ except Exception as e:
329
+ print(f"OpenRouter quick ack failed: {e}")
330
+
331
+ return "I hear you..."
332
 
333
  async def _analyze_emotion_with_fallback(self, user_input: str) -> dict:
334
+ """Emotion analysis with Gemini -> Claude -> SambaNova -> OpenRouter fallback."""
335
  default_emotion = {
336
  "primary_emotions": ["neutral"],
337
  "secondary_emotions": [],
 
348
  return result
349
  except Exception as e:
350
  print(f"Gemini emotion analysis failed: {e}")
 
351
 
352
  # Fallback to Claude
353
  if self._claude_available and self.claude:
 
358
  except Exception as e:
359
  print(f"Claude emotion analysis failed: {e}")
360
 
361
+ # Fallback to SambaNova (fast analysis)
362
+ if self._sambanova_available:
363
+ try:
364
+ result = await self.sambanova.analyze_emotion_fast(user_input, EMOTION_ANALYZER_QUICK_PROMPT)
365
+ if result:
366
+ return result
367
+ except Exception as e:
368
+ print(f"SambaNova emotion analysis failed: {e}")
369
+
370
+ # Fallback to OpenRouter
371
+ if self._openrouter_available:
372
+ try:
373
+ result = await self.openrouter.analyze_emotion_fast(user_input, EMOTION_ANALYZER_QUICK_PROMPT)
374
+ if result:
375
+ return result
376
+ except Exception as e:
377
+ print(f"OpenRouter emotion analysis failed: {e}")
378
+
379
  return default_emotion
380
 
381
  async def _decide_action_with_fallback(self, emotion_state: dict) -> dict:
 
413
  action: dict,
414
  history: list
415
  ) -> AsyncGenerator[str, None]:
416
+ """Generate response with Gemini -> Claude -> SambaNova -> OpenRouter fallback."""
417
 
418
  # Try Gemini first
419
  if self._gemini_available:
 
443
  except Exception as e:
444
  print(f"Claude response generation failed: {e}")
445
 
446
+ # Fallback to SambaNova
447
+ if self._sambanova_available:
448
+ try:
449
+ yielded = False
450
+ async for chunk in self.sambanova.generate_response_stream(
451
+ user_input, emotion_state, CONVERSATION_PROMPT
452
+ ):
453
+ yielded = True
454
+ yield chunk
455
+ if yielded:
456
+ return
457
+ except Exception as e:
458
+ print(f"SambaNova response generation failed: {e}")
459
+
460
+ # Final fallback to OpenRouter (free tier)
461
+ if self._openrouter_available:
462
+ try:
463
+ async for chunk in self.openrouter.generate_response_stream(
464
+ user_input, emotion_state, CONVERSATION_PROMPT
465
+ ):
466
+ yield chunk
467
+ return
468
+ except Exception as e:
469
+ print(f"OpenRouter response generation failed: {e}")
470
+
471
+ # All LLMs failed - yield a caring fallback
472
+ print("⚠️ All LLMs failed!")
473
+ yield "I'm here with you. Sometimes I need a moment to gather my thoughts, but I'm listening. Please share what's on your mind."
474
 
475
  async def _generate_intervention_with_fallback(
476
  self,
services/openrouter_client.py ADDED
@@ -0,0 +1,199 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ OpenRouter client for Pip's fallback responses.
3
+ OpenRouter provides access to multiple LLM providers with a unified API.
4
+ Uses OpenAI-compatible API.
5
+ """
6
+
7
+ import os
8
+ import asyncio
9
+ from typing import AsyncGenerator
10
+ from openai import AsyncOpenAI
11
+
12
+
13
+ class OpenRouterClient:
14
+ """OpenRouter-powered fallback for Pip."""
15
+
16
+ # Free/cheap models on OpenRouter
17
+ DEFAULT_MODEL = "meta-llama/llama-3.1-8b-instruct:free"
18
+ FALLBACK_MODEL = "meta-llama/llama-3.2-3b-instruct:free"
19
+
20
+ def __init__(self, api_key: str = None):
21
+ api_key = api_key or os.getenv("OPENROUTER_API_KEY")
22
+ self.available = bool(api_key)
23
+
24
+ if self.available:
25
+ self.client = AsyncOpenAI(
26
+ api_key=api_key,
27
+ base_url="https://openrouter.ai/api/v1"
28
+ )
29
+ else:
30
+ self.client = None
31
+ print("⚠️ OpenRouter: No API key found - service disabled")
32
+
33
+ self.model = self.DEFAULT_MODEL
34
+
35
+ async def quick_acknowledge(self, user_input: str, system_prompt: str) -> str:
36
+ """Generate a quick acknowledgment."""
37
+ if not self.available or not self.client:
38
+ return "I hear you..."
39
+
40
+ try:
41
+ response = await self.client.chat.completions.create(
42
+ model=self.model,
43
+ max_tokens=50,
44
+ messages=[
45
+ {"role": "system", "content": system_prompt},
46
+ {"role": "user", "content": user_input}
47
+ ],
48
+ extra_headers={
49
+ "HTTP-Referer": "https://huggingface.co/spaces/MCP-1st-Birthday/pipV1",
50
+ "X-Title": "Pip Emotional Companion"
51
+ }
52
+ )
53
+ return response.choices[0].message.content
54
+ except Exception as e:
55
+ print(f"OpenRouter quick_acknowledge error: {e}")
56
+ return "I hear you..."
57
+
58
+ async def analyze_emotion_fast(self, user_input: str, system_prompt: str) -> dict:
59
+ """Quick emotion analysis."""
60
+ import json
61
+
62
+ default_response = {
63
+ "primary_emotions": ["neutral"],
64
+ "intensity": 5,
65
+ "pip_expression": "neutral",
66
+ "intervention_needed": False
67
+ }
68
+
69
+ if not self.available or not self.client:
70
+ return default_response
71
+
72
+ try:
73
+ response = await self.client.chat.completions.create(
74
+ model=self.model,
75
+ max_tokens=256,
76
+ messages=[
77
+ {"role": "system", "content": system_prompt},
78
+ {"role": "user", "content": user_input}
79
+ ],
80
+ extra_headers={
81
+ "HTTP-Referer": "https://huggingface.co/spaces/MCP-1st-Birthday/pipV1",
82
+ "X-Title": "Pip Emotional Companion"
83
+ }
84
+ )
85
+
86
+ content = response.choices[0].message.content
87
+ if "```json" in content:
88
+ content = content.split("```json")[1].split("```")[0]
89
+ elif "```" in content:
90
+ content = content.split("```")[1].split("```")[0]
91
+ return json.loads(content.strip())
92
+ except Exception as e:
93
+ print(f"OpenRouter analyze_emotion error: {e}")
94
+ return default_response
95
+
96
+ async def generate_response_stream(
97
+ self,
98
+ user_input: str,
99
+ emotion_state: dict,
100
+ system_prompt: str
101
+ ) -> AsyncGenerator[str, None]:
102
+ """Generate conversational response with streaming."""
103
+ if not self.available or not self.client:
104
+ yield "I'm here with you. Sometimes words take a moment to find..."
105
+ return
106
+
107
+ context = f"""
108
+ User's emotions: {emotion_state.get('primary_emotions', [])}
109
+ Intensity: {emotion_state.get('intensity', 5)}/10
110
+
111
+ User said: {user_input}
112
+ """
113
+
114
+ try:
115
+ stream = await self.client.chat.completions.create(
116
+ model=self.model,
117
+ max_tokens=512,
118
+ stream=True,
119
+ messages=[
120
+ {"role": "system", "content": system_prompt},
121
+ {"role": "user", "content": context}
122
+ ],
123
+ extra_headers={
124
+ "HTTP-Referer": "https://huggingface.co/spaces/MCP-1st-Birthday/pipV1",
125
+ "X-Title": "Pip Emotional Companion"
126
+ }
127
+ )
128
+
129
+ async for chunk in stream:
130
+ if chunk.choices[0].delta.content:
131
+ yield chunk.choices[0].delta.content
132
+ except Exception as e:
133
+ print(f"OpenRouter generate_response_stream error: {e}")
134
+ yield "I'm here with you. Let me gather my thoughts..."
135
+
136
+ async def enhance_prompt(
137
+ self,
138
+ user_input: str,
139
+ emotion_state: dict,
140
+ mode: str,
141
+ system_prompt: str
142
+ ) -> str:
143
+ """Transform user context into a detailed image prompt."""
144
+ emotions = emotion_state.get('primary_emotions', ['peaceful'])
145
+ fallback = f"A beautiful, calming scene representing {emotions[0] if emotions else 'peace'}, soft colors, dreamy atmosphere"
146
+
147
+ if not self.available or not self.client:
148
+ return fallback
149
+
150
+ context = f"""
151
+ User said: "{user_input}"
152
+
153
+ Detected emotions: {emotion_state.get('primary_emotions', [])}
154
+ Emotional intensity: {emotion_state.get('intensity', 5)}/10
155
+ Current mode: {mode}
156
+
157
+ Generate a vivid, specific image prompt based on THIS user's context.
158
+ """
159
+
160
+ try:
161
+ response = await self.client.chat.completions.create(
162
+ model=self.model,
163
+ max_tokens=300,
164
+ messages=[
165
+ {"role": "system", "content": system_prompt},
166
+ {"role": "user", "content": context}
167
+ ],
168
+ extra_headers={
169
+ "HTTP-Referer": "https://huggingface.co/spaces/MCP-1st-Birthday/pipV1",
170
+ "X-Title": "Pip Emotional Companion"
171
+ }
172
+ )
173
+ return response.choices[0].message.content
174
+ except Exception as e:
175
+ print(f"OpenRouter enhance_prompt error: {e}")
176
+ return fallback
177
+
178
+ async def generate_text(self, prompt: str) -> str:
179
+ """Generate text for various purposes."""
180
+ if not self.available or not self.client:
181
+ return ""
182
+
183
+ try:
184
+ response = await self.client.chat.completions.create(
185
+ model=self.model,
186
+ max_tokens=512,
187
+ messages=[
188
+ {"role": "user", "content": prompt}
189
+ ],
190
+ extra_headers={
191
+ "HTTP-Referer": "https://huggingface.co/spaces/MCP-1st-Birthday/pipV1",
192
+ "X-Title": "Pip Emotional Companion"
193
+ }
194
+ )
195
+ return response.choices[0].message.content
196
+ except Exception as e:
197
+ print(f"OpenRouter generate_text error: {e}")
198
+ return ""
199
+