File size: 10,639 Bytes
9c3dc25
 
7160e12
 
 
9c3dc25
 
 
 
 
7160e12
 
 
 
 
9c3dc25
 
 
 
 
 
 
 
 
 
7160e12
 
9c3dc25
 
 
 
 
 
 
 
 
 
 
7160e12
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9c3dc25
 
 
 
 
7160e12
9c3dc25
 
7160e12
 
 
 
 
 
 
 
9c3dc25
7160e12
9c3dc25
 
 
 
 
 
 
 
 
 
 
7160e12
 
9c3dc25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7160e12
9c3dc25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7160e12
 
 
 
 
 
 
 
 
 
 
 
9c3dc25
 
 
 
7160e12
9c3dc25
7160e12
 
 
 
 
 
 
 
 
9c3dc25
 
 
 
 
 
 
 
 
7160e12
9c3dc25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7160e12
9c3dc25
 
 
 
6d9d677
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9c3dc25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7160e12
9c3dc25
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
# utils/gemini_explainer.py - FIXED VERSION

import google.generativeai as genai
from typing import Dict, List, Optional
import config
import logging

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)


class GeminiExplainer:
    """Alternative LLM explainer using Google Gemini"""
    
    def __init__(self):
        self.enabled = False
        self.model = None
        
        # Check if API key exists
        if not config.GEMINI_API_KEY:
            logger.warning("⚠️ GEMINI_API_KEY not found in environment variables")
            return
        
        # Try to initialize Gemini
        try:
            genai.configure(api_key=config.GEMINI_API_KEY)
            self.model = genai.GenerativeModel(config.GEMINI_MODEL)
            
            # Test the connection with a simple prompt
            test_response = self.model.generate_content("Say 'Hello'")
            if test_response.text:
                self.enabled = True
                logger.info(f"βœ… Gemini explainer initialized successfully with model: {config.GEMINI_MODEL}")
            else:
                logger.error("❌ Gemini test failed: No response text")
                
        except Exception as e:
            logger.error(f"❌ Failed to initialize Gemini: {str(e)}")
            self.enabled = False
    
    def explain_recommendation(
        self,
        card: str,
        rewards: float,
        rewards_rate: str,
        merchant: str,
        category: str,
        amount: float,
        warnings: Optional[List[str]] = None,
        annual_potential: float = 0,
        alternatives: Optional[List[Dict]] = None
    ) -> str:
        """Generate explanation using Gemini"""
        
        if not self.enabled or not self.model:
            logger.warning("⚠️ Gemini not enabled, returning fallback explanation")
            return self._generate_fallback_explanation(
                card, rewards, rewards_rate, merchant, category, amount
            )
        
        # Build prompt for consumer-friendly explanation
        prompt = f"""You are a friendly financial advisor helping everyday consumers optimize their credit card rewards.

Transaction Details:
- Merchant: {merchant}
- Category: {category}
- Amount: ${amount:.2f}

Recommended Card: {card}
Rewards Earned: ${rewards:.2f} ({rewards_rate})
Annual Potential: ${annual_potential:.2f}/year if you use this card for similar purchases

Task: Explain in 2-3 simple, conversational sentences why this card is the best choice for this purchase.

Guidelines:
1. Start with the tangible benefit (e.g., "You'll earn $5.02 back on this purchase")
2. Explain the reward rate in simple terms (avoid jargon)
3. Add a relatable comparison (e.g., "That's like getting a free coffee!")
4. Be encouraging and friendly

{"⚠️ Important: Mention this warning - " + warnings[0] if warnings else ""}

Keep it under 100 words and use everyday language."""

        try:
            logger.info(f"πŸ€– Calling Gemini for {merchant} recommendation...")
            
            response = self.model.generate_content(
                prompt,
                generation_config=genai.types.GenerationConfig(
                    temperature=0.7,
                    max_output_tokens=200,
                )
            )
            
            if response.text:
                logger.info(f"βœ… Gemini explanation generated successfully")
                return response.text.strip()
            else:
                logger.warning("⚠️ Gemini returned empty response")
                return self._generate_fallback_explanation(
                    card, rewards, rewards_rate, merchant, category, amount
                )
                
        except Exception as e:
            logger.error(f"❌ Gemini explanation failed: {str(e)}")
            return self._generate_fallback_explanation(
                card, rewards, rewards_rate, merchant, category, amount
            )
    
    def _generate_fallback_explanation(
        self,
        card: str,
        rewards: float,
        rewards_rate: str,
        merchant: str,
        category: str,
        amount: float
    ) -> str:
        """Generate rule-based explanation when Gemini is unavailable"""
        
        explanation = f"The **{card}** is your best choice for this {category.lower()} purchase at {merchant}. "
        explanation += f"You'll earn **{rewards_rate}**, which gives you **${rewards:.2f}** back on this transaction. "
        
        # Add relatable comparison
        if rewards >= 5:
            explanation += "That's like getting a free lunch! πŸ”"
        elif rewards >= 3:
            explanation += "That's like getting a free coffee! β˜•"
        else:
            explanation += "Every bit of savings counts! πŸ’°"
        
        return explanation
    
    def generate_spending_insights(
        self,
        user_id: str,
        total_spending: float,
        total_rewards: float,
        optimization_score: int,
        top_categories: List[Dict],
        recommendations_count: int
    ) -> str:
        """Generate personalized insights using Gemini"""
        
        if not self.enabled or not self.model:
            return self._generate_fallback_insights(
                total_spending, total_rewards, optimization_score
            )
        
        prompt = f"""You are a personal finance coach reviewing a user's credit card rewards performance.

User Stats:
- Total Spending: ${total_spending:.2f}
- Total Rewards: ${total_rewards:.2f}
- Optimization Score: {optimization_score}/100
- Optimized Transactions: {recommendations_count}
- Top Categories: {', '.join([c.get('category', 'Unknown') for c in top_categories[:3]])}

Task: Provide 3 actionable insights in a friendly, motivating tone. Each insight should be 1 sentence.

Guidelines:
1. Start with praise for what they're doing well
2. Identify their biggest opportunity (highest spending category)
3. Give one specific, actionable tip to improve their score
4. Use emojis and be encouraging!

Keep it under 120 words."""

        try:
            response = self.model.generate_content(
                prompt,
                generation_config=genai.types.GenerationConfig(
                    temperature=0.8,
                    max_output_tokens=200,
                )
            )
            
            if response.text:
                return response.text.strip()
            else:
                return self._generate_fallback_insights(
                    total_spending, total_rewards, optimization_score
                )
                
        except Exception as e:
            logger.error(f"❌ Gemini insights generation failed: {str(e)}")
            return self._generate_fallback_insights(
                total_spending, total_rewards, optimization_score
            )

    def chat_response(self, message: str, user_context: dict, chat_history: list) -> str:
        """
        Generate conversational response using Gemini
        
        Args:
            message: User's question
            user_context: User profile data (cards, spending, etc.)
            chat_history: Previous conversation turns
        
        Returns:
            str: Gemini's response
        """
        if not self.enabled:
            return "Gemini AI is currently unavailable. Please check your API configuration."
        
        try:
            # Build context from user data
            context_str = f"""
    You are a helpful credit card rewards expert assistant. You're chatting with a user who has the following profile:
    
    **User Profile:**
    - Cards in wallet: {', '.join(user_context.get('cards', ['Unknown']))}
    - Monthly spending: ${user_context.get('monthly_spending', 0):.2f}
    - Top spending category: {user_context.get('top_category', 'Unknown')}
    - Total rewards earned: ${user_context.get('total_rewards', 0):.2f}
    - Optimization score: {user_context.get('optimization_score', 0)}/100
    
    **Your role:**
    - Answer questions about credit cards, rewards, and optimization strategies
    - Be conversational, friendly, and concise (2-3 paragraphs max)
    - Reference the user's specific cards and spending when relevant
    - Provide actionable advice
    - If asked about a specific card, explain its benefits and best use cases
    
    **Conversation history:**
    """
            
            # Add recent chat history (last 3 turns)
            for user_msg, bot_msg in chat_history[-3:]:
                context_str += f"\nUser: {user_msg}\nAssistant: {bot_msg}\n"
            
            context_str += f"\n**Current question:** {message}\n\nProvide a helpful, personalized response:"
            
            # Generate response
            response = self.model.generate_content(context_str)
            
            if response and response.text:
                return response.text.strip()
            else:
                return "I'm having trouble generating a response. Could you rephrase your question?"
                
        except Exception as e:
            print(f"Gemini chat error: {e}")
            return "I encountered an error processing your question. Please try asking in a different way."
    
    def _generate_fallback_insights(
        self,
        total_spending: float,
        total_rewards: float,
        optimization_score: int
    ) -> str:
        """Generate rule-based insights when Gemini unavailable"""
        
        rewards_rate = (total_rewards / total_spending * 100) if total_spending > 0 else 0
        
        insights = f"You're earning **${total_rewards:.2f}** in rewards on **${total_spending:.2f}** of spending "
        insights += f"(**{rewards_rate:.1f}%** effective rate). "
        
        if optimization_score >= 80:
            insights += "🌟 **Excellent optimization!** You're maximizing your rewards effectively. "
        elif optimization_score >= 60:
            insights += "πŸ‘ **Good progress!** Consider using our recommendations more consistently. "
        else:
            insights += "πŸ’‘ **Room for improvement!** Follow our card suggestions to boost your rewards. "
        
        insights += "Keep tracking your spending to identify new optimization opportunities."
        
        return insights


# Singleton instance
_gemini_explainer = None

def get_gemini_explainer() -> GeminiExplainer:
    """Get or create singleton Gemini explainer instance"""
    global _gemini_explainer
    
    if _gemini_explainer is None:
        _gemini_explainer = GeminiExplainer()
    
    return _gemini_explainer