import streamlit as st import pandas as pd from huggingface_hub import InferenceClient # Page configuration st.set_page_config( page_title="📊 LLM Data Analyzer", page_icon="📊", layout="wide", initial_sidebar_state="expanded" ) st.title("📊 LLM Data Analyzer") st.write("*Analyze data and chat with AI powered by Hugging Face Inference API*") # Initialize HF Inference Client @st.cache_resource def get_hf_client(): """Get Hugging Face Inference Client""" try: return InferenceClient() except Exception as e: st.error(f"Error initializing HF client: {e}") return None client = get_hf_client() if client is None: st.error("Failed to initialize Hugging Face client") st.stop() # Create tabs tab1, tab2, tab3 = st.tabs(["📤 Upload & Analyze", "đŸ’Ŧ Chat", "📊 About"]) # ============================================================================ # TAB 1: Upload & Analyze # ============================================================================ with tab1: st.header("📤 Upload and Analyze Data") uploaded_file = st.file_uploader( "Upload a CSV or Excel file", type=["csv", "xlsx", "xls"], help="Supported formats: CSV, Excel" ) if uploaded_file is not None: st.success(f"✅ File uploaded: {uploaded_file.name}") try: if uploaded_file.name.endswith('.csv'): df = pd.read_csv(uploaded_file) else: df = pd.read_excel(uploaded_file) # Display data preview st.subheader("📋 Data Preview") st.dataframe(df.head(10), use_container_width=True) # Display statistics st.subheader("📊 Data Statistics") col1, col2, col3 = st.columns(3) with col1: st.metric("Rows", df.shape[0]) with col2: st.metric("Columns", df.shape[1]) with col3: st.metric("Memory", f"{df.memory_usage(deep=True).sum() / 1024:.2f} KB") # Detailed statistics st.write(df.describe().T) # Ask AI about the data st.subheader("❓ Ask AI About Your Data") question = st.text_input( "What would you like to know about this data?", placeholder="e.g., What is the average value in column X?" ) if question: with st.spinner("🤔 AI is analyzing your data..."): try: data_summary = df.describe().to_string() prompt = f"""You are a data analyst expert. You have the following data summary: {data_summary} Column names: {', '.join(df.columns.tolist())} User's question: {question} Please provide a clear, concise analysis based on the data summary.""" # Use Hugging Face Inference API response = client.text_generation( prompt, max_new_tokens=300, temperature=0.7, ) st.success("✅ Analysis Complete") st.write(response) except Exception as e: st.error(f"Error analyzing data: {e}") except Exception as e: st.error(f"Error reading file: {e}") # ============================================================================ # TAB 2: Chat # ============================================================================ with tab2: st.header("đŸ’Ŧ Chat with AI Assistant") st.write("Have a conversation with an AI assistant powered by Hugging Face.") # Initialize session state for chat history if "messages" not in st.session_state: st.session_state.messages = [] # Display chat history for message in st.session_state.messages: with st.chat_message(message["role"]): st.markdown(message["content"]) # Chat input user_input = st.chat_input("Type your message here...") if user_input: # Add user message to history st.session_state.messages.append({"role": "user", "content": user_input}) # Display user message with st.chat_message("user"): st.markdown(user_input) # Generate AI response with st.chat_message("assistant"): with st.spinner("âŗ Generating response..."): try: prompt = f"User: {user_input}\n\nAssistant:" response = client.text_generation( prompt, max_new_tokens=300, temperature=0.7, ) assistant_message = response.strip() st.markdown(assistant_message) # Add assistant message to history st.session_state.messages.append({ "role": "assistant", "content": assistant_message }) except Exception as e: st.error(f"Error generating response: {e}") # ============================================================================ # TAB 3: About # ============================================================================ with tab3: st.header("â„šī¸ About This App") st.markdown(""" ### đŸŽ¯ What is this? **LLM Data Analyzer** is an AI-powered tool for analyzing data and having conversations with an intelligent assistant. ### 🔧 Technology Stack - **Framework:** Streamlit - **AI Engine:** Hugging Face Inference API - **Hosting:** Hugging Face Spaces (Free Tier) - **Language:** Python ### ⚡ Features 1. **Data Analysis**: Upload CSV/Excel and ask questions about your data 2. **Chat**: Have conversations with an AI assistant 3. **Statistics**: View data summaries and insights ### 📝 How to Use 1. **Upload Data** - Start by uploading a CSV or Excel file 2. **Preview** - Review your data and statistics 3. **Ask Questions** - Get AI-powered analysis 4. **Chat** - Have follow-up conversations ### 🌐 Powered By - [Hugging Face](https://huggingface.co/) - AI models and hosting - [Streamlit](https://streamlit.io/) - Web framework ### 📖 Quick Tips - Keep questions focused and specific for best results - Responses may take a few seconds - Data is processed locally, not stored on server ### 🔗 Links - [GitHub Repository](https://github.com/Arif-Badhon/LLM-Data-Analyzer) - [Hugging Face Hub](https://huggingface.co/) --- **Version:** 1.0 | **Last Updated:** Dec 2025 """)