Spaces:
Sleeping
Sleeping
Arif
commited on
Commit
Β·
aa5cda2
1
Parent(s):
2b6c2ca
Added frontend pages and links
Browse files- frontend/.env.example +5 -0
- frontend/.streamlit/config.toml +13 -0
- frontend/README.md +9 -0
- frontend/app.py +72 -0
- frontend/components/__init__.py +3 -0
- frontend/components/chat_widget.py +57 -0
- frontend/components/data_display.py +51 -0
- frontend/components/sidebar.py +48 -0
- frontend/pages/01_chat.py +7 -0
- frontend/pages/02_upload_data.py +44 -0
- frontend/pages/03_Analysis.py +54 -0
- frontend/pages/04_Health_Check.py +36 -0
- frontend/requirements.txt +5 -0
- frontend/utils/__init__.py +3 -0
- frontend/utils/api_client.py +110 -0
- frontend/utils/config.py +17 -0
- frontend/utils/helpers.py +37 -0
frontend/.env.example
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Backend Configuration
|
| 2 |
+
BACKEND_URL=http://localhost:8000
|
| 3 |
+
|
| 4 |
+
# Optional: Streamlit specific
|
| 5 |
+
STREAMLIT_SERVER_HEADLESS=false
|
frontend/.streamlit/config.toml
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[theme]
|
| 2 |
+
primaryColor = "#208080"
|
| 3 |
+
backgroundColor = "#f5f5f5"
|
| 4 |
+
secondaryBackgroundColor = "#e0e0e0"
|
| 5 |
+
textColor = "#262730"
|
| 6 |
+
font = "sans serif"
|
| 7 |
+
|
| 8 |
+
[client]
|
| 9 |
+
showErrorDetails = true
|
| 10 |
+
|
| 11 |
+
[server]
|
| 12 |
+
port = 8501
|
| 13 |
+
headless = false
|
frontend/README.md
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# LLM Data Analyzer Frontend
|
| 2 |
+
|
| 3 |
+
Streamlit-based frontend for the LLM Data Analyzer backend.
|
| 4 |
+
|
| 5 |
+
## Installation
|
| 6 |
+
|
| 7 |
+
```bash
|
| 8 |
+
cd frontend
|
| 9 |
+
uv pip install -r requirements.txt
|
frontend/app.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Main Streamlit app"""
|
| 2 |
+
import streamlit as st
|
| 3 |
+
from components import render_sidebar
|
| 4 |
+
from utils import PAGE_TITLE, PAGE_ICON
|
| 5 |
+
|
| 6 |
+
st.set_page_config(
|
| 7 |
+
page_title=PAGE_TITLE,
|
| 8 |
+
page_icon=PAGE_ICON,
|
| 9 |
+
layout="wide",
|
| 10 |
+
initial_sidebar_state="expanded"
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
render_sidebar()
|
| 14 |
+
|
| 15 |
+
st.title("π€ LLM Data Analyzer")
|
| 16 |
+
st.subtitle("Advanced data analysis with AI assistance")
|
| 17 |
+
|
| 18 |
+
st.divider()
|
| 19 |
+
|
| 20 |
+
# Home page content
|
| 21 |
+
col1, col2 = st.columns(2)
|
| 22 |
+
|
| 23 |
+
with col1:
|
| 24 |
+
st.subheader("π¬ Chat")
|
| 25 |
+
st.write("""
|
| 26 |
+
- Ask questions about data analysis
|
| 27 |
+
- Get AI-powered insights
|
| 28 |
+
- Real-time responses from LLM
|
| 29 |
+
""")
|
| 30 |
+
st.page_link("pages/01_Chat.py", label="Go to Chat", icon="π¬")
|
| 31 |
+
|
| 32 |
+
with col2:
|
| 33 |
+
st.subheader("π Upload Data")
|
| 34 |
+
st.write("""
|
| 35 |
+
- Upload CSV or Excel files
|
| 36 |
+
- Preview your data
|
| 37 |
+
- View statistics
|
| 38 |
+
""")
|
| 39 |
+
st.page_link("pages/02_Upload_Data.py", label="Upload Data", icon="π")
|
| 40 |
+
|
| 41 |
+
st.divider()
|
| 42 |
+
|
| 43 |
+
col3, col4 = st.columns(2)
|
| 44 |
+
|
| 45 |
+
with col3:
|
| 46 |
+
st.subheader("π Analysis")
|
| 47 |
+
st.write("""
|
| 48 |
+
- Statistical analysis
|
| 49 |
+
- Trend detection
|
| 50 |
+
- Outlier detection
|
| 51 |
+
- Correlation analysis
|
| 52 |
+
""")
|
| 53 |
+
st.page_link("pages/03_Analysis.py", label="Run Analysis", icon="π")
|
| 54 |
+
|
| 55 |
+
with col4:
|
| 56 |
+
st.subheader("π₯ System Status")
|
| 57 |
+
st.write("""
|
| 58 |
+
- Check backend health
|
| 59 |
+
- View LLM model info
|
| 60 |
+
- Monitor system status
|
| 61 |
+
""")
|
| 62 |
+
st.page_link("pages/04_Health_Check.py", label="Check Status", icon="π₯")
|
| 63 |
+
|
| 64 |
+
st.divider()
|
| 65 |
+
|
| 66 |
+
st.info("""
|
| 67 |
+
### π Quick Start
|
| 68 |
+
1. **Upload Data** - Start by uploading a CSV or Excel file
|
| 69 |
+
2. **Preview** - Review your data and statistics
|
| 70 |
+
3. **Analyze** - Run analysis and get insights
|
| 71 |
+
4. **Chat** - Ask follow-up questions to the AI
|
| 72 |
+
""")
|
frontend/components/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .sidebar import render_sidebar
|
| 2 |
+
from .chat_widget import render_chat
|
| 3 |
+
from .data_display import display_data_preview, display_analysis_results
|
frontend/components/chat_widget.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Chat interface component"""
|
| 2 |
+
import streamlit as st
|
| 3 |
+
from utils import client
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def render_chat():
|
| 7 |
+
"""Render chat interface"""
|
| 8 |
+
st.subheader("π¬ Chat with LLM")
|
| 9 |
+
|
| 10 |
+
# Chat history
|
| 11 |
+
if "chat_history" not in st.session_state:
|
| 12 |
+
st.session_state.chat_history = []
|
| 13 |
+
|
| 14 |
+
# Display chat history
|
| 15 |
+
for message in st.session_state.chat_history:
|
| 16 |
+
with st.chat_message(message["role"]):
|
| 17 |
+
st.write(message["content"])
|
| 18 |
+
|
| 19 |
+
# Input
|
| 20 |
+
system_prompt = st.text_area(
|
| 21 |
+
"System Prompt",
|
| 22 |
+
value="You are a helpful data analysis assistant.",
|
| 23 |
+
height=80
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
user_input = st.chat_input("Type your message...")
|
| 27 |
+
|
| 28 |
+
if user_input:
|
| 29 |
+
# Add user message
|
| 30 |
+
st.session_state.chat_history.append({"role": "user", "content": user_input})
|
| 31 |
+
|
| 32 |
+
with st.chat_message("user"):
|
| 33 |
+
st.write(user_input)
|
| 34 |
+
|
| 35 |
+
# Get response
|
| 36 |
+
with st.spinner("π€ Thinking..."):
|
| 37 |
+
messages = [
|
| 38 |
+
{"role": "user", "content": user_input}
|
| 39 |
+
]
|
| 40 |
+
response = client.chat(messages, system_prompt)
|
| 41 |
+
|
| 42 |
+
if "error" in response:
|
| 43 |
+
st.error(f"Error: {response['error']}")
|
| 44 |
+
else:
|
| 45 |
+
assistant_response = response.get("response", "No response")
|
| 46 |
+
st.session_state.chat_history.append({
|
| 47 |
+
"role": "assistant",
|
| 48 |
+
"content": assistant_response
|
| 49 |
+
})
|
| 50 |
+
|
| 51 |
+
with st.chat_message("assistant"):
|
| 52 |
+
st.write(assistant_response)
|
| 53 |
+
|
| 54 |
+
# Show model info
|
| 55 |
+
st.caption(f"Model: {response.get('model', 'Unknown')}")
|
| 56 |
+
|
| 57 |
+
st.rerun()
|
frontend/components/data_display.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Data display utilities"""
|
| 2 |
+
import streamlit as st
|
| 3 |
+
import pandas as pd
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def display_data_preview(data: list, columns: list):
|
| 7 |
+
"""Display data preview"""
|
| 8 |
+
if not data:
|
| 9 |
+
st.warning("No data to display")
|
| 10 |
+
return
|
| 11 |
+
|
| 12 |
+
st.subheader("π Data Preview")
|
| 13 |
+
|
| 14 |
+
# Convert to DataFrame for better display
|
| 15 |
+
df = pd.DataFrame(data)
|
| 16 |
+
|
| 17 |
+
# Display summary
|
| 18 |
+
col1, col2, col3 = st.columns(3)
|
| 19 |
+
with col1:
|
| 20 |
+
st.metric("Total Rows", len(df))
|
| 21 |
+
with col2:
|
| 22 |
+
st.metric("Total Columns", len(df.columns))
|
| 23 |
+
with col3:
|
| 24 |
+
st.metric("Memory Usage", f"{df.memory_usage().sum() / 1024:.2f} KB")
|
| 25 |
+
|
| 26 |
+
st.divider()
|
| 27 |
+
|
| 28 |
+
# Display data table
|
| 29 |
+
st.dataframe(df, use_container_width=True)
|
| 30 |
+
|
| 31 |
+
st.divider()
|
| 32 |
+
|
| 33 |
+
# Display statistics
|
| 34 |
+
st.subheader("π Data Statistics")
|
| 35 |
+
st.dataframe(df.describe(), use_container_width=True)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def display_analysis_results(results: dict):
|
| 39 |
+
"""Display analysis results"""
|
| 40 |
+
st.subheader("π Analysis Results")
|
| 41 |
+
|
| 42 |
+
if "error" in results:
|
| 43 |
+
st.error(f"Analysis failed: {results['error']}")
|
| 44 |
+
return
|
| 45 |
+
|
| 46 |
+
# Display results based on type
|
| 47 |
+
if "results" in results:
|
| 48 |
+
st.write(results["results"])
|
| 49 |
+
|
| 50 |
+
if "summary" in results:
|
| 51 |
+
st.info(f"**Summary:** {results['summary']}")
|
frontend/components/sidebar.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Sidebar navigation"""
|
| 2 |
+
import streamlit as st
|
| 3 |
+
from utils import client
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def render_sidebar():
|
| 7 |
+
"""Render sidebar with navigation and status"""
|
| 8 |
+
with st.sidebar:
|
| 9 |
+
st.title("π€ LLM Data Analyzer")
|
| 10 |
+
|
| 11 |
+
st.divider()
|
| 12 |
+
|
| 13 |
+
# Navigation
|
| 14 |
+
st.subheader("Navigation")
|
| 15 |
+
st.page_link("app.py", label="π Home", icon="π ")
|
| 16 |
+
st.page_link("pages/01_Chat.py", label="π¬ Chat", icon="π¬")
|
| 17 |
+
st.page_link("pages/02_Upload_Data.py", label="π Upload Data", icon="π")
|
| 18 |
+
st.page_link("pages/03_Analysis.py", label="π Analysis", icon="π")
|
| 19 |
+
st.page_link("pages/04_Health_Check.py", label="π₯ Health Check", icon="π₯")
|
| 20 |
+
|
| 21 |
+
st.divider()
|
| 22 |
+
|
| 23 |
+
# Backend Status
|
| 24 |
+
st.subheader("Backend Status")
|
| 25 |
+
if st.button("π Check Status"):
|
| 26 |
+
with st.spinner("Checking..."):
|
| 27 |
+
health = client.health_check()
|
| 28 |
+
if health.get("status") == "healthy":
|
| 29 |
+
st.success(f"β
Connected - {health.get('llm_model')}")
|
| 30 |
+
else:
|
| 31 |
+
st.error("β Backend not responding")
|
| 32 |
+
|
| 33 |
+
st.divider()
|
| 34 |
+
|
| 35 |
+
# Settings
|
| 36 |
+
st.subheader("Settings")
|
| 37 |
+
backend_url = st.text_input(
|
| 38 |
+
"Backend URL",
|
| 39 |
+
value="http://localhost:8000",
|
| 40 |
+
help="Change if backend is running elsewhere"
|
| 41 |
+
)
|
| 42 |
+
|
| 43 |
+
st.divider()
|
| 44 |
+
|
| 45 |
+
# About
|
| 46 |
+
st.subheader("About")
|
| 47 |
+
st.caption("LLM Data Analyzer Frontend")
|
| 48 |
+
st.caption("Built with Streamlit & FastAPI")
|
frontend/pages/01_chat.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Chat page"""
|
| 2 |
+
import streamlit as st
|
| 3 |
+
from components import render_sidebar, render_chat
|
| 4 |
+
|
| 5 |
+
st.set_page_config(page_title="Chat", page_icon="π¬")
|
| 6 |
+
render_sidebar()
|
| 7 |
+
render_chat()
|
frontend/pages/02_upload_data.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""File upload and preview page"""
|
| 2 |
+
import streamlit as st
|
| 3 |
+
from components import render_sidebar, display_data_preview
|
| 4 |
+
from utils import client
|
| 5 |
+
|
| 6 |
+
st.set_page_config(page_title="Upload Data", page_icon="π")
|
| 7 |
+
render_sidebar()
|
| 8 |
+
|
| 9 |
+
st.title("π Upload & Preview Data")
|
| 10 |
+
|
| 11 |
+
st.subheader("Upload File")
|
| 12 |
+
uploaded_file = st.file_uploader(
|
| 13 |
+
"Choose a file (CSV or Excel)",
|
| 14 |
+
type=["csv", "xlsx", "xls"],
|
| 15 |
+
help="Upload your data file for analysis"
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
if uploaded_file:
|
| 19 |
+
with st.spinner("π€ Uploading file..."):
|
| 20 |
+
file_bytes = uploaded_file.read()
|
| 21 |
+
result = client.upload_file(file_bytes, uploaded_file.name)
|
| 22 |
+
|
| 23 |
+
if "error" in result:
|
| 24 |
+
st.error(f"Upload failed: {result['error']}")
|
| 25 |
+
else:
|
| 26 |
+
st.success(f"β
File uploaded: {result['filename']}")
|
| 27 |
+
|
| 28 |
+
# Store data in session state
|
| 29 |
+
st.session_state.uploaded_data = result.get("preview", [])
|
| 30 |
+
st.session_state.all_columns = result.get("column_names", [])
|
| 31 |
+
|
| 32 |
+
# Display file info
|
| 33 |
+
col1, col2, col3 = st.columns(3)
|
| 34 |
+
with col1:
|
| 35 |
+
st.metric("Rows", result.get("rows", 0))
|
| 36 |
+
with col2:
|
| 37 |
+
st.metric("Columns", result.get("columns", 0))
|
| 38 |
+
with col3:
|
| 39 |
+
st.metric("File Type", result.get("file_type", "unknown").upper())
|
| 40 |
+
|
| 41 |
+
st.divider()
|
| 42 |
+
|
| 43 |
+
# Display preview
|
| 44 |
+
display_data_preview(result.get("preview", []), result.get("column_names", []))
|
frontend/pages/03_Analysis.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Data analysis page"""
|
| 2 |
+
import streamlit as st
|
| 3 |
+
from components import render_sidebar, display_analysis_results
|
| 4 |
+
from utils import client, get_analysis_types
|
| 5 |
+
|
| 6 |
+
st.set_page_config(page_title="Analysis", page_icon="π")
|
| 7 |
+
render_sidebar()
|
| 8 |
+
|
| 9 |
+
st.title("π Data Analysis")
|
| 10 |
+
|
| 11 |
+
if "uploaded_data" not in st.session_state or not st.session_state.uploaded_data:
|
| 12 |
+
st.warning("β οΈ Please upload data first on the 'Upload Data' page")
|
| 13 |
+
st.stop()
|
| 14 |
+
|
| 15 |
+
data = st.session_state.uploaded_data
|
| 16 |
+
columns = st.session_state.get("all_columns", [])
|
| 17 |
+
|
| 18 |
+
st.subheader("Analysis Settings")
|
| 19 |
+
|
| 20 |
+
col1, col2 = st.columns(2)
|
| 21 |
+
with col1:
|
| 22 |
+
analysis_type = st.selectbox(
|
| 23 |
+
"Analysis Type",
|
| 24 |
+
get_analysis_types(),
|
| 25 |
+
help="Choose the type of analysis to perform"
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
with col2:
|
| 29 |
+
selected_columns = st.multiselect(
|
| 30 |
+
"Columns to Analyze",
|
| 31 |
+
columns,
|
| 32 |
+
default=columns[:3] if len(columns) > 3 else columns,
|
| 33 |
+
help="Select which columns to analyze"
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
st.divider()
|
| 37 |
+
|
| 38 |
+
if st.button("π Run Analysis", use_container_width=True):
|
| 39 |
+
with st.spinner("π Analyzing data..."):
|
| 40 |
+
result = client.analyze(data, analysis_type, selected_columns)
|
| 41 |
+
|
| 42 |
+
if "error" in result:
|
| 43 |
+
st.error(f"Analysis failed: {result['error']}")
|
| 44 |
+
else:
|
| 45 |
+
display_analysis_results(result)
|
| 46 |
+
|
| 47 |
+
# Show suggestions
|
| 48 |
+
if st.checkbox("π‘ Get AI Suggestions"):
|
| 49 |
+
with st.spinner("π€ Generating suggestions..."):
|
| 50 |
+
suggestions = client.get_suggestions(data, f"Analysis type: {analysis_type}")
|
| 51 |
+
if "suggestions" in suggestions:
|
| 52 |
+
st.subheader("π‘ Suggestions")
|
| 53 |
+
for i, suggestion in enumerate(suggestions["suggestions"], 1):
|
| 54 |
+
st.write(f"{i}. {suggestion}")
|
frontend/pages/04_Health_Check.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""System health check page"""
|
| 2 |
+
import streamlit as st
|
| 3 |
+
from components import render_sidebar
|
| 4 |
+
from utils import client
|
| 5 |
+
|
| 6 |
+
st.set_page_config(page_title="Health Check", page_icon="π₯")
|
| 7 |
+
render_sidebar()
|
| 8 |
+
|
| 9 |
+
st.title("π₯ System Health Check")
|
| 10 |
+
|
| 11 |
+
if st.button("π Check Backend Status", use_container_width=True):
|
| 12 |
+
with st.spinner("Checking..."):
|
| 13 |
+
health = client.health_check()
|
| 14 |
+
|
| 15 |
+
if health.get("status") == "healthy":
|
| 16 |
+
st.success("β
Backend is running and healthy!")
|
| 17 |
+
|
| 18 |
+
# Display details
|
| 19 |
+
col1, col2 = st.columns(2)
|
| 20 |
+
with col1:
|
| 21 |
+
st.metric("Status", health.get("status", "unknown"))
|
| 22 |
+
st.metric("Service", health.get("service", "unknown"))
|
| 23 |
+
|
| 24 |
+
with col2:
|
| 25 |
+
st.metric("LLM Model", health.get("llm_model", "unknown"))
|
| 26 |
+
st.metric("Environment", health.get("environment", "unknown"))
|
| 27 |
+
|
| 28 |
+
st.divider()
|
| 29 |
+
|
| 30 |
+
# Display full response
|
| 31 |
+
st.subheader("π Full Response")
|
| 32 |
+
st.json(health)
|
| 33 |
+
else:
|
| 34 |
+
st.error("β Backend is not responding or unhealthy")
|
| 35 |
+
if "detail" in health:
|
| 36 |
+
st.error(f"Details: {health['detail']}")
|
frontend/requirements.txt
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
streamlit>=1.28.0
|
| 2 |
+
requests>=2.31.0
|
| 3 |
+
pandas>=2.0.0
|
| 4 |
+
plotly>=5.17.0
|
| 5 |
+
python-dotenv>=1.0.0
|
frontend/utils/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .api_client import client
|
| 2 |
+
from .config import *
|
| 3 |
+
from .helpers import *
|
frontend/utils/api_client.py
ADDED
|
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""API client for backend communication"""
|
| 2 |
+
import requests
|
| 3 |
+
import logging
|
| 4 |
+
from typing import Dict, List, Any
|
| 5 |
+
|
| 6 |
+
logger = logging.getLogger(__name__)
|
| 7 |
+
|
| 8 |
+
# Import config
|
| 9 |
+
import os
|
| 10 |
+
from dotenv import load_dotenv
|
| 11 |
+
|
| 12 |
+
load_dotenv()
|
| 13 |
+
BACKEND_URL = os.getenv("BACKEND_URL", "http://localhost:8000")
|
| 14 |
+
API_BASE_URL = f"{BACKEND_URL}/api/v1"
|
| 15 |
+
TIMEOUT_LONG = 120
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class APIClient:
|
| 19 |
+
"""Client for backend API communication"""
|
| 20 |
+
|
| 21 |
+
def __init__(self):
|
| 22 |
+
self.base_url = API_BASE_URL
|
| 23 |
+
self.timeout = TIMEOUT_LONG
|
| 24 |
+
|
| 25 |
+
def health_check(self) -> Dict[str, Any]:
|
| 26 |
+
"""Check backend health status"""
|
| 27 |
+
try:
|
| 28 |
+
response = requests.get(
|
| 29 |
+
f"{self.base_url}/health",
|
| 30 |
+
timeout=self.timeout
|
| 31 |
+
)
|
| 32 |
+
response.raise_for_status()
|
| 33 |
+
return response.json()
|
| 34 |
+
except Exception as e:
|
| 35 |
+
logger.error(f"β Health check failed: {e}")
|
| 36 |
+
return {"status": "error", "detail": str(e)}
|
| 37 |
+
|
| 38 |
+
def chat(self, messages: List[Dict], system_prompt: str = None) -> Dict[str, Any]:
|
| 39 |
+
"""Send chat request to backend"""
|
| 40 |
+
try:
|
| 41 |
+
payload = {
|
| 42 |
+
"messages": messages,
|
| 43 |
+
"system_prompt": system_prompt or "You are a helpful data analysis assistant."
|
| 44 |
+
}
|
| 45 |
+
response = requests.post(
|
| 46 |
+
f"{self.base_url}/chat",
|
| 47 |
+
json=payload,
|
| 48 |
+
timeout=self.timeout
|
| 49 |
+
)
|
| 50 |
+
response.raise_for_status()
|
| 51 |
+
return response.json()
|
| 52 |
+
except Exception as e:
|
| 53 |
+
logger.error(f"β Chat request failed: {e}")
|
| 54 |
+
return {"error": str(e)}
|
| 55 |
+
|
| 56 |
+
def upload_file(self, file_bytes: bytes, filename: str) -> Dict[str, Any]:
|
| 57 |
+
"""Upload file to backend"""
|
| 58 |
+
try:
|
| 59 |
+
files = {"file": (filename, file_bytes)}
|
| 60 |
+
response = requests.post(
|
| 61 |
+
f"{self.base_url}/upload",
|
| 62 |
+
files=files,
|
| 63 |
+
timeout=self.timeout
|
| 64 |
+
)
|
| 65 |
+
response.raise_for_status()
|
| 66 |
+
return response.json()
|
| 67 |
+
except Exception as e:
|
| 68 |
+
logger.error(f"β File upload failed: {e}")
|
| 69 |
+
return {"error": str(e)}
|
| 70 |
+
|
| 71 |
+
def analyze(self, data: List[Dict], analysis_type: str, columns: List[str] = None) -> Dict[str, Any]:
|
| 72 |
+
"""Request data analysis"""
|
| 73 |
+
try:
|
| 74 |
+
payload = {
|
| 75 |
+
"data": data,
|
| 76 |
+
"analysis_type": analysis_type,
|
| 77 |
+
"columns": columns or []
|
| 78 |
+
}
|
| 79 |
+
response = requests.post(
|
| 80 |
+
f"{self.base_url}/analyze",
|
| 81 |
+
json=payload,
|
| 82 |
+
timeout=self.timeout
|
| 83 |
+
)
|
| 84 |
+
response.raise_for_status()
|
| 85 |
+
return response.json()
|
| 86 |
+
except Exception as e:
|
| 87 |
+
logger.error(f"β Analysis request failed: {e}")
|
| 88 |
+
return {"error": str(e)}
|
| 89 |
+
|
| 90 |
+
def get_suggestions(self, data: List[Dict], context: str = None) -> Dict[str, Any]:
|
| 91 |
+
"""Get AI suggestions for data"""
|
| 92 |
+
try:
|
| 93 |
+
payload = {
|
| 94 |
+
"data": data,
|
| 95 |
+
"analysis_context": context or ""
|
| 96 |
+
}
|
| 97 |
+
response = requests.post(
|
| 98 |
+
f"{self.base_url}/suggestions",
|
| 99 |
+
json=payload,
|
| 100 |
+
timeout=self.timeout
|
| 101 |
+
)
|
| 102 |
+
response.raise_for_status()
|
| 103 |
+
return response.json()
|
| 104 |
+
except Exception as e:
|
| 105 |
+
logger.error(f"β Suggestion request failed: {e}")
|
| 106 |
+
return {"error": str(e)}
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
# Global client instance
|
| 110 |
+
client = APIClient()
|
frontend/utils/config.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Configuration for frontend"""
|
| 2 |
+
import os
|
| 3 |
+
from dotenv import load_dotenv
|
| 4 |
+
|
| 5 |
+
load_dotenv()
|
| 6 |
+
|
| 7 |
+
# Backend API
|
| 8 |
+
BACKEND_URL = os.getenv("BACKEND_URL", "http://localhost:8000")
|
| 9 |
+
API_BASE_URL = f"{BACKEND_URL}/api/v1"
|
| 10 |
+
|
| 11 |
+
# Timeouts
|
| 12 |
+
TIMEOUT_SHORT = 5
|
| 13 |
+
TIMEOUT_LONG = 120
|
| 14 |
+
|
| 15 |
+
# UI Settings
|
| 16 |
+
PAGE_TITLE = "LLM Data Analyzer"
|
| 17 |
+
PAGE_ICON = "π€"
|
frontend/utils/helpers.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Helper functions"""
|
| 2 |
+
import streamlit as st
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def format_timestamp(ts: str) -> str:
|
| 7 |
+
"""Format timestamp for display"""
|
| 8 |
+
try:
|
| 9 |
+
dt = datetime.fromisoformat(ts)
|
| 10 |
+
return dt.strftime("%Y-%m-%d %H:%M:%S")
|
| 11 |
+
except:
|
| 12 |
+
return ts
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def get_analysis_types() -> list:
|
| 16 |
+
"""Get available analysis types"""
|
| 17 |
+
return [
|
| 18 |
+
"statistical_summary",
|
| 19 |
+
"trend_detection",
|
| 20 |
+
"outlier_detection",
|
| 21 |
+
"correlation_analysis"
|
| 22 |
+
]
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def display_error(error_msg: str):
|
| 26 |
+
"""Display error message"""
|
| 27 |
+
st.error(f"β Error: {error_msg}")
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def display_success(success_msg: str):
|
| 31 |
+
"""Display success message"""
|
| 32 |
+
st.success(f"β
{success_msg}")
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def display_info(info_msg: str):
|
| 36 |
+
"""Display info message"""
|
| 37 |
+
st.info(f"βΉοΈ {info_msg}")
|