Arif commited on
Commit
3f44a73
·
1 Parent(s): 2b6c2ca

Finally running backend

Browse files
backend/__init__.py ADDED
File without changes
backend/app/__init__.py ADDED
File without changes
backend/app/api/__init__.py ADDED
File without changes
backend/app/api/v1/__init__.py ADDED
File without changes
backend/app/config.py ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Configuration management for FastAPI backend
3
+ Loads ALL settings from .env.local at project ROOT
4
+ """
5
+ from pydantic_settings import BaseSettings
6
+ from pathlib import Path
7
+ from typing import List
8
+ import logging
9
+
10
+
11
+ def find_env_file() -> Path:
12
+ """Find .env.local by checking multiple paths"""
13
+ possible_paths = [
14
+ Path("/Users/arif/Projects/Personal/LLM-Data-Analyzer/.env.local"),
15
+ Path.cwd() / ".env.local",
16
+ Path(__file__).parent.parent.parent / ".env.local",
17
+ Path(__file__).parent.parent / ".env.local",
18
+ ]
19
+
20
+ for path in possible_paths:
21
+ if path.exists():
22
+ return path
23
+
24
+ raise FileNotFoundError(
25
+ f"❌ .env.local not found! Checked:\n" +
26
+ "\n".join([f" - {p}" for p in possible_paths])
27
+ )
28
+
29
+
30
+ class Settings(BaseSettings):
31
+ """Application settings - ALL loaded from .env.local"""
32
+
33
+ # API Configuration
34
+ fastapi_env: str
35
+ api_host: str
36
+ api_port: int
37
+ log_level: str
38
+
39
+ # LLM Configuration
40
+ llm_model_name: str
41
+ llm_max_tokens: int
42
+ llm_temperature: float
43
+ llm_device: str
44
+
45
+ # File Upload
46
+ max_file_size: int
47
+ upload_timeout: int
48
+
49
+ # CORS
50
+ cors_origins: List[str]
51
+
52
+ class Config:
53
+ env_file = str(find_env_file())
54
+ case_sensitive = False
55
+ extra = "ignore"
56
+
57
+
58
+ settings = Settings()
59
+
60
+
61
+ def get_logger(name: str) -> logging.Logger:
62
+ """Get configured logger instance"""
63
+ logger = logging.getLogger(name)
64
+
65
+ if not logger.handlers:
66
+ handler = logging.StreamHandler()
67
+ formatter = logging.Formatter(
68
+ '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
69
+ )
70
+ handler.setFormatter(formatter)
71
+ logger.addHandler(handler)
72
+
73
+ logger.setLevel(settings.log_level)
74
+ return logger
backend/app/main.py ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Main FastAPI application entry point
3
+ Uses relative imports to work correctly from any directory
4
+ """
5
+ from fastapi import FastAPI
6
+ from fastapi.middleware.cors import CORSMiddleware
7
+ from contextlib import asynccontextmanager
8
+
9
+ from .config import settings, get_logger
10
+
11
+ logger = get_logger(__name__)
12
+
13
+
14
+ @asynccontextmanager
15
+ async def lifespan(app: FastAPI):
16
+ """Manage app lifecycle"""
17
+ # Startup
18
+ logger.info("🚀 FastAPI application starting...")
19
+ logger.info(f"Environment: {settings.fastapi_env}")
20
+ yield
21
+ # Shutdown
22
+ logger.info("🛑 FastAPI application shutting down...")
23
+
24
+
25
+ # Create FastAPI app
26
+ app = FastAPI(
27
+ title="LLM Data Analyzer API",
28
+ description="Backend API for LLM-based data analysis and ML suggestions",
29
+ version="0.1.0",
30
+ lifespan=lifespan
31
+ )
32
+
33
+ # Add CORS middleware
34
+ app.add_middleware(
35
+ CORSMiddleware,
36
+ allow_origins=settings.cors_origins,
37
+ allow_credentials=True,
38
+ allow_methods=["*"],
39
+ allow_headers=["*"],
40
+ )
41
+
42
+
43
+ # Health check endpoint
44
+ @app.get("/health")
45
+ async def health_check():
46
+ """Health check endpoint"""
47
+ return {
48
+ "status": "healthy",
49
+ "environment": settings.fastapi_env,
50
+ "service": "llm-data-analyzer-backend"
51
+ }
52
+
53
+
54
+ # Root endpoint
55
+ @app.get("/")
56
+ async def root():
57
+ """Root endpoint with API information"""
58
+ return {
59
+ "service": "LLM Data Analyzer API",
60
+ "version": "0.1.0",
61
+ "docs_url": "/docs",
62
+ "health_url": "/health"
63
+ }
64
+
65
+
66
+ if __name__ == "__main__":
67
+ import uvicorn
68
+ uvicorn.run(
69
+ "backend.app.main:app",
70
+ host=settings.api_host,
71
+ port=settings.api_port,
72
+ reload=settings.fastapi_env == "development"
73
+ )
backend/app/middleware/__init__.py ADDED
File without changes
backend/app/services/__init__.py ADDED
File without changes
backend/app/services/analyzer.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Analysis Service - statistical and exploratory analysis"""
2
+ from app.config import get_logger
3
+
4
+ logger = get_logger(__name__)
5
+
6
+ class Analyzer:
7
+ """Performs statistical analysis on data"""
8
+
9
+ def __init__(self):
10
+ logger.info("Analyzer initialized")
11
+
12
+ async def analyze(self, data):
13
+ """Analyze data"""
14
+ return {"status": "Analysis coming in Phase 4"}
backend/app/services/dashboard_generator.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Dashboard Generation Service - creates interactive dashboards"""
2
+ from app.config import get_logger
3
+
4
+ logger = get_logger(__name__)
5
+
6
+ class DashboardGenerator:
7
+ """Generates auto-configured Plotly dashboards"""
8
+
9
+ def __init__(self):
10
+ logger.info("DashboardGenerator initialized")
11
+
12
+ async def generate(self, data):
13
+ """Generate dashboard from data"""
14
+ return {"dashboard": "Coming in Phase 4"}
backend/app/services/data_processor.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Data Processing Service - handles file uploads and data parsing"""
2
+ from app.config import get_logger
3
+
4
+ logger = get_logger(__name__)
5
+
6
+ class DataProcessor:
7
+ """Handles data file processing (CSV, XLS, XLSX)"""
8
+
9
+ def __init__(self):
10
+ logger.info("DataProcessor initialized")
11
+
12
+ async def process_file(self, file_path: str):
13
+ """Process uploaded file"""
14
+ return {"status": "File processing coming in Phase 3"}
backend/app/services/llm_service.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """LLM Service - handles MLX Llama 2 inference"""
2
+ from app.config import get_logger
3
+
4
+ logger = get_logger(__name__)
5
+
6
+ class LLMService:
7
+ """Wrapper around MLX LLM for convenient inference"""
8
+
9
+ def __init__(self):
10
+ """Initialize LLM Service - actual LLM loading in Phase 2"""
11
+ self.llm = None
12
+ logger.info("LLMService initialized (Phase 2 will load actual model)")
13
+
14
+ async def chat(self, message: str, history: list = None) -> str:
15
+ """Process user message and return LLM response"""
16
+ logger.info(f"Chat request: {message}")
17
+ return "LLM response will be here in Phase 2"
backend/app/services/ml_suggester.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ML Suggestion Service - suggests models and business problems"""
2
+ from app.config import get_logger
3
+
4
+ logger = get_logger(__name__)
5
+
6
+ class MLSuggester:
7
+ """Suggests appropriate ML models and identifies business problems"""
8
+
9
+ def __init__(self):
10
+ logger.info("MLSuggester initialized")
11
+
12
+ async def suggest_models(self, data_summary: dict):
13
+ """Suggest ML models based on data characteristics"""
14
+ return {"suggestions": []}
backend/app/utils/__init__.py ADDED
File without changes
backend/tests/__init__.py ADDED
File without changes
main.py DELETED
@@ -1,6 +0,0 @@
1
- def main():
2
- print("Hello from llm-data-analyzer!")
3
-
4
-
5
- if __name__ == "__main__":
6
- main()