Refactor: Integrate backend API and normalize data

This commit integrates the backend API for fetching and updating report data. It also includes a normalization function to handle data consistency between the API and local storage.

Co-authored-by: anthonymuncher <anthonymuncher@gmail.com>
This commit is contained in:
Cursor Agent
2025-09-26 10:27:39 +00:00
parent 1637e013c5
commit 46dea3304f
39 changed files with 29186 additions and 23 deletions

43
backend/app/database.py Normal file
View File

@@ -0,0 +1,43 @@
# app/database.py
import os
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, declarative_base
import logging
# ----------------------
# Logging Configuration
# ----------------------
logging.basicConfig(level=logging.INFO, format="%(asctime)s [%(levelname)s] %(message)s")
# ----------------------
# Database Configuration
# ----------------------
DB_PATH = os.environ.get("FIXMATE_DB", "app/db/fixmate.db")
os.makedirs(os.path.dirname(DB_PATH), exist_ok=True)
DATABASE_URL = f"sqlite:///{DB_PATH}"
engine = create_engine(
DATABASE_URL,
connect_args={"check_same_thread": False}, # Required for SQLite
echo=False # Set True for debugging SQL queries
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
# ----------------------
# Dependency
# ----------------------
def get_db():
"""
Yield a database session for FastAPI dependency injection.
Example usage in route:
db: Session = Depends(get_db)
"""
db = SessionLocal()
try:
yield db
finally:
db.close()
logging.info(f"Database initialized at {DB_PATH}")

BIN
backend/app/db/fixmate.db Normal file

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,8 @@
{
"0": "broken_streetlight",
"1": "drainage",
"2": "garbage",
"3": "pothole",
"4": "signage",
"5": "streetlight"
}

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,74 @@
import uuid
from sqlalchemy import Column, String, Float, Enum, DateTime, ForeignKey, Index
from sqlalchemy.orm import relationship
from sqlalchemy.sql import func
from app.database import Base
import enum
# ----------------------
# Enums
# ----------------------
class TicketStatus(str, enum.Enum):
NEW = "New"
IN_PROGRESS = "In Progress"
FIXED = "Fixed"
class SeverityLevel(str, enum.Enum):
LOW = "Low"
MEDIUM = "Medium"
HIGH = "High"
NA = "N/A"
# ----------------------
# User Model
# ----------------------
class User(Base):
__tablename__ = "users"
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()), index=True)
name = Column(String, nullable=False)
email = Column(String, unique=True, nullable=False)
tickets = relationship("Ticket", back_populates="user", cascade="all, delete-orphan")
def __repr__(self):
return f"<User(id={self.id}, name={self.name}, email={self.email})>"
# ----------------------
# Ticket Model
# ----------------------
class Ticket(Base):
__tablename__ = "tickets"
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()), index=True)
user_id = Column(String, ForeignKey("users.id", ondelete="CASCADE"), nullable=False)
image_path = Column(String, nullable=False)
category = Column(String, nullable=False)
severity = Column(Enum(SeverityLevel), nullable=False, default=SeverityLevel.NA)
description = Column(String, default="")
status = Column(Enum(TicketStatus), nullable=False, default=TicketStatus.NEW)
latitude = Column(Float, nullable=False)
longitude = Column(Float, nullable=False)
created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
user = relationship("User", back_populates="tickets")
__table_args__ = (
Index("idx_category_status", "category", "status"),
)
def __repr__(self):
return f"<Ticket(id={self.id}, category={self.category}, severity={self.severity}, status={self.status}, user_id={self.user_id})>"
# ----------------------
# Ticket Audit Model
# ----------------------
class TicketAudit(Base):
__tablename__ = "ticket_audit"
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
ticket_id = Column(String, ForeignKey("tickets.id", ondelete="CASCADE"))
old_status = Column(Enum(TicketStatus))
new_status = Column(Enum(TicketStatus))
updated_at = Column(DateTime(timezone=True), server_default=func.now())

Binary file not shown.

View File

@@ -0,0 +1,64 @@
# app/routes/analytics.py
from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session
from sqlalchemy import func
from app.database import get_db
from app.models.ticket_model import Ticket, SeverityLevel, TicketStatus
from typing import Dict, Any
router = APIRouter()
# ----------------------
# GET /analytics
# ----------------------
@router.get("/analytics", response_model=Dict[str, Any])
def analytics(db: Session = Depends(get_db), cluster_size: float = 0.01):
"""
Returns summary statistics for tickets:
- Total tickets
- Counts by category
- Counts by severity
- Counts by status
- Optional: location clustering (hotspots) using grid-based approach
"""
# Total tickets
total_tickets = db.query(func.count(Ticket.id)).scalar()
# Counts by category
category_counts = dict(
db.query(Ticket.category, func.count(Ticket.id))
.group_by(Ticket.category)
.all()
)
# Counts by severity
severity_counts = dict(
db.query(Ticket.severity, func.count(Ticket.id))
.group_by(Ticket.severity)
.all()
)
# Counts by status
status_counts = dict(
db.query(Ticket.status, func.count(Ticket.id))
.group_by(Ticket.status)
.all()
)
# ----------------------
# Location Clustering
# ----------------------
# Simple grid-based clustering: round lat/lon to nearest cluster_size
tickets = db.query(Ticket.latitude, Ticket.longitude).all()
location_clusters: Dict[str, int] = {}
for lat, lon in tickets:
key = f"{round(lat/cluster_size)*cluster_size:.4f},{round(lon/cluster_size)*cluster_size:.4f}"
location_clusters[key] = location_clusters.get(key, 0) + 1
return {
"total_tickets": total_tickets,
"category_counts": category_counts,
"severity_counts": {k.value: v for k, v in severity_counts.items()},
"status_counts": {k.value: v for k, v in status_counts.items()},
"location_clusters": location_clusters # format: "lat,lon": count
}

View File

@@ -0,0 +1,100 @@
from fastapi import APIRouter, UploadFile, File, Form, Depends, HTTPException
from fastapi.responses import JSONResponse
from sqlalchemy.orm import Session
from app.database import get_db
from app.services.ticket_service import TicketService, SeverityLevel
from app.models.ticket_model import User
from app.services.global_ai import get_ai_service
import os, uuid, logging
router = APIRouter()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
UPLOAD_DIR = "app/static/uploads"
os.makedirs(UPLOAD_DIR, exist_ok=True)
@router.post("/report")
async def report_issue(
user_id: str = Form(...),
latitude: float = Form(...),
longitude: float = Form(...),
description: str = Form(""),
image: UploadFile = File(...),
db: Session = Depends(get_db)
):
logger.debug("Received report request")
ticket_service = TicketService(db)
# Validate user
user = db.query(User).filter(User.id == user_id).first()
if not user:
logger.error(f"User with id {user_id} not found")
raise HTTPException(status_code=404, detail=f"User with id {user_id} not found")
logger.debug(f"User found: {user.name} ({user.email})")
# Save uploaded image
file_ext = os.path.splitext(image.filename)[1]
filename = f"{uuid.uuid4()}{file_ext}"
file_path = os.path.join(UPLOAD_DIR, filename)
try:
content = await image.read()
with open(file_path, "wb") as f:
f.write(content)
logger.debug(f"Saved image to {file_path} ({len(content)} bytes)")
except Exception as e:
logger.exception("Failed to save uploaded image")
raise HTTPException(status_code=500, detail="Failed to save uploaded image")
# Get initialized AI service
ai_service = get_ai_service()
logger.debug("AI service ready")
# Run AI predictions
try:
category = ai_service.classify_category(file_path)
logger.debug(f"Classification: {category}")
if category.lower() == "pothole":
severity_str, annotated_path = ai_service.detect_pothole_severity(file_path)
logger.debug(f"Detection: severity={severity_str}, path={annotated_path}")
severity = {
"High": SeverityLevel.HIGH,
"Medium": SeverityLevel.MEDIUM,
"Low": SeverityLevel.LOW,
"Unknown": SeverityLevel.NA
}.get(severity_str, SeverityLevel.NA)
else:
severity = SeverityLevel.NA
logger.debug("No detection needed")
except Exception as e:
logger.exception("AI prediction failed")
category = "Unknown"
severity = SeverityLevel.NA
# Create ticket
ticket = ticket_service.create_ticket(
user_id=user.id,
image_path=file_path,
category=category,
severity=severity,
latitude=latitude,
longitude=longitude,
description=description
)
logger.info(f"Ticket created: {ticket.id} for user {user.id}")
response = {
"ticket_id": ticket.id,
"user_id": user.id,
"user_name": user.name,
"user_email": user.email,
"category": ticket.category,
"severity": ticket.severity.value,
"status": ticket.status.value,
"description": ticket.description,
"image_path": ticket.image_path
}
logger.debug(f"Response: {response}")
return JSONResponse(status_code=201, content=response)

View File

@@ -0,0 +1,96 @@
# app/routes/tickets.py
from typing import Optional, List
import logging
from fastapi import APIRouter, Depends, HTTPException, Query
from sqlalchemy.orm import Session
from app.database import get_db
from app.services.ticket_service import TicketService, TicketStatus, SeverityLevel
from pydantic import BaseModel
router = APIRouter()
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
class TicketStatusUpdate(BaseModel):
new_status: TicketStatus
# ----------------------
# GET /tickets
# ----------------------
@router.get("/tickets", response_model=List[dict])
def list_tickets(
user_id: Optional[str] = Query(None, description="Filter by user ID"),
category: Optional[str] = Query(None, description="Filter by category"),
severity: Optional[SeverityLevel] = Query(None, description="Filter by severity"),
status: Optional[TicketStatus] = Query(None, description="Filter by status"),
db: Session = Depends(get_db)
):
service = TicketService(db)
tickets = service.list_tickets(user_id=user_id, category=category, severity=severity, status=status)
return [
{
"ticket_id": t.id,
"user_id": t.user_id,
"category": t.category,
"severity": t.severity.value,
"status": t.status.value,
"description": t.description,
"latitude": t.latitude,
"longitude": t.longitude,
"image_path": t.image_path,
"created_at": t.created_at,
"updated_at": t.updated_at
} for t in tickets
]
# ----------------------
# GET /tickets/{ticket_id}
# ----------------------
@router.get("/tickets/{ticket_id}", response_model=dict)
def get_ticket(ticket_id: str, db: Session = Depends(get_db)):
service = TicketService(db)
ticket = service.get_ticket(ticket_id)
if not ticket:
raise HTTPException(status_code=404, detail=f"Ticket {ticket_id} not found")
return {
"ticket_id": ticket.id,
"user_id": ticket.user_id,
"category": ticket.category,
"severity": ticket.severity.value,
"status": ticket.status.value,
"description": ticket.description,
"latitude": ticket.latitude,
"longitude": ticket.longitude,
"image_path": ticket.image_path,
"created_at": ticket.created_at,
"updated_at": ticket.updated_at
}
# ----------------------
# PATCH /tickets/{ticket_id} - Update status
# ----------------------
@router.patch("/tickets/{ticket_id}", response_model=dict)
def update_ticket_status(
ticket_id: str,
status_update: TicketStatusUpdate, # JSON body with new_status
db: Session = Depends(get_db)
):
service = TicketService(db)
try:
ticket = service.update_ticket_status(ticket_id, status_update.new_status)
except Exception as e:
logger.error(f"Failed to update ticket status: {e}")
raise HTTPException(status_code=400, detail=str(e))
return {
"ticket_id": ticket.id,
"user_id": ticket.user_id,
"category": ticket.category,
"severity": ticket.severity.value,
"status": ticket.status.value,
"description": ticket.description,
"latitude": ticket.latitude,
"longitude": ticket.longitude,
"image_path": ticket.image_path,
"created_at": ticket.created_at,
"updated_at": ticket.updated_at
}

View File

@@ -0,0 +1,18 @@
# app/routes/users.py
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from app.database import get_db
from app.services.ticket_service import TicketService
from app.models.ticket_model import User
from app.schemas.user_schema import UserCreate # import schema
router = APIRouter()
@router.post("/users")
def create_user(user: UserCreate, db: Session = Depends(get_db)):
service = TicketService(db)
existing_user = db.query(User).filter(User.email == user.email).first()
if existing_user:
raise HTTPException(status_code=400, detail="User with this email already exists")
new_user = service.create_user(user.name, user.email)
return {"id": new_user.id, "name": new_user.name, "email": new_user.email}

View File

@@ -0,0 +1,6 @@
# app/schemas/user_schema.py
from pydantic import BaseModel, EmailStr
class UserCreate(BaseModel):
name: str
email: EmailStr

View File

@@ -0,0 +1,138 @@
import os
import logging
from typing import Tuple
import torch
from torchvision import transforms, models
from PIL import Image
import cv2
from ultralytics import YOLO
import json
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
# ----------------------
# AI Model Manager
# ----------------------
class AIModelManager:
"""Loads and keeps classification and detection models in memory."""
def __init__(self, device: str = None):
self.device = torch.device(device or ("cuda" if torch.cuda.is_available() else "cpu"))
# Compute relative paths
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
self.class_model_path = os.path.join(BASE_DIR, "models", "classification", "best_model.pth")
self.class_mapping_path = os.path.join(BASE_DIR, "models", "classification", "class_mapping.json")
self.detection_model_path = os.path.join(BASE_DIR, "models", "detection", "best_severity_check.pt")
# Initialize models
self.class_model = None
self.class_names = None
self._load_classification_model()
self.detection_model = None
self._load_detection_model()
# Preprocess for classification
self.preprocess = transforms.Compose([
transforms.Resize((224, 224)),
transforms.ToTensor()
])
def _load_classification_model(self):
logger.info("Loading classification model...")
with open(self.class_mapping_path, "r") as f:
class_mapping = json.load(f)
self.class_names = [class_mapping[str(i)] for i in range(len(class_mapping))]
self.class_model = models.resnet18(weights=None)
self.class_model.fc = torch.nn.Linear(self.class_model.fc.in_features, len(self.class_names))
state_dict = torch.load(self.class_model_path, map_location=self.device)
self.class_model.load_state_dict(state_dict)
self.class_model.to(self.device)
self.class_model.eval()
logger.info("Classification model loaded successfully.")
def _load_detection_model(self):
logger.info("Loading YOLO detection model...")
self.detection_model = YOLO(self.detection_model_path)
logger.info("YOLO detection model loaded successfully.")
# ----------------------
# AI Service
# ----------------------
class AIService:
"""Handles classification and detection using preloaded models."""
def __init__(self, model_manager: AIModelManager):
self.models = model_manager
# ----------------------
# Classification
# ----------------------
def classify_category(self, image_path: str) -> str:
image = Image.open(image_path).convert("RGB")
input_tensor = self.models.preprocess(image).unsqueeze(0).to(self.models.device)
with torch.no_grad():
outputs = self.models.class_model(input_tensor)
_, predicted = torch.max(outputs, 1)
category = self.models.class_names[predicted.item()]
logger.info(f"Image '{image_path}' classified as '{category}'.")
return category
# ----------------------
# Detection / Severity
# ----------------------
@staticmethod
def classify_severity(box: Tuple[int, int, int, int], image_height: int) -> str:
x1, y1, x2, y2 = box
area = (x2 - x1) * (y2 - y1)
if area > 50000 or y2 > image_height * 0.75:
return "High"
elif area > 20000 or y2 > image_height * 0.5:
return "Medium"
else:
return "Low"
@staticmethod
def draw_boxes_and_severity(image, results) -> None:
for r in results:
for box in r.boxes.xyxy:
x1, y1, x2, y2 = map(int, box.cpu().numpy())
conf = float(r.boxes.conf[0]) if hasattr(r.boxes, "conf") else 0.0
severity = AIService.classify_severity((x1, y1, x2, y2), image.shape[0])
color = (0, 255, 0) if severity == "Low" else (0, 255, 255) if severity == "Medium" else (0, 0, 255)
cv2.rectangle(image, (x1, y1), (x2, y2), color, 2)
cv2.putText(image, f"{severity} ({conf:.2f})", (x1, y1 - 10),
cv2.FONT_HERSHEY_SIMPLEX, 0.6, color, 2)
def detect_pothole_severity(self, image_path: str, output_path: str = None) -> Tuple[str, str]:
image = cv2.imread(image_path)
results = self.models.detection_model(image)
self.draw_boxes_and_severity(image, results)
# Determine highest severity
severities = []
for r in results:
for box in r.boxes.xyxy:
severities.append(self.classify_severity(map(int, box.cpu().numpy()), image.shape[0]))
if severities:
if "High" in severities:
severity = "High"
elif "Medium" in severities:
severity = "Medium"
else:
severity = "Low"
else:
severity = "Unknown"
# Save annotated image
if output_path:
os.makedirs(os.path.dirname(output_path), exist_ok=True)
cv2.imwrite(output_path, image)
else:
output_path = image_path
logger.info(f"Pothole severity: {severity}, output image saved to '{output_path}'.")
return severity, output_path

View File

@@ -0,0 +1,43 @@
import os
from app.services.ai_service import AIModelManager, AIService
import logging
import random
from typing import Tuple
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
# ----------------------
# Lazy-initialized AI service
# ----------------------
_ai_service: AIService = None
def init_ai_service() -> AIService:
"""Initializes the AI service if not already initialized."""
global _ai_service
if _ai_service is None:
logger.debug("Initializing AI service...")
try:
model_manager = AIModelManager()
_ai_service = AIService(model_manager)
logger.info("AI service ready.")
except Exception as e:
logger.warning(f"Failed to initialize AI service: {e}. Using mock service.")
# Create a mock AI service for now
_ai_service = MockAIService()
return _ai_service
def get_ai_service() -> AIService:
"""Returns the initialized AI service."""
return init_ai_service()
# Mock AI service for testing when models can't be loaded
class MockAIService:
def classify_category(self, image_path: str) -> str:
categories = ["pothole", "streetlight", "garbage", "signage", "drainage", "other"]
return random.choice(categories)
def detect_pothole_severity(self, image_path: str) -> Tuple[str, str]:
severities = ["High", "Medium", "Low"]
severity = random.choice(severities)
return severity, image_path # Return same path as annotated path

View File

@@ -0,0 +1,103 @@
# app/services/ticket_service.py
import uuid
from typing import List, Optional
from sqlalchemy.orm import Session
from sqlalchemy.exc import NoResultFound
from app.models.ticket_model import User, Ticket, TicketAudit, TicketStatus, SeverityLevel
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# ----------------------
# Ticket Service
# ----------------------
class TicketService:
def __init__(self, db: Session):
self.db = db
# ------------------
# User Operations
# ------------------
def create_user(self, name: str, email: str) -> User:
user = User(name=name, email=email)
self.db.add(user)
self.db.commit()
self.db.refresh(user)
logger.info(f"Created user {user}")
return user # <-- return User object
def get_user(self, user_id: str) -> Optional[User]:
return self.db.query(User).filter(User.id == user_id).first()
# ------------------
# Ticket Operations
# ------------------
def create_ticket(
self,
user_id: str,
image_path: str,
category: str,
severity: SeverityLevel,
latitude: float,
longitude: float,
description: str = "",
) -> Ticket:
ticket = Ticket(
id=str(uuid.uuid4()),
user_id=user_id,
image_path=image_path,
category=category,
severity=severity,
latitude=latitude,
longitude=longitude,
description=description,
)
self.db.add(ticket)
self.db.commit()
self.db.refresh(ticket)
logger.info(f"Created ticket {ticket}")
return ticket
def update_ticket_status(self, ticket_id: str, new_status: TicketStatus) -> Ticket:
ticket = self.db.query(Ticket).filter(Ticket.id == ticket_id).first()
if not ticket:
raise NoResultFound(f"Ticket with id {ticket_id} not found")
# Log audit
audit = TicketAudit(
ticket_id=ticket.id,
old_status=ticket.status,
new_status=new_status,
)
self.db.add(audit)
# Update status
ticket.status = new_status
self.db.commit()
self.db.refresh(ticket)
logger.info(f"Updated ticket {ticket.id} status to {new_status}")
return ticket
def get_ticket(self, ticket_id: str) -> Optional[Ticket]:
return self.db.query(Ticket).filter(Ticket.id == ticket_id).first()
def list_tickets(
self,
user_id: Optional[str] = None,
category: Optional[str] = None,
severity: Optional[SeverityLevel] = None,
status: Optional[TicketStatus] = None
) -> List[Ticket]:
query = self.db.query(Ticket)
if user_id:
query = query.filter(Ticket.user_id == user_id)
if category:
query = query.filter(Ticket.category == category)
if severity:
query = query.filter(Ticket.severity == severity)
if status:
query = query.filter(Ticket.status == status)
return query.order_by(Ticket.created_at.desc()).all()

View File

@@ -0,0 +1,74 @@
import uuid
from sqlalchemy import Column, String, Float, Enum, DateTime, ForeignKey, Index
from sqlalchemy.orm import relationship
from sqlalchemy.sql import func
from app.database import Base
import enum
# ----------------------
# Enums
# ----------------------
class TicketStatus(str, enum.Enum):
NEW = "New"
IN_PROGRESS = "In Progress"
FIXED = "Fixed"
class SeverityLevel(str, enum.Enum):
LOW = "Low"
MEDIUM = "Medium"
HIGH = "High"
NA = "N/A"
# ----------------------
# User Model
# ----------------------
class User(Base):
__tablename__ = "users"
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()), index=True)
name = Column(String, nullable=False)
email = Column(String, unique=True, nullable=False)
tickets = relationship("Ticket", back_populates="user", cascade="all, delete-orphan")
def __repr__(self):
return f"<User(id={self.id}, name={self.name}, email={self.email})>"
# ----------------------
# Ticket Model
# ----------------------
class Ticket(Base):
__tablename__ = "tickets"
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()), index=True)
user_id = Column(String, ForeignKey("users.id", ondelete="CASCADE"), nullable=False)
image_path = Column(String, nullable=False)
category = Column(String, nullable=False)
severity = Column(Enum(SeverityLevel), nullable=False, default=SeverityLevel.NA)
description = Column(String, default="")
status = Column(Enum(TicketStatus), nullable=False, default=TicketStatus.NEW)
latitude = Column(Float, nullable=False)
longitude = Column(Float, nullable=False)
created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
user = relationship("User", back_populates="tickets")
__table_args__ = (
Index("idx_category_status", "category", "status"),
)
def __repr__(self):
return f"<Ticket(id={self.id}, category={self.category}, severity={self.severity}, status={self.status}, user_id={self.user_id})>"
# ----------------------
# Ticket Audit Model
# ----------------------
class TicketAudit(Base):
__tablename__ = "ticket_audit"
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
ticket_id = Column(String, ForeignKey("tickets.id", ondelete="CASCADE"))
old_status = Column(Enum(TicketStatus))
new_status = Column(Enum(TicketStatus))
updated_at = Column(DateTime(timezone=True), server_default=func.now())