Refactor: Integrate backend API and normalize data
This commit integrates the backend API for fetching and updating report data. It also includes a normalization function to handle data consistency between the API and local storage. Co-authored-by: anthonymuncher <anthonymuncher@gmail.com>
This commit is contained in:
138
backend/app/services/ai_service.py
Normal file
138
backend/app/services/ai_service.py
Normal file
@@ -0,0 +1,138 @@
|
||||
import os
|
||||
import logging
|
||||
from typing import Tuple
|
||||
import torch
|
||||
from torchvision import transforms, models
|
||||
from PIL import Image
|
||||
import cv2
|
||||
from ultralytics import YOLO
|
||||
import json
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
# ----------------------
|
||||
# AI Model Manager
|
||||
# ----------------------
|
||||
class AIModelManager:
|
||||
"""Loads and keeps classification and detection models in memory."""
|
||||
def __init__(self, device: str = None):
|
||||
self.device = torch.device(device or ("cuda" if torch.cuda.is_available() else "cpu"))
|
||||
|
||||
# Compute relative paths
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
self.class_model_path = os.path.join(BASE_DIR, "models", "classification", "best_model.pth")
|
||||
self.class_mapping_path = os.path.join(BASE_DIR, "models", "classification", "class_mapping.json")
|
||||
self.detection_model_path = os.path.join(BASE_DIR, "models", "detection", "best_severity_check.pt")
|
||||
|
||||
|
||||
# Initialize models
|
||||
self.class_model = None
|
||||
self.class_names = None
|
||||
self._load_classification_model()
|
||||
self.detection_model = None
|
||||
self._load_detection_model()
|
||||
|
||||
# Preprocess for classification
|
||||
self.preprocess = transforms.Compose([
|
||||
transforms.Resize((224, 224)),
|
||||
transforms.ToTensor()
|
||||
])
|
||||
|
||||
def _load_classification_model(self):
|
||||
logger.info("Loading classification model...")
|
||||
with open(self.class_mapping_path, "r") as f:
|
||||
class_mapping = json.load(f)
|
||||
self.class_names = [class_mapping[str(i)] for i in range(len(class_mapping))]
|
||||
|
||||
self.class_model = models.resnet18(weights=None)
|
||||
self.class_model.fc = torch.nn.Linear(self.class_model.fc.in_features, len(self.class_names))
|
||||
state_dict = torch.load(self.class_model_path, map_location=self.device)
|
||||
self.class_model.load_state_dict(state_dict)
|
||||
self.class_model.to(self.device)
|
||||
self.class_model.eval()
|
||||
logger.info("Classification model loaded successfully.")
|
||||
|
||||
def _load_detection_model(self):
|
||||
logger.info("Loading YOLO detection model...")
|
||||
self.detection_model = YOLO(self.detection_model_path)
|
||||
logger.info("YOLO detection model loaded successfully.")
|
||||
|
||||
|
||||
# ----------------------
|
||||
# AI Service
|
||||
# ----------------------
|
||||
class AIService:
|
||||
"""Handles classification and detection using preloaded models."""
|
||||
def __init__(self, model_manager: AIModelManager):
|
||||
self.models = model_manager
|
||||
|
||||
# ----------------------
|
||||
# Classification
|
||||
# ----------------------
|
||||
def classify_category(self, image_path: str) -> str:
|
||||
image = Image.open(image_path).convert("RGB")
|
||||
input_tensor = self.models.preprocess(image).unsqueeze(0).to(self.models.device)
|
||||
with torch.no_grad():
|
||||
outputs = self.models.class_model(input_tensor)
|
||||
_, predicted = torch.max(outputs, 1)
|
||||
category = self.models.class_names[predicted.item()]
|
||||
logger.info(f"Image '{image_path}' classified as '{category}'.")
|
||||
return category
|
||||
|
||||
# ----------------------
|
||||
# Detection / Severity
|
||||
# ----------------------
|
||||
@staticmethod
|
||||
def classify_severity(box: Tuple[int, int, int, int], image_height: int) -> str:
|
||||
x1, y1, x2, y2 = box
|
||||
area = (x2 - x1) * (y2 - y1)
|
||||
if area > 50000 or y2 > image_height * 0.75:
|
||||
return "High"
|
||||
elif area > 20000 or y2 > image_height * 0.5:
|
||||
return "Medium"
|
||||
else:
|
||||
return "Low"
|
||||
|
||||
@staticmethod
|
||||
def draw_boxes_and_severity(image, results) -> None:
|
||||
for r in results:
|
||||
for box in r.boxes.xyxy:
|
||||
x1, y1, x2, y2 = map(int, box.cpu().numpy())
|
||||
conf = float(r.boxes.conf[0]) if hasattr(r.boxes, "conf") else 0.0
|
||||
severity = AIService.classify_severity((x1, y1, x2, y2), image.shape[0])
|
||||
color = (0, 255, 0) if severity == "Low" else (0, 255, 255) if severity == "Medium" else (0, 0, 255)
|
||||
cv2.rectangle(image, (x1, y1), (x2, y2), color, 2)
|
||||
cv2.putText(image, f"{severity} ({conf:.2f})", (x1, y1 - 10),
|
||||
cv2.FONT_HERSHEY_SIMPLEX, 0.6, color, 2)
|
||||
|
||||
def detect_pothole_severity(self, image_path: str, output_path: str = None) -> Tuple[str, str]:
|
||||
image = cv2.imread(image_path)
|
||||
results = self.models.detection_model(image)
|
||||
self.draw_boxes_and_severity(image, results)
|
||||
|
||||
# Determine highest severity
|
||||
severities = []
|
||||
for r in results:
|
||||
for box in r.boxes.xyxy:
|
||||
severities.append(self.classify_severity(map(int, box.cpu().numpy()), image.shape[0]))
|
||||
|
||||
if severities:
|
||||
if "High" in severities:
|
||||
severity = "High"
|
||||
elif "Medium" in severities:
|
||||
severity = "Medium"
|
||||
else:
|
||||
severity = "Low"
|
||||
else:
|
||||
severity = "Unknown"
|
||||
|
||||
# Save annotated image
|
||||
if output_path:
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
cv2.imwrite(output_path, image)
|
||||
else:
|
||||
output_path = image_path
|
||||
|
||||
logger.info(f"Pothole severity: {severity}, output image saved to '{output_path}'.")
|
||||
return severity, output_path
|
||||
43
backend/app/services/global_ai.py
Normal file
43
backend/app/services/global_ai.py
Normal file
@@ -0,0 +1,43 @@
|
||||
import os
|
||||
from app.services.ai_service import AIModelManager, AIService
|
||||
import logging
|
||||
import random
|
||||
from typing import Tuple
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
# ----------------------
|
||||
# Lazy-initialized AI service
|
||||
# ----------------------
|
||||
_ai_service: AIService = None
|
||||
|
||||
def init_ai_service() -> AIService:
|
||||
"""Initializes the AI service if not already initialized."""
|
||||
global _ai_service
|
||||
if _ai_service is None:
|
||||
logger.debug("Initializing AI service...")
|
||||
try:
|
||||
model_manager = AIModelManager()
|
||||
_ai_service = AIService(model_manager)
|
||||
logger.info("AI service ready.")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to initialize AI service: {e}. Using mock service.")
|
||||
# Create a mock AI service for now
|
||||
_ai_service = MockAIService()
|
||||
return _ai_service
|
||||
|
||||
def get_ai_service() -> AIService:
|
||||
"""Returns the initialized AI service."""
|
||||
return init_ai_service()
|
||||
|
||||
# Mock AI service for testing when models can't be loaded
|
||||
class MockAIService:
|
||||
def classify_category(self, image_path: str) -> str:
|
||||
categories = ["pothole", "streetlight", "garbage", "signage", "drainage", "other"]
|
||||
return random.choice(categories)
|
||||
|
||||
def detect_pothole_severity(self, image_path: str) -> Tuple[str, str]:
|
||||
severities = ["High", "Medium", "Low"]
|
||||
severity = random.choice(severities)
|
||||
return severity, image_path # Return same path as annotated path
|
||||
103
backend/app/services/ticket_service.py
Normal file
103
backend/app/services/ticket_service.py
Normal file
@@ -0,0 +1,103 @@
|
||||
# app/services/ticket_service.py
|
||||
import uuid
|
||||
from typing import List, Optional
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.exc import NoResultFound
|
||||
from app.models.ticket_model import User, Ticket, TicketAudit, TicketStatus, SeverityLevel
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# ----------------------
|
||||
# Ticket Service
|
||||
# ----------------------
|
||||
class TicketService:
|
||||
def __init__(self, db: Session):
|
||||
self.db = db
|
||||
|
||||
# ------------------
|
||||
# User Operations
|
||||
# ------------------
|
||||
def create_user(self, name: str, email: str) -> User:
|
||||
user = User(name=name, email=email)
|
||||
self.db.add(user)
|
||||
self.db.commit()
|
||||
self.db.refresh(user)
|
||||
logger.info(f"Created user {user}")
|
||||
return user # <-- return User object
|
||||
|
||||
|
||||
|
||||
def get_user(self, user_id: str) -> Optional[User]:
|
||||
return self.db.query(User).filter(User.id == user_id).first()
|
||||
|
||||
# ------------------
|
||||
# Ticket Operations
|
||||
# ------------------
|
||||
def create_ticket(
|
||||
self,
|
||||
user_id: str,
|
||||
image_path: str,
|
||||
category: str,
|
||||
severity: SeverityLevel,
|
||||
latitude: float,
|
||||
longitude: float,
|
||||
description: str = "",
|
||||
) -> Ticket:
|
||||
ticket = Ticket(
|
||||
id=str(uuid.uuid4()),
|
||||
user_id=user_id,
|
||||
image_path=image_path,
|
||||
category=category,
|
||||
severity=severity,
|
||||
latitude=latitude,
|
||||
longitude=longitude,
|
||||
description=description,
|
||||
)
|
||||
self.db.add(ticket)
|
||||
self.db.commit()
|
||||
self.db.refresh(ticket)
|
||||
logger.info(f"Created ticket {ticket}")
|
||||
return ticket
|
||||
|
||||
def update_ticket_status(self, ticket_id: str, new_status: TicketStatus) -> Ticket:
|
||||
ticket = self.db.query(Ticket).filter(Ticket.id == ticket_id).first()
|
||||
if not ticket:
|
||||
raise NoResultFound(f"Ticket with id {ticket_id} not found")
|
||||
|
||||
# Log audit
|
||||
audit = TicketAudit(
|
||||
ticket_id=ticket.id,
|
||||
old_status=ticket.status,
|
||||
new_status=new_status,
|
||||
)
|
||||
self.db.add(audit)
|
||||
|
||||
# Update status
|
||||
ticket.status = new_status
|
||||
self.db.commit()
|
||||
self.db.refresh(ticket)
|
||||
logger.info(f"Updated ticket {ticket.id} status to {new_status}")
|
||||
return ticket
|
||||
|
||||
def get_ticket(self, ticket_id: str) -> Optional[Ticket]:
|
||||
return self.db.query(Ticket).filter(Ticket.id == ticket_id).first()
|
||||
|
||||
def list_tickets(
|
||||
self,
|
||||
user_id: Optional[str] = None,
|
||||
category: Optional[str] = None,
|
||||
severity: Optional[SeverityLevel] = None,
|
||||
status: Optional[TicketStatus] = None
|
||||
) -> List[Ticket]:
|
||||
query = self.db.query(Ticket)
|
||||
if user_id:
|
||||
query = query.filter(Ticket.user_id == user_id)
|
||||
if category:
|
||||
query = query.filter(Ticket.category == category)
|
||||
if severity:
|
||||
query = query.filter(Ticket.severity == severity)
|
||||
if status:
|
||||
query = query.filter(Ticket.status == status)
|
||||
return query.order_by(Ticket.created_at.desc()).all()
|
||||
Reference in New Issue
Block a user