Files
claudetools/api/services/decision_log_service.py
Mike Swanson 390b10b32c Complete Phase 6: MSP Work Tracking with Context Recall System
Implements production-ready MSP platform with cross-machine persistent memory for Claude.

API Implementation:
- 130 REST API endpoints across 21 entities
- JWT authentication on all endpoints
- AES-256-GCM encryption for credentials
- Automatic audit logging
- Complete OpenAPI documentation

Database:
- 43 tables in MariaDB (172.16.3.20:3306)
- 42 SQLAlchemy models with modern 2.0 syntax
- Full Alembic migration system
- 99.1% CRUD test pass rate

Context Recall System (Phase 6):
- Cross-machine persistent memory via database
- Automatic context injection via Claude Code hooks
- Automatic context saving after task completion
- 90-95% token reduction with compression utilities
- Relevance scoring with time decay
- Tag-based semantic search
- One-command setup script

Security Features:
- JWT tokens with Argon2 password hashing
- AES-256-GCM encryption for all sensitive data
- Comprehensive audit trail for credentials
- HMAC tamper detection
- Secure configuration management

Test Results:
- Phase 3: 38/38 CRUD tests passing (100%)
- Phase 4: 34/35 core API tests passing (97.1%)
- Phase 5: 62/62 extended API tests passing (100%)
- Phase 6: 10/10 compression tests passing (100%)
- Overall: 144/145 tests passing (99.3%)

Documentation:
- Comprehensive architecture guides
- Setup automation scripts
- API documentation at /api/docs
- Complete test reports
- Troubleshooting guides

Project Status: 95% Complete (Production-Ready)
Phase 7 (optional work context APIs) remains for future enhancement.

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-01-17 06:00:26 -07:00

319 lines
7.7 KiB
Python

"""
DecisionLog service layer for business logic and database operations.
Handles all database operations for decision logs, tracking important
decisions made during work for future reference.
"""
from typing import Optional
from uuid import UUID
from fastapi import HTTPException, status
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import Session
from api.models.decision_log import DecisionLog
from api.schemas.decision_log import DecisionLogCreate, DecisionLogUpdate
def get_decision_logs(
db: Session,
skip: int = 0,
limit: int = 100
) -> tuple[list[DecisionLog], int]:
"""
Retrieve a paginated list of decision logs.
Args:
db: Database session
skip: Number of records to skip (for pagination)
limit: Maximum number of records to return
Returns:
tuple: (list of decision logs, total count)
"""
# Get total count
total = db.query(DecisionLog).count()
# Get paginated results, ordered by most recent first
logs = (
db.query(DecisionLog)
.order_by(DecisionLog.created_at.desc())
.offset(skip)
.limit(limit)
.all()
)
return logs, total
def get_decision_log_by_id(db: Session, log_id: UUID) -> DecisionLog:
"""
Retrieve a single decision log by its ID.
Args:
db: Database session
log_id: UUID of the decision log to retrieve
Returns:
DecisionLog: The decision log object
Raises:
HTTPException: 404 if decision log not found
"""
log = db.query(DecisionLog).filter(DecisionLog.id == str(log_id)).first()
if not log:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"DecisionLog with ID {log_id} not found"
)
return log
def get_decision_logs_by_project(
db: Session,
project_id: UUID,
skip: int = 0,
limit: int = 100
) -> tuple[list[DecisionLog], int]:
"""
Retrieve decision logs for a specific project.
Args:
db: Database session
project_id: UUID of the project
skip: Number of records to skip
limit: Maximum number of records to return
Returns:
tuple: (list of decision logs, total count)
"""
# Get total count for project
total = db.query(DecisionLog).filter(
DecisionLog.project_id == str(project_id)
).count()
# Get paginated results
logs = (
db.query(DecisionLog)
.filter(DecisionLog.project_id == str(project_id))
.order_by(DecisionLog.created_at.desc())
.offset(skip)
.limit(limit)
.all()
)
return logs, total
def get_decision_logs_by_session(
db: Session,
session_id: UUID,
skip: int = 0,
limit: int = 100
) -> tuple[list[DecisionLog], int]:
"""
Retrieve decision logs for a specific session.
Args:
db: Database session
session_id: UUID of the session
skip: Number of records to skip
limit: Maximum number of records to return
Returns:
tuple: (list of decision logs, total count)
"""
# Get total count for session
total = db.query(DecisionLog).filter(
DecisionLog.session_id == str(session_id)
).count()
# Get paginated results
logs = (
db.query(DecisionLog)
.filter(DecisionLog.session_id == str(session_id))
.order_by(DecisionLog.created_at.desc())
.offset(skip)
.limit(limit)
.all()
)
return logs, total
def get_decision_logs_by_impact(
db: Session,
impact: str,
skip: int = 0,
limit: int = 100
) -> tuple[list[DecisionLog], int]:
"""
Retrieve decision logs filtered by impact level.
Args:
db: Database session
impact: Impact level (low, medium, high, critical)
skip: Number of records to skip
limit: Maximum number of records to return
Returns:
tuple: (list of decision logs, total count)
"""
# Validate impact level
valid_impacts = ["low", "medium", "high", "critical"]
if impact.lower() not in valid_impacts:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Invalid impact level. Must be one of: {', '.join(valid_impacts)}"
)
# Get total count for impact
total = db.query(DecisionLog).filter(
DecisionLog.impact == impact.lower()
).count()
# Get paginated results
logs = (
db.query(DecisionLog)
.filter(DecisionLog.impact == impact.lower())
.order_by(DecisionLog.created_at.desc())
.offset(skip)
.limit(limit)
.all()
)
return logs, total
def create_decision_log(
db: Session,
log_data: DecisionLogCreate
) -> DecisionLog:
"""
Create a new decision log.
Args:
db: Database session
log_data: Decision log creation data
Returns:
DecisionLog: The created decision log object
Raises:
HTTPException: 500 if database error occurs
"""
try:
# Create new decision log instance
db_log = DecisionLog(**log_data.model_dump())
# Add to database
db.add(db_log)
db.commit()
db.refresh(db_log)
return db_log
except IntegrityError as e:
db.rollback()
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Database error: {str(e)}"
)
except Exception as e:
db.rollback()
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to create decision log: {str(e)}"
)
def update_decision_log(
db: Session,
log_id: UUID,
log_data: DecisionLogUpdate
) -> DecisionLog:
"""
Update an existing decision log.
Args:
db: Database session
log_id: UUID of the decision log to update
log_data: Decision log update data
Returns:
DecisionLog: The updated decision log object
Raises:
HTTPException: 404 if decision log not found
HTTPException: 500 if database error occurs
"""
# Get existing log
log = get_decision_log_by_id(db, log_id)
try:
# Update only provided fields
update_data = log_data.model_dump(exclude_unset=True)
# Apply updates
for field, value in update_data.items():
setattr(log, field, value)
db.commit()
db.refresh(log)
return log
except HTTPException:
db.rollback()
raise
except IntegrityError as e:
db.rollback()
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Database error: {str(e)}"
)
except Exception as e:
db.rollback()
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to update decision log: {str(e)}"
)
def delete_decision_log(db: Session, log_id: UUID) -> dict:
"""
Delete a decision log by its ID.
Args:
db: Database session
log_id: UUID of the decision log to delete
Returns:
dict: Success message
Raises:
HTTPException: 404 if decision log not found
HTTPException: 500 if database error occurs
"""
# Get existing log (raises 404 if not found)
log = get_decision_log_by_id(db, log_id)
try:
db.delete(log)
db.commit()
return {
"message": "DecisionLog deleted successfully",
"log_id": str(log_id)
}
except Exception as e:
db.rollback()
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to delete decision log: {str(e)}"
)