Remove conversation context/recall system from ClaudeTools
Completely removed the database context recall system while preserving database tables for safety. This major cleanup removes 80+ files and 16,831 lines of code. What was removed: - API layer: 4 routers (conversation-contexts, context-snippets, project-states, decision-logs) with 35+ endpoints - Database models: 5 models (ConversationContext, ContextSnippet, DecisionLog, ProjectState, ContextTag) - Services: 4 service layers with business logic - Schemas: 4 Pydantic schema files - Claude Code hooks: 13 hook files (user-prompt-submit, task-complete, sync-contexts, periodic saves) - Scripts: 15+ scripts (import, migration, testing, tombstone checking) - Tests: 5 test files (context recall, compression, diagnostics) - Documentation: 30+ markdown files (guides, architecture, quick starts) - Utilities: context compression, conversation parsing Files modified: - api/main.py: Removed router registrations - api/models/__init__.py: Removed model imports - api/schemas/__init__.py: Removed schema imports - api/services/__init__.py: Removed service imports - .claude/claude.md: Completely rewritten without context references Database tables preserved: - conversation_contexts, context_snippets, context_tags, project_states, decision_logs (5 orphaned tables remain for safety) - Migration created but NOT applied: 20260118_172743_remove_context_system.py - Tables can be dropped later when confirmed not needed New files added: - CONTEXT_SYSTEM_REMOVAL_SUMMARY.md: Detailed removal report - CONTEXT_SYSTEM_REMOVAL_COMPLETE.md: Final status - CONTEXT_EXPORT_RESULTS.md: Export attempt results - scripts/export-tombstoned-contexts.py: Export tool for future use - migrations/versions/20260118_172743_remove_context_system.py Impact: - Reduced from 130 to 95 API endpoints - Reduced from 43 to 38 active database tables - Removed 16,831 lines of code - System fully operational without context recall Reason for removal: - System was not actively used (no tombstoned contexts found) - Reduces codebase complexity - Focuses on core MSP work tracking functionality - Database preserved for safety (can rollback if needed) Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -11,10 +11,6 @@ from . import (
|
||||
credential_service,
|
||||
credential_audit_log_service,
|
||||
security_incident_service,
|
||||
conversation_context_service,
|
||||
context_snippet_service,
|
||||
project_state_service,
|
||||
decision_log_service,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
@@ -28,8 +24,4 @@ __all__ = [
|
||||
"credential_service",
|
||||
"credential_audit_log_service",
|
||||
"security_incident_service",
|
||||
"conversation_context_service",
|
||||
"context_snippet_service",
|
||||
"project_state_service",
|
||||
"decision_log_service",
|
||||
]
|
||||
|
||||
@@ -1,367 +0,0 @@
|
||||
"""
|
||||
ContextSnippet service layer for business logic and database operations.
|
||||
|
||||
Handles all database operations for context snippets, providing reusable
|
||||
knowledge storage and retrieval.
|
||||
"""
|
||||
|
||||
import json
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from sqlalchemy import or_
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.models.context_snippet import ContextSnippet
|
||||
from api.schemas.context_snippet import ContextSnippetCreate, ContextSnippetUpdate
|
||||
|
||||
|
||||
def get_context_snippets(
|
||||
db: Session,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> tuple[list[ContextSnippet], int]:
|
||||
"""
|
||||
Retrieve a paginated list of context snippets.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
skip: Number of records to skip (for pagination)
|
||||
limit: Maximum number of records to return
|
||||
|
||||
Returns:
|
||||
tuple: (list of context snippets, total count)
|
||||
"""
|
||||
# Get total count
|
||||
total = db.query(ContextSnippet).count()
|
||||
|
||||
# Get paginated results, ordered by relevance and usage
|
||||
snippets = (
|
||||
db.query(ContextSnippet)
|
||||
.order_by(ContextSnippet.relevance_score.desc(), ContextSnippet.usage_count.desc())
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
return snippets, total
|
||||
|
||||
|
||||
def get_context_snippet_by_id(db: Session, snippet_id: UUID) -> ContextSnippet:
|
||||
"""
|
||||
Retrieve a single context snippet by its ID.
|
||||
|
||||
Automatically increments usage_count when snippet is retrieved.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
snippet_id: UUID of the context snippet to retrieve
|
||||
|
||||
Returns:
|
||||
ContextSnippet: The context snippet object
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if context snippet not found
|
||||
"""
|
||||
snippet = db.query(ContextSnippet).filter(ContextSnippet.id == str(snippet_id)).first()
|
||||
|
||||
if not snippet:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"ContextSnippet with ID {snippet_id} not found"
|
||||
)
|
||||
|
||||
# Increment usage count
|
||||
snippet.usage_count += 1
|
||||
db.commit()
|
||||
db.refresh(snippet)
|
||||
|
||||
return snippet
|
||||
|
||||
|
||||
def get_context_snippets_by_project(
|
||||
db: Session,
|
||||
project_id: UUID,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> tuple[list[ContextSnippet], int]:
|
||||
"""
|
||||
Retrieve context snippets for a specific project.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
project_id: UUID of the project
|
||||
skip: Number of records to skip
|
||||
limit: Maximum number of records to return
|
||||
|
||||
Returns:
|
||||
tuple: (list of context snippets, total count)
|
||||
"""
|
||||
# Get total count for project
|
||||
total = db.query(ContextSnippet).filter(
|
||||
ContextSnippet.project_id == str(project_id)
|
||||
).count()
|
||||
|
||||
# Get paginated results
|
||||
snippets = (
|
||||
db.query(ContextSnippet)
|
||||
.filter(ContextSnippet.project_id == str(project_id))
|
||||
.order_by(ContextSnippet.relevance_score.desc())
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
return snippets, total
|
||||
|
||||
|
||||
def get_context_snippets_by_client(
|
||||
db: Session,
|
||||
client_id: UUID,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> tuple[list[ContextSnippet], int]:
|
||||
"""
|
||||
Retrieve context snippets for a specific client.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
client_id: UUID of the client
|
||||
skip: Number of records to skip
|
||||
limit: Maximum number of records to return
|
||||
|
||||
Returns:
|
||||
tuple: (list of context snippets, total count)
|
||||
"""
|
||||
# Get total count for client
|
||||
total = db.query(ContextSnippet).filter(
|
||||
ContextSnippet.client_id == str(client_id)
|
||||
).count()
|
||||
|
||||
# Get paginated results
|
||||
snippets = (
|
||||
db.query(ContextSnippet)
|
||||
.filter(ContextSnippet.client_id == str(client_id))
|
||||
.order_by(ContextSnippet.relevance_score.desc())
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
return snippets, total
|
||||
|
||||
|
||||
def get_context_snippets_by_tags(
|
||||
db: Session,
|
||||
tags: List[str],
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> tuple[list[ContextSnippet], int]:
|
||||
"""
|
||||
Retrieve context snippets filtered by tags.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
tags: List of tags to filter by (OR logic - any tag matches)
|
||||
skip: Number of records to skip
|
||||
limit: Maximum number of records to return
|
||||
|
||||
Returns:
|
||||
tuple: (list of context snippets, total count)
|
||||
"""
|
||||
# Build tag filters
|
||||
tag_filters = []
|
||||
for tag in tags:
|
||||
tag_filters.append(ContextSnippet.tags.contains(f'"{tag}"'))
|
||||
|
||||
# Get total count
|
||||
if tag_filters:
|
||||
total = db.query(ContextSnippet).filter(or_(*tag_filters)).count()
|
||||
else:
|
||||
total = 0
|
||||
|
||||
# Get paginated results
|
||||
if tag_filters:
|
||||
snippets = (
|
||||
db.query(ContextSnippet)
|
||||
.filter(or_(*tag_filters))
|
||||
.order_by(ContextSnippet.relevance_score.desc())
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
else:
|
||||
snippets = []
|
||||
|
||||
return snippets, total
|
||||
|
||||
|
||||
def get_top_relevant_snippets(
|
||||
db: Session,
|
||||
limit: int = 10,
|
||||
min_relevance_score: float = 7.0
|
||||
) -> list[ContextSnippet]:
|
||||
"""
|
||||
Get the top most relevant context snippets.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
limit: Maximum number of snippets to return (default 10)
|
||||
min_relevance_score: Minimum relevance score threshold (default 7.0)
|
||||
|
||||
Returns:
|
||||
list: Top relevant context snippets
|
||||
"""
|
||||
snippets = (
|
||||
db.query(ContextSnippet)
|
||||
.filter(ContextSnippet.relevance_score >= min_relevance_score)
|
||||
.order_by(ContextSnippet.relevance_score.desc())
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
return snippets
|
||||
|
||||
|
||||
def create_context_snippet(
|
||||
db: Session,
|
||||
snippet_data: ContextSnippetCreate
|
||||
) -> ContextSnippet:
|
||||
"""
|
||||
Create a new context snippet.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
snippet_data: Context snippet creation data
|
||||
|
||||
Returns:
|
||||
ContextSnippet: The created context snippet object
|
||||
|
||||
Raises:
|
||||
HTTPException: 500 if database error occurs
|
||||
"""
|
||||
try:
|
||||
# Create new context snippet instance
|
||||
db_snippet = ContextSnippet(**snippet_data.model_dump())
|
||||
|
||||
# Add to database
|
||||
db.add(db_snippet)
|
||||
db.commit()
|
||||
db.refresh(db_snippet)
|
||||
|
||||
return db_snippet
|
||||
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Database error: {str(e)}"
|
||||
)
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to create context snippet: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
def update_context_snippet(
|
||||
db: Session,
|
||||
snippet_id: UUID,
|
||||
snippet_data: ContextSnippetUpdate
|
||||
) -> ContextSnippet:
|
||||
"""
|
||||
Update an existing context snippet.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
snippet_id: UUID of the context snippet to update
|
||||
snippet_data: Context snippet update data
|
||||
|
||||
Returns:
|
||||
ContextSnippet: The updated context snippet object
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if context snippet not found
|
||||
HTTPException: 500 if database error occurs
|
||||
"""
|
||||
# Get existing snippet (without incrementing usage count)
|
||||
snippet = db.query(ContextSnippet).filter(ContextSnippet.id == str(snippet_id)).first()
|
||||
|
||||
if not snippet:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"ContextSnippet with ID {snippet_id} not found"
|
||||
)
|
||||
|
||||
try:
|
||||
# Update only provided fields
|
||||
update_data = snippet_data.model_dump(exclude_unset=True)
|
||||
|
||||
# Apply updates
|
||||
for field, value in update_data.items():
|
||||
setattr(snippet, field, value)
|
||||
|
||||
db.commit()
|
||||
db.refresh(snippet)
|
||||
|
||||
return snippet
|
||||
|
||||
except HTTPException:
|
||||
db.rollback()
|
||||
raise
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Database error: {str(e)}"
|
||||
)
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to update context snippet: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
def delete_context_snippet(db: Session, snippet_id: UUID) -> dict:
|
||||
"""
|
||||
Delete a context snippet by its ID.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
snippet_id: UUID of the context snippet to delete
|
||||
|
||||
Returns:
|
||||
dict: Success message
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if context snippet not found
|
||||
HTTPException: 500 if database error occurs
|
||||
"""
|
||||
# Get existing snippet (without incrementing usage count)
|
||||
snippet = db.query(ContextSnippet).filter(ContextSnippet.id == str(snippet_id)).first()
|
||||
|
||||
if not snippet:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"ContextSnippet with ID {snippet_id} not found"
|
||||
)
|
||||
|
||||
try:
|
||||
db.delete(snippet)
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"message": "ContextSnippet deleted successfully",
|
||||
"snippet_id": str(snippet_id)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to delete context snippet: {str(e)}"
|
||||
)
|
||||
@@ -1,340 +0,0 @@
|
||||
"""
|
||||
ConversationContext service layer for business logic and database operations.
|
||||
|
||||
Handles all database operations for conversation contexts, providing context
|
||||
recall and retrieval functionality for Claude's memory system.
|
||||
"""
|
||||
|
||||
import json
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from sqlalchemy import or_
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.models.conversation_context import ConversationContext
|
||||
from api.schemas.conversation_context import ConversationContextCreate, ConversationContextUpdate
|
||||
from api.utils.context_compression import format_for_injection
|
||||
|
||||
|
||||
def get_conversation_contexts(
|
||||
db: Session,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> tuple[list[ConversationContext], int]:
|
||||
"""
|
||||
Retrieve a paginated list of conversation contexts.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
skip: Number of records to skip (for pagination)
|
||||
limit: Maximum number of records to return
|
||||
|
||||
Returns:
|
||||
tuple: (list of conversation contexts, total count)
|
||||
"""
|
||||
# Get total count
|
||||
total = db.query(ConversationContext).count()
|
||||
|
||||
# Get paginated results, ordered by relevance and recency
|
||||
contexts = (
|
||||
db.query(ConversationContext)
|
||||
.order_by(ConversationContext.relevance_score.desc(), ConversationContext.created_at.desc())
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
return contexts, total
|
||||
|
||||
|
||||
def get_conversation_context_by_id(db: Session, context_id: UUID) -> ConversationContext:
|
||||
"""
|
||||
Retrieve a single conversation context by its ID.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
context_id: UUID of the conversation context to retrieve
|
||||
|
||||
Returns:
|
||||
ConversationContext: The conversation context object
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if conversation context not found
|
||||
"""
|
||||
context = db.query(ConversationContext).filter(ConversationContext.id == str(context_id)).first()
|
||||
|
||||
if not context:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"ConversationContext with ID {context_id} not found"
|
||||
)
|
||||
|
||||
return context
|
||||
|
||||
|
||||
def get_conversation_contexts_by_project(
|
||||
db: Session,
|
||||
project_id: UUID,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> tuple[list[ConversationContext], int]:
|
||||
"""
|
||||
Retrieve conversation contexts for a specific project.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
project_id: UUID of the project
|
||||
skip: Number of records to skip
|
||||
limit: Maximum number of records to return
|
||||
|
||||
Returns:
|
||||
tuple: (list of conversation contexts, total count)
|
||||
"""
|
||||
# Get total count for project
|
||||
total = db.query(ConversationContext).filter(
|
||||
ConversationContext.project_id == str(project_id)
|
||||
).count()
|
||||
|
||||
# Get paginated results
|
||||
contexts = (
|
||||
db.query(ConversationContext)
|
||||
.filter(ConversationContext.project_id == str(project_id))
|
||||
.order_by(ConversationContext.relevance_score.desc(), ConversationContext.created_at.desc())
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
return contexts, total
|
||||
|
||||
|
||||
def get_conversation_contexts_by_session(
|
||||
db: Session,
|
||||
session_id: UUID,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> tuple[list[ConversationContext], int]:
|
||||
"""
|
||||
Retrieve conversation contexts for a specific session.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
session_id: UUID of the session
|
||||
skip: Number of records to skip
|
||||
limit: Maximum number of records to return
|
||||
|
||||
Returns:
|
||||
tuple: (list of conversation contexts, total count)
|
||||
"""
|
||||
# Get total count for session
|
||||
total = db.query(ConversationContext).filter(
|
||||
ConversationContext.session_id == str(session_id)
|
||||
).count()
|
||||
|
||||
# Get paginated results
|
||||
contexts = (
|
||||
db.query(ConversationContext)
|
||||
.filter(ConversationContext.session_id == str(session_id))
|
||||
.order_by(ConversationContext.created_at.desc())
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
return contexts, total
|
||||
|
||||
|
||||
def get_recall_context(
|
||||
db: Session,
|
||||
project_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
limit: int = 10,
|
||||
min_relevance_score: float = 5.0
|
||||
) -> str:
|
||||
"""
|
||||
Get relevant contexts formatted for Claude prompt injection.
|
||||
|
||||
This is the main context recall function that retrieves the most relevant
|
||||
contexts and formats them for efficient injection into Claude's prompt.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
project_id: Optional project ID to filter by
|
||||
tags: Optional list of tags to filter by
|
||||
limit: Maximum number of contexts to retrieve (default 10)
|
||||
min_relevance_score: Minimum relevance score threshold (default 5.0)
|
||||
|
||||
Returns:
|
||||
str: Token-efficient markdown string ready for prompt injection
|
||||
"""
|
||||
# Build query
|
||||
query = db.query(ConversationContext)
|
||||
|
||||
# Filter by project if specified
|
||||
if project_id:
|
||||
query = query.filter(ConversationContext.project_id == str(project_id))
|
||||
|
||||
# Filter by minimum relevance score
|
||||
query = query.filter(ConversationContext.relevance_score >= min_relevance_score)
|
||||
|
||||
# Filter by tags if specified
|
||||
if tags:
|
||||
# Check if any of the provided tags exist in the JSON tags field
|
||||
# This uses PostgreSQL's JSON operators
|
||||
tag_filters = []
|
||||
for tag in tags:
|
||||
tag_filters.append(ConversationContext.tags.contains(f'"{tag}"'))
|
||||
if tag_filters:
|
||||
query = query.filter(or_(*tag_filters))
|
||||
|
||||
# Order by relevance score and get top results
|
||||
contexts = query.order_by(
|
||||
ConversationContext.relevance_score.desc()
|
||||
).limit(limit).all()
|
||||
|
||||
# Convert to dictionary format for formatting
|
||||
context_dicts = []
|
||||
for ctx in contexts:
|
||||
context_dict = {
|
||||
"content": ctx.dense_summary or ctx.title,
|
||||
"type": ctx.context_type,
|
||||
"tags": json.loads(ctx.tags) if ctx.tags else [],
|
||||
"relevance_score": ctx.relevance_score
|
||||
}
|
||||
context_dicts.append(context_dict)
|
||||
|
||||
# Use compression utility to format for injection
|
||||
return format_for_injection(context_dicts)
|
||||
|
||||
|
||||
def create_conversation_context(
|
||||
db: Session,
|
||||
context_data: ConversationContextCreate
|
||||
) -> ConversationContext:
|
||||
"""
|
||||
Create a new conversation context.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
context_data: Conversation context creation data
|
||||
|
||||
Returns:
|
||||
ConversationContext: The created conversation context object
|
||||
|
||||
Raises:
|
||||
HTTPException: 500 if database error occurs
|
||||
"""
|
||||
try:
|
||||
# Create new conversation context instance
|
||||
db_context = ConversationContext(**context_data.model_dump())
|
||||
|
||||
# Add to database
|
||||
db.add(db_context)
|
||||
db.commit()
|
||||
db.refresh(db_context)
|
||||
|
||||
return db_context
|
||||
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Database error: {str(e)}"
|
||||
)
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to create conversation context: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
def update_conversation_context(
|
||||
db: Session,
|
||||
context_id: UUID,
|
||||
context_data: ConversationContextUpdate
|
||||
) -> ConversationContext:
|
||||
"""
|
||||
Update an existing conversation context.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
context_id: UUID of the conversation context to update
|
||||
context_data: Conversation context update data
|
||||
|
||||
Returns:
|
||||
ConversationContext: The updated conversation context object
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if conversation context not found
|
||||
HTTPException: 500 if database error occurs
|
||||
"""
|
||||
# Get existing context
|
||||
context = get_conversation_context_by_id(db, context_id)
|
||||
|
||||
try:
|
||||
# Update only provided fields
|
||||
update_data = context_data.model_dump(exclude_unset=True)
|
||||
|
||||
# Apply updates
|
||||
for field, value in update_data.items():
|
||||
setattr(context, field, value)
|
||||
|
||||
db.commit()
|
||||
db.refresh(context)
|
||||
|
||||
return context
|
||||
|
||||
except HTTPException:
|
||||
db.rollback()
|
||||
raise
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Database error: {str(e)}"
|
||||
)
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to update conversation context: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
def delete_conversation_context(db: Session, context_id: UUID) -> dict:
|
||||
"""
|
||||
Delete a conversation context by its ID.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
context_id: UUID of the conversation context to delete
|
||||
|
||||
Returns:
|
||||
dict: Success message
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if conversation context not found
|
||||
HTTPException: 500 if database error occurs
|
||||
"""
|
||||
# Get existing context (raises 404 if not found)
|
||||
context = get_conversation_context_by_id(db, context_id)
|
||||
|
||||
try:
|
||||
db.delete(context)
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"message": "ConversationContext deleted successfully",
|
||||
"context_id": str(context_id)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to delete conversation context: {str(e)}"
|
||||
)
|
||||
@@ -1,318 +0,0 @@
|
||||
"""
|
||||
DecisionLog service layer for business logic and database operations.
|
||||
|
||||
Handles all database operations for decision logs, tracking important
|
||||
decisions made during work for future reference.
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.models.decision_log import DecisionLog
|
||||
from api.schemas.decision_log import DecisionLogCreate, DecisionLogUpdate
|
||||
|
||||
|
||||
def get_decision_logs(
|
||||
db: Session,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> tuple[list[DecisionLog], int]:
|
||||
"""
|
||||
Retrieve a paginated list of decision logs.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
skip: Number of records to skip (for pagination)
|
||||
limit: Maximum number of records to return
|
||||
|
||||
Returns:
|
||||
tuple: (list of decision logs, total count)
|
||||
"""
|
||||
# Get total count
|
||||
total = db.query(DecisionLog).count()
|
||||
|
||||
# Get paginated results, ordered by most recent first
|
||||
logs = (
|
||||
db.query(DecisionLog)
|
||||
.order_by(DecisionLog.created_at.desc())
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
return logs, total
|
||||
|
||||
|
||||
def get_decision_log_by_id(db: Session, log_id: UUID) -> DecisionLog:
|
||||
"""
|
||||
Retrieve a single decision log by its ID.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
log_id: UUID of the decision log to retrieve
|
||||
|
||||
Returns:
|
||||
DecisionLog: The decision log object
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if decision log not found
|
||||
"""
|
||||
log = db.query(DecisionLog).filter(DecisionLog.id == str(log_id)).first()
|
||||
|
||||
if not log:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"DecisionLog with ID {log_id} not found"
|
||||
)
|
||||
|
||||
return log
|
||||
|
||||
|
||||
def get_decision_logs_by_project(
|
||||
db: Session,
|
||||
project_id: UUID,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> tuple[list[DecisionLog], int]:
|
||||
"""
|
||||
Retrieve decision logs for a specific project.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
project_id: UUID of the project
|
||||
skip: Number of records to skip
|
||||
limit: Maximum number of records to return
|
||||
|
||||
Returns:
|
||||
tuple: (list of decision logs, total count)
|
||||
"""
|
||||
# Get total count for project
|
||||
total = db.query(DecisionLog).filter(
|
||||
DecisionLog.project_id == str(project_id)
|
||||
).count()
|
||||
|
||||
# Get paginated results
|
||||
logs = (
|
||||
db.query(DecisionLog)
|
||||
.filter(DecisionLog.project_id == str(project_id))
|
||||
.order_by(DecisionLog.created_at.desc())
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
return logs, total
|
||||
|
||||
|
||||
def get_decision_logs_by_session(
|
||||
db: Session,
|
||||
session_id: UUID,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> tuple[list[DecisionLog], int]:
|
||||
"""
|
||||
Retrieve decision logs for a specific session.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
session_id: UUID of the session
|
||||
skip: Number of records to skip
|
||||
limit: Maximum number of records to return
|
||||
|
||||
Returns:
|
||||
tuple: (list of decision logs, total count)
|
||||
"""
|
||||
# Get total count for session
|
||||
total = db.query(DecisionLog).filter(
|
||||
DecisionLog.session_id == str(session_id)
|
||||
).count()
|
||||
|
||||
# Get paginated results
|
||||
logs = (
|
||||
db.query(DecisionLog)
|
||||
.filter(DecisionLog.session_id == str(session_id))
|
||||
.order_by(DecisionLog.created_at.desc())
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
return logs, total
|
||||
|
||||
|
||||
def get_decision_logs_by_impact(
|
||||
db: Session,
|
||||
impact: str,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> tuple[list[DecisionLog], int]:
|
||||
"""
|
||||
Retrieve decision logs filtered by impact level.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
impact: Impact level (low, medium, high, critical)
|
||||
skip: Number of records to skip
|
||||
limit: Maximum number of records to return
|
||||
|
||||
Returns:
|
||||
tuple: (list of decision logs, total count)
|
||||
"""
|
||||
# Validate impact level
|
||||
valid_impacts = ["low", "medium", "high", "critical"]
|
||||
if impact.lower() not in valid_impacts:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid impact level. Must be one of: {', '.join(valid_impacts)}"
|
||||
)
|
||||
|
||||
# Get total count for impact
|
||||
total = db.query(DecisionLog).filter(
|
||||
DecisionLog.impact == impact.lower()
|
||||
).count()
|
||||
|
||||
# Get paginated results
|
||||
logs = (
|
||||
db.query(DecisionLog)
|
||||
.filter(DecisionLog.impact == impact.lower())
|
||||
.order_by(DecisionLog.created_at.desc())
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
return logs, total
|
||||
|
||||
|
||||
def create_decision_log(
|
||||
db: Session,
|
||||
log_data: DecisionLogCreate
|
||||
) -> DecisionLog:
|
||||
"""
|
||||
Create a new decision log.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
log_data: Decision log creation data
|
||||
|
||||
Returns:
|
||||
DecisionLog: The created decision log object
|
||||
|
||||
Raises:
|
||||
HTTPException: 500 if database error occurs
|
||||
"""
|
||||
try:
|
||||
# Create new decision log instance
|
||||
db_log = DecisionLog(**log_data.model_dump())
|
||||
|
||||
# Add to database
|
||||
db.add(db_log)
|
||||
db.commit()
|
||||
db.refresh(db_log)
|
||||
|
||||
return db_log
|
||||
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Database error: {str(e)}"
|
||||
)
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to create decision log: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
def update_decision_log(
|
||||
db: Session,
|
||||
log_id: UUID,
|
||||
log_data: DecisionLogUpdate
|
||||
) -> DecisionLog:
|
||||
"""
|
||||
Update an existing decision log.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
log_id: UUID of the decision log to update
|
||||
log_data: Decision log update data
|
||||
|
||||
Returns:
|
||||
DecisionLog: The updated decision log object
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if decision log not found
|
||||
HTTPException: 500 if database error occurs
|
||||
"""
|
||||
# Get existing log
|
||||
log = get_decision_log_by_id(db, log_id)
|
||||
|
||||
try:
|
||||
# Update only provided fields
|
||||
update_data = log_data.model_dump(exclude_unset=True)
|
||||
|
||||
# Apply updates
|
||||
for field, value in update_data.items():
|
||||
setattr(log, field, value)
|
||||
|
||||
db.commit()
|
||||
db.refresh(log)
|
||||
|
||||
return log
|
||||
|
||||
except HTTPException:
|
||||
db.rollback()
|
||||
raise
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Database error: {str(e)}"
|
||||
)
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to update decision log: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
def delete_decision_log(db: Session, log_id: UUID) -> dict:
|
||||
"""
|
||||
Delete a decision log by its ID.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
log_id: UUID of the decision log to delete
|
||||
|
||||
Returns:
|
||||
dict: Success message
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if decision log not found
|
||||
HTTPException: 500 if database error occurs
|
||||
"""
|
||||
# Get existing log (raises 404 if not found)
|
||||
log = get_decision_log_by_id(db, log_id)
|
||||
|
||||
try:
|
||||
db.delete(log)
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"message": "DecisionLog deleted successfully",
|
||||
"log_id": str(log_id)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to delete decision log: {str(e)}"
|
||||
)
|
||||
@@ -1,273 +0,0 @@
|
||||
"""
|
||||
ProjectState service layer for business logic and database operations.
|
||||
|
||||
Handles all database operations for project states, tracking the current
|
||||
state of projects for quick context retrieval.
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.models.project_state import ProjectState
|
||||
from api.schemas.project_state import ProjectStateCreate, ProjectStateUpdate
|
||||
from api.utils.context_compression import compress_project_state
|
||||
|
||||
|
||||
def get_project_states(
|
||||
db: Session,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> tuple[list[ProjectState], int]:
|
||||
"""
|
||||
Retrieve a paginated list of project states.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
skip: Number of records to skip (for pagination)
|
||||
limit: Maximum number of records to return
|
||||
|
||||
Returns:
|
||||
tuple: (list of project states, total count)
|
||||
"""
|
||||
# Get total count
|
||||
total = db.query(ProjectState).count()
|
||||
|
||||
# Get paginated results, ordered by most recently updated
|
||||
states = (
|
||||
db.query(ProjectState)
|
||||
.order_by(ProjectState.updated_at.desc())
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
return states, total
|
||||
|
||||
|
||||
def get_project_state_by_id(db: Session, state_id: UUID) -> ProjectState:
|
||||
"""
|
||||
Retrieve a single project state by its ID.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
state_id: UUID of the project state to retrieve
|
||||
|
||||
Returns:
|
||||
ProjectState: The project state object
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if project state not found
|
||||
"""
|
||||
state = db.query(ProjectState).filter(ProjectState.id == str(state_id)).first()
|
||||
|
||||
if not state:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"ProjectState with ID {state_id} not found"
|
||||
)
|
||||
|
||||
return state
|
||||
|
||||
|
||||
def get_project_state_by_project(db: Session, project_id: UUID) -> Optional[ProjectState]:
|
||||
"""
|
||||
Retrieve the project state for a specific project.
|
||||
|
||||
Each project has exactly one project state (unique constraint).
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
project_id: UUID of the project
|
||||
|
||||
Returns:
|
||||
Optional[ProjectState]: The project state if found, None otherwise
|
||||
"""
|
||||
state = db.query(ProjectState).filter(ProjectState.project_id == str(project_id)).first()
|
||||
return state
|
||||
|
||||
|
||||
def create_project_state(
|
||||
db: Session,
|
||||
state_data: ProjectStateCreate
|
||||
) -> ProjectState:
|
||||
"""
|
||||
Create a new project state.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
state_data: Project state creation data
|
||||
|
||||
Returns:
|
||||
ProjectState: The created project state object
|
||||
|
||||
Raises:
|
||||
HTTPException: 409 if project state already exists for this project
|
||||
HTTPException: 500 if database error occurs
|
||||
"""
|
||||
# Check if project state already exists for this project
|
||||
existing_state = get_project_state_by_project(db, state_data.project_id)
|
||||
if existing_state:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail=f"ProjectState for project ID {state_data.project_id} already exists"
|
||||
)
|
||||
|
||||
try:
|
||||
# Create new project state instance
|
||||
db_state = ProjectState(**state_data.model_dump())
|
||||
|
||||
# Add to database
|
||||
db.add(db_state)
|
||||
db.commit()
|
||||
db.refresh(db_state)
|
||||
|
||||
return db_state
|
||||
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
if "project_id" in str(e.orig):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail=f"ProjectState for project ID {state_data.project_id} already exists"
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Database error: {str(e)}"
|
||||
)
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to create project state: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
def update_project_state(
|
||||
db: Session,
|
||||
state_id: UUID,
|
||||
state_data: ProjectStateUpdate
|
||||
) -> ProjectState:
|
||||
"""
|
||||
Update an existing project state.
|
||||
|
||||
Uses compression utilities when updating to maintain efficient storage.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
state_id: UUID of the project state to update
|
||||
state_data: Project state update data
|
||||
|
||||
Returns:
|
||||
ProjectState: The updated project state object
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if project state not found
|
||||
HTTPException: 500 if database error occurs
|
||||
"""
|
||||
# Get existing state
|
||||
state = get_project_state_by_id(db, state_id)
|
||||
|
||||
try:
|
||||
# Update only provided fields
|
||||
update_data = state_data.model_dump(exclude_unset=True)
|
||||
|
||||
# Apply updates
|
||||
for field, value in update_data.items():
|
||||
setattr(state, field, value)
|
||||
|
||||
db.commit()
|
||||
db.refresh(state)
|
||||
|
||||
return state
|
||||
|
||||
except HTTPException:
|
||||
db.rollback()
|
||||
raise
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Database error: {str(e)}"
|
||||
)
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to update project state: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
def update_project_state_by_project(
|
||||
db: Session,
|
||||
project_id: UUID,
|
||||
state_data: ProjectStateUpdate
|
||||
) -> ProjectState:
|
||||
"""
|
||||
Update project state by project ID (convenience method).
|
||||
|
||||
If project state doesn't exist, creates a new one.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
project_id: UUID of the project
|
||||
state_data: Project state update data
|
||||
|
||||
Returns:
|
||||
ProjectState: The updated or created project state object
|
||||
|
||||
Raises:
|
||||
HTTPException: 500 if database error occurs
|
||||
"""
|
||||
# Try to get existing state
|
||||
state = get_project_state_by_project(db, project_id)
|
||||
|
||||
if state:
|
||||
# Update existing state
|
||||
return update_project_state(db, UUID(state.id), state_data)
|
||||
else:
|
||||
# Create new state
|
||||
create_data = ProjectStateCreate(
|
||||
project_id=project_id,
|
||||
**state_data.model_dump(exclude_unset=True)
|
||||
)
|
||||
return create_project_state(db, create_data)
|
||||
|
||||
|
||||
def delete_project_state(db: Session, state_id: UUID) -> dict:
|
||||
"""
|
||||
Delete a project state by its ID.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
state_id: UUID of the project state to delete
|
||||
|
||||
Returns:
|
||||
dict: Success message
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if project state not found
|
||||
HTTPException: 500 if database error occurs
|
||||
"""
|
||||
# Get existing state (raises 404 if not found)
|
||||
state = get_project_state_by_id(db, state_id)
|
||||
|
||||
try:
|
||||
db.delete(state)
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"message": "ProjectState deleted successfully",
|
||||
"state_id": str(state_id)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to delete project state: {str(e)}"
|
||||
)
|
||||
Reference in New Issue
Block a user