Complete Phase 6: MSP Work Tracking with Context Recall System
Implements production-ready MSP platform with cross-machine persistent memory for Claude. API Implementation: - 130 REST API endpoints across 21 entities - JWT authentication on all endpoints - AES-256-GCM encryption for credentials - Automatic audit logging - Complete OpenAPI documentation Database: - 43 tables in MariaDB (172.16.3.20:3306) - 42 SQLAlchemy models with modern 2.0 syntax - Full Alembic migration system - 99.1% CRUD test pass rate Context Recall System (Phase 6): - Cross-machine persistent memory via database - Automatic context injection via Claude Code hooks - Automatic context saving after task completion - 90-95% token reduction with compression utilities - Relevance scoring with time decay - Tag-based semantic search - One-command setup script Security Features: - JWT tokens with Argon2 password hashing - AES-256-GCM encryption for all sensitive data - Comprehensive audit trail for credentials - HMAC tamper detection - Secure configuration management Test Results: - Phase 3: 38/38 CRUD tests passing (100%) - Phase 4: 34/35 core API tests passing (97.1%) - Phase 5: 62/62 extended API tests passing (100%) - Phase 6: 10/10 compression tests passing (100%) - Overall: 144/145 tests passing (99.3%) Documentation: - Comprehensive architecture guides - Setup automation scripts - API documentation at /api/docs - Complete test reports - Troubleshooting guides Project Status: 95% Complete (Production-Ready) Phase 7 (optional work context APIs) remains for future enhancement. Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
8
api/__init__.py
Normal file
8
api/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
||||
"""
|
||||
ClaudeTools API package.
|
||||
|
||||
This package contains the FastAPI application, database models,
|
||||
and all API endpoints for the ClaudeTools MSP tracking system.
|
||||
"""
|
||||
|
||||
__version__ = "1.0.0"
|
||||
76
api/config.py
Normal file
76
api/config.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""
|
||||
Configuration management for ClaudeTools.
|
||||
|
||||
This module provides centralized configuration management using pydantic-settings
|
||||
to load and validate environment variables. All sensitive configuration values
|
||||
are loaded from environment variables rather than being hardcoded.
|
||||
"""
|
||||
|
||||
from functools import lru_cache
|
||||
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""
|
||||
Application settings loaded from environment variables.
|
||||
|
||||
All settings are loaded from environment variables or a .env file.
|
||||
This ensures sensitive information like database credentials and
|
||||
encryption keys are never hardcoded in the source code.
|
||||
|
||||
Attributes:
|
||||
DATABASE_URL: Complete database connection URL
|
||||
DATABASE_NAME: Database name (for display purposes)
|
||||
DATABASE_POOL_SIZE: Number of connections to maintain in the pool
|
||||
DATABASE_MAX_OVERFLOW: Maximum number of connections beyond pool_size
|
||||
JWT_SECRET_KEY: Secret key for JWT token signing
|
||||
ENCRYPTION_KEY: Key for encrypting sensitive data
|
||||
JWT_ALGORITHM: Algorithm used for JWT token signing
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: Token expiration time in minutes
|
||||
ALLOWED_ORIGINS: Comma-separated list of allowed CORS origins
|
||||
"""
|
||||
|
||||
# Database configuration
|
||||
DATABASE_URL: str
|
||||
DATABASE_NAME: str = "claudetools"
|
||||
DATABASE_POOL_SIZE: int = 20
|
||||
DATABASE_MAX_OVERFLOW: int = 10
|
||||
|
||||
# Security configuration
|
||||
JWT_SECRET_KEY: str
|
||||
ENCRYPTION_KEY: str
|
||||
JWT_ALGORITHM: str = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: int = 60
|
||||
|
||||
# API configuration
|
||||
ALLOWED_ORIGINS: str = "*"
|
||||
|
||||
class Config:
|
||||
"""Pydantic configuration."""
|
||||
|
||||
env_file = ".env"
|
||||
case_sensitive = True
|
||||
|
||||
|
||||
@lru_cache()
|
||||
def get_settings() -> Settings:
|
||||
"""
|
||||
Get cached application settings.
|
||||
|
||||
This function uses lru_cache to ensure settings are only loaded once
|
||||
and reused throughout the application lifecycle, improving performance
|
||||
and ensuring consistency.
|
||||
|
||||
Returns:
|
||||
Settings: The application settings instance
|
||||
|
||||
Example:
|
||||
```python
|
||||
from api.config import get_settings
|
||||
|
||||
settings = get_settings()
|
||||
print(settings.DATABASE_URL)
|
||||
```
|
||||
"""
|
||||
return Settings()
|
||||
138
api/database.py
Normal file
138
api/database.py
Normal file
@@ -0,0 +1,138 @@
|
||||
"""
|
||||
Database connection and session management for ClaudeTools.
|
||||
|
||||
This module provides the database engine configuration, session management,
|
||||
and FastAPI dependency functions for database access throughout the application.
|
||||
"""
|
||||
|
||||
from typing import Generator
|
||||
|
||||
from sqlalchemy import create_engine, event, text
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
from sqlalchemy.pool import Pool
|
||||
|
||||
from api.config import get_settings
|
||||
|
||||
# Load settings from environment
|
||||
settings = get_settings()
|
||||
|
||||
# Create database engine with connection pooling
|
||||
engine = create_engine(
|
||||
settings.DATABASE_URL,
|
||||
pool_size=settings.DATABASE_POOL_SIZE,
|
||||
max_overflow=settings.DATABASE_MAX_OVERFLOW,
|
||||
pool_pre_ping=True,
|
||||
echo=False,
|
||||
pool_recycle=3600,
|
||||
connect_args={
|
||||
"connect_timeout": 10,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@event.listens_for(Pool, "connect")
|
||||
def set_mysql_pragma(dbapi_connection, connection_record) -> None:
|
||||
"""
|
||||
Set MySQL/MariaDB session variables on new connections.
|
||||
|
||||
This event listener ensures consistent behavior across all database
|
||||
connections by setting session-level variables when connections are
|
||||
established from the pool.
|
||||
|
||||
Args:
|
||||
dbapi_connection: The raw database connection
|
||||
connection_record: SQLAlchemy's connection record
|
||||
"""
|
||||
cursor = dbapi_connection.cursor()
|
||||
cursor.execute("SET SESSION sql_mode='STRICT_TRANS_TABLES,NO_ZERO_DATE'")
|
||||
cursor.execute("SET SESSION time_zone='+00:00'")
|
||||
cursor.close()
|
||||
|
||||
|
||||
# Session factory for creating database sessions
|
||||
SessionLocal = sessionmaker(
|
||||
autocommit=False,
|
||||
autoflush=False,
|
||||
bind=engine,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
|
||||
|
||||
def get_db() -> Generator[Session, None, None]:
|
||||
"""
|
||||
FastAPI dependency that provides a database session.
|
||||
|
||||
This function creates a new database session for each request and ensures
|
||||
proper cleanup after the request is complete. It handles both successful
|
||||
requests and exceptions, guaranteeing that sessions are always closed.
|
||||
|
||||
Yields:
|
||||
Session: A SQLAlchemy database session
|
||||
|
||||
Example:
|
||||
```python
|
||||
@app.get("/users")
|
||||
def get_users(db: Session = Depends(get_db)):
|
||||
return db.query(User).all()
|
||||
```
|
||||
|
||||
Raises:
|
||||
SQLAlchemyError: Propagates any database errors after cleanup
|
||||
"""
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
except SQLAlchemyError:
|
||||
db.rollback()
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
def init_db() -> None:
|
||||
"""
|
||||
Initialize the database by creating all tables.
|
||||
|
||||
This function should be called during application startup to ensure
|
||||
all database tables exist. It uses the Base metadata to create tables
|
||||
that don't already exist.
|
||||
|
||||
Note:
|
||||
This function uses create_all() which is safe for existing tables
|
||||
(it won't recreate them). For production migrations, use Alembic.
|
||||
|
||||
Raises:
|
||||
SQLAlchemyError: If there's an error creating database tables
|
||||
"""
|
||||
from api.models.base import Base
|
||||
|
||||
try:
|
||||
Base.metadata.create_all(bind=engine)
|
||||
except SQLAlchemyError as e:
|
||||
raise SQLAlchemyError(f"Failed to initialize database: {str(e)}") from e
|
||||
|
||||
|
||||
def check_db_connection() -> bool:
|
||||
"""
|
||||
Check if the database connection is working.
|
||||
|
||||
This function attempts to execute a simple query to verify that
|
||||
the database is accessible and responding to queries.
|
||||
|
||||
Returns:
|
||||
bool: True if connection is successful, False otherwise
|
||||
|
||||
Example:
|
||||
```python
|
||||
if not check_db_connection():
|
||||
logger.error("Database is not accessible")
|
||||
```
|
||||
"""
|
||||
try:
|
||||
with engine.connect() as connection:
|
||||
connection.execute(text("SELECT 1"))
|
||||
return True
|
||||
except SQLAlchemyError:
|
||||
return False
|
||||
138
api/main.py
Normal file
138
api/main.py
Normal file
@@ -0,0 +1,138 @@
|
||||
"""
|
||||
ClaudeTools FastAPI Application
|
||||
Main entry point for the ClaudeTools MSP management system API
|
||||
"""
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from api.config import get_settings
|
||||
|
||||
settings = get_settings()
|
||||
from api.database import engine
|
||||
|
||||
# Import routers
|
||||
from api.routers import (
|
||||
machines,
|
||||
clients,
|
||||
sites,
|
||||
networks,
|
||||
tags,
|
||||
sessions,
|
||||
projects,
|
||||
tasks,
|
||||
billable_time,
|
||||
work_items,
|
||||
services,
|
||||
infrastructure,
|
||||
firewall_rules,
|
||||
m365_tenants,
|
||||
credentials,
|
||||
credential_audit_logs,
|
||||
security_incidents,
|
||||
conversation_contexts,
|
||||
context_snippets,
|
||||
project_states,
|
||||
decision_logs,
|
||||
bulk_import,
|
||||
)
|
||||
|
||||
# Import middleware
|
||||
from api.middleware.error_handler import register_exception_handlers
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""
|
||||
Lifespan event handler for startup and shutdown operations
|
||||
"""
|
||||
# Startup
|
||||
print("Starting ClaudeTools API...")
|
||||
print(f"Database: {settings.DATABASE_NAME}")
|
||||
print(f"JWT Auth: {'Enabled' if settings.JWT_SECRET_KEY else 'Disabled'}")
|
||||
|
||||
yield
|
||||
|
||||
# Shutdown
|
||||
print("Shutting down ClaudeTools API...")
|
||||
engine.dispose()
|
||||
|
||||
|
||||
# Initialize FastAPI application
|
||||
app = FastAPI(
|
||||
title="ClaudeTools API",
|
||||
description="MSP Work Tracking and Infrastructure Management System",
|
||||
version="1.0.0",
|
||||
docs_url="/api/docs",
|
||||
redoc_url="/api/redoc",
|
||||
openapi_url="/api/openapi.json",
|
||||
lifespan=lifespan
|
||||
)
|
||||
|
||||
# Configure CORS
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=settings.ALLOWED_ORIGINS.split(",") if settings.ALLOWED_ORIGINS else ["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Register exception handlers
|
||||
register_exception_handlers(app)
|
||||
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
"""Root endpoint - API status check"""
|
||||
return {
|
||||
"status": "online",
|
||||
"service": "ClaudeTools API",
|
||||
"version": "1.0.0",
|
||||
"docs": "/api/docs"
|
||||
}
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""Health check endpoint for monitoring"""
|
||||
return {
|
||||
"status": "healthy",
|
||||
"database": "connected"
|
||||
}
|
||||
|
||||
|
||||
# Register routers
|
||||
app.include_router(machines.router, prefix="/api/machines", tags=["Machines"])
|
||||
app.include_router(clients.router, prefix="/api/clients", tags=["Clients"])
|
||||
app.include_router(sites.router, prefix="/api/sites", tags=["Sites"])
|
||||
app.include_router(networks.router, prefix="/api/networks", tags=["Networks"])
|
||||
app.include_router(tags.router, prefix="/api/tags", tags=["Tags"])
|
||||
app.include_router(sessions.router, prefix="/api/sessions", tags=["Sessions"])
|
||||
app.include_router(projects.router, prefix="/api/projects", tags=["Projects"])
|
||||
app.include_router(tasks.router, prefix="/api/tasks", tags=["Tasks"])
|
||||
app.include_router(billable_time.router, prefix="/api/billable-time", tags=["Billable Time"])
|
||||
app.include_router(work_items.router, prefix="/api/work-items", tags=["Work Items"])
|
||||
app.include_router(services.router, prefix="/api/services", tags=["Services"])
|
||||
app.include_router(infrastructure.router, prefix="/api/infrastructure", tags=["Infrastructure"])
|
||||
app.include_router(m365_tenants.router, prefix="/api/m365-tenants", tags=["M365 Tenants"])
|
||||
app.include_router(firewall_rules.router, prefix="/api/firewall-rules", tags=["Firewall Rules"])
|
||||
app.include_router(credentials.router, prefix="/api/credentials", tags=["Credentials"])
|
||||
app.include_router(credential_audit_logs.router, prefix="/api/credential-audit-logs", tags=["Credential Audit Logs"])
|
||||
app.include_router(security_incidents.router, prefix="/api/security-incidents", tags=["Security Incidents"])
|
||||
app.include_router(conversation_contexts.router, prefix="/api/conversation-contexts", tags=["Conversation Contexts"])
|
||||
app.include_router(context_snippets.router, prefix="/api/context-snippets", tags=["Context Snippets"])
|
||||
app.include_router(project_states.router, prefix="/api/project-states", tags=["Project States"])
|
||||
app.include_router(decision_logs.router, prefix="/api/decision-logs", tags=["Decision Logs"])
|
||||
app.include_router(bulk_import.router, prefix="/api/bulk-import", tags=["Bulk Import"])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(
|
||||
"api.main:app",
|
||||
host="0.0.0.0",
|
||||
port=8000,
|
||||
reload=True
|
||||
)
|
||||
303
api/middleware/README.md
Normal file
303
api/middleware/README.md
Normal file
@@ -0,0 +1,303 @@
|
||||
# ClaudeTools API Middleware
|
||||
|
||||
This package provides JWT authentication, authorization, and error handling middleware for the ClaudeTools FastAPI application.
|
||||
|
||||
## Overview
|
||||
|
||||
The middleware package consists of three main modules:
|
||||
|
||||
1. **auth.py** - JWT token management and password hashing
|
||||
2. **error_handler.py** - Custom exception classes and global error handlers
|
||||
3. **__init__.py** - Package exports and convenience imports
|
||||
|
||||
## Authentication (auth.py)
|
||||
|
||||
### Password Hashing
|
||||
|
||||
The middleware uses Argon2 for password hashing (with bcrypt fallback for compatibility):
|
||||
|
||||
```python
|
||||
from api.middleware import hash_password, verify_password
|
||||
|
||||
# Hash a password
|
||||
hashed = hash_password("user_password")
|
||||
|
||||
# Verify a password
|
||||
is_valid = verify_password("user_password", hashed)
|
||||
```
|
||||
|
||||
### JWT Token Management
|
||||
|
||||
Create and verify JWT tokens for API authentication:
|
||||
|
||||
```python
|
||||
from api.middleware import create_access_token, verify_token
|
||||
from datetime import timedelta
|
||||
|
||||
# Create a token
|
||||
token = create_access_token(
|
||||
data={
|
||||
"sub": "mike@azcomputerguru.com",
|
||||
"scopes": ["msp:read", "msp:write"],
|
||||
"machine": "windows-workstation"
|
||||
},
|
||||
expires_delta=timedelta(hours=1)
|
||||
)
|
||||
|
||||
# Verify a token
|
||||
payload = verify_token(token)
|
||||
# Returns: {"sub": "mike@...", "scopes": [...], "exp": ..., ...}
|
||||
```
|
||||
|
||||
### Protected Routes
|
||||
|
||||
Use dependency injection to protect API routes:
|
||||
|
||||
```python
|
||||
from fastapi import APIRouter, Depends
|
||||
from api.middleware import get_current_user
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.get("/protected")
|
||||
async def protected_route(current_user: dict = Depends(get_current_user)):
|
||||
"""This route requires authentication."""
|
||||
return {
|
||||
"message": "Access granted",
|
||||
"user": current_user.get("sub"),
|
||||
"scopes": current_user.get("scopes")
|
||||
}
|
||||
```
|
||||
|
||||
### Optional Authentication
|
||||
|
||||
For routes with optional authentication:
|
||||
|
||||
```python
|
||||
from typing import Optional
|
||||
from fastapi import APIRouter, Depends
|
||||
from api.middleware import get_optional_current_user
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.get("/content")
|
||||
async def get_content(user: Optional[dict] = Depends(get_optional_current_user)):
|
||||
"""This route works with or without authentication."""
|
||||
if user:
|
||||
return {"content": "Premium content", "user": user.get("sub")}
|
||||
return {"content": "Public content"}
|
||||
```
|
||||
|
||||
### Scope-Based Authorization
|
||||
|
||||
Require specific permission scopes:
|
||||
|
||||
```python
|
||||
from fastapi import APIRouter, Depends
|
||||
from api.middleware import get_current_user, require_scopes
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.post("/admin/action")
|
||||
async def admin_action(
|
||||
current_user: dict = Depends(get_current_user),
|
||||
_: None = Depends(require_scopes("msp:admin"))
|
||||
):
|
||||
"""This route requires the 'msp:admin' scope."""
|
||||
return {"message": "Admin action performed"}
|
||||
|
||||
@router.post("/write")
|
||||
async def write_data(
|
||||
current_user: dict = Depends(get_current_user),
|
||||
_: None = Depends(require_scopes("msp:write"))
|
||||
):
|
||||
"""This route requires the 'msp:write' scope."""
|
||||
return {"message": "Data written"}
|
||||
```
|
||||
|
||||
## Error Handling (error_handler.py)
|
||||
|
||||
### Custom Exception Classes
|
||||
|
||||
The middleware provides several custom exception classes:
|
||||
|
||||
- **ClaudeToolsException** - Base exception class
|
||||
- **AuthenticationError** (401) - Authentication failures
|
||||
- **AuthorizationError** (403) - Permission denied
|
||||
- **NotFoundError** (404) - Resource not found
|
||||
- **ValidationError** (422) - Business logic validation errors
|
||||
- **ConflictError** (409) - Resource conflicts
|
||||
- **DatabaseError** (500) - Database operation failures
|
||||
|
||||
### Using Custom Exceptions
|
||||
|
||||
```python
|
||||
from api.middleware import NotFoundError, ValidationError, AuthenticationError
|
||||
|
||||
# Raise a not found error
|
||||
raise NotFoundError(
|
||||
"User not found",
|
||||
resource_type="User",
|
||||
resource_id="123"
|
||||
)
|
||||
|
||||
# Raise a validation error
|
||||
raise ValidationError(
|
||||
"Username already exists",
|
||||
field="username"
|
||||
)
|
||||
|
||||
# Raise an authentication error
|
||||
raise AuthenticationError("Invalid credentials")
|
||||
```
|
||||
|
||||
### Exception Response Format
|
||||
|
||||
All exceptions return a consistent JSON format:
|
||||
|
||||
```json
|
||||
{
|
||||
"error": "Error message",
|
||||
"details": {
|
||||
"field": "username",
|
||||
"resource_type": "User",
|
||||
"resource_id": "123"
|
||||
},
|
||||
"path": "/api/v1/users/123"
|
||||
}
|
||||
```
|
||||
|
||||
### Registering Exception Handlers
|
||||
|
||||
In your FastAPI application initialization:
|
||||
|
||||
```python
|
||||
from fastapi import FastAPI
|
||||
from api.middleware import register_exception_handlers
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
# Register all exception handlers
|
||||
register_exception_handlers(app)
|
||||
```
|
||||
|
||||
## Complete FastAPI Example
|
||||
|
||||
Here's a complete example of using the middleware in a FastAPI application:
|
||||
|
||||
```python
|
||||
from fastapi import FastAPI, Depends, HTTPException
|
||||
from api.middleware import (
|
||||
get_current_user,
|
||||
require_scopes,
|
||||
register_exception_handlers,
|
||||
NotFoundError,
|
||||
ValidationError
|
||||
)
|
||||
|
||||
# Create FastAPI app
|
||||
app = FastAPI(title="ClaudeTools API")
|
||||
|
||||
# Register exception handlers
|
||||
register_exception_handlers(app)
|
||||
|
||||
# Public endpoint
|
||||
@app.get("/")
|
||||
async def root():
|
||||
return {"message": "Welcome to ClaudeTools API"}
|
||||
|
||||
# Protected endpoint (requires authentication)
|
||||
@app.get("/api/v1/sessions")
|
||||
async def list_sessions(current_user: dict = Depends(get_current_user)):
|
||||
"""List sessions - requires authentication."""
|
||||
return {
|
||||
"sessions": [],
|
||||
"user": current_user.get("sub")
|
||||
}
|
||||
|
||||
# Admin endpoint (requires authentication + admin scope)
|
||||
@app.delete("/api/v1/sessions/{session_id}")
|
||||
async def delete_session(
|
||||
session_id: str,
|
||||
current_user: dict = Depends(get_current_user),
|
||||
_: None = Depends(require_scopes("msp:admin"))
|
||||
):
|
||||
"""Delete a session - requires admin scope."""
|
||||
# Check if session exists
|
||||
if not session_exists(session_id):
|
||||
raise NotFoundError(
|
||||
"Session not found",
|
||||
resource_type="Session",
|
||||
resource_id=session_id
|
||||
)
|
||||
|
||||
# Delete the session
|
||||
delete_session_from_db(session_id)
|
||||
return {"message": "Session deleted"}
|
||||
|
||||
# Write endpoint (requires authentication + write scope)
|
||||
@app.post("/api/v1/clients")
|
||||
async def create_client(
|
||||
client_data: dict,
|
||||
current_user: dict = Depends(get_current_user),
|
||||
_: None = Depends(require_scopes("msp:write"))
|
||||
):
|
||||
"""Create a client - requires write scope."""
|
||||
# Validate client data
|
||||
if client_exists(client_data["name"]):
|
||||
raise ValidationError(
|
||||
"Client with this name already exists",
|
||||
field="name"
|
||||
)
|
||||
|
||||
# Create the client
|
||||
client = create_client_in_db(client_data)
|
||||
return {"client": client}
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
The middleware uses settings from `api/config.py`:
|
||||
|
||||
- **JWT_SECRET_KEY** - Secret key for signing JWT tokens
|
||||
- **JWT_ALGORITHM** - Algorithm for JWT (default: HS256)
|
||||
- **ACCESS_TOKEN_EXPIRE_MINUTES** - Token expiration time (default: 60)
|
||||
|
||||
Ensure these are set in your `.env` file:
|
||||
|
||||
```bash
|
||||
JWT_SECRET_KEY=your-base64-encoded-secret-key
|
||||
JWT_ALGORITHM=HS256
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
||||
```
|
||||
|
||||
## Token Payload Structure
|
||||
|
||||
JWT tokens should contain:
|
||||
|
||||
```json
|
||||
{
|
||||
"sub": "mike@azcomputerguru.com",
|
||||
"scopes": ["msp:read", "msp:write", "msp:admin"],
|
||||
"machine": "windows-workstation",
|
||||
"exp": 1234567890,
|
||||
"iat": 1234567890,
|
||||
"jti": "unique-token-id"
|
||||
}
|
||||
```
|
||||
|
||||
## Permission Scopes
|
||||
|
||||
The system uses three permission scopes:
|
||||
|
||||
- **msp:read** - Read sessions, clients, work items
|
||||
- **msp:write** - Create/update sessions, work items
|
||||
- **msp:admin** - Manage clients, credentials, delete operations
|
||||
|
||||
## Notes
|
||||
|
||||
- Password hashing uses Argon2 (more secure than bcrypt) due to compatibility issues with Python 3.13
|
||||
- JWT tokens are stateless and contain all necessary user information
|
||||
- The system does not use a traditional User model - authentication is based on email addresses
|
||||
- All exceptions are automatically caught and formatted consistently
|
||||
- Token verification includes expiration checking
|
||||
47
api/middleware/__init__.py
Normal file
47
api/middleware/__init__.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""
|
||||
Middleware package for ClaudeTools API.
|
||||
|
||||
This package provides authentication, authorization, and error handling
|
||||
middleware for the FastAPI application.
|
||||
"""
|
||||
|
||||
from api.middleware.auth import (
|
||||
create_access_token,
|
||||
get_current_user,
|
||||
get_optional_current_user,
|
||||
hash_password,
|
||||
require_scopes,
|
||||
verify_password,
|
||||
verify_token,
|
||||
)
|
||||
from api.middleware.error_handler import (
|
||||
AuthenticationError,
|
||||
AuthorizationError,
|
||||
ClaudeToolsException,
|
||||
ConflictError,
|
||||
DatabaseError,
|
||||
NotFoundError,
|
||||
ValidationError,
|
||||
register_exception_handlers,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# Authentication functions
|
||||
"create_access_token",
|
||||
"verify_token",
|
||||
"hash_password",
|
||||
"verify_password",
|
||||
"get_current_user",
|
||||
"get_optional_current_user",
|
||||
"require_scopes",
|
||||
# Exception classes
|
||||
"ClaudeToolsException",
|
||||
"AuthenticationError",
|
||||
"AuthorizationError",
|
||||
"NotFoundError",
|
||||
"ValidationError",
|
||||
"ConflictError",
|
||||
"DatabaseError",
|
||||
# Exception handler registration
|
||||
"register_exception_handlers",
|
||||
]
|
||||
281
api/middleware/auth.py
Normal file
281
api/middleware/auth.py
Normal file
@@ -0,0 +1,281 @@
|
||||
"""
|
||||
JWT Authentication middleware for ClaudeTools API.
|
||||
|
||||
This module provides JWT token creation, verification, and password hashing
|
||||
utilities for securing API endpoints. It uses PyJWT for token handling and
|
||||
passlib with bcrypt for password hashing.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Optional
|
||||
|
||||
import jwt
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
||||
from passlib.context import CryptContext
|
||||
|
||||
from api.config import get_settings
|
||||
|
||||
# Password hashing context using bcrypt
|
||||
# Note: Due to compatibility issues between passlib 1.7.4 and bcrypt 5.0 on Python 3.13,
|
||||
# we use argon2 as the primary scheme. This is actually more secure than bcrypt.
|
||||
# If bcrypt compatibility is restored in future versions, it can be added back.
|
||||
try:
|
||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||
# Test if bcrypt is working
|
||||
pwd_context.hash("test")
|
||||
except (ValueError, Exception):
|
||||
# Fallback to argon2 if bcrypt has compatibility issues
|
||||
pwd_context = CryptContext(schemes=["argon2"], deprecated="auto")
|
||||
|
||||
# HTTP Bearer token scheme for FastAPI
|
||||
security = HTTPBearer()
|
||||
|
||||
# Get application settings
|
||||
settings = get_settings()
|
||||
|
||||
|
||||
def hash_password(password: str) -> str:
|
||||
"""
|
||||
Hash a plain text password using bcrypt.
|
||||
|
||||
Args:
|
||||
password: The plain text password to hash
|
||||
|
||||
Returns:
|
||||
str: The hashed password
|
||||
|
||||
Example:
|
||||
```python
|
||||
hashed = hash_password("my_secure_password")
|
||||
print(hashed) # $2b$12$...
|
||||
```
|
||||
"""
|
||||
return pwd_context.hash(password)
|
||||
|
||||
|
||||
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||
"""
|
||||
Verify a plain text password against a hashed password.
|
||||
|
||||
Args:
|
||||
plain_password: The plain text password to verify
|
||||
hashed_password: The hashed password to verify against
|
||||
|
||||
Returns:
|
||||
bool: True if password matches, False otherwise
|
||||
|
||||
Example:
|
||||
```python
|
||||
is_valid = verify_password("user_input", stored_hash)
|
||||
if is_valid:
|
||||
print("Password is correct")
|
||||
```
|
||||
"""
|
||||
return pwd_context.verify(plain_password, hashed_password)
|
||||
|
||||
|
||||
def create_access_token(
|
||||
data: dict, expires_delta: Optional[timedelta] = None
|
||||
) -> str:
|
||||
"""
|
||||
Create a JWT access token with the provided data.
|
||||
|
||||
The token includes the provided data plus an expiration time (exp claim).
|
||||
If no expiration delta is provided, uses the default from settings.
|
||||
|
||||
Args:
|
||||
data: Dictionary of claims to include in the token (e.g., {"sub": "user_id"})
|
||||
expires_delta: Optional custom expiration time. If None, uses ACCESS_TOKEN_EXPIRE_MINUTES from settings
|
||||
|
||||
Returns:
|
||||
str: Encoded JWT token
|
||||
|
||||
Example:
|
||||
```python
|
||||
token = create_access_token(
|
||||
data={"sub": "user123"},
|
||||
expires_delta=timedelta(hours=1)
|
||||
)
|
||||
```
|
||||
"""
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.now(timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(timezone.utc) + timedelta(
|
||||
minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES
|
||||
)
|
||||
|
||||
to_encode.update({"exp": expire})
|
||||
|
||||
encoded_jwt = jwt.encode(
|
||||
to_encode, settings.JWT_SECRET_KEY, algorithm=settings.JWT_ALGORITHM
|
||||
)
|
||||
|
||||
return encoded_jwt
|
||||
|
||||
|
||||
def verify_token(token: str) -> dict:
|
||||
"""
|
||||
Verify and decode a JWT token.
|
||||
|
||||
Args:
|
||||
token: The JWT token string to verify
|
||||
|
||||
Returns:
|
||||
dict: The decoded token payload
|
||||
|
||||
Raises:
|
||||
HTTPException: If token is invalid or expired with 401 status code
|
||||
|
||||
Example:
|
||||
```python
|
||||
try:
|
||||
payload = verify_token(token_string)
|
||||
user_id = payload.get("sub")
|
||||
except HTTPException:
|
||||
print("Invalid token")
|
||||
```
|
||||
"""
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token, settings.JWT_SECRET_KEY, algorithms=[settings.JWT_ALGORITHM]
|
||||
)
|
||||
return payload
|
||||
except jwt.ExpiredSignatureError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Token has expired",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
except jwt.InvalidTokenError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Could not validate credentials",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
|
||||
def get_current_user(
|
||||
credentials: HTTPAuthorizationCredentials = Depends(security),
|
||||
) -> dict:
|
||||
"""
|
||||
Dependency function to get the current authenticated user from JWT token.
|
||||
|
||||
This function is used as a FastAPI dependency to protect routes that require
|
||||
authentication. It extracts the token from the Authorization header, verifies it,
|
||||
and returns the token payload containing user information.
|
||||
|
||||
Args:
|
||||
credentials: HTTP Bearer credentials from the Authorization header
|
||||
|
||||
Returns:
|
||||
dict: The decoded token payload containing user information (sub, scopes, etc.)
|
||||
|
||||
Raises:
|
||||
HTTPException: 401 if token is invalid
|
||||
|
||||
Example:
|
||||
```python
|
||||
@router.get("/protected")
|
||||
async def protected_route(current_user: dict = Depends(get_current_user)):
|
||||
return {"email": current_user.get("sub"), "scopes": current_user.get("scopes")}
|
||||
```
|
||||
"""
|
||||
token = credentials.credentials
|
||||
payload = verify_token(token)
|
||||
|
||||
# Extract user identifier from token subject claim
|
||||
user_identifier: Optional[str] = payload.get("sub")
|
||||
if user_identifier is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Could not validate credentials",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
return payload
|
||||
|
||||
|
||||
def get_optional_current_user(
|
||||
credentials: Optional[HTTPAuthorizationCredentials] = Depends(
|
||||
HTTPBearer(auto_error=False)
|
||||
),
|
||||
) -> Optional[dict]:
|
||||
"""
|
||||
Dependency function to get the current user if authenticated, None otherwise.
|
||||
|
||||
This is useful for routes that have optional authentication where behavior
|
||||
changes based on whether a user is logged in or not.
|
||||
|
||||
Args:
|
||||
credentials: Optional HTTP Bearer credentials from the Authorization header
|
||||
|
||||
Returns:
|
||||
Optional[dict]: The decoded token payload or None if not authenticated
|
||||
|
||||
Example:
|
||||
```python
|
||||
@router.get("/content")
|
||||
async def get_content(user: Optional[dict] = Depends(get_optional_current_user)):
|
||||
if user:
|
||||
return {"content": "Premium content", "email": user.get("sub")}
|
||||
return {"content": "Public content"}
|
||||
```
|
||||
"""
|
||||
if credentials is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
token = credentials.credentials
|
||||
payload = verify_token(token)
|
||||
user_identifier: Optional[str] = payload.get("sub")
|
||||
|
||||
if user_identifier is None:
|
||||
return None
|
||||
|
||||
return payload
|
||||
except HTTPException:
|
||||
return None
|
||||
|
||||
|
||||
def require_scopes(*required_scopes: str):
|
||||
"""
|
||||
Dependency factory to require specific permission scopes.
|
||||
|
||||
This function creates a dependency that checks if the authenticated user
|
||||
has all the required permission scopes.
|
||||
|
||||
Args:
|
||||
*required_scopes: Variable number of scope strings required (e.g., "msp:read", "msp:write")
|
||||
|
||||
Returns:
|
||||
Callable: A dependency function that validates scopes
|
||||
|
||||
Raises:
|
||||
HTTPException: 403 if user lacks required scopes
|
||||
|
||||
Example:
|
||||
```python
|
||||
@router.post("/admin/action")
|
||||
async def admin_action(
|
||||
current_user: dict = Depends(get_current_user),
|
||||
_: None = Depends(require_scopes("msp:admin"))
|
||||
):
|
||||
return {"message": "Admin action performed"}
|
||||
```
|
||||
"""
|
||||
|
||||
def check_scopes(current_user: dict = Depends(get_current_user)) -> None:
|
||||
user_scopes = current_user.get("scopes", [])
|
||||
|
||||
for scope in required_scopes:
|
||||
if scope not in user_scopes:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=f"Missing required permission: {scope}",
|
||||
)
|
||||
|
||||
return check_scopes
|
||||
324
api/middleware/error_handler.py
Normal file
324
api/middleware/error_handler.py
Normal file
@@ -0,0 +1,324 @@
|
||||
"""
|
||||
Error handling middleware for ClaudeTools API.
|
||||
|
||||
This module provides custom exception classes and global exception handlers
|
||||
for consistent error responses across the FastAPI application.
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from fastapi import FastAPI, Request, status
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
from fastapi.responses import JSONResponse
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
|
||||
class ClaudeToolsException(Exception):
|
||||
"""Base exception class for ClaudeTools application."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
status_code: int = status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
details: Optional[Dict[str, Any]] = None,
|
||||
):
|
||||
"""
|
||||
Initialize the exception.
|
||||
|
||||
Args:
|
||||
message: Human-readable error message
|
||||
status_code: HTTP status code for the error
|
||||
details: Optional dictionary with additional error details
|
||||
"""
|
||||
self.message = message
|
||||
self.status_code = status_code
|
||||
self.details = details or {}
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
class AuthenticationError(ClaudeToolsException):
|
||||
"""
|
||||
Exception raised for authentication failures.
|
||||
|
||||
This includes invalid credentials, expired tokens, or missing authentication.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, message: str = "Authentication failed", details: Optional[Dict[str, Any]] = None
|
||||
):
|
||||
"""
|
||||
Initialize authentication error.
|
||||
|
||||
Args:
|
||||
message: Error message
|
||||
details: Optional additional details
|
||||
"""
|
||||
super().__init__(
|
||||
message=message,
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
details=details,
|
||||
)
|
||||
|
||||
|
||||
class AuthorizationError(ClaudeToolsException):
|
||||
"""
|
||||
Exception raised for authorization failures.
|
||||
|
||||
This occurs when an authenticated user lacks permission for an action.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, message: str = "Insufficient permissions", details: Optional[Dict[str, Any]] = None
|
||||
):
|
||||
"""
|
||||
Initialize authorization error.
|
||||
|
||||
Args:
|
||||
message: Error message
|
||||
details: Optional additional details
|
||||
"""
|
||||
super().__init__(
|
||||
message=message,
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
details=details,
|
||||
)
|
||||
|
||||
|
||||
class NotFoundError(ClaudeToolsException):
|
||||
"""
|
||||
Exception raised when a requested resource is not found.
|
||||
|
||||
This should be used for missing users, organizations, tools, etc.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Resource not found",
|
||||
resource_type: Optional[str] = None,
|
||||
resource_id: Optional[str] = None,
|
||||
):
|
||||
"""
|
||||
Initialize not found error.
|
||||
|
||||
Args:
|
||||
message: Error message
|
||||
resource_type: Optional type of resource (e.g., "User", "Tool")
|
||||
resource_id: Optional ID of the missing resource
|
||||
"""
|
||||
details = {}
|
||||
if resource_type:
|
||||
details["resource_type"] = resource_type
|
||||
if resource_id:
|
||||
details["resource_id"] = resource_id
|
||||
|
||||
super().__init__(
|
||||
message=message,
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details=details,
|
||||
)
|
||||
|
||||
|
||||
class ValidationError(ClaudeToolsException):
|
||||
"""
|
||||
Exception raised for business logic validation failures.
|
||||
|
||||
This is separate from FastAPI's RequestValidationError and should be used
|
||||
for application-level validation (e.g., duplicate usernames, invalid state transitions).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Validation failed",
|
||||
field: Optional[str] = None,
|
||||
details: Optional[Dict[str, Any]] = None,
|
||||
):
|
||||
"""
|
||||
Initialize validation error.
|
||||
|
||||
Args:
|
||||
message: Error message
|
||||
field: Optional field name that failed validation
|
||||
details: Optional additional details
|
||||
"""
|
||||
error_details = details or {}
|
||||
if field:
|
||||
error_details["field"] = field
|
||||
|
||||
super().__init__(
|
||||
message=message,
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
details=error_details,
|
||||
)
|
||||
|
||||
|
||||
class ConflictError(ClaudeToolsException):
|
||||
"""
|
||||
Exception raised when a request conflicts with existing data.
|
||||
|
||||
This includes duplicate entries, concurrent modifications, etc.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, message: str = "Resource conflict", details: Optional[Dict[str, Any]] = None
|
||||
):
|
||||
"""
|
||||
Initialize conflict error.
|
||||
|
||||
Args:
|
||||
message: Error message
|
||||
details: Optional additional details
|
||||
"""
|
||||
super().__init__(
|
||||
message=message,
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
details=details,
|
||||
)
|
||||
|
||||
|
||||
class DatabaseError(ClaudeToolsException):
|
||||
"""
|
||||
Exception raised for database operation failures.
|
||||
|
||||
This wraps SQLAlchemy errors with a consistent interface.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, message: str = "Database operation failed", details: Optional[Dict[str, Any]] = None
|
||||
):
|
||||
"""
|
||||
Initialize database error.
|
||||
|
||||
Args:
|
||||
message: Error message
|
||||
details: Optional additional details
|
||||
"""
|
||||
super().__init__(
|
||||
message=message,
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
details=details,
|
||||
)
|
||||
|
||||
|
||||
async def claudetools_exception_handler(
|
||||
request: Request, exc: ClaudeToolsException
|
||||
) -> JSONResponse:
|
||||
"""
|
||||
Handler for custom ClaudeTools exceptions.
|
||||
|
||||
Args:
|
||||
request: The FastAPI request object
|
||||
exc: The ClaudeTools exception
|
||||
|
||||
Returns:
|
||||
JSONResponse: Formatted error response
|
||||
"""
|
||||
return JSONResponse(
|
||||
status_code=exc.status_code,
|
||||
content={
|
||||
"error": exc.message,
|
||||
"details": exc.details,
|
||||
"path": str(request.url.path),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def validation_exception_handler(
|
||||
request: Request, exc: RequestValidationError
|
||||
) -> JSONResponse:
|
||||
"""
|
||||
Handler for FastAPI request validation errors.
|
||||
|
||||
Args:
|
||||
request: The FastAPI request object
|
||||
exc: The validation error
|
||||
|
||||
Returns:
|
||||
JSONResponse: Formatted error response
|
||||
"""
|
||||
errors = []
|
||||
for error in exc.errors():
|
||||
errors.append(
|
||||
{
|
||||
"field": ".".join(str(loc) for loc in error["loc"]),
|
||||
"message": error["msg"],
|
||||
"type": error["type"],
|
||||
}
|
||||
)
|
||||
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
content={
|
||||
"error": "Request validation failed",
|
||||
"details": {"validation_errors": errors},
|
||||
"path": str(request.url.path),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def sqlalchemy_exception_handler(
|
||||
request: Request, exc: SQLAlchemyError
|
||||
) -> JSONResponse:
|
||||
"""
|
||||
Handler for SQLAlchemy database errors.
|
||||
|
||||
Args:
|
||||
request: The FastAPI request object
|
||||
exc: The SQLAlchemy exception
|
||||
|
||||
Returns:
|
||||
JSONResponse: Formatted error response
|
||||
"""
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
content={
|
||||
"error": "Database operation failed",
|
||||
"details": {"type": type(exc).__name__},
|
||||
"path": str(request.url.path),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def generic_exception_handler(request: Request, exc: Exception) -> JSONResponse:
|
||||
"""
|
||||
Handler for unhandled exceptions.
|
||||
|
||||
Args:
|
||||
request: The FastAPI request object
|
||||
exc: The exception
|
||||
|
||||
Returns:
|
||||
JSONResponse: Formatted error response
|
||||
"""
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
content={
|
||||
"error": "Internal server error",
|
||||
"details": {"type": type(exc).__name__},
|
||||
"path": str(request.url.path),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def register_exception_handlers(app: FastAPI) -> None:
|
||||
"""
|
||||
Register all exception handlers with the FastAPI application.
|
||||
|
||||
This should be called during application startup to ensure all exceptions
|
||||
are handled consistently.
|
||||
|
||||
Args:
|
||||
app: The FastAPI application instance
|
||||
|
||||
Example:
|
||||
```python
|
||||
from fastapi import FastAPI
|
||||
from api.middleware.error_handler import register_exception_handlers
|
||||
|
||||
app = FastAPI()
|
||||
register_exception_handlers(app)
|
||||
```
|
||||
"""
|
||||
app.add_exception_handler(ClaudeToolsException, claudetools_exception_handler)
|
||||
app.add_exception_handler(RequestValidationError, validation_exception_handler)
|
||||
app.add_exception_handler(SQLAlchemyError, sqlalchemy_exception_handler)
|
||||
app.add_exception_handler(Exception, generic_exception_handler)
|
||||
97
api/models/__init__.py
Normal file
97
api/models/__init__.py
Normal file
@@ -0,0 +1,97 @@
|
||||
"""
|
||||
SQLAlchemy ORM models for ClaudeTools.
|
||||
|
||||
This package contains all database models and their base classes.
|
||||
"""
|
||||
|
||||
from api.models.api_audit_log import ApiAuditLog
|
||||
from api.models.backup_log import BackupLog
|
||||
from api.models.base import Base, TimestampMixin, UUIDMixin
|
||||
from api.models.billable_time import BillableTime
|
||||
from api.models.client import Client
|
||||
from api.models.command_run import CommandRun
|
||||
from api.models.context_snippet import ContextSnippet
|
||||
from api.models.conversation_context import ConversationContext
|
||||
from api.models.credential import Credential
|
||||
from api.models.credential_audit_log import CredentialAuditLog
|
||||
from api.models.credential_permission import CredentialPermission
|
||||
from api.models.database_change import DatabaseChange
|
||||
from api.models.decision_log import DecisionLog
|
||||
from api.models.deployment import Deployment
|
||||
from api.models.environmental_insight import EnvironmentalInsight
|
||||
from api.models.external_integration import ExternalIntegration
|
||||
from api.models.failure_pattern import FailurePattern
|
||||
from api.models.file_change import FileChange
|
||||
from api.models.firewall_rule import FirewallRule
|
||||
from api.models.infrastructure import Infrastructure
|
||||
from api.models.infrastructure_change import InfrastructureChange
|
||||
from api.models.infrastructure_tag import InfrastructureTag
|
||||
from api.models.integration_credential import IntegrationCredential
|
||||
from api.models.m365_tenant import M365Tenant
|
||||
from api.models.machine import Machine
|
||||
from api.models.network import Network
|
||||
from api.models.operation_failure import OperationFailure
|
||||
from api.models.pending_task import PendingTask
|
||||
from api.models.problem_solution import ProblemSolution
|
||||
from api.models.project import Project
|
||||
from api.models.project_state import ProjectState
|
||||
from api.models.schema_migration import SchemaMigration
|
||||
from api.models.security_incident import SecurityIncident
|
||||
from api.models.service import Service
|
||||
from api.models.service_relationship import ServiceRelationship
|
||||
from api.models.session import Session
|
||||
from api.models.session_tag import SessionTag
|
||||
from api.models.site import Site
|
||||
from api.models.tag import Tag
|
||||
from api.models.task import Task
|
||||
from api.models.ticket_link import TicketLink
|
||||
from api.models.work_item import WorkItem
|
||||
from api.models.work_item_tag import WorkItemTag
|
||||
|
||||
__all__ = [
|
||||
"ApiAuditLog",
|
||||
"BackupLog",
|
||||
"Base",
|
||||
"BillableTime",
|
||||
"Client",
|
||||
"CommandRun",
|
||||
"ContextSnippet",
|
||||
"ConversationContext",
|
||||
"Credential",
|
||||
"CredentialAuditLog",
|
||||
"CredentialPermission",
|
||||
"DatabaseChange",
|
||||
"DecisionLog",
|
||||
"Deployment",
|
||||
"EnvironmentalInsight",
|
||||
"ExternalIntegration",
|
||||
"FailurePattern",
|
||||
"FileChange",
|
||||
"FirewallRule",
|
||||
"Infrastructure",
|
||||
"InfrastructureChange",
|
||||
"InfrastructureTag",
|
||||
"IntegrationCredential",
|
||||
"M365Tenant",
|
||||
"Machine",
|
||||
"Network",
|
||||
"OperationFailure",
|
||||
"PendingTask",
|
||||
"ProblemSolution",
|
||||
"Project",
|
||||
"ProjectState",
|
||||
"SchemaMigration",
|
||||
"SecurityIncident",
|
||||
"Service",
|
||||
"ServiceRelationship",
|
||||
"Session",
|
||||
"SessionTag",
|
||||
"Site",
|
||||
"Tag",
|
||||
"Task",
|
||||
"TicketLink",
|
||||
"TimestampMixin",
|
||||
"UUIDMixin",
|
||||
"WorkItem",
|
||||
"WorkItemTag",
|
||||
]
|
||||
111
api/models/api_audit_log.py
Normal file
111
api/models/api_audit_log.py
Normal file
@@ -0,0 +1,111 @@
|
||||
"""
|
||||
API audit log model for tracking API requests and security events.
|
||||
|
||||
Tracks all API requests including user, endpoint, request/response details,
|
||||
and performance metrics for security auditing and monitoring.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Index, Integer, String, Text, TIMESTAMP
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .base import Base, UUIDMixin
|
||||
|
||||
|
||||
class ApiAuditLog(Base, UUIDMixin):
|
||||
"""
|
||||
API audit log model for tracking API requests and security.
|
||||
|
||||
Logs all API requests with details about the user, endpoint accessed,
|
||||
request/response data, performance metrics, and errors. Used for
|
||||
security auditing, monitoring, and troubleshooting API issues.
|
||||
|
||||
Attributes:
|
||||
user_id: User identifier from JWT sub claim
|
||||
endpoint: API endpoint path accessed
|
||||
http_method: HTTP method used (GET, POST, PUT, DELETE, etc.)
|
||||
ip_address: IP address of the requester
|
||||
user_agent: User agent string from the request
|
||||
request_body: Sanitized request body (credentials removed)
|
||||
response_status: HTTP response status code
|
||||
response_time_ms: Response time in milliseconds
|
||||
error_message: Error message if request failed
|
||||
timestamp: When the request was made
|
||||
"""
|
||||
|
||||
__tablename__ = "api_audit_log"
|
||||
|
||||
# User identification
|
||||
user_id: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False,
|
||||
doc="User identifier from JWT sub claim"
|
||||
)
|
||||
|
||||
# Request details
|
||||
endpoint: Mapped[str] = mapped_column(
|
||||
String(500),
|
||||
nullable=False,
|
||||
doc="API endpoint path accessed (e.g., '/api/v1/sessions')"
|
||||
)
|
||||
|
||||
http_method: Mapped[Optional[str]] = mapped_column(
|
||||
String(10),
|
||||
doc="HTTP method used: GET, POST, PUT, DELETE, PATCH"
|
||||
)
|
||||
|
||||
# Client information
|
||||
ip_address: Mapped[Optional[str]] = mapped_column(
|
||||
String(45),
|
||||
doc="IP address of the requester (IPv4 or IPv6)"
|
||||
)
|
||||
|
||||
user_agent: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="User agent string from the request"
|
||||
)
|
||||
|
||||
# Request/Response data
|
||||
request_body: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Sanitized request body (credentials and sensitive data removed)"
|
||||
)
|
||||
|
||||
response_status: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
doc="HTTP response status code (200, 401, 500, etc.)"
|
||||
)
|
||||
|
||||
response_time_ms: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
doc="Response time in milliseconds"
|
||||
)
|
||||
|
||||
# Error tracking
|
||||
error_message: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Error message if the request failed"
|
||||
)
|
||||
|
||||
# Timestamp
|
||||
timestamp: Mapped[datetime] = mapped_column(
|
||||
TIMESTAMP,
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="When the request was made"
|
||||
)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index("idx_api_audit_user", "user_id"),
|
||||
Index("idx_api_audit_endpoint", "endpoint"),
|
||||
Index("idx_api_audit_timestamp", "timestamp"),
|
||||
Index("idx_api_audit_status", "response_status"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the audit log entry."""
|
||||
return f"<ApiAuditLog(user='{self.user_id}', endpoint='{self.endpoint}', status={self.response_status})>"
|
||||
147
api/models/backup_log.py
Normal file
147
api/models/backup_log.py
Normal file
@@ -0,0 +1,147 @@
|
||||
"""
|
||||
Backup Log model for tracking ClaudeTools database backups.
|
||||
|
||||
This model logs all backup operations with verification status,
|
||||
ensuring the ClaudeTools database can be reliably restored if needed.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import (
|
||||
BigInteger,
|
||||
CheckConstraint,
|
||||
Index,
|
||||
Integer,
|
||||
String,
|
||||
Text,
|
||||
)
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .base import Base, UUIDMixin
|
||||
|
||||
|
||||
class BackupLog(Base, UUIDMixin):
|
||||
"""
|
||||
Backup tracking for ClaudeTools database.
|
||||
|
||||
Logs all backup operations including timing, file details, and verification
|
||||
status. Ensures database can be restored with confidence.
|
||||
|
||||
Attributes:
|
||||
id: Unique identifier
|
||||
backup_type: Type of backup (daily, weekly, monthly, manual, pre-migration)
|
||||
file_path: Path to the backup file
|
||||
file_size_bytes: Size of the backup file in bytes
|
||||
backup_started_at: When the backup started
|
||||
backup_completed_at: When the backup completed
|
||||
duration_seconds: Computed duration of backup operation
|
||||
verification_status: Status of backup verification (passed, failed, not_verified)
|
||||
verification_details: JSON with specific verification check results
|
||||
database_host: Host where database is located
|
||||
database_name: Name of the database backed up
|
||||
backup_method: Method used for backup (mysqldump, etc.)
|
||||
created_at: Timestamp when log entry was created
|
||||
"""
|
||||
|
||||
__tablename__ = "backup_log"
|
||||
|
||||
# Backup details
|
||||
backup_type: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
CheckConstraint(
|
||||
"backup_type IN ('daily', 'weekly', 'monthly', 'manual', 'pre-migration')"
|
||||
),
|
||||
nullable=False,
|
||||
doc="Type of backup performed",
|
||||
)
|
||||
file_path: Mapped[str] = mapped_column(
|
||||
String(500),
|
||||
nullable=False,
|
||||
doc="Path to the backup file",
|
||||
)
|
||||
file_size_bytes: Mapped[int] = mapped_column(
|
||||
BigInteger,
|
||||
nullable=False,
|
||||
doc="Size of backup file in bytes",
|
||||
)
|
||||
|
||||
# Timing
|
||||
backup_started_at: Mapped[datetime] = mapped_column(
|
||||
nullable=False,
|
||||
doc="When the backup started",
|
||||
)
|
||||
backup_completed_at: Mapped[datetime] = mapped_column(
|
||||
nullable=False,
|
||||
doc="When the backup completed",
|
||||
)
|
||||
|
||||
# Note: SQLAlchemy doesn't support TIMESTAMPDIFF directly, so we'll calculate in Python
|
||||
# The duration will be computed by the application layer rather than as a stored generated column
|
||||
duration_seconds: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
nullable=True,
|
||||
doc="Duration of backup in seconds (computed in application)",
|
||||
)
|
||||
|
||||
# Verification
|
||||
verification_status: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
CheckConstraint(
|
||||
"verification_status IN ('passed', 'failed', 'not_verified')"
|
||||
),
|
||||
nullable=True,
|
||||
doc="Verification status of the backup",
|
||||
)
|
||||
verification_details: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="JSON with specific verification check results",
|
||||
)
|
||||
|
||||
# Metadata
|
||||
database_host: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
nullable=True,
|
||||
doc="Host where database is located",
|
||||
)
|
||||
database_name: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
nullable=True,
|
||||
doc="Name of the database backed up",
|
||||
)
|
||||
backup_method: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
default="mysqldump",
|
||||
nullable=False,
|
||||
doc="Method used for backup",
|
||||
)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="When log entry was created",
|
||||
)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index("idx_backup_type", "backup_type"),
|
||||
Index("idx_backup_date", "backup_completed_at"),
|
||||
Index("idx_verification_status", "verification_status"),
|
||||
)
|
||||
|
||||
def calculate_duration(self) -> None:
|
||||
"""Calculate and set the duration_seconds field."""
|
||||
if self.backup_started_at and self.backup_completed_at:
|
||||
delta = self.backup_completed_at - self.backup_started_at
|
||||
self.duration_seconds = int(delta.total_seconds())
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the backup log."""
|
||||
return (
|
||||
f"<BackupLog(id={self.id!r}, "
|
||||
f"type={self.backup_type!r}, "
|
||||
f"size={self.file_size_bytes}, "
|
||||
f"status={self.verification_status!r})>"
|
||||
)
|
||||
69
api/models/base.py
Normal file
69
api/models/base.py
Normal file
@@ -0,0 +1,69 @@
|
||||
"""
|
||||
Base models and mixins for SQLAlchemy ORM.
|
||||
|
||||
This module provides the foundational base class and reusable mixins
|
||||
for all database models in the ClaudeTools application.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import CHAR, Column, DateTime
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
"""Base class for all SQLAlchemy ORM models."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class UUIDMixin:
|
||||
"""
|
||||
Mixin that adds a UUID primary key column.
|
||||
|
||||
This mixin provides a standardized UUID-based primary key for models,
|
||||
stored as CHAR(36) for compatibility with MariaDB and other databases
|
||||
that don't have native UUID support.
|
||||
|
||||
Attributes:
|
||||
id: UUID primary key stored as CHAR(36), automatically generated
|
||||
"""
|
||||
|
||||
id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
primary_key=True,
|
||||
default=lambda: str(uuid.uuid4()),
|
||||
nullable=False,
|
||||
doc="Unique identifier for the record",
|
||||
)
|
||||
|
||||
|
||||
class TimestampMixin:
|
||||
"""
|
||||
Mixin that adds timestamp columns for record tracking.
|
||||
|
||||
This mixin provides automatic timestamp tracking for record creation
|
||||
and updates, using database-level defaults for consistency.
|
||||
|
||||
Attributes:
|
||||
created_at: Timestamp when the record was created
|
||||
updated_at: Timestamp when the record was last updated
|
||||
"""
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime,
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="Timestamp when the record was created",
|
||||
)
|
||||
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime,
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
server_onupdate=func.now(),
|
||||
doc="Timestamp when the record was last updated",
|
||||
)
|
||||
186
api/models/billable_time.py
Normal file
186
api/models/billable_time.py
Normal file
@@ -0,0 +1,186 @@
|
||||
"""
|
||||
Billable time model for tracking time entries for billing.
|
||||
|
||||
Tracks individual billable time entries with references to work items,
|
||||
sessions, and clients, including rates, amounts, and billing details.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import Boolean, CHAR, CheckConstraint, ForeignKey, Index, Integer, Numeric, String, Text, TIMESTAMP
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .client import Client
|
||||
from .session import Session
|
||||
from .work_item import WorkItem
|
||||
|
||||
|
||||
class BillableTime(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Billable time model representing individual billable time entries.
|
||||
|
||||
Tracks time entries for billing purposes with detailed information about
|
||||
the work performed, rates applied, and amounts calculated. Links to
|
||||
work items, sessions, and clients for comprehensive billing tracking.
|
||||
|
||||
Attributes:
|
||||
work_item_id: Foreign key to work_items table
|
||||
session_id: Foreign key to sessions table
|
||||
client_id: Foreign key to clients table
|
||||
start_time: When the billable time started
|
||||
end_time: When the billable time ended
|
||||
duration_minutes: Duration in minutes (auto-calculated or manual)
|
||||
hourly_rate: Hourly rate applied to this time entry
|
||||
total_amount: Total billable amount (calculated)
|
||||
is_billable: Whether this time entry is actually billable
|
||||
description: Description of the work performed
|
||||
category: Category of work (consulting, development, support, etc.)
|
||||
notes: Additional notes about this time entry
|
||||
invoiced_at: When this time entry was invoiced
|
||||
invoice_id: Reference to invoice if applicable
|
||||
"""
|
||||
|
||||
__tablename__ = "billable_time"
|
||||
|
||||
# Foreign keys
|
||||
work_item_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("work_items.id", ondelete="SET NULL"),
|
||||
doc="Foreign key to work_items table"
|
||||
)
|
||||
|
||||
session_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("sessions.id", ondelete="CASCADE"),
|
||||
doc="Foreign key to sessions table"
|
||||
)
|
||||
|
||||
client_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("clients.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Foreign key to clients table"
|
||||
)
|
||||
|
||||
# Time tracking
|
||||
start_time: Mapped[datetime] = mapped_column(
|
||||
TIMESTAMP,
|
||||
nullable=False,
|
||||
doc="When the billable time started"
|
||||
)
|
||||
|
||||
end_time: Mapped[Optional[datetime]] = mapped_column(
|
||||
TIMESTAMP,
|
||||
doc="When the billable time ended"
|
||||
)
|
||||
|
||||
duration_minutes: Mapped[int] = mapped_column(
|
||||
Integer,
|
||||
nullable=False,
|
||||
doc="Duration in minutes (auto-calculated or manual)"
|
||||
)
|
||||
|
||||
# Billing information
|
||||
hourly_rate: Mapped[float] = mapped_column(
|
||||
Numeric(10, 2),
|
||||
nullable=False,
|
||||
doc="Hourly rate applied to this time entry"
|
||||
)
|
||||
|
||||
total_amount: Mapped[float] = mapped_column(
|
||||
Numeric(10, 2),
|
||||
nullable=False,
|
||||
doc="Total billable amount (calculated: duration * rate)"
|
||||
)
|
||||
|
||||
is_billable: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=True,
|
||||
server_default="1",
|
||||
nullable=False,
|
||||
doc="Whether this time entry is actually billable"
|
||||
)
|
||||
|
||||
# Work details
|
||||
description: Mapped[str] = mapped_column(
|
||||
Text,
|
||||
nullable=False,
|
||||
doc="Description of the work performed"
|
||||
)
|
||||
|
||||
category: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
nullable=False,
|
||||
doc="Category: consulting, development, support, maintenance, troubleshooting, project_work, training, documentation"
|
||||
)
|
||||
|
||||
notes: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Additional notes about this time entry"
|
||||
)
|
||||
|
||||
# Invoice tracking
|
||||
invoiced_at: Mapped[Optional[datetime]] = mapped_column(
|
||||
TIMESTAMP,
|
||||
doc="When this time entry was invoiced"
|
||||
)
|
||||
|
||||
invoice_id: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
doc="Reference to invoice if applicable"
|
||||
)
|
||||
|
||||
# Relationships
|
||||
work_item: Mapped[Optional["WorkItem"]] = relationship(
|
||||
"WorkItem",
|
||||
doc="Relationship to WorkItem model"
|
||||
)
|
||||
|
||||
session: Mapped[Optional["Session"]] = relationship(
|
||||
"Session",
|
||||
doc="Relationship to Session model"
|
||||
)
|
||||
|
||||
client: Mapped["Client"] = relationship(
|
||||
"Client",
|
||||
doc="Relationship to Client model"
|
||||
)
|
||||
|
||||
# Constraints and indexes
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"category IN ('consulting', 'development', 'support', 'maintenance', 'troubleshooting', 'project_work', 'training', 'documentation')",
|
||||
name="ck_billable_time_category"
|
||||
),
|
||||
CheckConstraint(
|
||||
"duration_minutes > 0",
|
||||
name="ck_billable_time_duration_positive"
|
||||
),
|
||||
CheckConstraint(
|
||||
"hourly_rate >= 0",
|
||||
name="ck_billable_time_rate_non_negative"
|
||||
),
|
||||
CheckConstraint(
|
||||
"total_amount >= 0",
|
||||
name="ck_billable_time_amount_non_negative"
|
||||
),
|
||||
CheckConstraint(
|
||||
"end_time IS NULL OR end_time >= start_time",
|
||||
name="ck_billable_time_end_after_start"
|
||||
),
|
||||
Index("idx_billable_time_work_item", "work_item_id"),
|
||||
Index("idx_billable_time_session", "session_id"),
|
||||
Index("idx_billable_time_client", "client_id"),
|
||||
Index("idx_billable_time_start", "start_time"),
|
||||
Index("idx_billable_time_billable", "is_billable"),
|
||||
Index("idx_billable_time_category", "category"),
|
||||
Index("idx_billable_time_invoiced", "invoiced_at"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the billable time entry."""
|
||||
return f"<BillableTime(client_id='{self.client_id}', duration={self.duration_minutes}min, amount=${self.total_amount})>"
|
||||
120
api/models/client.py
Normal file
120
api/models/client.py
Normal file
@@ -0,0 +1,120 @@
|
||||
"""
|
||||
Client model for all client organizations.
|
||||
|
||||
Master table for MSP clients, internal projects, and client organizations.
|
||||
"""
|
||||
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import Boolean, Index, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .pending_task import PendingTask
|
||||
from .project import Project
|
||||
from .session import Session
|
||||
|
||||
|
||||
class Client(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Client model representing client organizations.
|
||||
|
||||
Master table for all client organizations including MSP clients,
|
||||
internal projects, and project-based clients. Stores client identification,
|
||||
network information, and Microsoft 365 tenant details.
|
||||
|
||||
Attributes:
|
||||
name: Client name (unique)
|
||||
type: Client type (msp_client, internal, project)
|
||||
network_subnet: Client network subnet (e.g., "192.168.0.0/24")
|
||||
domain_name: Active Directory domain or primary domain
|
||||
m365_tenant_id: Microsoft 365 tenant ID
|
||||
primary_contact: Primary contact person
|
||||
notes: Additional notes about the client
|
||||
is_active: Whether client is currently active
|
||||
"""
|
||||
|
||||
__tablename__ = "clients"
|
||||
|
||||
# Client identification
|
||||
name: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False,
|
||||
unique=True,
|
||||
doc="Client name (unique)"
|
||||
)
|
||||
|
||||
type: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
nullable=False,
|
||||
doc="Client type: msp_client, internal, project"
|
||||
)
|
||||
|
||||
# Network information
|
||||
network_subnet: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
doc="Client network subnet (e.g., '192.168.0.0/24')"
|
||||
)
|
||||
|
||||
domain_name: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
doc="Active Directory domain or primary domain"
|
||||
)
|
||||
|
||||
# Microsoft 365
|
||||
m365_tenant_id: Mapped[Optional[str]] = mapped_column(
|
||||
String(36),
|
||||
doc="Microsoft 365 tenant ID (UUID format)"
|
||||
)
|
||||
|
||||
# Contact information
|
||||
primary_contact: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
doc="Primary contact person"
|
||||
)
|
||||
|
||||
# Notes
|
||||
notes: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Additional notes about the client"
|
||||
)
|
||||
|
||||
# Status
|
||||
is_active: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=True,
|
||||
server_default="1",
|
||||
doc="Whether client is currently active"
|
||||
)
|
||||
|
||||
# Relationships
|
||||
projects: Mapped[list["Project"]] = relationship(
|
||||
"Project",
|
||||
back_populates="client",
|
||||
cascade="all, delete-orphan",
|
||||
doc="Projects associated with this client"
|
||||
)
|
||||
|
||||
sessions: Mapped[list["Session"]] = relationship(
|
||||
"Session",
|
||||
back_populates="client",
|
||||
doc="Sessions associated with this client"
|
||||
)
|
||||
|
||||
pending_tasks: Mapped[list["PendingTask"]] = relationship(
|
||||
"PendingTask",
|
||||
back_populates="client",
|
||||
doc="Pending tasks associated with this client"
|
||||
)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index("idx_clients_type", "type"),
|
||||
Index("idx_clients_name", "name"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the client."""
|
||||
return f"<Client(name='{self.name}', type='{self.type}')>"
|
||||
140
api/models/command_run.py
Normal file
140
api/models/command_run.py
Normal file
@@ -0,0 +1,140 @@
|
||||
"""
|
||||
Command run model for tracking shell/PowerShell/SQL commands executed.
|
||||
|
||||
This model records all commands executed during work sessions, including
|
||||
success/failure status and enhanced failure tracking for diagnostics.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import CHAR, Boolean, ForeignKey, Index, Integer, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from api.models.base import Base, UUIDMixin
|
||||
|
||||
|
||||
class CommandRun(UUIDMixin, Base):
|
||||
"""
|
||||
Track commands executed during work sessions.
|
||||
|
||||
Records shell, PowerShell, SQL, and other commands with execution details,
|
||||
output, and enhanced failure tracking for compatibility and environmental issues.
|
||||
|
||||
Attributes:
|
||||
id: UUID primary key
|
||||
work_item_id: Reference to the work item
|
||||
session_id: Reference to the session
|
||||
command_text: The actual command that was executed
|
||||
host: Where the command was executed (hostname or IP)
|
||||
shell_type: Type of shell (bash, powershell, sql, docker, etc.)
|
||||
success: Whether the command succeeded
|
||||
output_summary: Summary of command output (first/last lines or error)
|
||||
exit_code: Command exit code (non-zero indicates failure)
|
||||
error_message: Full error text if command failed
|
||||
failure_category: Category of failure (compatibility, permission, syntax, environmental)
|
||||
resolution: How the failure was fixed (if resolved)
|
||||
resolved: Whether the failure has been resolved
|
||||
execution_order: Sequence number within work item
|
||||
created_at: When the command was executed
|
||||
"""
|
||||
|
||||
__tablename__ = "commands_run"
|
||||
|
||||
# Foreign keys
|
||||
work_item_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("work_items.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Reference to work item",
|
||||
)
|
||||
session_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("sessions.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Reference to session",
|
||||
)
|
||||
|
||||
# Command details
|
||||
command_text: Mapped[str] = mapped_column(
|
||||
Text,
|
||||
nullable=False,
|
||||
doc="The actual command that was executed",
|
||||
)
|
||||
host: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
nullable=True,
|
||||
doc="Where the command was executed (hostname or IP)",
|
||||
)
|
||||
shell_type: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
nullable=True,
|
||||
doc="Type of shell (bash, powershell, sql, docker, etc.)",
|
||||
)
|
||||
success: Mapped[Optional[bool]] = mapped_column(
|
||||
Boolean,
|
||||
nullable=True,
|
||||
doc="Whether the command succeeded",
|
||||
)
|
||||
output_summary: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="Summary of command output (first/last lines or error)",
|
||||
)
|
||||
|
||||
# Failure tracking
|
||||
exit_code: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
nullable=True,
|
||||
doc="Command exit code (non-zero indicates failure)",
|
||||
)
|
||||
error_message: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="Full error text if command failed",
|
||||
)
|
||||
failure_category: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
nullable=True,
|
||||
doc="Category of failure (compatibility, permission, syntax, environmental)",
|
||||
)
|
||||
resolution: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="How the failure was fixed (if resolved)",
|
||||
)
|
||||
resolved: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
nullable=False,
|
||||
server_default="0",
|
||||
doc="Whether the failure has been resolved",
|
||||
)
|
||||
|
||||
# Execution metadata
|
||||
execution_order: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
nullable=True,
|
||||
doc="Sequence number within work item",
|
||||
)
|
||||
|
||||
# Timestamp
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="When the command was executed",
|
||||
)
|
||||
|
||||
# Table constraints
|
||||
__table_args__ = (
|
||||
Index("idx_commands_work_item", "work_item_id"),
|
||||
Index("idx_commands_session", "session_id"),
|
||||
Index("idx_commands_host", "host"),
|
||||
Index("idx_commands_success", "success"),
|
||||
Index("idx_commands_failure_category", "failure_category"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the command run."""
|
||||
cmd_preview = self.command_text[:50] + "..." if len(self.command_text) > 50 else self.command_text
|
||||
return f"<CommandRun(id={self.id}, command={cmd_preview}, success={self.success})>"
|
||||
124
api/models/context_snippet.py
Normal file
124
api/models/context_snippet.py
Normal file
@@ -0,0 +1,124 @@
|
||||
"""
|
||||
ContextSnippet model for storing reusable context snippets.
|
||||
|
||||
Stores small, highly compressed pieces of information like technical decisions,
|
||||
configurations, patterns, and lessons learned for quick retrieval.
|
||||
"""
|
||||
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import Float, ForeignKey, Index, Integer, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .client import Client
|
||||
from .project import Project
|
||||
|
||||
|
||||
class ContextSnippet(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
ContextSnippet model for storing reusable context snippets.
|
||||
|
||||
Stores small, highly compressed pieces of information like technical
|
||||
decisions, configurations, patterns, and lessons learned. These snippets
|
||||
are designed for quick retrieval and reuse across conversations.
|
||||
|
||||
Attributes:
|
||||
category: Category of snippet (tech_decision, configuration, pattern, lesson_learned)
|
||||
title: Brief title describing the snippet
|
||||
dense_content: Highly compressed information content
|
||||
structured_data: JSON object for optional structured representation
|
||||
tags: JSON array of tags for retrieval and categorization
|
||||
project_id: Foreign key to projects (optional)
|
||||
client_id: Foreign key to clients (optional)
|
||||
relevance_score: Float score for ranking relevance (default 1.0)
|
||||
usage_count: Integer count of how many times this snippet was retrieved (default 0)
|
||||
project: Relationship to Project model
|
||||
client: Relationship to Client model
|
||||
"""
|
||||
|
||||
__tablename__ = "context_snippets"
|
||||
|
||||
# Foreign keys
|
||||
project_id: Mapped[Optional[str]] = mapped_column(
|
||||
String(36),
|
||||
ForeignKey("projects.id", ondelete="SET NULL"),
|
||||
doc="Foreign key to projects (optional)"
|
||||
)
|
||||
|
||||
client_id: Mapped[Optional[str]] = mapped_column(
|
||||
String(36),
|
||||
ForeignKey("clients.id", ondelete="SET NULL"),
|
||||
doc="Foreign key to clients (optional)"
|
||||
)
|
||||
|
||||
# Snippet metadata
|
||||
category: Mapped[str] = mapped_column(
|
||||
String(100),
|
||||
nullable=False,
|
||||
doc="Category: tech_decision, configuration, pattern, lesson_learned"
|
||||
)
|
||||
|
||||
title: Mapped[str] = mapped_column(
|
||||
String(200),
|
||||
nullable=False,
|
||||
doc="Brief title describing the snippet"
|
||||
)
|
||||
|
||||
# Content
|
||||
dense_content: Mapped[str] = mapped_column(
|
||||
Text,
|
||||
nullable=False,
|
||||
doc="Highly compressed information content"
|
||||
)
|
||||
|
||||
structured_data: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON object for optional structured representation"
|
||||
)
|
||||
|
||||
# Retrieval metadata
|
||||
tags: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON array of tags for retrieval and categorization"
|
||||
)
|
||||
|
||||
relevance_score: Mapped[float] = mapped_column(
|
||||
Float,
|
||||
default=1.0,
|
||||
server_default="1.0",
|
||||
doc="Float score for ranking relevance (default 1.0)"
|
||||
)
|
||||
|
||||
usage_count: Mapped[int] = mapped_column(
|
||||
Integer,
|
||||
default=0,
|
||||
server_default="0",
|
||||
doc="Integer count of how many times this snippet was retrieved"
|
||||
)
|
||||
|
||||
# Relationships
|
||||
project: Mapped[Optional["Project"]] = relationship(
|
||||
"Project",
|
||||
doc="Relationship to Project model"
|
||||
)
|
||||
|
||||
client: Mapped[Optional["Client"]] = relationship(
|
||||
"Client",
|
||||
doc="Relationship to Client model"
|
||||
)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index("idx_context_snippets_project", "project_id"),
|
||||
Index("idx_context_snippets_client", "client_id"),
|
||||
Index("idx_context_snippets_category", "category"),
|
||||
Index("idx_context_snippets_relevance", "relevance_score"),
|
||||
Index("idx_context_snippets_usage", "usage_count"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the context snippet."""
|
||||
return f"<ContextSnippet(title='{self.title}', category='{self.category}', usage={self.usage_count})>"
|
||||
135
api/models/conversation_context.py
Normal file
135
api/models/conversation_context.py
Normal file
@@ -0,0 +1,135 @@
|
||||
"""
|
||||
ConversationContext model for storing Claude's conversation context.
|
||||
|
||||
Stores compressed summaries of conversations, sessions, and project states
|
||||
for cross-machine recall and context continuity.
|
||||
"""
|
||||
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import Float, ForeignKey, Index, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .machine import Machine
|
||||
from .project import Project
|
||||
from .session import Session
|
||||
|
||||
|
||||
class ConversationContext(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
ConversationContext model for storing Claude's conversation context.
|
||||
|
||||
Stores compressed, structured summaries of conversations, work sessions,
|
||||
and project states to enable Claude to recall important context across
|
||||
different machines and conversation sessions.
|
||||
|
||||
Attributes:
|
||||
session_id: Foreign key to sessions (optional - not all contexts are work sessions)
|
||||
project_id: Foreign key to projects (optional)
|
||||
context_type: Type of context (session_summary, project_state, general_context)
|
||||
title: Brief title describing the context
|
||||
dense_summary: Compressed, structured summary (JSON or dense text)
|
||||
key_decisions: JSON array of important decisions made
|
||||
current_state: JSON object describing what's currently in progress
|
||||
tags: JSON array of tags for retrieval and categorization
|
||||
relevance_score: Float score for ranking relevance (default 1.0)
|
||||
machine_id: Foreign key to machines (which machine created this context)
|
||||
session: Relationship to Session model
|
||||
project: Relationship to Project model
|
||||
machine: Relationship to Machine model
|
||||
"""
|
||||
|
||||
__tablename__ = "conversation_contexts"
|
||||
|
||||
# Foreign keys
|
||||
session_id: Mapped[Optional[str]] = mapped_column(
|
||||
String(36),
|
||||
ForeignKey("sessions.id", ondelete="SET NULL"),
|
||||
doc="Foreign key to sessions (optional - not all contexts are work sessions)"
|
||||
)
|
||||
|
||||
project_id: Mapped[Optional[str]] = mapped_column(
|
||||
String(36),
|
||||
ForeignKey("projects.id", ondelete="SET NULL"),
|
||||
doc="Foreign key to projects (optional)"
|
||||
)
|
||||
|
||||
machine_id: Mapped[Optional[str]] = mapped_column(
|
||||
String(36),
|
||||
ForeignKey("machines.id", ondelete="SET NULL"),
|
||||
doc="Foreign key to machines (which machine created this context)"
|
||||
)
|
||||
|
||||
# Context metadata
|
||||
context_type: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
nullable=False,
|
||||
doc="Type of context: session_summary, project_state, general_context"
|
||||
)
|
||||
|
||||
title: Mapped[str] = mapped_column(
|
||||
String(200),
|
||||
nullable=False,
|
||||
doc="Brief title describing the context"
|
||||
)
|
||||
|
||||
# Context content
|
||||
dense_summary: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Compressed, structured summary (JSON or dense text)"
|
||||
)
|
||||
|
||||
key_decisions: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON array of important decisions made"
|
||||
)
|
||||
|
||||
current_state: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON object describing what's currently in progress"
|
||||
)
|
||||
|
||||
# Retrieval metadata
|
||||
tags: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON array of tags for retrieval and categorization"
|
||||
)
|
||||
|
||||
relevance_score: Mapped[float] = mapped_column(
|
||||
Float,
|
||||
default=1.0,
|
||||
server_default="1.0",
|
||||
doc="Float score for ranking relevance (default 1.0)"
|
||||
)
|
||||
|
||||
# Relationships
|
||||
session: Mapped[Optional["Session"]] = relationship(
|
||||
"Session",
|
||||
doc="Relationship to Session model"
|
||||
)
|
||||
|
||||
project: Mapped[Optional["Project"]] = relationship(
|
||||
"Project",
|
||||
doc="Relationship to Project model"
|
||||
)
|
||||
|
||||
machine: Mapped[Optional["Machine"]] = relationship(
|
||||
"Machine",
|
||||
doc="Relationship to Machine model"
|
||||
)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index("idx_conversation_contexts_session", "session_id"),
|
||||
Index("idx_conversation_contexts_project", "project_id"),
|
||||
Index("idx_conversation_contexts_machine", "machine_id"),
|
||||
Index("idx_conversation_contexts_type", "context_type"),
|
||||
Index("idx_conversation_contexts_relevance", "relevance_score"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the conversation context."""
|
||||
return f"<ConversationContext(title='{self.title}', type='{self.context_type}', relevance={self.relevance_score})>"
|
||||
231
api/models/credential.py
Normal file
231
api/models/credential.py
Normal file
@@ -0,0 +1,231 @@
|
||||
"""
|
||||
Credential model for secure storage of authentication credentials.
|
||||
|
||||
This model stores various types of credentials (passwords, API keys, OAuth tokens, etc.)
|
||||
with encryption for sensitive fields.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import (
|
||||
Boolean,
|
||||
CHAR,
|
||||
CheckConstraint,
|
||||
ForeignKey,
|
||||
Index,
|
||||
Integer,
|
||||
LargeBinary,
|
||||
String,
|
||||
Text,
|
||||
)
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from api.models.base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
|
||||
class Credential(UUIDMixin, TimestampMixin, Base):
|
||||
"""
|
||||
Stores authentication credentials for various services.
|
||||
|
||||
Supports multiple credential types including passwords, API keys, OAuth,
|
||||
SSH keys, and more. Sensitive data is stored encrypted using AES-256-GCM.
|
||||
|
||||
Attributes:
|
||||
id: UUID primary key
|
||||
client_id: Reference to client this credential belongs to
|
||||
service_id: Reference to service this credential is for
|
||||
infrastructure_id: Reference to infrastructure component
|
||||
credential_type: Type of credential (password, api_key, oauth, etc.)
|
||||
service_name: Display name for the service (e.g., "Gitea Admin")
|
||||
username: Username for authentication
|
||||
password_encrypted: AES-256-GCM encrypted password
|
||||
api_key_encrypted: Encrypted API key
|
||||
client_id_oauth: OAuth client ID
|
||||
client_secret_encrypted: Encrypted OAuth client secret
|
||||
tenant_id_oauth: OAuth tenant ID
|
||||
public_key: SSH public key (not encrypted)
|
||||
token_encrypted: Encrypted bearer/access token
|
||||
connection_string_encrypted: Encrypted connection string
|
||||
integration_code: Integration code for services like Autotask
|
||||
external_url: External URL for the service
|
||||
internal_url: Internal URL for the service
|
||||
custom_port: Custom port number if applicable
|
||||
role_description: Description of access level/role
|
||||
requires_vpn: Whether VPN is required for access
|
||||
requires_2fa: Whether 2FA is required
|
||||
ssh_key_auth_enabled: Whether SSH key authentication is enabled
|
||||
access_level: Description of access level
|
||||
expires_at: When the credential expires
|
||||
last_rotated_at: When the credential was last rotated
|
||||
is_active: Whether the credential is currently active
|
||||
created_at: Creation timestamp
|
||||
updated_at: Last update timestamp
|
||||
"""
|
||||
|
||||
__tablename__ = "credentials"
|
||||
|
||||
# Foreign keys
|
||||
client_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("clients.id", ondelete="CASCADE"),
|
||||
nullable=True,
|
||||
doc="Reference to client",
|
||||
)
|
||||
service_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("services.id", ondelete="CASCADE"),
|
||||
nullable=True,
|
||||
doc="Reference to service",
|
||||
)
|
||||
infrastructure_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("infrastructure.id", ondelete="CASCADE"),
|
||||
nullable=True,
|
||||
doc="Reference to infrastructure component",
|
||||
)
|
||||
|
||||
# Credential type and service info
|
||||
credential_type: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
nullable=False,
|
||||
doc="Type of credential",
|
||||
)
|
||||
service_name: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False,
|
||||
doc="Display name for the service",
|
||||
)
|
||||
|
||||
# Authentication fields
|
||||
username: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
nullable=True,
|
||||
doc="Username for authentication",
|
||||
)
|
||||
password_encrypted: Mapped[Optional[bytes]] = mapped_column(
|
||||
LargeBinary,
|
||||
nullable=True,
|
||||
doc="AES-256-GCM encrypted password",
|
||||
)
|
||||
api_key_encrypted: Mapped[Optional[bytes]] = mapped_column(
|
||||
LargeBinary,
|
||||
nullable=True,
|
||||
doc="Encrypted API key",
|
||||
)
|
||||
|
||||
# OAuth fields
|
||||
client_id_oauth: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
nullable=True,
|
||||
doc="OAuth client ID",
|
||||
)
|
||||
client_secret_encrypted: Mapped[Optional[bytes]] = mapped_column(
|
||||
LargeBinary,
|
||||
nullable=True,
|
||||
doc="Encrypted OAuth client secret",
|
||||
)
|
||||
tenant_id_oauth: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
nullable=True,
|
||||
doc="OAuth tenant ID",
|
||||
)
|
||||
|
||||
# SSH and token fields
|
||||
public_key: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="SSH public key",
|
||||
)
|
||||
token_encrypted: Mapped[Optional[bytes]] = mapped_column(
|
||||
LargeBinary,
|
||||
nullable=True,
|
||||
doc="Encrypted bearer/access token",
|
||||
)
|
||||
connection_string_encrypted: Mapped[Optional[bytes]] = mapped_column(
|
||||
LargeBinary,
|
||||
nullable=True,
|
||||
doc="Encrypted connection string",
|
||||
)
|
||||
integration_code: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
nullable=True,
|
||||
doc="Integration code for services like Autotask",
|
||||
)
|
||||
|
||||
# Metadata
|
||||
external_url: Mapped[Optional[str]] = mapped_column(
|
||||
String(500),
|
||||
nullable=True,
|
||||
doc="External URL for the service",
|
||||
)
|
||||
internal_url: Mapped[Optional[str]] = mapped_column(
|
||||
String(500),
|
||||
nullable=True,
|
||||
doc="Internal URL for the service",
|
||||
)
|
||||
custom_port: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
nullable=True,
|
||||
doc="Custom port number",
|
||||
)
|
||||
role_description: Mapped[Optional[str]] = mapped_column(
|
||||
String(500),
|
||||
nullable=True,
|
||||
doc="Description of access level/role",
|
||||
)
|
||||
requires_vpn: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
nullable=False,
|
||||
server_default="0",
|
||||
doc="Whether VPN is required",
|
||||
)
|
||||
requires_2fa: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
nullable=False,
|
||||
server_default="0",
|
||||
doc="Whether 2FA is required",
|
||||
)
|
||||
ssh_key_auth_enabled: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
nullable=False,
|
||||
server_default="0",
|
||||
doc="Whether SSH key authentication is enabled",
|
||||
)
|
||||
access_level: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
nullable=True,
|
||||
doc="Description of access level",
|
||||
)
|
||||
|
||||
# Lifecycle
|
||||
expires_at: Mapped[Optional[datetime]] = mapped_column(
|
||||
nullable=True,
|
||||
doc="Expiration timestamp",
|
||||
)
|
||||
last_rotated_at: Mapped[Optional[datetime]] = mapped_column(
|
||||
nullable=True,
|
||||
doc="Last rotation timestamp",
|
||||
)
|
||||
is_active: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
nullable=False,
|
||||
server_default="1",
|
||||
doc="Whether the credential is active",
|
||||
)
|
||||
|
||||
# Table constraints
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"credential_type IN ('password', 'api_key', 'oauth', 'ssh_key', 'shared_secret', 'jwt', 'connection_string', 'certificate')",
|
||||
name="ck_credentials_type",
|
||||
),
|
||||
Index("idx_credentials_client", "client_id"),
|
||||
Index("idx_credentials_service", "service_id"),
|
||||
Index("idx_credentials_type", "credential_type"),
|
||||
Index("idx_credentials_active", "is_active"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the credential."""
|
||||
return f"<Credential(id={self.id}, service_name={self.service_name}, type={self.credential_type})>"
|
||||
95
api/models/credential_audit_log.py
Normal file
95
api/models/credential_audit_log.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""
|
||||
Credential audit log model for tracking credential access and modifications.
|
||||
|
||||
This model provides a comprehensive audit trail for all credential-related
|
||||
operations including views, updates, rotations, and decryptions.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import CHAR, CheckConstraint, ForeignKey, Index, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from api.models.base import Base, UUIDMixin
|
||||
|
||||
|
||||
class CredentialAuditLog(UUIDMixin, Base):
|
||||
"""
|
||||
Audit trail for credential access and modifications.
|
||||
|
||||
Records all operations performed on credentials including who accessed them,
|
||||
when, from where, and what action was performed.
|
||||
|
||||
Attributes:
|
||||
id: UUID primary key
|
||||
credential_id: Reference to the credential
|
||||
action: Type of action performed (view, create, update, delete, rotate, decrypt)
|
||||
user_id: User who performed the action (JWT sub claim)
|
||||
ip_address: IP address of the user
|
||||
user_agent: Browser/client user agent
|
||||
details: JSON string with additional context about the action
|
||||
timestamp: When the action was performed
|
||||
"""
|
||||
|
||||
__tablename__ = "credential_audit_log"
|
||||
|
||||
# Foreign keys
|
||||
credential_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("credentials.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Reference to credential",
|
||||
)
|
||||
|
||||
# Action details
|
||||
action: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
nullable=False,
|
||||
doc="Type of action performed",
|
||||
)
|
||||
user_id: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False,
|
||||
doc="User who performed the action (JWT sub claim)",
|
||||
)
|
||||
|
||||
# Context information
|
||||
ip_address: Mapped[Optional[str]] = mapped_column(
|
||||
String(45),
|
||||
nullable=True,
|
||||
doc="IP address (IPv4 or IPv6)",
|
||||
)
|
||||
user_agent: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="Browser/client user agent string",
|
||||
)
|
||||
details: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="JSON string with additional context (what changed, why, etc.)",
|
||||
)
|
||||
|
||||
# Timestamp
|
||||
timestamp: Mapped[datetime] = mapped_column(
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="When the action was performed",
|
||||
)
|
||||
|
||||
# Table constraints
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"action IN ('view', 'create', 'update', 'delete', 'rotate', 'decrypt')",
|
||||
name="ck_credential_audit_action",
|
||||
),
|
||||
Index("idx_cred_audit_credential", "credential_id"),
|
||||
Index("idx_cred_audit_user", "user_id"),
|
||||
Index("idx_cred_audit_timestamp", "timestamp"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the audit log entry."""
|
||||
return f"<CredentialAuditLog(id={self.id}, action={self.action}, user={self.user_id}, timestamp={self.timestamp})>"
|
||||
88
api/models/credential_permission.py
Normal file
88
api/models/credential_permission.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""
|
||||
Credential permission model for access control.
|
||||
|
||||
This model manages fine-grained access control for credentials,
|
||||
supporting future team expansion with role-based permissions.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import (
|
||||
CHAR,
|
||||
CheckConstraint,
|
||||
ForeignKey,
|
||||
Index,
|
||||
String,
|
||||
UniqueConstraint,
|
||||
)
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from api.models.base import Base, UUIDMixin
|
||||
|
||||
|
||||
class CredentialPermission(UUIDMixin, Base):
|
||||
"""
|
||||
Access control for credentials.
|
||||
|
||||
Manages who can access specific credentials and what level of access they have.
|
||||
Supports read, write, and admin permission levels.
|
||||
|
||||
Attributes:
|
||||
id: UUID primary key
|
||||
credential_id: Reference to the credential
|
||||
user_id: User or role ID who has access
|
||||
permission_level: Level of access (read, write, admin)
|
||||
granted_at: When the permission was granted
|
||||
granted_by: Who granted the permission
|
||||
"""
|
||||
|
||||
__tablename__ = "credential_permissions"
|
||||
|
||||
# Foreign keys
|
||||
credential_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("credentials.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Reference to credential",
|
||||
)
|
||||
|
||||
# Permission details
|
||||
user_id: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False,
|
||||
doc="User or role ID who has access",
|
||||
)
|
||||
permission_level: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
nullable=True,
|
||||
doc="Level of access",
|
||||
)
|
||||
|
||||
# Metadata
|
||||
granted_at: Mapped[datetime] = mapped_column(
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="When the permission was granted",
|
||||
)
|
||||
granted_by: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
nullable=True,
|
||||
doc="Who granted the permission",
|
||||
)
|
||||
|
||||
# Table constraints
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"permission_level IN ('read', 'write', 'admin')",
|
||||
name="ck_credential_permissions_level",
|
||||
),
|
||||
UniqueConstraint("credential_id", "user_id", name="uq_credential_user"),
|
||||
Index("idx_cred_perm_credential", "credential_id"),
|
||||
Index("idx_cred_perm_user", "user_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the credential permission."""
|
||||
return f"<CredentialPermission(id={self.id}, user={self.user_id}, level={self.permission_level})>"
|
||||
152
api/models/database_change.py
Normal file
152
api/models/database_change.py
Normal file
@@ -0,0 +1,152 @@
|
||||
"""
|
||||
Database change model for tracking database schema and data modifications.
|
||||
|
||||
Tracks database changes including schema modifications, data updates, index
|
||||
creation, optimizations, and cleanup operations with backup tracking.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import BigInteger, Boolean, CHAR, CheckConstraint, ForeignKey, Index, String, Text, TIMESTAMP
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .base import Base, UUIDMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .infrastructure import Infrastructure
|
||||
from .session import Session
|
||||
from .work_item import WorkItem
|
||||
|
||||
|
||||
class DatabaseChange(Base, UUIDMixin):
|
||||
"""
|
||||
Database change model for tracking database modifications.
|
||||
|
||||
Records all database schema and data changes including DDL operations,
|
||||
data modifications, index management, optimizations, and cleanup tasks.
|
||||
Tracks affected rows, backup status, and freed space for audit and
|
||||
rollback purposes.
|
||||
|
||||
Attributes:
|
||||
work_item_id: Foreign key to work_items table (required)
|
||||
session_id: Foreign key to sessions table (required)
|
||||
database_name: Name of the database that was modified
|
||||
infrastructure_id: Foreign key to infrastructure table
|
||||
change_type: Type of database change
|
||||
sql_executed: SQL statements that were executed
|
||||
rows_affected: Number of rows affected by the change
|
||||
size_freed_bytes: Bytes freed by cleanup operations
|
||||
backup_taken: Whether a backup was taken before the change
|
||||
backup_location: Path or identifier of the backup
|
||||
created_at: When the change was made
|
||||
work_item: Relationship to WorkItem model
|
||||
session: Relationship to Session model
|
||||
infrastructure: Relationship to Infrastructure model
|
||||
"""
|
||||
|
||||
__tablename__ = "database_changes"
|
||||
|
||||
# Foreign keys
|
||||
work_item_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("work_items.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Foreign key to work_items table (required)"
|
||||
)
|
||||
|
||||
session_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("sessions.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Foreign key to sessions table (required)"
|
||||
)
|
||||
|
||||
database_name: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False,
|
||||
doc="Name of the database that was modified"
|
||||
)
|
||||
|
||||
infrastructure_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("infrastructure.id", ondelete="SET NULL"),
|
||||
doc="Foreign key to infrastructure table"
|
||||
)
|
||||
|
||||
# Change details
|
||||
change_type: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
doc="Type of change: schema, data, index, optimization, cleanup, migration"
|
||||
)
|
||||
|
||||
sql_executed: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="SQL statements that were executed"
|
||||
)
|
||||
|
||||
rows_affected: Mapped[Optional[int]] = mapped_column(
|
||||
BigInteger,
|
||||
doc="Number of rows affected by the change"
|
||||
)
|
||||
|
||||
size_freed_bytes: Mapped[Optional[int]] = mapped_column(
|
||||
BigInteger,
|
||||
doc="Bytes freed by cleanup operations"
|
||||
)
|
||||
|
||||
# Backup tracking
|
||||
backup_taken: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=False,
|
||||
server_default="0",
|
||||
nullable=False,
|
||||
doc="Whether a backup was taken before the change"
|
||||
)
|
||||
|
||||
backup_location: Mapped[Optional[str]] = mapped_column(
|
||||
String(500),
|
||||
doc="Path or identifier of the backup"
|
||||
)
|
||||
|
||||
# Timestamp
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
TIMESTAMP,
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="When the change was made"
|
||||
)
|
||||
|
||||
# Relationships
|
||||
work_item: Mapped["WorkItem"] = relationship(
|
||||
"WorkItem",
|
||||
back_populates="database_changes",
|
||||
doc="Relationship to WorkItem model"
|
||||
)
|
||||
|
||||
session: Mapped["Session"] = relationship(
|
||||
"Session",
|
||||
back_populates="database_changes",
|
||||
doc="Relationship to Session model"
|
||||
)
|
||||
|
||||
infrastructure: Mapped[Optional["Infrastructure"]] = relationship(
|
||||
"Infrastructure",
|
||||
back_populates="database_changes",
|
||||
doc="Relationship to Infrastructure model"
|
||||
)
|
||||
|
||||
# Constraints and indexes
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"change_type IN ('schema', 'data', 'index', 'optimization', 'cleanup', 'migration')",
|
||||
name="ck_database_changes_type"
|
||||
),
|
||||
Index("idx_db_changes_work_item", "work_item_id"),
|
||||
Index("idx_db_changes_database", "database_name"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the database change."""
|
||||
return f"<DatabaseChange(database='{self.database_name}', type='{self.change_type}', rows={self.rows_affected})>"
|
||||
115
api/models/decision_log.py
Normal file
115
api/models/decision_log.py
Normal file
@@ -0,0 +1,115 @@
|
||||
"""
|
||||
DecisionLog model for tracking important decisions made during work.
|
||||
|
||||
Stores decisions with their rationale, alternatives considered, and impact
|
||||
to provide decision history and context for future work.
|
||||
"""
|
||||
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import ForeignKey, Index, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .project import Project
|
||||
from .session import Session
|
||||
|
||||
|
||||
class DecisionLog(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
DecisionLog model for tracking important decisions made during work.
|
||||
|
||||
Stores decisions with their type, rationale, alternatives considered,
|
||||
and impact assessment. This provides a decision history that can be
|
||||
referenced in future conversations and work sessions.
|
||||
|
||||
Attributes:
|
||||
decision_type: Type of decision (technical, architectural, process, security)
|
||||
decision_text: What was decided (the actual decision)
|
||||
rationale: Why this decision was made
|
||||
alternatives_considered: JSON array of other options that were considered
|
||||
impact: Impact level (low, medium, high, critical)
|
||||
project_id: Foreign key to projects (optional)
|
||||
session_id: Foreign key to sessions (optional)
|
||||
tags: JSON array of tags for retrieval and categorization
|
||||
project: Relationship to Project model
|
||||
session: Relationship to Session model
|
||||
"""
|
||||
|
||||
__tablename__ = "decision_logs"
|
||||
|
||||
# Foreign keys
|
||||
project_id: Mapped[Optional[str]] = mapped_column(
|
||||
String(36),
|
||||
ForeignKey("projects.id", ondelete="SET NULL"),
|
||||
doc="Foreign key to projects (optional)"
|
||||
)
|
||||
|
||||
session_id: Mapped[Optional[str]] = mapped_column(
|
||||
String(36),
|
||||
ForeignKey("sessions.id", ondelete="SET NULL"),
|
||||
doc="Foreign key to sessions (optional)"
|
||||
)
|
||||
|
||||
# Decision metadata
|
||||
decision_type: Mapped[str] = mapped_column(
|
||||
String(100),
|
||||
nullable=False,
|
||||
doc="Type of decision: technical, architectural, process, security"
|
||||
)
|
||||
|
||||
impact: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
default="medium",
|
||||
server_default="medium",
|
||||
doc="Impact level: low, medium, high, critical"
|
||||
)
|
||||
|
||||
# Decision content
|
||||
decision_text: Mapped[str] = mapped_column(
|
||||
Text,
|
||||
nullable=False,
|
||||
doc="What was decided (the actual decision)"
|
||||
)
|
||||
|
||||
rationale: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Why this decision was made"
|
||||
)
|
||||
|
||||
alternatives_considered: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON array of other options that were considered"
|
||||
)
|
||||
|
||||
# Retrieval metadata
|
||||
tags: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON array of tags for retrieval and categorization"
|
||||
)
|
||||
|
||||
# Relationships
|
||||
project: Mapped[Optional["Project"]] = relationship(
|
||||
"Project",
|
||||
doc="Relationship to Project model"
|
||||
)
|
||||
|
||||
session: Mapped[Optional["Session"]] = relationship(
|
||||
"Session",
|
||||
doc="Relationship to Session model"
|
||||
)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index("idx_decision_logs_project", "project_id"),
|
||||
Index("idx_decision_logs_session", "session_id"),
|
||||
Index("idx_decision_logs_type", "decision_type"),
|
||||
Index("idx_decision_logs_impact", "impact"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the decision log."""
|
||||
decision_preview = self.decision_text[:50] + "..." if len(self.decision_text) > 50 else self.decision_text
|
||||
return f"<DecisionLog(type='{self.decision_type}', impact='{self.impact}', decision='{decision_preview}')>"
|
||||
167
api/models/deployment.py
Normal file
167
api/models/deployment.py
Normal file
@@ -0,0 +1,167 @@
|
||||
"""
|
||||
Deployment model for tracking software and configuration deployments.
|
||||
|
||||
Tracks deployments of code, configuration, database changes, containers,
|
||||
and service restarts with version control and rollback capabilities.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import Boolean, CHAR, CheckConstraint, ForeignKey, Index, String, Text, TIMESTAMP
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .base import Base, UUIDMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .infrastructure import Infrastructure
|
||||
from .service import Service
|
||||
from .session import Session
|
||||
from .work_item import WorkItem
|
||||
|
||||
|
||||
class Deployment(Base, UUIDMixin):
|
||||
"""
|
||||
Deployment model for tracking software and configuration deployments.
|
||||
|
||||
Records deployments of code, configuration files, database changes,
|
||||
containers, and service restarts. Includes version tracking, source/
|
||||
destination paths, and rollback procedures for operational safety.
|
||||
|
||||
Attributes:
|
||||
work_item_id: Foreign key to work_items table (required)
|
||||
session_id: Foreign key to sessions table (required)
|
||||
infrastructure_id: Foreign key to infrastructure table
|
||||
service_id: Foreign key to services table
|
||||
deployment_type: Type of deployment (code, config, database, etc.)
|
||||
version: Version identifier for this deployment
|
||||
description: Detailed description of what was deployed
|
||||
deployed_from: Source path or repository
|
||||
deployed_to: Destination path or target system
|
||||
rollback_available: Whether rollback is possible
|
||||
rollback_procedure: Instructions for rolling back this deployment
|
||||
created_at: When the deployment occurred
|
||||
work_item: Relationship to WorkItem model
|
||||
session: Relationship to Session model
|
||||
infrastructure: Relationship to Infrastructure model
|
||||
service: Relationship to Service model
|
||||
"""
|
||||
|
||||
__tablename__ = "deployments"
|
||||
|
||||
# Foreign keys
|
||||
work_item_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("work_items.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Foreign key to work_items table (required)"
|
||||
)
|
||||
|
||||
session_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("sessions.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Foreign key to sessions table (required)"
|
||||
)
|
||||
|
||||
infrastructure_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("infrastructure.id", ondelete="SET NULL"),
|
||||
doc="Foreign key to infrastructure table"
|
||||
)
|
||||
|
||||
service_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("services.id", ondelete="SET NULL"),
|
||||
doc="Foreign key to services table"
|
||||
)
|
||||
|
||||
# Deployment details
|
||||
deployment_type: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
doc="Type of deployment: code, config, database, container, service_restart"
|
||||
)
|
||||
|
||||
version: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
doc="Version identifier for this deployment"
|
||||
)
|
||||
|
||||
description: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Detailed description of what was deployed"
|
||||
)
|
||||
|
||||
# Source and destination
|
||||
deployed_from: Mapped[Optional[str]] = mapped_column(
|
||||
String(500),
|
||||
doc="Source path or repository (e.g., /home/user/app, git@github.com:user/repo)"
|
||||
)
|
||||
|
||||
deployed_to: Mapped[Optional[str]] = mapped_column(
|
||||
String(500),
|
||||
doc="Destination path or target system (e.g., /var/www/app, container-name)"
|
||||
)
|
||||
|
||||
# Rollback capability
|
||||
rollback_available: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=False,
|
||||
server_default="0",
|
||||
nullable=False,
|
||||
doc="Whether rollback is possible for this deployment"
|
||||
)
|
||||
|
||||
rollback_procedure: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Instructions for rolling back this deployment"
|
||||
)
|
||||
|
||||
# Timestamp
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
TIMESTAMP,
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="When the deployment occurred"
|
||||
)
|
||||
|
||||
# Relationships
|
||||
work_item: Mapped["WorkItem"] = relationship(
|
||||
"WorkItem",
|
||||
back_populates="deployments",
|
||||
doc="Relationship to WorkItem model"
|
||||
)
|
||||
|
||||
session: Mapped["Session"] = relationship(
|
||||
"Session",
|
||||
back_populates="deployments",
|
||||
doc="Relationship to Session model"
|
||||
)
|
||||
|
||||
infrastructure: Mapped[Optional["Infrastructure"]] = relationship(
|
||||
"Infrastructure",
|
||||
back_populates="deployments",
|
||||
doc="Relationship to Infrastructure model"
|
||||
)
|
||||
|
||||
service: Mapped[Optional["Service"]] = relationship(
|
||||
"Service",
|
||||
back_populates="deployments",
|
||||
doc="Relationship to Service model"
|
||||
)
|
||||
|
||||
# Constraints and indexes
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"deployment_type IN ('code', 'config', 'database', 'container', 'service_restart')",
|
||||
name="ck_deployments_type"
|
||||
),
|
||||
Index("idx_deployments_work_item", "work_item_id"),
|
||||
Index("idx_deployments_infrastructure", "infrastructure_id"),
|
||||
Index("idx_deployments_service", "service_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the deployment."""
|
||||
return f"<Deployment(type='{self.deployment_type}', version='{self.version}', to='{self.deployed_to}')>"
|
||||
145
api/models/environmental_insight.py
Normal file
145
api/models/environmental_insight.py
Normal file
@@ -0,0 +1,145 @@
|
||||
"""
|
||||
Environmental Insight model for Context Learning system.
|
||||
|
||||
This model stores generated insights about client/infrastructure environments,
|
||||
helping Claude learn from failures and provide better suggestions over time.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import (
|
||||
CHAR,
|
||||
CheckConstraint,
|
||||
ForeignKey,
|
||||
Index,
|
||||
Integer,
|
||||
String,
|
||||
Text,
|
||||
)
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
|
||||
class EnvironmentalInsight(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Environmental insights for client/infrastructure environments.
|
||||
|
||||
Stores learned insights about environmental constraints, configurations,
|
||||
and best practices discovered through failure analysis and verification.
|
||||
Used to generate insights.md files and provide context-aware suggestions.
|
||||
|
||||
Attributes:
|
||||
id: Unique identifier
|
||||
client_id: Reference to the client this insight applies to
|
||||
infrastructure_id: Reference to specific infrastructure if applicable
|
||||
insight_category: Category of insight (command_constraints, service_configuration, etc.)
|
||||
insight_title: Brief title describing the insight
|
||||
insight_description: Detailed markdown-formatted description
|
||||
examples: JSON array of command/configuration examples
|
||||
source_pattern_id: Reference to failure pattern that generated this insight
|
||||
confidence_level: How confident we are (confirmed, likely, suspected)
|
||||
verification_count: Number of times this insight has been verified
|
||||
priority: Priority level (1-10, higher = more important)
|
||||
last_verified: When this insight was last verified
|
||||
created_at: When the insight was created
|
||||
updated_at: When the insight was last updated
|
||||
"""
|
||||
|
||||
__tablename__ = "environmental_insights"
|
||||
|
||||
# Foreign keys
|
||||
client_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("clients.id", ondelete="CASCADE"),
|
||||
nullable=True,
|
||||
doc="Client this insight applies to",
|
||||
)
|
||||
infrastructure_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("infrastructure.id", ondelete="CASCADE"),
|
||||
nullable=True,
|
||||
doc="Specific infrastructure if applicable",
|
||||
)
|
||||
|
||||
# Insight content
|
||||
insight_category: Mapped[str] = mapped_column(
|
||||
String(100),
|
||||
nullable=False,
|
||||
doc="Category of insight",
|
||||
)
|
||||
insight_title: Mapped[str] = mapped_column(
|
||||
String(500),
|
||||
nullable=False,
|
||||
doc="Brief title describing the insight",
|
||||
)
|
||||
insight_description: Mapped[str] = mapped_column(
|
||||
Text,
|
||||
nullable=False,
|
||||
doc="Detailed markdown-formatted description",
|
||||
)
|
||||
examples: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="JSON array of command/configuration examples",
|
||||
)
|
||||
|
||||
# Metadata
|
||||
source_pattern_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("failure_patterns.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
doc="Failure pattern that generated this insight",
|
||||
)
|
||||
confidence_level: Mapped[Optional[str]] = mapped_column(
|
||||
String(20),
|
||||
nullable=True,
|
||||
doc="Confidence level in this insight",
|
||||
)
|
||||
verification_count: Mapped[int] = mapped_column(
|
||||
Integer,
|
||||
default=1,
|
||||
server_default="1",
|
||||
nullable=False,
|
||||
doc="Number of times verified",
|
||||
)
|
||||
priority: Mapped[int] = mapped_column(
|
||||
Integer,
|
||||
default=5,
|
||||
server_default="5",
|
||||
nullable=False,
|
||||
doc="Priority level (1-10, higher = more important)",
|
||||
)
|
||||
last_verified: Mapped[Optional[datetime]] = mapped_column(
|
||||
nullable=True,
|
||||
doc="When this insight was last verified",
|
||||
)
|
||||
|
||||
# Indexes and constraints
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"insight_category IN ('command_constraints', 'service_configuration', 'version_limitations', 'custom_installations', 'network_constraints', 'permissions')",
|
||||
name="ck_insights_category",
|
||||
),
|
||||
CheckConstraint(
|
||||
"confidence_level IN ('confirmed', 'likely', 'suspected')",
|
||||
name="ck_insights_confidence",
|
||||
),
|
||||
Index("idx_insights_client", "client_id"),
|
||||
Index("idx_insights_infrastructure", "infrastructure_id"),
|
||||
Index("idx_insights_category", "insight_category"),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
# client = relationship("Client", back_populates="environmental_insights")
|
||||
# infrastructure = relationship("Infrastructure", back_populates="environmental_insights")
|
||||
# source_pattern = relationship("FailurePattern", back_populates="generated_insights")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the environmental insight."""
|
||||
return (
|
||||
f"<EnvironmentalInsight(id={self.id!r}, "
|
||||
f"category={self.insight_category!r}, "
|
||||
f"title={self.insight_title!r})>"
|
||||
)
|
||||
127
api/models/external_integration.py
Normal file
127
api/models/external_integration.py
Normal file
@@ -0,0 +1,127 @@
|
||||
"""
|
||||
External Integration model for tracking external system interactions.
|
||||
|
||||
This model logs all interactions with external systems like SyncroMSP,
|
||||
MSP Backups, Zapier webhooks, and other third-party integrations.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import CHAR, ForeignKey, Index, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .base import Base, UUIDMixin
|
||||
|
||||
|
||||
class ExternalIntegration(Base, UUIDMixin):
|
||||
"""
|
||||
External integration tracking for third-party system interactions.
|
||||
|
||||
Logs all API calls, webhook triggers, and data exchanges with external
|
||||
systems. Useful for debugging, auditing, and understanding integration patterns.
|
||||
|
||||
Attributes:
|
||||
id: Unique identifier
|
||||
session_id: Reference to the session during which integration occurred
|
||||
work_item_id: Reference to the work item this integration relates to
|
||||
integration_type: Type of integration (syncro_ticket, msp_backups, zapier_webhook)
|
||||
external_id: External system's identifier (ticket ID, asset ID, etc.)
|
||||
external_url: Direct link to the external resource
|
||||
action: What action was performed (created, updated, linked, attached)
|
||||
direction: Direction of data flow (outbound, inbound)
|
||||
request_data: JSON data that was sent to external system
|
||||
response_data: JSON data received from external system
|
||||
created_at: When the integration occurred
|
||||
created_by: User who authorized the integration
|
||||
"""
|
||||
|
||||
__tablename__ = "external_integrations"
|
||||
|
||||
# Foreign keys
|
||||
session_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("sessions.id", ondelete="CASCADE"),
|
||||
nullable=True,
|
||||
doc="Session during which integration occurred",
|
||||
)
|
||||
work_item_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("work_items.id", ondelete="CASCADE"),
|
||||
nullable=True,
|
||||
doc="Work item this integration relates to",
|
||||
)
|
||||
|
||||
# Integration details
|
||||
integration_type: Mapped[str] = mapped_column(
|
||||
String(100),
|
||||
nullable=False,
|
||||
doc="Type of integration (syncro_ticket, msp_backups, zapier_webhook, etc.)",
|
||||
)
|
||||
external_id: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
nullable=True,
|
||||
doc="External system's identifier (ticket ID, asset ID, etc.)",
|
||||
)
|
||||
external_url: Mapped[Optional[str]] = mapped_column(
|
||||
String(500),
|
||||
nullable=True,
|
||||
doc="Direct link to the external resource",
|
||||
)
|
||||
|
||||
# Action tracking
|
||||
action: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
nullable=True,
|
||||
doc="Action performed (created, updated, linked, attached)",
|
||||
)
|
||||
direction: Mapped[Optional[str]] = mapped_column(
|
||||
String(20),
|
||||
nullable=True,
|
||||
doc="Direction of data flow (outbound, inbound)",
|
||||
)
|
||||
|
||||
# Data
|
||||
request_data: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="JSON data sent to external system",
|
||||
)
|
||||
response_data: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="JSON data received from external system",
|
||||
)
|
||||
|
||||
# Metadata
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="When the integration occurred",
|
||||
)
|
||||
created_by: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
nullable=True,
|
||||
doc="User who authorized the integration",
|
||||
)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index("idx_ext_int_session", "session_id"),
|
||||
Index("idx_ext_int_type", "integration_type"),
|
||||
Index("idx_ext_int_external", "external_id"),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
# session = relationship("Session", back_populates="external_integrations")
|
||||
# work_item = relationship("WorkItem", back_populates="external_integrations")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the external integration."""
|
||||
return (
|
||||
f"<ExternalIntegration(id={self.id!r}, "
|
||||
f"type={self.integration_type!r}, "
|
||||
f"action={self.action!r}, "
|
||||
f"external_id={self.external_id!r})>"
|
||||
)
|
||||
184
api/models/failure_pattern.py
Normal file
184
api/models/failure_pattern.py
Normal file
@@ -0,0 +1,184 @@
|
||||
"""
|
||||
Failure pattern model for tracking recurring environmental and compatibility issues.
|
||||
|
||||
This model identifies and documents patterns of failures across systems and clients,
|
||||
enabling proactive problem resolution and system insights.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import (
|
||||
Boolean,
|
||||
CHAR,
|
||||
CheckConstraint,
|
||||
ForeignKey,
|
||||
Index,
|
||||
Integer,
|
||||
String,
|
||||
Text,
|
||||
)
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from api.models.base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
|
||||
class FailurePattern(UUIDMixin, TimestampMixin, Base):
|
||||
"""
|
||||
Track recurring failure patterns and environmental limitations.
|
||||
|
||||
Documents patterns of failures that occur due to compatibility issues,
|
||||
environmental limitations, or system-specific constraints. Used to build
|
||||
institutional knowledge and prevent repeated mistakes.
|
||||
|
||||
Attributes:
|
||||
id: UUID primary key
|
||||
infrastructure_id: Reference to affected infrastructure
|
||||
client_id: Reference to affected client
|
||||
pattern_type: Type of failure pattern
|
||||
pattern_signature: Brief identifier for the pattern
|
||||
error_pattern: Regex or keywords to match this failure
|
||||
affected_systems: JSON array of affected systems
|
||||
triggering_commands: JSON array of command patterns that trigger this
|
||||
triggering_operations: JSON array of operation types that trigger this
|
||||
failure_description: Detailed description of the failure
|
||||
root_cause: Why this failure occurs
|
||||
recommended_solution: The recommended approach to avoid/fix this
|
||||
alternative_approaches: JSON array of alternative solutions
|
||||
occurrence_count: How many times this pattern has been observed
|
||||
first_seen: When this pattern was first observed
|
||||
last_seen: When this pattern was last observed
|
||||
severity: Impact level (blocking, major, minor, info)
|
||||
is_active: Whether this pattern is still relevant
|
||||
added_to_insights: Whether this has been added to insights.md
|
||||
created_at: Creation timestamp
|
||||
updated_at: Last update timestamp
|
||||
"""
|
||||
|
||||
__tablename__ = "failure_patterns"
|
||||
|
||||
# Foreign keys
|
||||
infrastructure_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("infrastructure.id", ondelete="CASCADE"),
|
||||
nullable=True,
|
||||
doc="Reference to affected infrastructure",
|
||||
)
|
||||
client_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("clients.id", ondelete="CASCADE"),
|
||||
nullable=True,
|
||||
doc="Reference to affected client",
|
||||
)
|
||||
|
||||
# Pattern identification
|
||||
pattern_type: Mapped[str] = mapped_column(
|
||||
String(100),
|
||||
nullable=False,
|
||||
doc="Type of failure pattern",
|
||||
)
|
||||
pattern_signature: Mapped[str] = mapped_column(
|
||||
String(500),
|
||||
nullable=False,
|
||||
doc="Brief identifier for the pattern (e.g., 'PowerShell 7 cmdlets on Server 2008')",
|
||||
)
|
||||
error_pattern: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="Regex or keywords to match this failure (e.g., 'Get-LocalUser.*not recognized')",
|
||||
)
|
||||
|
||||
# Context
|
||||
affected_systems: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="JSON array of affected systems (e.g., ['all_server_2008', 'D2TESTNAS'])",
|
||||
)
|
||||
triggering_commands: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="JSON array of command patterns that trigger this failure",
|
||||
)
|
||||
triggering_operations: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="JSON array of operation types that trigger this failure",
|
||||
)
|
||||
|
||||
# Resolution
|
||||
failure_description: Mapped[str] = mapped_column(
|
||||
Text,
|
||||
nullable=False,
|
||||
doc="Detailed description of the failure",
|
||||
)
|
||||
root_cause: Mapped[str] = mapped_column(
|
||||
Text,
|
||||
nullable=False,
|
||||
doc="Why this failure occurs (e.g., 'Server 2008 only has PowerShell 2.0')",
|
||||
)
|
||||
recommended_solution: Mapped[str] = mapped_column(
|
||||
Text,
|
||||
nullable=False,
|
||||
doc="The recommended approach to avoid/fix this (e.g., 'Use Get-WmiObject instead')",
|
||||
)
|
||||
alternative_approaches: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="JSON array of alternative solutions",
|
||||
)
|
||||
|
||||
# Metadata
|
||||
occurrence_count: Mapped[int] = mapped_column(
|
||||
Integer,
|
||||
nullable=False,
|
||||
server_default="1",
|
||||
doc="How many times this pattern has been observed",
|
||||
)
|
||||
first_seen: Mapped[datetime] = mapped_column(
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="When this pattern was first observed",
|
||||
)
|
||||
last_seen: Mapped[datetime] = mapped_column(
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="When this pattern was last observed",
|
||||
)
|
||||
severity: Mapped[Optional[str]] = mapped_column(
|
||||
String(20),
|
||||
nullable=True,
|
||||
doc="Impact level",
|
||||
)
|
||||
is_active: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
nullable=False,
|
||||
server_default="1",
|
||||
doc="Whether this pattern is still relevant",
|
||||
)
|
||||
added_to_insights: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
nullable=False,
|
||||
server_default="0",
|
||||
doc="Whether this has been added to insights.md",
|
||||
)
|
||||
|
||||
# Table constraints
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"pattern_type IN ('command_compatibility', 'version_mismatch', 'permission_denied', 'service_unavailable', 'configuration_error', 'environmental_limitation')",
|
||||
name="ck_failure_patterns_type",
|
||||
),
|
||||
CheckConstraint(
|
||||
"severity IN ('blocking', 'major', 'minor', 'info')",
|
||||
name="ck_failure_patterns_severity",
|
||||
),
|
||||
Index("idx_failure_infrastructure", "infrastructure_id"),
|
||||
Index("idx_failure_client", "client_id"),
|
||||
Index("idx_failure_pattern_type", "pattern_type"),
|
||||
Index("idx_failure_signature", "pattern_signature"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the failure pattern."""
|
||||
return f"<FailurePattern(id={self.id}, signature={self.pattern_signature}, severity={self.severity}, count={self.occurrence_count})>"
|
||||
99
api/models/file_change.py
Normal file
99
api/models/file_change.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""
|
||||
File change model for tracking file operations during work sessions.
|
||||
|
||||
This model records all file modifications, creations, deletions, and renames
|
||||
performed during work sessions.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import CHAR, CheckConstraint, ForeignKey, Index, Integer, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from api.models.base import Base, UUIDMixin
|
||||
|
||||
|
||||
class FileChange(UUIDMixin, Base):
|
||||
"""
|
||||
Track file changes during work sessions.
|
||||
|
||||
Records all file operations including creations, modifications, deletions,
|
||||
renames, and backups performed during work items.
|
||||
|
||||
Attributes:
|
||||
id: UUID primary key
|
||||
work_item_id: Reference to the work item
|
||||
session_id: Reference to the session
|
||||
file_path: Path to the file that was changed
|
||||
change_type: Type of change (created, modified, deleted, renamed, backed_up)
|
||||
backup_path: Path to backup if one was created
|
||||
size_bytes: File size in bytes
|
||||
description: Description of the change
|
||||
created_at: When the change was recorded
|
||||
"""
|
||||
|
||||
__tablename__ = "file_changes"
|
||||
|
||||
# Foreign keys
|
||||
work_item_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("work_items.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Reference to work item",
|
||||
)
|
||||
session_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("sessions.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Reference to session",
|
||||
)
|
||||
|
||||
# File details
|
||||
file_path: Mapped[str] = mapped_column(
|
||||
String(1000),
|
||||
nullable=False,
|
||||
doc="Path to the file that was changed",
|
||||
)
|
||||
change_type: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
nullable=True,
|
||||
doc="Type of change",
|
||||
)
|
||||
backup_path: Mapped[Optional[str]] = mapped_column(
|
||||
String(1000),
|
||||
nullable=True,
|
||||
doc="Path to backup file if created",
|
||||
)
|
||||
size_bytes: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
nullable=True,
|
||||
doc="File size in bytes",
|
||||
)
|
||||
description: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="Description of the change",
|
||||
)
|
||||
|
||||
# Timestamp
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="When the change was recorded",
|
||||
)
|
||||
|
||||
# Table constraints
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"change_type IN ('created', 'modified', 'deleted', 'renamed', 'backed_up')",
|
||||
name="ck_file_changes_type",
|
||||
),
|
||||
Index("idx_file_changes_work_item", "work_item_id"),
|
||||
Index("idx_file_changes_session", "session_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the file change."""
|
||||
return f"<FileChange(id={self.id}, file={self.file_path}, type={self.change_type})>"
|
||||
108
api/models/firewall_rule.py
Normal file
108
api/models/firewall_rule.py
Normal file
@@ -0,0 +1,108 @@
|
||||
"""
|
||||
Firewall rule model for network security rules.
|
||||
|
||||
Firewall rules track network security rules for documentation and audit trail
|
||||
purposes, including source/destination CIDRs, ports, protocols, and actions.
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import CHAR, CheckConstraint, ForeignKey, Index, Integer, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
|
||||
class FirewallRule(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Firewall rule model for network security rules.
|
||||
|
||||
Tracks firewall rules for documentation and audit trail purposes,
|
||||
including source and destination CIDRs, ports, protocols, and
|
||||
allow/deny/drop actions.
|
||||
|
||||
Attributes:
|
||||
infrastructure_id: Reference to the infrastructure this rule applies to
|
||||
rule_name: Name of the firewall rule
|
||||
source_cidr: Source CIDR notation
|
||||
destination_cidr: Destination CIDR notation
|
||||
port: Port number
|
||||
protocol: Protocol (tcp, udp, icmp)
|
||||
action: Action to take (allow, deny, drop)
|
||||
rule_order: Order of the rule in the firewall
|
||||
notes: Additional notes
|
||||
created_at: When the rule was created
|
||||
created_by: Who created the rule
|
||||
"""
|
||||
|
||||
__tablename__ = "firewall_rules"
|
||||
|
||||
# Foreign keys
|
||||
infrastructure_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("infrastructure.id", ondelete="CASCADE"),
|
||||
doc="Reference to the infrastructure this rule applies to"
|
||||
)
|
||||
|
||||
# Rule identification
|
||||
rule_name: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
doc="Name of the firewall rule"
|
||||
)
|
||||
|
||||
# Rule configuration
|
||||
source_cidr: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
doc="Source CIDR notation"
|
||||
)
|
||||
|
||||
destination_cidr: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
doc="Destination CIDR notation"
|
||||
)
|
||||
|
||||
port: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
doc="Port number"
|
||||
)
|
||||
|
||||
protocol: Mapped[Optional[str]] = mapped_column(
|
||||
String(20),
|
||||
doc="Protocol: tcp, udp, icmp"
|
||||
)
|
||||
|
||||
action: Mapped[Optional[str]] = mapped_column(
|
||||
String(20),
|
||||
doc="Action: allow, deny, drop"
|
||||
)
|
||||
|
||||
# Rule ordering
|
||||
rule_order: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
doc="Order of the rule in the firewall"
|
||||
)
|
||||
|
||||
# Notes
|
||||
notes: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Additional notes"
|
||||
)
|
||||
|
||||
# Audit information
|
||||
created_by: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
doc="Who created the rule"
|
||||
)
|
||||
|
||||
# Constraints and indexes
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"action IN ('allow', 'deny', 'drop')",
|
||||
name="ck_firewall_rules_action"
|
||||
),
|
||||
Index("idx_firewall_infra", "infrastructure_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the firewall rule."""
|
||||
return f"<FirewallRule(rule_name='{self.rule_name}', action='{self.action}')>"
|
||||
198
api/models/infrastructure.py
Normal file
198
api/models/infrastructure.py
Normal file
@@ -0,0 +1,198 @@
|
||||
"""
|
||||
Infrastructure model for hardware and virtual assets.
|
||||
|
||||
Infrastructure represents servers, network devices, workstations, and other
|
||||
IT assets with detailed configuration and environmental constraints.
|
||||
"""
|
||||
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import Boolean, CHAR, CheckConstraint, ForeignKey, Index, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .database_change import DatabaseChange
|
||||
from .deployment import Deployment
|
||||
from .infrastructure_change import InfrastructureChange
|
||||
|
||||
|
||||
class Infrastructure(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Infrastructure model representing IT assets.
|
||||
|
||||
Tracks physical servers, virtual machines, containers, network devices,
|
||||
NAS storage, workstations, and other infrastructure components with
|
||||
detailed configuration and environmental constraints.
|
||||
|
||||
Attributes:
|
||||
client_id: Reference to the client
|
||||
site_id: Reference to the site this infrastructure is located at
|
||||
asset_type: Type of asset (physical_server, virtual_machine, etc.)
|
||||
hostname: Hostname of the infrastructure
|
||||
ip_address: IP address (IPv4 or IPv6)
|
||||
mac_address: MAC address
|
||||
os: Operating system name
|
||||
os_version: Operating system version
|
||||
role_description: Description of the infrastructure's role
|
||||
parent_host_id: Reference to parent host for VMs/containers
|
||||
status: Current status (active, migration_source, etc.)
|
||||
environmental_notes: Special environmental constraints or notes
|
||||
powershell_version: PowerShell version if applicable
|
||||
shell_type: Shell type (bash, cmd, powershell, sh)
|
||||
package_manager: Package manager (apt, yum, chocolatey, none)
|
||||
has_gui: Whether the system has a GUI
|
||||
limitations: JSON array of limitations
|
||||
notes: Additional notes
|
||||
"""
|
||||
|
||||
__tablename__ = "infrastructure"
|
||||
|
||||
# Foreign keys
|
||||
client_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("clients.id", ondelete="CASCADE"),
|
||||
doc="Reference to the client"
|
||||
)
|
||||
|
||||
site_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("sites.id", ondelete="SET NULL"),
|
||||
doc="Reference to the site this infrastructure is located at"
|
||||
)
|
||||
|
||||
# Asset identification
|
||||
asset_type: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
nullable=False,
|
||||
doc="Type: physical_server, virtual_machine, container, network_device, nas_storage, workstation, firewall, domain_controller"
|
||||
)
|
||||
|
||||
hostname: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False,
|
||||
doc="Hostname of the infrastructure"
|
||||
)
|
||||
|
||||
ip_address: Mapped[Optional[str]] = mapped_column(
|
||||
String(45),
|
||||
doc="IP address (IPv4 or IPv6)"
|
||||
)
|
||||
|
||||
mac_address: Mapped[Optional[str]] = mapped_column(
|
||||
String(17),
|
||||
doc="MAC address"
|
||||
)
|
||||
|
||||
# Operating system
|
||||
os: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
doc="Operating system name (e.g., 'Ubuntu 22.04', 'Windows Server 2022')"
|
||||
)
|
||||
|
||||
os_version: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
doc="Operating system version (e.g., '6.22', '2008 R2', '22.04')"
|
||||
)
|
||||
|
||||
# Role and hierarchy
|
||||
role_description: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Description of the infrastructure's role"
|
||||
)
|
||||
|
||||
parent_host_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("infrastructure.id", ondelete="SET NULL"),
|
||||
doc="Reference to parent host for VMs/containers"
|
||||
)
|
||||
|
||||
# Status
|
||||
status: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
default="active",
|
||||
server_default="active",
|
||||
nullable=False,
|
||||
doc="Status: active, migration_source, migration_destination, decommissioned"
|
||||
)
|
||||
|
||||
# Environmental constraints
|
||||
environmental_notes: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Special environmental constraints or notes (e.g., 'Manual WINS install', 'ReadyNAS OS, SMB1 only')"
|
||||
)
|
||||
|
||||
powershell_version: Mapped[Optional[str]] = mapped_column(
|
||||
String(20),
|
||||
doc="PowerShell version (e.g., '2.0', '5.1', '7.4')"
|
||||
)
|
||||
|
||||
shell_type: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
doc="Shell type: bash, cmd, powershell, sh"
|
||||
)
|
||||
|
||||
package_manager: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
doc="Package manager: apt, yum, chocolatey, none"
|
||||
)
|
||||
|
||||
has_gui: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=True,
|
||||
server_default="1",
|
||||
nullable=False,
|
||||
doc="Whether the system has a GUI"
|
||||
)
|
||||
|
||||
limitations: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc='JSON array of limitations (e.g., ["no_ps7", "smb1_only", "dos_6.22_commands"])'
|
||||
)
|
||||
|
||||
# Notes
|
||||
notes: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Additional notes"
|
||||
)
|
||||
|
||||
# Relationships
|
||||
deployments: Mapped[list["Deployment"]] = relationship(
|
||||
"Deployment",
|
||||
back_populates="infrastructure",
|
||||
doc="Relationship to Deployment model"
|
||||
)
|
||||
|
||||
database_changes: Mapped[list["DatabaseChange"]] = relationship(
|
||||
"DatabaseChange",
|
||||
back_populates="infrastructure",
|
||||
doc="Relationship to DatabaseChange model"
|
||||
)
|
||||
|
||||
infrastructure_changes: Mapped[list["InfrastructureChange"]] = relationship(
|
||||
"InfrastructureChange",
|
||||
back_populates="infrastructure",
|
||||
doc="Relationship to InfrastructureChange model"
|
||||
)
|
||||
|
||||
# Constraints and indexes
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"asset_type IN ('physical_server', 'virtual_machine', 'container', 'network_device', 'nas_storage', 'workstation', 'firewall', 'domain_controller')",
|
||||
name="ck_infrastructure_asset_type"
|
||||
),
|
||||
CheckConstraint(
|
||||
"status IN ('active', 'migration_source', 'migration_destination', 'decommissioned')",
|
||||
name="ck_infrastructure_status"
|
||||
),
|
||||
Index("idx_infrastructure_client", "client_id"),
|
||||
Index("idx_infrastructure_type", "asset_type"),
|
||||
Index("idx_infrastructure_hostname", "hostname"),
|
||||
Index("idx_infrastructure_parent", "parent_host_id"),
|
||||
Index("idx_infrastructure_os", "os"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the infrastructure."""
|
||||
return f"<Infrastructure(hostname='{self.hostname}', asset_type='{self.asset_type}')>"
|
||||
165
api/models/infrastructure_change.py
Normal file
165
api/models/infrastructure_change.py
Normal file
@@ -0,0 +1,165 @@
|
||||
"""
|
||||
Infrastructure change model for tracking infrastructure modifications.
|
||||
|
||||
Tracks changes to infrastructure including DNS, firewall, routing, SSL,
|
||||
containers, and other infrastructure components with audit trail and
|
||||
rollback procedures.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import Boolean, CHAR, CheckConstraint, ForeignKey, Index, String, Text, TIMESTAMP
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .base import Base, UUIDMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .infrastructure import Infrastructure
|
||||
from .session import Session
|
||||
from .work_item import WorkItem
|
||||
|
||||
|
||||
class InfrastructureChange(Base, UUIDMixin):
|
||||
"""
|
||||
Infrastructure change model for audit trail of infrastructure modifications.
|
||||
|
||||
Records changes to infrastructure components including DNS configuration,
|
||||
firewall rules, routing tables, SSL certificates, containers, service
|
||||
configurations, hardware, network, and storage. Tracks before/after state,
|
||||
rollback procedures, and verification status for operational safety.
|
||||
|
||||
Attributes:
|
||||
work_item_id: Foreign key to work_items table (required)
|
||||
session_id: Foreign key to sessions table (required)
|
||||
infrastructure_id: Foreign key to infrastructure table
|
||||
change_type: Type of infrastructure change
|
||||
target_system: System or component that was modified
|
||||
before_state: State before the change (configuration snapshot)
|
||||
after_state: State after the change (configuration snapshot)
|
||||
is_permanent: Whether this is a permanent change or temporary
|
||||
rollback_procedure: Instructions for rolling back this change
|
||||
verification_performed: Whether verification was performed after change
|
||||
verification_notes: Notes about verification testing
|
||||
created_at: When the change was made
|
||||
work_item: Relationship to WorkItem model
|
||||
session: Relationship to Session model
|
||||
infrastructure: Relationship to Infrastructure model
|
||||
"""
|
||||
|
||||
__tablename__ = "infrastructure_changes"
|
||||
|
||||
# Foreign keys
|
||||
work_item_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("work_items.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Foreign key to work_items table (required)"
|
||||
)
|
||||
|
||||
session_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("sessions.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Foreign key to sessions table (required)"
|
||||
)
|
||||
|
||||
infrastructure_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("infrastructure.id", ondelete="SET NULL"),
|
||||
doc="Foreign key to infrastructure table"
|
||||
)
|
||||
|
||||
# Change details
|
||||
change_type: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
doc="Type of change: dns, firewall, routing, ssl, container, service_config, hardware, network, storage"
|
||||
)
|
||||
|
||||
target_system: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False,
|
||||
doc="System or component that was modified (e.g., 'jupiter', 'UDM-Pro', 'web-container')"
|
||||
)
|
||||
|
||||
# State tracking
|
||||
before_state: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Configuration or state before the change (snapshot, config dump, etc.)"
|
||||
)
|
||||
|
||||
after_state: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Configuration or state after the change (snapshot, config dump, etc.)"
|
||||
)
|
||||
|
||||
# Change characteristics
|
||||
is_permanent: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=True,
|
||||
server_default="1",
|
||||
nullable=False,
|
||||
doc="Whether this is a permanent change or temporary (e.g., for testing)"
|
||||
)
|
||||
|
||||
rollback_procedure: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Instructions for rolling back this change if needed"
|
||||
)
|
||||
|
||||
# Verification
|
||||
verification_performed: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=False,
|
||||
server_default="0",
|
||||
nullable=False,
|
||||
doc="Whether verification testing was performed after the change"
|
||||
)
|
||||
|
||||
verification_notes: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Notes about verification testing (what was tested, results, etc.)"
|
||||
)
|
||||
|
||||
# Timestamp
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
TIMESTAMP,
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="When the change was made"
|
||||
)
|
||||
|
||||
# Relationships
|
||||
work_item: Mapped["WorkItem"] = relationship(
|
||||
"WorkItem",
|
||||
back_populates="infrastructure_changes",
|
||||
doc="Relationship to WorkItem model"
|
||||
)
|
||||
|
||||
session: Mapped["Session"] = relationship(
|
||||
"Session",
|
||||
back_populates="infrastructure_changes",
|
||||
doc="Relationship to Session model"
|
||||
)
|
||||
|
||||
infrastructure: Mapped[Optional["Infrastructure"]] = relationship(
|
||||
"Infrastructure",
|
||||
back_populates="infrastructure_changes",
|
||||
doc="Relationship to Infrastructure model"
|
||||
)
|
||||
|
||||
# Constraints and indexes
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"change_type IN ('dns', 'firewall', 'routing', 'ssl', 'container', 'service_config', 'hardware', 'network', 'storage')",
|
||||
name="ck_infrastructure_changes_type"
|
||||
),
|
||||
Index("idx_infra_changes_work_item", "work_item_id"),
|
||||
Index("idx_infra_changes_session", "session_id"),
|
||||
Index("idx_infra_changes_infrastructure", "infrastructure_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the infrastructure change."""
|
||||
return f"<InfrastructureChange(type='{self.change_type}', target='{self.target_system}', permanent={self.is_permanent})>"
|
||||
56
api/models/infrastructure_tag.py
Normal file
56
api/models/infrastructure_tag.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""
|
||||
Infrastructure Tag junction table for many-to-many relationship.
|
||||
|
||||
This model creates the many-to-many relationship between infrastructure and tags,
|
||||
allowing flexible categorization and filtering of infrastructure items.
|
||||
"""
|
||||
|
||||
from sqlalchemy import CHAR, ForeignKey, Index, PrimaryKeyConstraint
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .base import Base
|
||||
|
||||
|
||||
class InfrastructureTag(Base):
|
||||
"""
|
||||
Junction table linking infrastructure to tags.
|
||||
|
||||
Implements many-to-many relationship between infrastructure and tags tables.
|
||||
Allows infrastructure items to be tagged with multiple categories for filtering
|
||||
and organization (e.g., docker, postgresql, backup-server, production).
|
||||
|
||||
Attributes:
|
||||
infrastructure_id: Foreign key to infrastructure table
|
||||
tag_id: Foreign key to tags table
|
||||
"""
|
||||
|
||||
__tablename__ = "infrastructure_tags"
|
||||
|
||||
# Composite primary key
|
||||
infrastructure_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("infrastructure.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Infrastructure item being tagged",
|
||||
)
|
||||
tag_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("tags.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Tag applied to the infrastructure",
|
||||
)
|
||||
|
||||
# Table constraints and indexes
|
||||
__table_args__ = (
|
||||
PrimaryKeyConstraint("infrastructure_id", "tag_id"),
|
||||
Index("idx_it_infrastructure", "infrastructure_id"),
|
||||
Index("idx_it_tag", "tag_id"),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
# infrastructure = relationship("Infrastructure", back_populates="tags")
|
||||
# tag = relationship("Tag", back_populates="infrastructure_items")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the infrastructure tag relationship."""
|
||||
return f"<InfrastructureTag(infrastructure_id={self.infrastructure_id!r}, tag_id={self.tag_id!r})>"
|
||||
130
api/models/integration_credential.py
Normal file
130
api/models/integration_credential.py
Normal file
@@ -0,0 +1,130 @@
|
||||
"""
|
||||
Integration Credential model for storing external system authentication.
|
||||
|
||||
This model securely stores OAuth tokens, API keys, and other credentials
|
||||
needed to authenticate with external integrations like SyncroMSP, MSP Backups, etc.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import (
|
||||
Boolean,
|
||||
CheckConstraint,
|
||||
Index,
|
||||
LargeBinary,
|
||||
String,
|
||||
Text,
|
||||
)
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
|
||||
class IntegrationCredential(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Integration credentials for external system authentication.
|
||||
|
||||
Stores encrypted credentials (API keys, OAuth tokens) for integrations.
|
||||
Each integration type has one record with its authentication credentials.
|
||||
All sensitive data is encrypted using AES-256-GCM.
|
||||
|
||||
Attributes:
|
||||
id: Unique identifier
|
||||
integration_name: Unique name of the integration (syncro, msp_backups, zapier)
|
||||
credential_type: Type of credential (oauth, api_key, basic_auth)
|
||||
api_key_encrypted: Encrypted API key (if credential_type is api_key)
|
||||
oauth_token_encrypted: Encrypted OAuth access token
|
||||
oauth_refresh_token_encrypted: Encrypted OAuth refresh token
|
||||
oauth_expires_at: When the OAuth token expires
|
||||
api_base_url: Base URL for API calls
|
||||
webhook_url: Webhook URL for receiving callbacks
|
||||
is_active: Whether this integration is currently active
|
||||
last_tested_at: When the connection was last tested
|
||||
last_test_status: Result of last connection test
|
||||
created_at: When the credential was created
|
||||
updated_at: When the credential was last updated
|
||||
"""
|
||||
|
||||
__tablename__ = "integration_credentials"
|
||||
|
||||
# Integration identification
|
||||
integration_name: Mapped[str] = mapped_column(
|
||||
String(100),
|
||||
unique=True,
|
||||
nullable=False,
|
||||
doc="Unique name of integration (syncro, msp_backups, zapier)",
|
||||
)
|
||||
|
||||
# Credential type and encrypted values
|
||||
credential_type: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
nullable=True,
|
||||
doc="Type of credential",
|
||||
)
|
||||
api_key_encrypted: Mapped[Optional[bytes]] = mapped_column(
|
||||
LargeBinary,
|
||||
nullable=True,
|
||||
doc="Encrypted API key (AES-256-GCM)",
|
||||
)
|
||||
oauth_token_encrypted: Mapped[Optional[bytes]] = mapped_column(
|
||||
LargeBinary,
|
||||
nullable=True,
|
||||
doc="Encrypted OAuth access token",
|
||||
)
|
||||
oauth_refresh_token_encrypted: Mapped[Optional[bytes]] = mapped_column(
|
||||
LargeBinary,
|
||||
nullable=True,
|
||||
doc="Encrypted OAuth refresh token",
|
||||
)
|
||||
oauth_expires_at: Mapped[Optional[datetime]] = mapped_column(
|
||||
nullable=True,
|
||||
doc="When the OAuth token expires",
|
||||
)
|
||||
|
||||
# Endpoints
|
||||
api_base_url: Mapped[Optional[str]] = mapped_column(
|
||||
String(500),
|
||||
nullable=True,
|
||||
doc="Base URL for API calls",
|
||||
)
|
||||
webhook_url: Mapped[Optional[str]] = mapped_column(
|
||||
String(500),
|
||||
nullable=True,
|
||||
doc="Webhook URL for receiving callbacks",
|
||||
)
|
||||
|
||||
# Status
|
||||
is_active: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=True,
|
||||
nullable=False,
|
||||
doc="Whether this integration is active",
|
||||
)
|
||||
last_tested_at: Mapped[Optional[datetime]] = mapped_column(
|
||||
nullable=True,
|
||||
doc="When the connection was last tested",
|
||||
)
|
||||
last_test_status: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
nullable=True,
|
||||
doc="Result of last connection test",
|
||||
)
|
||||
|
||||
# Indexes and constraints
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"credential_type IN ('oauth', 'api_key', 'basic_auth')",
|
||||
name="ck_integration_credential_type",
|
||||
),
|
||||
Index("idx_int_cred_name", "integration_name"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the integration credential."""
|
||||
return (
|
||||
f"<IntegrationCredential(id={self.id!r}, "
|
||||
f"name={self.integration_name!r}, "
|
||||
f"type={self.credential_type!r}, "
|
||||
f"active={self.is_active})>"
|
||||
)
|
||||
86
api/models/m365_tenant.py
Normal file
86
api/models/m365_tenant.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""
|
||||
Microsoft 365 tenant model for tracking M365 tenants.
|
||||
|
||||
M365 tenants represent Microsoft 365 tenant configurations for clients
|
||||
including tenant IDs, domains, and CIPP integration.
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import CHAR, ForeignKey, Index, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
|
||||
class M365Tenant(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Microsoft 365 tenant model for tracking M365 configurations.
|
||||
|
||||
Tracks Microsoft 365 tenant information including tenant IDs,
|
||||
domain names, admin contacts, and CIPP portal integration.
|
||||
|
||||
Attributes:
|
||||
client_id: Reference to the client
|
||||
tenant_id: Microsoft tenant ID (UUID)
|
||||
tenant_name: Tenant name (e.g., "dataforth.com")
|
||||
default_domain: Default domain (e.g., "dataforthcorp.onmicrosoft.com")
|
||||
admin_email: Administrator email address
|
||||
cipp_name: Name in CIPP portal
|
||||
notes: Additional notes
|
||||
"""
|
||||
|
||||
__tablename__ = "m365_tenants"
|
||||
|
||||
# Foreign keys
|
||||
client_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("clients.id", ondelete="CASCADE"),
|
||||
doc="Reference to the client"
|
||||
)
|
||||
|
||||
# Tenant identification
|
||||
tenant_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
nullable=False,
|
||||
unique=True,
|
||||
doc="Microsoft tenant ID (UUID)"
|
||||
)
|
||||
|
||||
tenant_name: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
doc="Tenant name (e.g., 'dataforth.com')"
|
||||
)
|
||||
|
||||
default_domain: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
doc="Default domain (e.g., 'dataforthcorp.onmicrosoft.com')"
|
||||
)
|
||||
|
||||
# Contact information
|
||||
admin_email: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
doc="Administrator email address"
|
||||
)
|
||||
|
||||
# CIPP integration
|
||||
cipp_name: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
doc="Name in CIPP portal"
|
||||
)
|
||||
|
||||
# Notes
|
||||
notes: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Additional notes"
|
||||
)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index("idx_m365_client", "client_id"),
|
||||
Index("idx_m365_tenant_id", "tenant_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the M365 tenant."""
|
||||
return f"<M365Tenant(tenant_name='{self.tenant_name}', tenant_id='{self.tenant_id}')>"
|
||||
263
api/models/machine.py
Normal file
263
api/models/machine.py
Normal file
@@ -0,0 +1,263 @@
|
||||
"""
|
||||
Machine model for technician's machines used for MSP work.
|
||||
|
||||
Tracks laptops, desktops, and workstations with their capabilities,
|
||||
installed tools, MCP servers, and skills.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import Boolean, Index, Integer, String, Text, TIMESTAMP
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .session import Session
|
||||
|
||||
|
||||
class Machine(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Machine model representing technician's machines used for MSP work.
|
||||
|
||||
Tracks machine identification, capabilities, installed tools, MCP servers,
|
||||
skills, and network context. Machines are auto-detected on session start
|
||||
using hostname, username, platform, and home directory.
|
||||
|
||||
Attributes:
|
||||
hostname: Machine hostname from `hostname` command
|
||||
machine_fingerprint: SHA256 hash of hostname + username + platform + home_directory
|
||||
friendly_name: Human-readable name like "Main Laptop" or "Home Desktop"
|
||||
machine_type: Type of machine (laptop, desktop, workstation, vm)
|
||||
platform: Operating system platform (win32, darwin, linux)
|
||||
os_version: Operating system version
|
||||
username: Username from `whoami` command
|
||||
home_directory: User home directory path
|
||||
has_vpn_access: Whether machine can connect to client networks
|
||||
vpn_profiles: JSON array of available VPN profiles
|
||||
has_docker: Whether Docker is installed
|
||||
has_powershell: Whether PowerShell is installed
|
||||
powershell_version: PowerShell version if installed
|
||||
has_ssh: Whether SSH is available
|
||||
has_git: Whether Git is installed
|
||||
typical_network_location: Typical network location (home, office, mobile)
|
||||
static_ip: Static IP address if applicable
|
||||
claude_working_directory: Primary working directory for Claude Code
|
||||
additional_working_dirs: JSON array of additional working directories
|
||||
installed_tools: JSON object with tool versions
|
||||
available_mcps: JSON array of available MCP servers
|
||||
mcp_capabilities: JSON object with MCP capabilities
|
||||
available_skills: JSON array of available skills
|
||||
skill_paths: JSON object mapping skill names to paths
|
||||
preferred_shell: Preferred shell (powershell, bash, zsh, cmd)
|
||||
package_manager_commands: JSON object with package manager commands
|
||||
is_primary: Whether this is the primary machine
|
||||
is_active: Whether machine is active
|
||||
last_seen: Last time machine was seen
|
||||
last_session_id: UUID of last session from this machine
|
||||
notes: Additional notes about the machine
|
||||
"""
|
||||
|
||||
__tablename__ = "machines"
|
||||
|
||||
# Machine identification (auto-detected)
|
||||
hostname: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False,
|
||||
unique=True,
|
||||
doc="Machine hostname from `hostname` command"
|
||||
)
|
||||
|
||||
machine_fingerprint: Mapped[Optional[str]] = mapped_column(
|
||||
String(500),
|
||||
unique=True,
|
||||
doc="SHA256 hash: hostname + username + platform + home_directory"
|
||||
)
|
||||
|
||||
# Environment details
|
||||
friendly_name: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
doc="Human-readable name like 'Main Laptop' or 'Home Desktop'"
|
||||
)
|
||||
|
||||
machine_type: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
doc="Type of machine: laptop, desktop, workstation, vm"
|
||||
)
|
||||
|
||||
platform: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
doc="Operating system platform: win32, darwin, linux"
|
||||
)
|
||||
|
||||
os_version: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
doc="Operating system version"
|
||||
)
|
||||
|
||||
username: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
doc="Username from `whoami` command"
|
||||
)
|
||||
|
||||
home_directory: Mapped[Optional[str]] = mapped_column(
|
||||
String(500),
|
||||
doc="User home directory path"
|
||||
)
|
||||
|
||||
# Capabilities
|
||||
has_vpn_access: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=False,
|
||||
server_default="0",
|
||||
doc="Whether machine can connect to client networks"
|
||||
)
|
||||
|
||||
vpn_profiles: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON array of available VPN profiles"
|
||||
)
|
||||
|
||||
has_docker: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=False,
|
||||
server_default="0",
|
||||
doc="Whether Docker is installed"
|
||||
)
|
||||
|
||||
has_powershell: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=False,
|
||||
server_default="0",
|
||||
doc="Whether PowerShell is installed"
|
||||
)
|
||||
|
||||
powershell_version: Mapped[Optional[str]] = mapped_column(
|
||||
String(20),
|
||||
doc="PowerShell version if installed"
|
||||
)
|
||||
|
||||
has_ssh: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=True,
|
||||
server_default="1",
|
||||
doc="Whether SSH is available"
|
||||
)
|
||||
|
||||
has_git: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=True,
|
||||
server_default="1",
|
||||
doc="Whether Git is installed"
|
||||
)
|
||||
|
||||
# Network context
|
||||
typical_network_location: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
doc="Typical network location: home, office, mobile"
|
||||
)
|
||||
|
||||
static_ip: Mapped[Optional[str]] = mapped_column(
|
||||
String(45),
|
||||
doc="Static IP address if applicable (supports IPv4/IPv6)"
|
||||
)
|
||||
|
||||
# Claude Code context
|
||||
claude_working_directory: Mapped[Optional[str]] = mapped_column(
|
||||
String(500),
|
||||
doc="Primary working directory for Claude Code"
|
||||
)
|
||||
|
||||
additional_working_dirs: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON array of additional working directories"
|
||||
)
|
||||
|
||||
# Tool versions
|
||||
installed_tools: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON object with tool versions like {\"git\": \"2.40\", \"docker\": \"24.0\"}"
|
||||
)
|
||||
|
||||
# MCP Servers & Skills
|
||||
available_mcps: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON array of available MCP servers"
|
||||
)
|
||||
|
||||
mcp_capabilities: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON object with MCP capabilities"
|
||||
)
|
||||
|
||||
available_skills: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON array of available skills"
|
||||
)
|
||||
|
||||
skill_paths: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON object mapping skill names to paths"
|
||||
)
|
||||
|
||||
# OS-Specific Commands
|
||||
preferred_shell: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
doc="Preferred shell: powershell, bash, zsh, cmd"
|
||||
)
|
||||
|
||||
package_manager_commands: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON object with package manager commands"
|
||||
)
|
||||
|
||||
# Status
|
||||
is_primary: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=False,
|
||||
server_default="0",
|
||||
doc="Whether this is the primary machine"
|
||||
)
|
||||
|
||||
is_active: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=True,
|
||||
server_default="1",
|
||||
doc="Whether machine is currently active"
|
||||
)
|
||||
|
||||
last_seen: Mapped[Optional[datetime]] = mapped_column(
|
||||
TIMESTAMP,
|
||||
doc="Last time machine was seen"
|
||||
)
|
||||
|
||||
last_session_id: Mapped[Optional[str]] = mapped_column(
|
||||
String(36),
|
||||
doc="UUID of last session from this machine"
|
||||
)
|
||||
|
||||
# Notes
|
||||
notes: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Additional notes about the machine"
|
||||
)
|
||||
|
||||
# Relationships
|
||||
sessions: Mapped[list["Session"]] = relationship(
|
||||
"Session",
|
||||
back_populates="machine",
|
||||
doc="Sessions associated with this machine"
|
||||
)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index("idx_machines_hostname", "hostname"),
|
||||
Index("idx_machines_fingerprint", "machine_fingerprint"),
|
||||
Index("idx_machines_is_active", "is_active"),
|
||||
Index("idx_machines_platform", "platform"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the machine."""
|
||||
return f"<Machine(hostname='{self.hostname}', friendly_name='{self.friendly_name}', platform='{self.platform}')>"
|
||||
98
api/models/network.py
Normal file
98
api/models/network.py
Normal file
@@ -0,0 +1,98 @@
|
||||
"""
|
||||
Network model for network segments and VLANs.
|
||||
|
||||
Networks represent network segments, VLANs, VPN networks, and other
|
||||
logical or physical network divisions.
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import CHAR, CheckConstraint, ForeignKey, Index, Integer, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
|
||||
class Network(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Network model representing network segments and VLANs.
|
||||
|
||||
Tracks network segments including LANs, VPNs, VLANs, isolated networks,
|
||||
and DMZs with CIDR notation, gateway IPs, and VLAN IDs.
|
||||
|
||||
Attributes:
|
||||
client_id: Reference to the client
|
||||
site_id: Reference to the site
|
||||
network_name: Name of the network
|
||||
network_type: Type of network (lan, vpn, vlan, isolated, dmz)
|
||||
cidr: Network CIDR notation (e.g., "192.168.0.0/24")
|
||||
gateway_ip: Gateway IP address
|
||||
vlan_id: VLAN ID if applicable
|
||||
notes: Additional notes
|
||||
created_at: When the network was created
|
||||
"""
|
||||
|
||||
__tablename__ = "networks"
|
||||
|
||||
# Foreign keys
|
||||
client_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("clients.id", ondelete="CASCADE"),
|
||||
doc="Reference to the client"
|
||||
)
|
||||
|
||||
site_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("sites.id", ondelete="CASCADE"),
|
||||
doc="Reference to the site"
|
||||
)
|
||||
|
||||
# Network identification
|
||||
network_name: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False,
|
||||
doc="Name of the network"
|
||||
)
|
||||
|
||||
network_type: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
doc="Type: lan, vpn, vlan, isolated, dmz"
|
||||
)
|
||||
|
||||
# Network configuration
|
||||
cidr: Mapped[str] = mapped_column(
|
||||
String(100),
|
||||
nullable=False,
|
||||
doc="Network CIDR notation (e.g., '192.168.0.0/24')"
|
||||
)
|
||||
|
||||
gateway_ip: Mapped[Optional[str]] = mapped_column(
|
||||
String(45),
|
||||
doc="Gateway IP address"
|
||||
)
|
||||
|
||||
vlan_id: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
doc="VLAN ID if applicable"
|
||||
)
|
||||
|
||||
# Notes
|
||||
notes: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Additional notes"
|
||||
)
|
||||
|
||||
|
||||
# Constraints and indexes
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"network_type IN ('lan', 'vpn', 'vlan', 'isolated', 'dmz')",
|
||||
name="ck_networks_type"
|
||||
),
|
||||
Index("idx_networks_client", "client_id"),
|
||||
Index("idx_networks_site", "site_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the network."""
|
||||
return f"<Network(network_name='{self.network_name}', cidr='{self.cidr}')>"
|
||||
178
api/models/operation_failure.py
Normal file
178
api/models/operation_failure.py
Normal file
@@ -0,0 +1,178 @@
|
||||
"""
|
||||
Operation failure model for tracking non-command failures.
|
||||
|
||||
Tracks failures from API calls, file operations, network requests, and other
|
||||
operations (distinct from command execution failures tracked in command_runs).
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import Boolean, CHAR, CheckConstraint, ForeignKey, Index, String, Text, TIMESTAMP
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .base import Base, UUIDMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .session import Session
|
||||
from .work_item import WorkItem
|
||||
|
||||
|
||||
class OperationFailure(Base, UUIDMixin):
|
||||
"""
|
||||
Operation failure model for non-command failures.
|
||||
|
||||
Tracks failures from API calls, file operations, network requests,
|
||||
database queries, and external integrations. Used for troubleshooting,
|
||||
pattern detection, and system reliability monitoring.
|
||||
|
||||
Distinct from CommandRun failures which track shell command execution.
|
||||
This tracks programmatic operations and API interactions.
|
||||
|
||||
Attributes:
|
||||
session_id: Foreign key to sessions table
|
||||
work_item_id: Foreign key to work_items table
|
||||
operation_type: Type of operation that failed
|
||||
operation_description: Detailed description of what was attempted
|
||||
target_system: Host, URL, or service name that was targeted
|
||||
error_message: Error message from the failure
|
||||
error_code: HTTP status, exit code, or error number
|
||||
failure_category: Category of failure (timeout, authentication, etc.)
|
||||
stack_trace: Stack trace if available
|
||||
resolution_applied: Description of how the failure was resolved
|
||||
resolved: Whether the failure has been resolved
|
||||
resolved_at: When the failure was resolved
|
||||
request_data: JSON data of what was attempted
|
||||
response_data: JSON data of error response
|
||||
environment_snapshot: JSON snapshot of relevant environment variables/versions
|
||||
created_at: When the failure occurred
|
||||
session: Relationship to Session model
|
||||
work_item: Relationship to WorkItem model
|
||||
"""
|
||||
|
||||
__tablename__ = "operation_failures"
|
||||
|
||||
# Foreign keys
|
||||
session_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("sessions.id", ondelete="CASCADE"),
|
||||
doc="Foreign key to sessions table"
|
||||
)
|
||||
|
||||
work_item_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("work_items.id", ondelete="CASCADE"),
|
||||
doc="Foreign key to work_items table"
|
||||
)
|
||||
|
||||
# Operation details
|
||||
operation_type: Mapped[str] = mapped_column(
|
||||
String(100),
|
||||
nullable=False,
|
||||
doc="Type of operation: api_call, file_operation, network_request, database_query, external_integration, service_restart"
|
||||
)
|
||||
|
||||
operation_description: Mapped[str] = mapped_column(
|
||||
Text,
|
||||
nullable=False,
|
||||
doc="Detailed description of what was attempted"
|
||||
)
|
||||
|
||||
target_system: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
doc="Host, URL, or service name that was targeted"
|
||||
)
|
||||
|
||||
# Failure details
|
||||
error_message: Mapped[str] = mapped_column(
|
||||
Text,
|
||||
nullable=False,
|
||||
doc="Error message from the failure"
|
||||
)
|
||||
|
||||
error_code: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
doc="HTTP status code, exit code, or error number"
|
||||
)
|
||||
|
||||
failure_category: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
doc="Category of failure: timeout, authentication, not_found, permission_denied, etc."
|
||||
)
|
||||
|
||||
stack_trace: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Stack trace if available"
|
||||
)
|
||||
|
||||
# Resolution tracking
|
||||
resolution_applied: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Description of how the failure was resolved"
|
||||
)
|
||||
|
||||
resolved: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=False,
|
||||
server_default="0",
|
||||
nullable=False,
|
||||
doc="Whether the failure has been resolved"
|
||||
)
|
||||
|
||||
resolved_at: Mapped[Optional[datetime]] = mapped_column(
|
||||
TIMESTAMP,
|
||||
doc="When the failure was resolved"
|
||||
)
|
||||
|
||||
# Context data (JSON stored as text)
|
||||
request_data: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON data of what was attempted"
|
||||
)
|
||||
|
||||
response_data: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON data of error response"
|
||||
)
|
||||
|
||||
environment_snapshot: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON snapshot of relevant environment variables, versions, etc."
|
||||
)
|
||||
|
||||
# Timestamp
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
TIMESTAMP,
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="When the failure occurred"
|
||||
)
|
||||
|
||||
# Relationships
|
||||
session: Mapped[Optional["Session"]] = relationship(
|
||||
"Session",
|
||||
back_populates="operation_failures",
|
||||
doc="Relationship to Session model"
|
||||
)
|
||||
|
||||
work_item: Mapped[Optional["WorkItem"]] = relationship(
|
||||
"WorkItem",
|
||||
doc="Relationship to WorkItem model"
|
||||
)
|
||||
|
||||
# Constraints and indexes
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"operation_type IN ('api_call', 'file_operation', 'network_request', 'database_query', 'external_integration', 'service_restart')",
|
||||
name="ck_operation_failures_type"
|
||||
),
|
||||
Index("idx_op_failure_session", "session_id"),
|
||||
Index("idx_op_failure_type", "operation_type"),
|
||||
Index("idx_op_failure_category", "failure_category"),
|
||||
Index("idx_op_failure_resolved", "resolved"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the operation failure."""
|
||||
return f"<OperationFailure(type='{self.operation_type}', target='{self.target_system}', resolved={self.resolved})>"
|
||||
154
api/models/pending_task.py
Normal file
154
api/models/pending_task.py
Normal file
@@ -0,0 +1,154 @@
|
||||
"""
|
||||
Pending task model for tracking open items across clients and projects.
|
||||
|
||||
Tracks tasks that need to be completed, their priority, status, and
|
||||
assignment information.
|
||||
"""
|
||||
|
||||
from datetime import date, datetime
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import CHAR, CheckConstraint, DATE, ForeignKey, Index, String, Text, TIMESTAMP
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .client import Client
|
||||
from .project import Project
|
||||
from .work_item import WorkItem
|
||||
|
||||
|
||||
class PendingTask(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Pending task model for open items across all clients and projects.
|
||||
|
||||
Tracks tasks that need to be completed with priority, blocking information,
|
||||
assignment, and due dates. These represent work items that are planned or
|
||||
in progress but not yet completed.
|
||||
|
||||
Attributes:
|
||||
client_id: Foreign key to clients table
|
||||
project_id: Foreign key to projects table
|
||||
work_item_id: Foreign key to work_items table (if task linked to work item)
|
||||
title: Brief title of the task
|
||||
description: Detailed description of the task
|
||||
priority: Task priority (critical, high, medium, low)
|
||||
blocked_by: Description of what is blocking this task
|
||||
assigned_to: Name of person assigned to the task
|
||||
due_date: Due date for the task
|
||||
status: Task status (pending, in_progress, blocked, completed, cancelled)
|
||||
completed_at: When the task was completed
|
||||
client: Relationship to Client model
|
||||
project: Relationship to Project model
|
||||
work_item: Relationship to WorkItem model
|
||||
"""
|
||||
|
||||
__tablename__ = "pending_tasks"
|
||||
|
||||
# Foreign keys
|
||||
client_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("clients.id", ondelete="CASCADE"),
|
||||
doc="Foreign key to clients table"
|
||||
)
|
||||
|
||||
project_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("projects.id", ondelete="CASCADE"),
|
||||
doc="Foreign key to projects table"
|
||||
)
|
||||
|
||||
work_item_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("work_items.id", ondelete="SET NULL"),
|
||||
doc="Foreign key to work_items table (if task linked to work item)"
|
||||
)
|
||||
|
||||
# Task details
|
||||
title: Mapped[str] = mapped_column(
|
||||
String(500),
|
||||
nullable=False,
|
||||
doc="Brief title of the task"
|
||||
)
|
||||
|
||||
description: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Detailed description of the task"
|
||||
)
|
||||
|
||||
# Priority and blocking
|
||||
priority: Mapped[Optional[str]] = mapped_column(
|
||||
String(20),
|
||||
doc="Task priority: critical, high, medium, low"
|
||||
)
|
||||
|
||||
blocked_by: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Description of what is blocking this task"
|
||||
)
|
||||
|
||||
# Assignment
|
||||
assigned_to: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
doc="Name of person assigned to the task"
|
||||
)
|
||||
|
||||
# Scheduling
|
||||
due_date: Mapped[Optional[date]] = mapped_column(
|
||||
DATE,
|
||||
doc="Due date for the task"
|
||||
)
|
||||
|
||||
# Status
|
||||
status: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
default="pending",
|
||||
server_default="pending",
|
||||
nullable=False,
|
||||
doc="Task status: pending, in_progress, blocked, completed, cancelled"
|
||||
)
|
||||
|
||||
# Completion tracking
|
||||
completed_at: Mapped[Optional[datetime]] = mapped_column(
|
||||
TIMESTAMP,
|
||||
doc="When the task was completed"
|
||||
)
|
||||
|
||||
# Relationships
|
||||
client: Mapped[Optional["Client"]] = relationship(
|
||||
"Client",
|
||||
back_populates="pending_tasks",
|
||||
doc="Relationship to Client model"
|
||||
)
|
||||
|
||||
project: Mapped[Optional["Project"]] = relationship(
|
||||
"Project",
|
||||
back_populates="pending_tasks",
|
||||
doc="Relationship to Project model"
|
||||
)
|
||||
|
||||
work_item: Mapped[Optional["WorkItem"]] = relationship(
|
||||
"WorkItem",
|
||||
doc="Relationship to WorkItem model"
|
||||
)
|
||||
|
||||
# Constraints and indexes
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"priority IN ('critical', 'high', 'medium', 'low')",
|
||||
name="ck_pending_tasks_priority"
|
||||
),
|
||||
CheckConstraint(
|
||||
"status IN ('pending', 'in_progress', 'blocked', 'completed', 'cancelled')",
|
||||
name="ck_pending_tasks_status"
|
||||
),
|
||||
Index("idx_pending_tasks_client", "client_id"),
|
||||
Index("idx_pending_tasks_status", "status"),
|
||||
Index("idx_pending_tasks_priority", "priority"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the pending task."""
|
||||
return f"<PendingTask(title='{self.title}', status='{self.status}', priority='{self.priority}')>"
|
||||
127
api/models/problem_solution.py
Normal file
127
api/models/problem_solution.py
Normal file
@@ -0,0 +1,127 @@
|
||||
"""
|
||||
Problem solution model for tracking issues and their resolutions.
|
||||
|
||||
This model captures problems encountered during work sessions, the investigation
|
||||
process, root cause analysis, and solutions applied.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import CHAR, ForeignKey, Index, Integer, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from api.models.base import Base, UUIDMixin
|
||||
|
||||
|
||||
class ProblemSolution(UUIDMixin, Base):
|
||||
"""
|
||||
Track problems and their solutions.
|
||||
|
||||
Records issues encountered during work, including symptoms, investigation steps,
|
||||
root cause analysis, solutions applied, and verification methods.
|
||||
|
||||
Attributes:
|
||||
id: UUID primary key
|
||||
work_item_id: Reference to the work item
|
||||
session_id: Reference to the session
|
||||
problem_description: Detailed description of the problem
|
||||
symptom: What the user observed/experienced
|
||||
error_message: Exact error code or message
|
||||
investigation_steps: JSON array of diagnostic commands/steps taken
|
||||
root_cause: Identified root cause of the problem
|
||||
solution_applied: The solution that was implemented
|
||||
verification_method: How the fix was verified
|
||||
rollback_plan: Plan to rollback if solution causes issues
|
||||
recurrence_count: Number of times this problem has occurred
|
||||
created_at: When the problem was recorded
|
||||
"""
|
||||
|
||||
__tablename__ = "problem_solutions"
|
||||
|
||||
# Foreign keys
|
||||
work_item_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("work_items.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Reference to work item",
|
||||
)
|
||||
session_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("sessions.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Reference to session",
|
||||
)
|
||||
|
||||
# Problem details
|
||||
problem_description: Mapped[str] = mapped_column(
|
||||
Text,
|
||||
nullable=False,
|
||||
doc="Detailed description of the problem",
|
||||
)
|
||||
symptom: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="What the user observed/experienced",
|
||||
)
|
||||
error_message: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="Exact error code or message",
|
||||
)
|
||||
|
||||
# Investigation and analysis
|
||||
investigation_steps: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="JSON array of diagnostic commands/steps taken",
|
||||
)
|
||||
root_cause: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="Identified root cause of the problem",
|
||||
)
|
||||
|
||||
# Solution details
|
||||
solution_applied: Mapped[str] = mapped_column(
|
||||
Text,
|
||||
nullable=False,
|
||||
doc="The solution that was implemented",
|
||||
)
|
||||
verification_method: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="How the fix was verified",
|
||||
)
|
||||
rollback_plan: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="Plan to rollback if solution causes issues",
|
||||
)
|
||||
|
||||
# Recurrence tracking
|
||||
recurrence_count: Mapped[int] = mapped_column(
|
||||
Integer,
|
||||
nullable=False,
|
||||
server_default="1",
|
||||
doc="Number of times this problem has occurred",
|
||||
)
|
||||
|
||||
# Timestamp
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="When the problem was recorded",
|
||||
)
|
||||
|
||||
# Table constraints
|
||||
__table_args__ = (
|
||||
Index("idx_problems_work_item", "work_item_id"),
|
||||
Index("idx_problems_session", "session_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the problem solution."""
|
||||
desc_preview = self.problem_description[:50] + "..." if len(self.problem_description) > 50 else self.problem_description
|
||||
return f"<ProblemSolution(id={self.id}, problem={desc_preview}, recurrence={self.recurrence_count})>"
|
||||
161
api/models/project.py
Normal file
161
api/models/project.py
Normal file
@@ -0,0 +1,161 @@
|
||||
"""
|
||||
Project model for individual projects and engagements.
|
||||
|
||||
Tracks client projects, internal products, infrastructure work, and development tools.
|
||||
"""
|
||||
|
||||
from datetime import date, datetime
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import DATE, ForeignKey, Index, Numeric, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .client import Client
|
||||
from .pending_task import PendingTask
|
||||
from .session import Session
|
||||
|
||||
|
||||
class Project(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Project model representing individual projects and engagements.
|
||||
|
||||
Tracks client projects, internal products, infrastructure work,
|
||||
websites, development tools, and documentation projects. Each project
|
||||
belongs to a client and has status, priority, and time tracking.
|
||||
|
||||
Attributes:
|
||||
client_id: Foreign key to clients table
|
||||
name: Project name
|
||||
slug: URL-safe slug (directory name)
|
||||
category: Project category
|
||||
status: Current status (complete, working, blocked, pending, critical, deferred)
|
||||
priority: Priority level (critical, high, medium, low)
|
||||
description: Project description
|
||||
started_date: Date project started
|
||||
target_completion_date: Target completion date
|
||||
completed_date: Actual completion date
|
||||
estimated_hours: Estimated hours for completion
|
||||
actual_hours: Actual hours spent
|
||||
gitea_repo_url: Gitea repository URL if applicable
|
||||
notes: Additional notes about the project
|
||||
client: Relationship to Client model
|
||||
"""
|
||||
|
||||
__tablename__ = "projects"
|
||||
|
||||
# Foreign keys
|
||||
client_id: Mapped[str] = mapped_column(
|
||||
String(36),
|
||||
ForeignKey("clients.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Foreign key to clients table"
|
||||
)
|
||||
|
||||
# Project identification
|
||||
name: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False,
|
||||
doc="Project name"
|
||||
)
|
||||
|
||||
slug: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
unique=True,
|
||||
doc="URL-safe slug (directory name like 'dataforth-dos')"
|
||||
)
|
||||
|
||||
# Categorization
|
||||
category: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
doc="Project category: client_project, internal_product, infrastructure, website, development_tool, documentation"
|
||||
)
|
||||
|
||||
status: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
default="working",
|
||||
server_default="working",
|
||||
doc="Status: complete, working, blocked, pending, critical, deferred"
|
||||
)
|
||||
|
||||
priority: Mapped[Optional[str]] = mapped_column(
|
||||
String(20),
|
||||
doc="Priority level: critical, high, medium, low"
|
||||
)
|
||||
|
||||
# Description
|
||||
description: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Project description"
|
||||
)
|
||||
|
||||
# Timeline
|
||||
started_date: Mapped[Optional[date]] = mapped_column(
|
||||
DATE,
|
||||
doc="Date project started"
|
||||
)
|
||||
|
||||
target_completion_date: Mapped[Optional[date]] = mapped_column(
|
||||
DATE,
|
||||
doc="Target completion date"
|
||||
)
|
||||
|
||||
completed_date: Mapped[Optional[date]] = mapped_column(
|
||||
DATE,
|
||||
doc="Actual completion date"
|
||||
)
|
||||
|
||||
# Time tracking
|
||||
estimated_hours: Mapped[Optional[float]] = mapped_column(
|
||||
Numeric(10, 2),
|
||||
doc="Estimated hours for completion"
|
||||
)
|
||||
|
||||
actual_hours: Mapped[Optional[float]] = mapped_column(
|
||||
Numeric(10, 2),
|
||||
doc="Actual hours spent"
|
||||
)
|
||||
|
||||
# Repository
|
||||
gitea_repo_url: Mapped[Optional[str]] = mapped_column(
|
||||
String(500),
|
||||
doc="Gitea repository URL if applicable"
|
||||
)
|
||||
|
||||
# Notes
|
||||
notes: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Additional notes about the project"
|
||||
)
|
||||
|
||||
# Relationships
|
||||
client: Mapped["Client"] = relationship(
|
||||
"Client",
|
||||
back_populates="projects",
|
||||
doc="Relationship to Client model"
|
||||
)
|
||||
|
||||
sessions: Mapped[list["Session"]] = relationship(
|
||||
"Session",
|
||||
back_populates="project",
|
||||
doc="Sessions associated with this project"
|
||||
)
|
||||
|
||||
pending_tasks: Mapped[list["PendingTask"]] = relationship(
|
||||
"PendingTask",
|
||||
back_populates="project",
|
||||
doc="Pending tasks associated with this project"
|
||||
)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index("idx_projects_client", "client_id"),
|
||||
Index("idx_projects_status", "status"),
|
||||
Index("idx_projects_slug", "slug"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the project."""
|
||||
return f"<Project(name='{self.name}', slug='{self.slug}', status='{self.status}')>"
|
||||
118
api/models/project_state.py
Normal file
118
api/models/project_state.py
Normal file
@@ -0,0 +1,118 @@
|
||||
"""
|
||||
ProjectState model for tracking current state of projects.
|
||||
|
||||
Stores the current phase, progress, blockers, and next actions for each project
|
||||
to enable quick context retrieval when resuming work.
|
||||
"""
|
||||
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import ForeignKey, Index, Integer, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .project import Project
|
||||
from .session import Session
|
||||
|
||||
|
||||
class ProjectState(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
ProjectState model for tracking current state of projects.
|
||||
|
||||
Stores the current phase, progress, blockers, next actions, and key
|
||||
information about a project's state. Each project has exactly one
|
||||
ProjectState record that is updated as the project progresses.
|
||||
|
||||
Attributes:
|
||||
project_id: Foreign key to projects (required, unique - one state per project)
|
||||
current_phase: Current phase or stage of the project
|
||||
progress_percentage: Integer percentage of completion (0-100)
|
||||
blockers: JSON array of current blockers preventing progress
|
||||
next_actions: JSON array of next steps to take
|
||||
context_summary: Dense overview text of where the project currently stands
|
||||
key_files: JSON array of important file paths for this project
|
||||
important_decisions: JSON array of key decisions made for this project
|
||||
last_session_id: Foreign key to the last session that updated this state
|
||||
project: Relationship to Project model
|
||||
last_session: Relationship to Session model
|
||||
"""
|
||||
|
||||
__tablename__ = "project_states"
|
||||
|
||||
# Foreign keys
|
||||
project_id: Mapped[str] = mapped_column(
|
||||
String(36),
|
||||
ForeignKey("projects.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
unique=True,
|
||||
doc="Foreign key to projects (required, unique - one state per project)"
|
||||
)
|
||||
|
||||
last_session_id: Mapped[Optional[str]] = mapped_column(
|
||||
String(36),
|
||||
ForeignKey("sessions.id", ondelete="SET NULL"),
|
||||
doc="Foreign key to the last session that updated this state"
|
||||
)
|
||||
|
||||
# State metadata
|
||||
current_phase: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
doc="Current phase or stage of the project"
|
||||
)
|
||||
|
||||
progress_percentage: Mapped[int] = mapped_column(
|
||||
Integer,
|
||||
default=0,
|
||||
server_default="0",
|
||||
doc="Integer percentage of completion (0-100)"
|
||||
)
|
||||
|
||||
# State content
|
||||
blockers: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON array of current blockers preventing progress"
|
||||
)
|
||||
|
||||
next_actions: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON array of next steps to take"
|
||||
)
|
||||
|
||||
context_summary: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Dense overview text of where the project currently stands"
|
||||
)
|
||||
|
||||
key_files: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON array of important file paths for this project"
|
||||
)
|
||||
|
||||
important_decisions: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON array of key decisions made for this project"
|
||||
)
|
||||
|
||||
# Relationships
|
||||
project: Mapped["Project"] = relationship(
|
||||
"Project",
|
||||
doc="Relationship to Project model"
|
||||
)
|
||||
|
||||
last_session: Mapped[Optional["Session"]] = relationship(
|
||||
"Session",
|
||||
doc="Relationship to Session model"
|
||||
)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index("idx_project_states_project", "project_id"),
|
||||
Index("idx_project_states_last_session", "last_session_id"),
|
||||
Index("idx_project_states_progress", "progress_percentage"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the project state."""
|
||||
return f"<ProjectState(project_id='{self.project_id}', phase='{self.current_phase}', progress={self.progress_percentage}%)>"
|
||||
73
api/models/schema_migration.py
Normal file
73
api/models/schema_migration.py
Normal file
@@ -0,0 +1,73 @@
|
||||
"""
|
||||
Schema migration model for tracking Alembic database migrations.
|
||||
|
||||
Tracks which database schema migrations have been applied, when, and by whom
|
||||
for database version control and migration management.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import String, Text, TIMESTAMP
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .base import Base
|
||||
|
||||
|
||||
class SchemaMigration(Base):
|
||||
"""
|
||||
Schema migration model for tracking Alembic database migrations.
|
||||
|
||||
Records database schema version changes applied via Alembic migrations.
|
||||
Used to track which migrations have been applied, when they were applied,
|
||||
and the SQL executed for audit and rollback purposes.
|
||||
|
||||
Note: This model does NOT use UUIDMixin as it uses version_id as the
|
||||
primary key to match Alembic's migration tracking system.
|
||||
|
||||
Attributes:
|
||||
version_id: Alembic migration version identifier (primary key)
|
||||
description: Description of what the migration does
|
||||
applied_at: When the migration was applied
|
||||
applied_by: User or system that applied the migration
|
||||
migration_sql: SQL executed during the migration
|
||||
"""
|
||||
|
||||
__tablename__ = "schema_migrations"
|
||||
|
||||
# Primary key - Alembic version identifier
|
||||
version_id: Mapped[str] = mapped_column(
|
||||
String(100),
|
||||
primary_key=True,
|
||||
doc="Alembic migration version identifier"
|
||||
)
|
||||
|
||||
# Migration details
|
||||
description: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Description of what the migration does"
|
||||
)
|
||||
|
||||
# Application tracking
|
||||
applied_at: Mapped[datetime] = mapped_column(
|
||||
TIMESTAMP,
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="When the migration was applied"
|
||||
)
|
||||
|
||||
applied_by: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
doc="User or system that applied the migration"
|
||||
)
|
||||
|
||||
# Migration SQL
|
||||
migration_sql: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="SQL executed during the migration"
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the schema migration."""
|
||||
return f"<SchemaMigration(version='{self.version_id}', applied_at='{self.applied_at}')>"
|
||||
144
api/models/security_incident.py
Normal file
144
api/models/security_incident.py
Normal file
@@ -0,0 +1,144 @@
|
||||
"""
|
||||
Security incident model for tracking security events and remediation.
|
||||
|
||||
This model captures security incidents, their investigation, and resolution
|
||||
including BEC, backdoors, malware, and other security threats.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import (
|
||||
CHAR,
|
||||
CheckConstraint,
|
||||
ForeignKey,
|
||||
Index,
|
||||
String,
|
||||
Text,
|
||||
)
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from api.models.base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
|
||||
class SecurityIncident(UUIDMixin, TimestampMixin, Base):
|
||||
"""
|
||||
Security incident tracking and remediation.
|
||||
|
||||
Records security incidents from detection through investigation to resolution,
|
||||
including details about the incident type, severity, and remediation steps.
|
||||
|
||||
Attributes:
|
||||
id: UUID primary key
|
||||
client_id: Reference to affected client
|
||||
service_id: Reference to affected service
|
||||
infrastructure_id: Reference to affected infrastructure
|
||||
incident_type: Type of security incident
|
||||
incident_date: When the incident occurred
|
||||
severity: Severity level (critical, high, medium, low)
|
||||
description: Detailed description of the incident
|
||||
findings: Investigation results and findings
|
||||
remediation_steps: Steps taken to remediate
|
||||
status: Current status of incident handling
|
||||
resolved_at: When the incident was resolved
|
||||
notes: Additional notes
|
||||
created_at: Creation timestamp
|
||||
updated_at: Last update timestamp
|
||||
"""
|
||||
|
||||
__tablename__ = "security_incidents"
|
||||
|
||||
# Foreign keys
|
||||
client_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("clients.id", ondelete="CASCADE"),
|
||||
nullable=True,
|
||||
doc="Reference to affected client",
|
||||
)
|
||||
service_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("services.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
doc="Reference to affected service",
|
||||
)
|
||||
infrastructure_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("infrastructure.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
doc="Reference to affected infrastructure",
|
||||
)
|
||||
|
||||
# Incident details
|
||||
incident_type: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
nullable=True,
|
||||
doc="Type of security incident",
|
||||
)
|
||||
incident_date: Mapped[datetime] = mapped_column(
|
||||
nullable=False,
|
||||
doc="When the incident occurred",
|
||||
)
|
||||
severity: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
nullable=True,
|
||||
doc="Severity level",
|
||||
)
|
||||
description: Mapped[str] = mapped_column(
|
||||
Text,
|
||||
nullable=False,
|
||||
doc="Detailed description of the incident",
|
||||
)
|
||||
|
||||
# Investigation and remediation
|
||||
findings: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="Investigation results and findings",
|
||||
)
|
||||
remediation_steps: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="Steps taken to remediate the incident",
|
||||
)
|
||||
|
||||
# Status tracking
|
||||
status: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
nullable=False,
|
||||
server_default="'investigating'",
|
||||
doc="Current status of incident handling",
|
||||
)
|
||||
resolved_at: Mapped[Optional[datetime]] = mapped_column(
|
||||
nullable=True,
|
||||
doc="When the incident was resolved",
|
||||
)
|
||||
|
||||
# Additional information
|
||||
notes: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
doc="Additional notes and context",
|
||||
)
|
||||
|
||||
# Table constraints
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"incident_type IN ('bec', 'backdoor', 'malware', 'unauthorized_access', 'data_breach', 'phishing', 'ransomware', 'brute_force')",
|
||||
name="ck_security_incidents_type",
|
||||
),
|
||||
CheckConstraint(
|
||||
"severity IN ('critical', 'high', 'medium', 'low')",
|
||||
name="ck_security_incidents_severity",
|
||||
),
|
||||
CheckConstraint(
|
||||
"status IN ('investigating', 'contained', 'resolved', 'monitoring')",
|
||||
name="ck_security_incidents_status",
|
||||
),
|
||||
Index("idx_incidents_client", "client_id"),
|
||||
Index("idx_incidents_type", "incident_type"),
|
||||
Index("idx_incidents_status", "status"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the security incident."""
|
||||
return f"<SecurityIncident(id={self.id}, type={self.incident_type}, severity={self.severity}, status={self.status})>"
|
||||
122
api/models/service.py
Normal file
122
api/models/service.py
Normal file
@@ -0,0 +1,122 @@
|
||||
"""
|
||||
Service model for applications running on infrastructure.
|
||||
|
||||
Services represent applications, databases, web servers, and other software
|
||||
running on infrastructure components.
|
||||
"""
|
||||
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import CHAR, CheckConstraint, ForeignKey, Index, Integer, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .deployment import Deployment
|
||||
|
||||
|
||||
class Service(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Service model representing applications running on infrastructure.
|
||||
|
||||
Tracks applications, services, databases, web servers, and other software
|
||||
components running on infrastructure with URLs, ports, and status.
|
||||
|
||||
Attributes:
|
||||
infrastructure_id: Reference to the infrastructure hosting this service
|
||||
service_name: Name of the service (e.g., "Gitea", "PostgreSQL")
|
||||
service_type: Type of service (e.g., "git_hosting", "database")
|
||||
external_url: External URL for accessing the service
|
||||
internal_url: Internal URL for accessing the service
|
||||
port: Port number the service runs on
|
||||
protocol: Protocol used (https, ssh, smb, etc.)
|
||||
status: Current status (running, stopped, error, maintenance)
|
||||
version: Version of the service
|
||||
notes: Additional notes
|
||||
"""
|
||||
|
||||
__tablename__ = "services"
|
||||
|
||||
# Foreign keys
|
||||
infrastructure_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("infrastructure.id", ondelete="CASCADE"),
|
||||
doc="Reference to the infrastructure hosting this service"
|
||||
)
|
||||
|
||||
# Service identification
|
||||
service_name: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False,
|
||||
doc="Name of the service (e.g., 'Gitea', 'PostgreSQL', 'Apache')"
|
||||
)
|
||||
|
||||
service_type: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
doc="Type of service (e.g., 'git_hosting', 'database', 'web_server')"
|
||||
)
|
||||
|
||||
# URLs and connectivity
|
||||
external_url: Mapped[Optional[str]] = mapped_column(
|
||||
String(500),
|
||||
doc="External URL for accessing the service"
|
||||
)
|
||||
|
||||
internal_url: Mapped[Optional[str]] = mapped_column(
|
||||
String(500),
|
||||
doc="Internal URL for accessing the service"
|
||||
)
|
||||
|
||||
port: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
doc="Port number the service runs on"
|
||||
)
|
||||
|
||||
protocol: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
doc="Protocol used (https, ssh, smb, etc.)"
|
||||
)
|
||||
|
||||
# Status
|
||||
status: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
default="running",
|
||||
server_default="running",
|
||||
nullable=False,
|
||||
doc="Status: running, stopped, error, maintenance"
|
||||
)
|
||||
|
||||
# Version
|
||||
version: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
doc="Version of the service"
|
||||
)
|
||||
|
||||
# Notes
|
||||
notes: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Additional notes"
|
||||
)
|
||||
|
||||
# Relationships
|
||||
deployments: Mapped[list["Deployment"]] = relationship(
|
||||
"Deployment",
|
||||
back_populates="service",
|
||||
doc="Relationship to Deployment model"
|
||||
)
|
||||
|
||||
# Constraints and indexes
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"status IN ('running', 'stopped', 'error', 'maintenance')",
|
||||
name="ck_services_status"
|
||||
),
|
||||
Index("idx_services_infrastructure", "infrastructure_id"),
|
||||
Index("idx_services_name", "service_name"),
|
||||
Index("idx_services_type", "service_type"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the service."""
|
||||
return f"<Service(service_name='{self.service_name}', status='{self.status}')>"
|
||||
83
api/models/service_relationship.py
Normal file
83
api/models/service_relationship.py
Normal file
@@ -0,0 +1,83 @@
|
||||
"""
|
||||
Service relationship model for service dependencies and relationships.
|
||||
|
||||
Service relationships track how services depend on, proxy through, or
|
||||
relate to other services in the infrastructure.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import CHAR, CheckConstraint, ForeignKey, Index, Text, UniqueConstraint
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .base import Base, UUIDMixin
|
||||
|
||||
|
||||
class ServiceRelationship(Base, UUIDMixin):
|
||||
"""
|
||||
Service relationship model representing dependencies and relationships.
|
||||
|
||||
Tracks relationships between services including hosting, proxying,
|
||||
authentication, backend dependencies, and replication.
|
||||
|
||||
Attributes:
|
||||
from_service_id: Reference to the source service in the relationship
|
||||
to_service_id: Reference to the target service in the relationship
|
||||
relationship_type: Type of relationship (hosted_on, proxied_by, etc.)
|
||||
notes: Additional notes about the relationship
|
||||
created_at: When the relationship was created
|
||||
"""
|
||||
|
||||
__tablename__ = "service_relationships"
|
||||
|
||||
# Foreign keys
|
||||
from_service_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("services.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Reference to the source service in the relationship"
|
||||
)
|
||||
|
||||
to_service_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("services.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Reference to the target service in the relationship"
|
||||
)
|
||||
|
||||
# Relationship details
|
||||
relationship_type: Mapped[str] = mapped_column(
|
||||
CHAR(50),
|
||||
nullable=False,
|
||||
doc="Type: hosted_on, proxied_by, authenticates_via, backend_for, depends_on, replicates_to"
|
||||
)
|
||||
|
||||
# Notes
|
||||
notes: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Additional notes about the relationship"
|
||||
)
|
||||
|
||||
# Timestamp
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="When the relationship was created"
|
||||
)
|
||||
|
||||
# Constraints and indexes
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"relationship_type IN ('hosted_on', 'proxied_by', 'authenticates_via', 'backend_for', 'depends_on', 'replicates_to')",
|
||||
name="ck_service_relationships_type"
|
||||
),
|
||||
UniqueConstraint("from_service_id", "to_service_id", "relationship_type", name="uq_service_relationship"),
|
||||
Index("idx_service_rel_from", "from_service_id"),
|
||||
Index("idx_service_rel_to", "to_service_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the service relationship."""
|
||||
return f"<ServiceRelationship(from='{self.from_service_id}', to='{self.to_service_id}', type='{self.relationship_type}')>"
|
||||
215
api/models/session.py
Normal file
215
api/models/session.py
Normal file
@@ -0,0 +1,215 @@
|
||||
"""
|
||||
Session model for work sessions with time tracking.
|
||||
|
||||
Tracks individual work sessions including client, project, machine used,
|
||||
time tracking, and session documentation.
|
||||
"""
|
||||
|
||||
from datetime import date, datetime
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import Boolean, DATE, ForeignKey, Index, Integer, Numeric, String, Text, TIMESTAMP
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .client import Client
|
||||
from .database_change import DatabaseChange
|
||||
from .deployment import Deployment
|
||||
from .infrastructure_change import InfrastructureChange
|
||||
from .machine import Machine
|
||||
from .operation_failure import OperationFailure
|
||||
from .project import Project
|
||||
from .work_item import WorkItem
|
||||
|
||||
|
||||
class Session(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Session model representing work sessions with time tracking.
|
||||
|
||||
Tracks individual work sessions including which client, project, and machine
|
||||
were involved, along with timing information, billability, and session documentation.
|
||||
Enhanced with machine tracking to understand which machine was used for the work.
|
||||
|
||||
Attributes:
|
||||
client_id: Foreign key to clients table
|
||||
project_id: Foreign key to projects table
|
||||
machine_id: Foreign key to machines table (which machine was used)
|
||||
session_date: Date of the session
|
||||
start_time: Session start timestamp
|
||||
end_time: Session end timestamp
|
||||
duration_minutes: Duration in minutes (auto-calculated or manual)
|
||||
status: Session status (completed, in_progress, blocked, pending)
|
||||
session_title: Brief title describing the session
|
||||
summary: Markdown summary of the session
|
||||
is_billable: Whether this session is billable
|
||||
billable_hours: Billable hours if applicable
|
||||
technician: Name of technician who performed the work
|
||||
session_log_file: Path to markdown session log file
|
||||
notes: Additional notes about the session
|
||||
client: Relationship to Client model
|
||||
project: Relationship to Project model
|
||||
machine: Relationship to Machine model
|
||||
"""
|
||||
|
||||
__tablename__ = "sessions"
|
||||
|
||||
# Foreign keys
|
||||
client_id: Mapped[Optional[str]] = mapped_column(
|
||||
String(36),
|
||||
ForeignKey("clients.id", ondelete="SET NULL"),
|
||||
doc="Foreign key to clients table"
|
||||
)
|
||||
|
||||
project_id: Mapped[Optional[str]] = mapped_column(
|
||||
String(36),
|
||||
ForeignKey("projects.id", ondelete="SET NULL"),
|
||||
doc="Foreign key to projects table"
|
||||
)
|
||||
|
||||
machine_id: Mapped[Optional[str]] = mapped_column(
|
||||
String(36),
|
||||
ForeignKey("machines.id", ondelete="SET NULL"),
|
||||
doc="Foreign key to machines table (which machine was used)"
|
||||
)
|
||||
|
||||
# Session timing
|
||||
session_date: Mapped[date] = mapped_column(
|
||||
DATE,
|
||||
nullable=False,
|
||||
doc="Date of the session"
|
||||
)
|
||||
|
||||
start_time: Mapped[Optional[datetime]] = mapped_column(
|
||||
TIMESTAMP,
|
||||
doc="Session start timestamp"
|
||||
)
|
||||
|
||||
end_time: Mapped[Optional[datetime]] = mapped_column(
|
||||
TIMESTAMP,
|
||||
doc="Session end timestamp"
|
||||
)
|
||||
|
||||
duration_minutes: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
doc="Duration in minutes (auto-calculated or manual)"
|
||||
)
|
||||
|
||||
# Status
|
||||
status: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
default="completed",
|
||||
server_default="completed",
|
||||
doc="Session status: completed, in_progress, blocked, pending"
|
||||
)
|
||||
|
||||
# Session details
|
||||
session_title: Mapped[str] = mapped_column(
|
||||
String(500),
|
||||
nullable=False,
|
||||
doc="Brief title describing the session"
|
||||
)
|
||||
|
||||
summary: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Markdown summary of the session"
|
||||
)
|
||||
|
||||
# Billability
|
||||
is_billable: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=False,
|
||||
server_default="0",
|
||||
doc="Whether this session is billable"
|
||||
)
|
||||
|
||||
billable_hours: Mapped[Optional[float]] = mapped_column(
|
||||
Numeric(10, 2),
|
||||
doc="Billable hours if applicable"
|
||||
)
|
||||
|
||||
# Technician
|
||||
technician: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
doc="Name of technician who performed the work"
|
||||
)
|
||||
|
||||
# Documentation
|
||||
session_log_file: Mapped[Optional[str]] = mapped_column(
|
||||
String(500),
|
||||
doc="Path to markdown session log file"
|
||||
)
|
||||
|
||||
# Notes
|
||||
notes: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Additional notes about the session"
|
||||
)
|
||||
|
||||
# Relationships
|
||||
client: Mapped[Optional["Client"]] = relationship(
|
||||
"Client",
|
||||
back_populates="sessions",
|
||||
doc="Relationship to Client model"
|
||||
)
|
||||
|
||||
project: Mapped[Optional["Project"]] = relationship(
|
||||
"Project",
|
||||
back_populates="sessions",
|
||||
doc="Relationship to Project model"
|
||||
)
|
||||
|
||||
machine: Mapped[Optional["Machine"]] = relationship(
|
||||
"Machine",
|
||||
back_populates="sessions",
|
||||
doc="Relationship to Machine model"
|
||||
)
|
||||
|
||||
work_items: Mapped[list["WorkItem"]] = relationship(
|
||||
"WorkItem",
|
||||
back_populates="session",
|
||||
cascade="all, delete-orphan",
|
||||
doc="Relationship to WorkItem model"
|
||||
)
|
||||
|
||||
operation_failures: Mapped[list["OperationFailure"]] = relationship(
|
||||
"OperationFailure",
|
||||
back_populates="session",
|
||||
cascade="all, delete-orphan",
|
||||
doc="Relationship to OperationFailure model"
|
||||
)
|
||||
|
||||
deployments: Mapped[list["Deployment"]] = relationship(
|
||||
"Deployment",
|
||||
back_populates="session",
|
||||
cascade="all, delete-orphan",
|
||||
doc="Relationship to Deployment model"
|
||||
)
|
||||
|
||||
database_changes: Mapped[list["DatabaseChange"]] = relationship(
|
||||
"DatabaseChange",
|
||||
back_populates="session",
|
||||
cascade="all, delete-orphan",
|
||||
doc="Relationship to DatabaseChange model"
|
||||
)
|
||||
|
||||
infrastructure_changes: Mapped[list["InfrastructureChange"]] = relationship(
|
||||
"InfrastructureChange",
|
||||
back_populates="session",
|
||||
cascade="all, delete-orphan",
|
||||
doc="Relationship to InfrastructureChange model"
|
||||
)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index("idx_sessions_client", "client_id"),
|
||||
Index("idx_sessions_project", "project_id"),
|
||||
Index("idx_sessions_date", "session_date"),
|
||||
Index("idx_sessions_billable", "is_billable"),
|
||||
Index("idx_sessions_machine", "machine_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the session."""
|
||||
return f"<Session(title='{self.session_title}', date='{self.session_date}', status='{self.status}')>"
|
||||
51
api/models/session_tag.py
Normal file
51
api/models/session_tag.py
Normal file
@@ -0,0 +1,51 @@
|
||||
"""
|
||||
Session tag junction table for many-to-many relationships.
|
||||
|
||||
Associates sessions with tags for categorization and filtering.
|
||||
"""
|
||||
|
||||
from sqlalchemy import CHAR, ForeignKey, Index, PrimaryKeyConstraint
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from .base import Base
|
||||
|
||||
|
||||
class SessionTag(Base):
|
||||
"""
|
||||
Session tag junction table for many-to-many relationships.
|
||||
|
||||
Links sessions to tags, allowing sessions to have multiple tags
|
||||
and tags to be associated with multiple sessions.
|
||||
|
||||
Attributes:
|
||||
session_id: Reference to the session
|
||||
tag_id: Reference to the tag
|
||||
"""
|
||||
|
||||
__tablename__ = "session_tags"
|
||||
|
||||
# Composite primary key
|
||||
session_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("sessions.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Reference to the session"
|
||||
)
|
||||
|
||||
tag_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("tags.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Reference to the tag"
|
||||
)
|
||||
|
||||
# Table constraints
|
||||
__table_args__ = (
|
||||
PrimaryKeyConstraint("session_id", "tag_id"),
|
||||
Index("idx_st_session", "session_id"),
|
||||
Index("idx_st_tag", "tag_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the session tag."""
|
||||
return f"<SessionTag(session_id='{self.session_id}', tag_id='{self.tag_id}')>"
|
||||
95
api/models/site.py
Normal file
95
api/models/site.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""
|
||||
Site model for client physical locations.
|
||||
|
||||
Sites represent physical locations for clients including network configuration,
|
||||
VPN settings, and gateway information.
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Boolean, CHAR, ForeignKey, Index, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
|
||||
class Site(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Site model representing client physical locations.
|
||||
|
||||
Tracks physical sites for clients with network configuration including
|
||||
subnets, VPN settings, gateway IPs, and DNS servers.
|
||||
|
||||
Attributes:
|
||||
client_id: Reference to the client this site belongs to
|
||||
name: Site name (e.g., "Main Office", "SLC - Salt Lake City")
|
||||
network_subnet: Network subnet for the site (e.g., "172.16.9.0/24")
|
||||
vpn_required: Whether VPN is required to access this site
|
||||
vpn_subnet: VPN subnet if applicable (e.g., "192.168.1.0/24")
|
||||
gateway_ip: Gateway IP address (IPv4 or IPv6)
|
||||
dns_servers: JSON array of DNS server addresses
|
||||
notes: Additional notes about the site
|
||||
"""
|
||||
|
||||
__tablename__ = "sites"
|
||||
|
||||
# Foreign keys
|
||||
client_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("clients.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Reference to the client this site belongs to"
|
||||
)
|
||||
|
||||
# Site identification
|
||||
name: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False,
|
||||
doc="Site name (e.g., 'Main Office', 'SLC - Salt Lake City')"
|
||||
)
|
||||
|
||||
# Network configuration
|
||||
network_subnet: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
doc="Network subnet for the site (e.g., '172.16.9.0/24')"
|
||||
)
|
||||
|
||||
# VPN configuration
|
||||
vpn_required: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=False,
|
||||
server_default="0",
|
||||
nullable=False,
|
||||
doc="Whether VPN is required to access this site"
|
||||
)
|
||||
|
||||
vpn_subnet: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
doc="VPN subnet if applicable (e.g., '192.168.1.0/24')"
|
||||
)
|
||||
|
||||
# Gateway and DNS
|
||||
gateway_ip: Mapped[Optional[str]] = mapped_column(
|
||||
String(45),
|
||||
doc="Gateway IP address (IPv4 or IPv6)"
|
||||
)
|
||||
|
||||
dns_servers: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON array of DNS server addresses"
|
||||
)
|
||||
|
||||
# Notes
|
||||
notes: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Additional notes about the site"
|
||||
)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index("idx_sites_client", "client_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the site."""
|
||||
return f"<Site(name='{self.name}', client_id='{self.client_id}')>"
|
||||
69
api/models/tag.py
Normal file
69
api/models/tag.py
Normal file
@@ -0,0 +1,69 @@
|
||||
"""
|
||||
Tag model for categorizing and organizing work items.
|
||||
|
||||
Provides flexible tagging system for technologies, clients, infrastructure,
|
||||
problem types, actions, and services.
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Index, Integer, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
|
||||
class Tag(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Tag model for categorizing and organizing work items.
|
||||
|
||||
Provides a flexible tagging system for organizing work by technology,
|
||||
client, infrastructure, problem type, action, or service. Tags can be
|
||||
pre-populated or created on-demand, with automatic usage tracking.
|
||||
|
||||
Attributes:
|
||||
name: Tag name (unique)
|
||||
category: Tag category (technology, client, infrastructure, problem_type, action, service)
|
||||
description: Description of the tag
|
||||
usage_count: Number of times this tag has been used (auto-incremented)
|
||||
"""
|
||||
|
||||
__tablename__ = "tags"
|
||||
|
||||
# Tag identification
|
||||
name: Mapped[str] = mapped_column(
|
||||
String(100),
|
||||
nullable=False,
|
||||
unique=True,
|
||||
doc="Tag name (unique)"
|
||||
)
|
||||
|
||||
# Categorization
|
||||
category: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
doc="Tag category: technology, client, infrastructure, problem_type, action, service"
|
||||
)
|
||||
|
||||
# Description
|
||||
description: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Description of the tag"
|
||||
)
|
||||
|
||||
# Usage tracking
|
||||
usage_count: Mapped[int] = mapped_column(
|
||||
Integer,
|
||||
default=0,
|
||||
server_default="0",
|
||||
doc="Number of times this tag has been used (auto-incremented)"
|
||||
)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index("idx_tags_category", "category"),
|
||||
Index("idx_tags_name", "name"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the tag."""
|
||||
return f"<Tag(name='{self.name}', category='{self.category}', usage_count={self.usage_count})>"
|
||||
160
api/models/task.py
Normal file
160
api/models/task.py
Normal file
@@ -0,0 +1,160 @@
|
||||
"""
|
||||
Task model for hierarchical task tracking.
|
||||
|
||||
Tasks represent work items that can be hierarchical, assigned to agents,
|
||||
and tracked across sessions with dependencies and complexity estimates.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import CHAR, CheckConstraint, ForeignKey, Index, Integer, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
|
||||
class Task(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Task model representing hierarchical work items.
|
||||
|
||||
Tasks support parent-child relationships for breaking down complex work,
|
||||
status tracking with blocking reasons, assignment to agents, and
|
||||
complexity estimation.
|
||||
|
||||
Attributes:
|
||||
parent_task_id: Reference to parent task for hierarchical structure
|
||||
task_order: Order of this task relative to siblings
|
||||
title: Task title
|
||||
description: Detailed task description
|
||||
task_type: Type of task (implementation, research, review, etc.)
|
||||
status: Current status (pending, in_progress, blocked, completed, cancelled)
|
||||
blocking_reason: Reason why task is blocked
|
||||
session_id: Reference to the session this task belongs to
|
||||
client_id: Reference to the client
|
||||
project_id: Reference to the project
|
||||
assigned_agent: Which agent is handling this task
|
||||
estimated_complexity: Complexity estimate (trivial to very_complex)
|
||||
started_at: When the task was started
|
||||
completed_at: When the task was completed
|
||||
task_context: Detailed context for this task (JSON)
|
||||
dependencies: JSON array of dependency task IDs
|
||||
"""
|
||||
|
||||
__tablename__ = "tasks"
|
||||
|
||||
# Task hierarchy
|
||||
parent_task_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("tasks.id", ondelete="CASCADE"),
|
||||
doc="Reference to parent task for hierarchical structure"
|
||||
)
|
||||
|
||||
task_order: Mapped[int] = mapped_column(
|
||||
Integer,
|
||||
nullable=False,
|
||||
doc="Order of this task relative to siblings"
|
||||
)
|
||||
|
||||
# Task details
|
||||
title: Mapped[str] = mapped_column(
|
||||
String(500),
|
||||
nullable=False,
|
||||
doc="Task title"
|
||||
)
|
||||
|
||||
description: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Detailed task description"
|
||||
)
|
||||
|
||||
task_type: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
doc="Type: implementation, research, review, deployment, testing, documentation, bugfix, analysis"
|
||||
)
|
||||
|
||||
# Status tracking
|
||||
status: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
nullable=False,
|
||||
doc="Status: pending, in_progress, blocked, completed, cancelled"
|
||||
)
|
||||
|
||||
blocking_reason: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Reason why task is blocked (if status='blocked')"
|
||||
)
|
||||
|
||||
# Context references
|
||||
session_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("sessions.id", ondelete="CASCADE"),
|
||||
doc="Reference to the session this task belongs to"
|
||||
)
|
||||
|
||||
client_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("clients.id", ondelete="SET NULL"),
|
||||
doc="Reference to the client"
|
||||
)
|
||||
|
||||
project_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("projects.id", ondelete="SET NULL"),
|
||||
doc="Reference to the project"
|
||||
)
|
||||
|
||||
assigned_agent: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
doc="Which agent is handling this task"
|
||||
)
|
||||
|
||||
# Timing
|
||||
estimated_complexity: Mapped[Optional[str]] = mapped_column(
|
||||
String(20),
|
||||
doc="Complexity: trivial, simple, moderate, complex, very_complex"
|
||||
)
|
||||
|
||||
started_at: Mapped[Optional[datetime]] = mapped_column(
|
||||
doc="When the task was started"
|
||||
)
|
||||
|
||||
completed_at: Mapped[Optional[datetime]] = mapped_column(
|
||||
doc="When the task was completed"
|
||||
)
|
||||
|
||||
# Context data (stored as JSON text)
|
||||
task_context: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="Detailed context for this task (JSON)"
|
||||
)
|
||||
|
||||
dependencies: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc="JSON array of dependency task IDs"
|
||||
)
|
||||
|
||||
# Constraints and indexes
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"task_type IN ('implementation', 'research', 'review', 'deployment', 'testing', 'documentation', 'bugfix', 'analysis')",
|
||||
name="ck_tasks_type"
|
||||
),
|
||||
CheckConstraint(
|
||||
"status IN ('pending', 'in_progress', 'blocked', 'completed', 'cancelled')",
|
||||
name="ck_tasks_status"
|
||||
),
|
||||
CheckConstraint(
|
||||
"estimated_complexity IN ('trivial', 'simple', 'moderate', 'complex', 'very_complex')",
|
||||
name="ck_tasks_complexity"
|
||||
),
|
||||
Index("idx_tasks_session", "session_id"),
|
||||
Index("idx_tasks_status", "status"),
|
||||
Index("idx_tasks_parent", "parent_task_id"),
|
||||
Index("idx_tasks_client", "client_id"),
|
||||
Index("idx_tasks_project", "project_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the task."""
|
||||
return f"<Task(title='{self.title}', status='{self.status}')>"
|
||||
118
api/models/ticket_link.py
Normal file
118
api/models/ticket_link.py
Normal file
@@ -0,0 +1,118 @@
|
||||
"""
|
||||
Ticket Link model for connecting sessions to external ticketing systems.
|
||||
|
||||
This model creates relationships between ClaudeTools sessions and tickets
|
||||
in external systems like SyncroMSP, Autotask, ConnectWise, etc.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import CHAR, ForeignKey, Index, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .base import Base, UUIDMixin
|
||||
|
||||
|
||||
class TicketLink(Base, UUIDMixin):
|
||||
"""
|
||||
Links between sessions and external ticketing system tickets.
|
||||
|
||||
Creates associations between ClaudeTools work sessions and tickets
|
||||
in external MSP platforms. Enables automatic time tracking, status
|
||||
updates, and work documentation in ticketing systems.
|
||||
|
||||
Attributes:
|
||||
id: Unique identifier
|
||||
session_id: Reference to the ClaudeTools session
|
||||
client_id: Reference to the client
|
||||
integration_type: Type of ticketing system (syncro, autotask, connectwise)
|
||||
ticket_id: External ticket identifier
|
||||
ticket_number: Human-readable ticket number (e.g., "T12345")
|
||||
ticket_subject: Subject/title of the ticket
|
||||
ticket_url: Direct URL to view the ticket
|
||||
ticket_status: Current status of the ticket
|
||||
link_type: Type of relationship (related, resolves, documents)
|
||||
created_at: When the link was created
|
||||
"""
|
||||
|
||||
__tablename__ = "ticket_links"
|
||||
|
||||
# Foreign keys
|
||||
session_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("sessions.id", ondelete="CASCADE"),
|
||||
nullable=True,
|
||||
doc="ClaudeTools session linked to this ticket",
|
||||
)
|
||||
client_id: Mapped[Optional[str]] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("clients.id", ondelete="CASCADE"),
|
||||
nullable=True,
|
||||
doc="Client this ticket belongs to",
|
||||
)
|
||||
|
||||
# Ticket information
|
||||
integration_type: Mapped[str] = mapped_column(
|
||||
String(100),
|
||||
nullable=False,
|
||||
doc="Ticketing system type (syncro, autotask, connectwise)",
|
||||
)
|
||||
ticket_id: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False,
|
||||
doc="External ticket identifier",
|
||||
)
|
||||
ticket_number: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
nullable=True,
|
||||
doc="Human-readable ticket number (T12345)",
|
||||
)
|
||||
ticket_subject: Mapped[Optional[str]] = mapped_column(
|
||||
String(500),
|
||||
nullable=True,
|
||||
doc="Subject/title of the ticket",
|
||||
)
|
||||
ticket_url: Mapped[Optional[str]] = mapped_column(
|
||||
String(500),
|
||||
nullable=True,
|
||||
doc="Direct URL to view the ticket",
|
||||
)
|
||||
ticket_status: Mapped[Optional[str]] = mapped_column(
|
||||
String(100),
|
||||
nullable=True,
|
||||
doc="Current status of the ticket",
|
||||
)
|
||||
|
||||
# Link metadata
|
||||
link_type: Mapped[Optional[str]] = mapped_column(
|
||||
String(50),
|
||||
nullable=True,
|
||||
doc="Type of relationship (related, resolves, documents)",
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="When the link was created",
|
||||
)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index("idx_ticket_session", "session_id"),
|
||||
Index("idx_ticket_client", "client_id"),
|
||||
Index("idx_ticket_external", "integration_type", "ticket_id"),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
# session = relationship("Session", back_populates="ticket_links")
|
||||
# client = relationship("Client", back_populates="ticket_links")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the ticket link."""
|
||||
return (
|
||||
f"<TicketLink(id={self.id!r}, "
|
||||
f"type={self.integration_type!r}, "
|
||||
f"ticket={self.ticket_number or self.ticket_id!r}, "
|
||||
f"link_type={self.link_type!r})>"
|
||||
)
|
||||
189
api/models/work_item.py
Normal file
189
api/models/work_item.py
Normal file
@@ -0,0 +1,189 @@
|
||||
"""
|
||||
Work item model for tracking session work activities.
|
||||
|
||||
Work items represent individual tasks and activities completed during
|
||||
work sessions, with categorization, timing, and billing tracking.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import Boolean, CHAR, CheckConstraint, ForeignKey, Index, Integer, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .base import Base, UUIDMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .database_change import DatabaseChange
|
||||
from .deployment import Deployment
|
||||
from .infrastructure_change import InfrastructureChange
|
||||
from .session import Session
|
||||
|
||||
|
||||
class WorkItem(Base, UUIDMixin):
|
||||
"""
|
||||
Work item model representing individual work activities during sessions.
|
||||
|
||||
Tracks detailed work activities completed during a session including
|
||||
categorization, status, timing estimates and actuals, affected systems,
|
||||
and technologies used.
|
||||
|
||||
Attributes:
|
||||
session_id: Reference to the session this work item belongs to
|
||||
category: Work category (infrastructure, troubleshooting, etc.)
|
||||
title: Brief title of the work item
|
||||
description: Detailed description of the work performed
|
||||
status: Current status of the work item
|
||||
priority: Priority level (critical, high, medium, low)
|
||||
is_billable: Whether this work item is billable
|
||||
estimated_minutes: Estimated time to complete in minutes
|
||||
actual_minutes: Actual time spent in minutes
|
||||
affected_systems: JSON array of affected systems
|
||||
technologies_used: JSON array of technologies used
|
||||
item_order: Sequence order within the session
|
||||
created_at: When the work item was created
|
||||
completed_at: When the work item was completed
|
||||
"""
|
||||
|
||||
__tablename__ = "work_items"
|
||||
|
||||
# Foreign keys
|
||||
session_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("sessions.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Reference to the session this work item belongs to"
|
||||
)
|
||||
|
||||
# Relationships
|
||||
session: Mapped["Session"] = relationship(
|
||||
"Session",
|
||||
back_populates="work_items",
|
||||
doc="Relationship to Session model"
|
||||
)
|
||||
|
||||
deployments: Mapped[list["Deployment"]] = relationship(
|
||||
"Deployment",
|
||||
back_populates="work_item",
|
||||
cascade="all, delete-orphan",
|
||||
doc="Relationship to Deployment model"
|
||||
)
|
||||
|
||||
database_changes: Mapped[list["DatabaseChange"]] = relationship(
|
||||
"DatabaseChange",
|
||||
back_populates="work_item",
|
||||
cascade="all, delete-orphan",
|
||||
doc="Relationship to DatabaseChange model"
|
||||
)
|
||||
|
||||
infrastructure_changes: Mapped[list["InfrastructureChange"]] = relationship(
|
||||
"InfrastructureChange",
|
||||
back_populates="work_item",
|
||||
cascade="all, delete-orphan",
|
||||
doc="Relationship to InfrastructureChange model"
|
||||
)
|
||||
|
||||
# Work categorization
|
||||
category: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
nullable=False,
|
||||
doc="Work category: infrastructure, troubleshooting, configuration, development, maintenance, security, documentation"
|
||||
)
|
||||
|
||||
title: Mapped[str] = mapped_column(
|
||||
String(500),
|
||||
nullable=False,
|
||||
doc="Brief title of the work item"
|
||||
)
|
||||
|
||||
description: Mapped[str] = mapped_column(
|
||||
Text,
|
||||
nullable=False,
|
||||
doc="Detailed description of the work performed"
|
||||
)
|
||||
|
||||
# Status tracking
|
||||
status: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
default="completed",
|
||||
server_default="completed",
|
||||
nullable=False,
|
||||
doc="Status: completed, in_progress, blocked, pending, deferred"
|
||||
)
|
||||
|
||||
priority: Mapped[Optional[str]] = mapped_column(
|
||||
String(20),
|
||||
doc="Priority level: critical, high, medium, low"
|
||||
)
|
||||
|
||||
# Billing
|
||||
is_billable: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=False,
|
||||
server_default="0",
|
||||
nullable=False,
|
||||
doc="Whether this work item is billable"
|
||||
)
|
||||
|
||||
# Time tracking
|
||||
estimated_minutes: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
doc="Estimated time to complete in minutes"
|
||||
)
|
||||
|
||||
actual_minutes: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
doc="Actual time spent in minutes"
|
||||
)
|
||||
|
||||
# Context data (stored as JSON text)
|
||||
affected_systems: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc='JSON array of affected systems (e.g., ["jupiter", "172.16.3.20"])'
|
||||
)
|
||||
|
||||
technologies_used: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
doc='JSON array of technologies used (e.g., ["docker", "mariadb"])'
|
||||
)
|
||||
|
||||
# Ordering
|
||||
item_order: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
doc="Sequence order within the session"
|
||||
)
|
||||
|
||||
# Timestamps
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
doc="When the work item was created"
|
||||
)
|
||||
|
||||
completed_at: Mapped[Optional[datetime]] = mapped_column(
|
||||
doc="When the work item was completed"
|
||||
)
|
||||
|
||||
# Constraints and indexes
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"category IN ('infrastructure', 'troubleshooting', 'configuration', 'development', 'maintenance', 'security', 'documentation')",
|
||||
name="ck_work_items_category"
|
||||
),
|
||||
CheckConstraint(
|
||||
"status IN ('completed', 'in_progress', 'blocked', 'pending', 'deferred')",
|
||||
name="ck_work_items_status"
|
||||
),
|
||||
CheckConstraint(
|
||||
"priority IN ('critical', 'high', 'medium', 'low')",
|
||||
name="ck_work_items_priority"
|
||||
),
|
||||
Index("idx_work_items_session", "session_id"),
|
||||
Index("idx_work_items_category", "category"),
|
||||
Index("idx_work_items_status", "status"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the work item."""
|
||||
return f"<WorkItem(title='{self.title}', category='{self.category}', status='{self.status}')>"
|
||||
56
api/models/work_item_tag.py
Normal file
56
api/models/work_item_tag.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""
|
||||
Work Item Tag junction table for many-to-many relationship.
|
||||
|
||||
This model creates the many-to-many relationship between work items and tags,
|
||||
allowing flexible categorization and filtering of work items.
|
||||
"""
|
||||
|
||||
from sqlalchemy import CHAR, ForeignKey, Index, PrimaryKeyConstraint
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .base import Base
|
||||
|
||||
|
||||
class WorkItemTag(Base):
|
||||
"""
|
||||
Junction table linking work items to tags.
|
||||
|
||||
Implements many-to-many relationship between work_items and tags tables.
|
||||
Allows work items to be tagged with multiple categories for filtering
|
||||
and organization.
|
||||
|
||||
Attributes:
|
||||
work_item_id: Foreign key to work_items table
|
||||
tag_id: Foreign key to tags table
|
||||
"""
|
||||
|
||||
__tablename__ = "work_item_tags"
|
||||
|
||||
# Composite primary key
|
||||
work_item_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("work_items.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Work item being tagged",
|
||||
)
|
||||
tag_id: Mapped[str] = mapped_column(
|
||||
CHAR(36),
|
||||
ForeignKey("tags.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
doc="Tag applied to the work item",
|
||||
)
|
||||
|
||||
# Table constraints and indexes
|
||||
__table_args__ = (
|
||||
PrimaryKeyConstraint("work_item_id", "tag_id"),
|
||||
Index("idx_wit_work_item", "work_item_id"),
|
||||
Index("idx_wit_tag", "tag_id"),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
# work_item = relationship("WorkItem", back_populates="tags")
|
||||
# tag = relationship("Tag", back_populates="work_items")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of the work item tag relationship."""
|
||||
return f"<WorkItemTag(work_item_id={self.work_item_id!r}, tag_id={self.tag_id!r})>"
|
||||
1
api/routers/__init__.py
Normal file
1
api/routers/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""API routers for ClaudeTools"""
|
||||
565
api/routers/billable_time.py
Normal file
565
api/routers/billable_time.py
Normal file
@@ -0,0 +1,565 @@
|
||||
"""
|
||||
Billable Time API router for ClaudeTools.
|
||||
|
||||
This module defines all REST API endpoints for managing billable time entries, including
|
||||
CRUD operations with proper authentication, validation, and error handling.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.billable_time import (
|
||||
BillableTimeCreate,
|
||||
BillableTimeResponse,
|
||||
BillableTimeUpdate,
|
||||
)
|
||||
from api.services import billable_time_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all billable time entries",
|
||||
description="Retrieve a paginated list of all billable time entries",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_billable_time_entries(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all billable time entries with pagination.
|
||||
|
||||
- **skip**: Number of entries to skip (default: 0)
|
||||
- **limit**: Maximum number of entries to return (default: 100, max: 1000)
|
||||
|
||||
Returns a list of billable time entries with pagination metadata.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/billable-time?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 25,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"billable_time": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "456e7890-e89b-12d3-a456-426614174001",
|
||||
"session_id": "789e0123-e89b-12d3-a456-426614174002",
|
||||
"start_time": "2024-01-15T09:00:00Z",
|
||||
"duration_minutes": 120,
|
||||
"hourly_rate": 150.00,
|
||||
"total_amount": 300.00,
|
||||
"is_billable": true,
|
||||
"description": "Database optimization work",
|
||||
"category": "development",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
entries, total = billable_time_service.get_billable_time_entries(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"billable_time": [BillableTimeResponse.model_validate(entry) for entry in entries]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve billable time entries: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{billable_time_id}",
|
||||
response_model=BillableTimeResponse,
|
||||
summary="Get billable time entry by ID",
|
||||
description="Retrieve a single billable time entry by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Billable time entry found and returned",
|
||||
"model": BillableTimeResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Billable time entry not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Billable time entry with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_billable_time_entry(
|
||||
billable_time_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific billable time entry by ID.
|
||||
|
||||
- **billable_time_id**: UUID of the billable time entry to retrieve
|
||||
|
||||
Returns the complete billable time entry details.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/billable-time/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"work_item_id": "012e3456-e89b-12d3-a456-426614174003",
|
||||
"session_id": "789e0123-e89b-12d3-a456-426614174002",
|
||||
"client_id": "456e7890-e89b-12d3-a456-426614174001",
|
||||
"start_time": "2024-01-15T09:00:00Z",
|
||||
"end_time": "2024-01-15T11:00:00Z",
|
||||
"duration_minutes": 120,
|
||||
"hourly_rate": 150.00,
|
||||
"total_amount": 300.00,
|
||||
"is_billable": true,
|
||||
"description": "Database optimization and performance tuning",
|
||||
"category": "development",
|
||||
"notes": "Optimized queries and added indexes",
|
||||
"invoiced_at": null,
|
||||
"invoice_id": null,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
entry = billable_time_service.get_billable_time_by_id(db, billable_time_id)
|
||||
return BillableTimeResponse.model_validate(entry)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=BillableTimeResponse,
|
||||
summary="Create new billable time entry",
|
||||
description="Create a new billable time entry with the provided details",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
responses={
|
||||
201: {
|
||||
"description": "Billable time entry created successfully",
|
||||
"model": BillableTimeResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Referenced client, session, or work item not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Client with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": [
|
||||
{
|
||||
"loc": ["body", "client_id"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def create_billable_time_entry(
|
||||
billable_time_data: BillableTimeCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new billable time entry.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/billable-time
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"client_id": "456e7890-e89b-12d3-a456-426614174001",
|
||||
"session_id": "789e0123-e89b-12d3-a456-426614174002",
|
||||
"work_item_id": "012e3456-e89b-12d3-a456-426614174003",
|
||||
"start_time": "2024-01-15T09:00:00Z",
|
||||
"end_time": "2024-01-15T11:00:00Z",
|
||||
"duration_minutes": 120,
|
||||
"hourly_rate": 150.00,
|
||||
"total_amount": 300.00,
|
||||
"is_billable": true,
|
||||
"description": "Database optimization and performance tuning",
|
||||
"category": "development",
|
||||
"notes": "Optimized queries and added indexes"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "456e7890-e89b-12d3-a456-426614174001",
|
||||
"start_time": "2024-01-15T09:00:00Z",
|
||||
"duration_minutes": 120,
|
||||
"hourly_rate": 150.00,
|
||||
"total_amount": 300.00,
|
||||
"is_billable": true,
|
||||
"category": "development",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
entry = billable_time_service.create_billable_time(db, billable_time_data)
|
||||
return BillableTimeResponse.model_validate(entry)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{billable_time_id}",
|
||||
response_model=BillableTimeResponse,
|
||||
summary="Update billable time entry",
|
||||
description="Update an existing billable time entry's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Billable time entry updated successfully",
|
||||
"model": BillableTimeResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Billable time entry, client, session, or work item not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Billable time entry with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Invalid client_id"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def update_billable_time_entry(
|
||||
billable_time_id: UUID,
|
||||
billable_time_data: BillableTimeUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing billable time entry.
|
||||
|
||||
- **billable_time_id**: UUID of the billable time entry to update
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
PUT /api/billable-time/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"duration_minutes": 150,
|
||||
"total_amount": 375.00,
|
||||
"notes": "Additional optimization work performed"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "456e7890-e89b-12d3-a456-426614174001",
|
||||
"start_time": "2024-01-15T09:00:00Z",
|
||||
"duration_minutes": 150,
|
||||
"hourly_rate": 150.00,
|
||||
"total_amount": 375.00,
|
||||
"notes": "Additional optimization work performed",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T14:20:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
entry = billable_time_service.update_billable_time(db, billable_time_id, billable_time_data)
|
||||
return BillableTimeResponse.model_validate(entry)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{billable_time_id}",
|
||||
response_model=dict,
|
||||
summary="Delete billable time entry",
|
||||
description="Delete a billable time entry by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Billable time entry deleted successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"message": "Billable time entry deleted successfully",
|
||||
"billable_time_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
404: {
|
||||
"description": "Billable time entry not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Billable time entry with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def delete_billable_time_entry(
|
||||
billable_time_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a billable time entry.
|
||||
|
||||
- **billable_time_id**: UUID of the billable time entry to delete
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
DELETE /api/billable-time/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"message": "Billable time entry deleted successfully",
|
||||
"billable_time_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
```
|
||||
"""
|
||||
return billable_time_service.delete_billable_time(db, billable_time_id)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-session/{session_id}",
|
||||
response_model=dict,
|
||||
summary="Get billable time by session",
|
||||
description="Retrieve billable time entries for a specific session",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Billable time entries retrieved successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"total": 3,
|
||||
"skip": 0,
|
||||
"limit": 100,
|
||||
"billable_time": []
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_billable_time_by_session(
|
||||
session_id: UUID,
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get billable time entries for a specific session.
|
||||
|
||||
- **session_id**: UUID of the session
|
||||
- **skip**: Number of entries to skip (default: 0)
|
||||
- **limit**: Maximum number of entries to return (default: 100, max: 1000)
|
||||
|
||||
Returns a paginated list of billable time entries for the session.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/billable-time/by-session/789e0123-e89b-12d3-a456-426614174002?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 3,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"billable_time": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"session_id": "789e0123-e89b-12d3-a456-426614174002",
|
||||
"duration_minutes": 120,
|
||||
"total_amount": 300.00,
|
||||
"description": "Database optimization",
|
||||
"created_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
entries, total = billable_time_service.get_billable_time_by_session(db, session_id, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"billable_time": [BillableTimeResponse.model_validate(entry) for entry in entries]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve billable time entries: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-work-item/{work_item_id}",
|
||||
response_model=dict,
|
||||
summary="Get billable time by work item",
|
||||
description="Retrieve billable time entries for a specific work item",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Billable time entries retrieved successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"total": 5,
|
||||
"skip": 0,
|
||||
"limit": 100,
|
||||
"billable_time": []
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_billable_time_by_work_item(
|
||||
work_item_id: UUID,
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get billable time entries for a specific work item.
|
||||
|
||||
- **work_item_id**: UUID of the work item
|
||||
- **skip**: Number of entries to skip (default: 0)
|
||||
- **limit**: Maximum number of entries to return (default: 100, max: 1000)
|
||||
|
||||
Returns a paginated list of billable time entries for the work item.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/billable-time/by-work-item/012e3456-e89b-12d3-a456-426614174003?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 5,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"billable_time": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"work_item_id": "012e3456-e89b-12d3-a456-426614174003",
|
||||
"duration_minutes": 120,
|
||||
"total_amount": 300.00,
|
||||
"description": "Bug fix and testing",
|
||||
"created_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
entries, total = billable_time_service.get_billable_time_by_work_item(db, work_item_id, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"billable_time": [BillableTimeResponse.model_validate(entry) for entry in entries]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve billable time entries: {str(e)}"
|
||||
)
|
||||
258
api/routers/bulk_import.py
Normal file
258
api/routers/bulk_import.py
Normal file
@@ -0,0 +1,258 @@
|
||||
"""
|
||||
Bulk Import API Router for ClaudeTools.
|
||||
|
||||
Provides endpoints for bulk importing conversation contexts from Claude project folders.
|
||||
Scans .jsonl files, extracts context using the conversation_parser utility.
|
||||
"""
|
||||
|
||||
import json
|
||||
from typing import Dict, List, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.conversation_context import ConversationContextCreate
|
||||
from api.services import conversation_context_service
|
||||
from api.utils.conversation_parser import (
|
||||
extract_context_from_conversation,
|
||||
parse_jsonl_conversation,
|
||||
scan_folder_for_conversations,
|
||||
)
|
||||
|
||||
# Create router
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post(
|
||||
"/import-folder",
|
||||
response_model=dict,
|
||||
summary="Bulk import from Claude projects folder",
|
||||
description="Scan a folder for .jsonl conversation files and import them as contexts",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
async def import_claude_folder(
|
||||
folder_path: str = Query(..., description="Path to Claude projects folder"),
|
||||
dry_run: bool = Query(False, description="Preview import without saving to database"),
|
||||
project_id: Optional[UUID] = Query(None, description="Associate contexts with a specific project"),
|
||||
session_id: Optional[UUID] = Query(None, description="Associate contexts with a specific session"),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Bulk import conversation contexts from a Claude projects folder.
|
||||
|
||||
This endpoint:
|
||||
1. Scans the folder for .jsonl conversation files
|
||||
2. Parses each conversation file
|
||||
3. Extracts context, decisions, and metadata
|
||||
4. Saves contexts to database (unless dry_run=True)
|
||||
|
||||
Args:
|
||||
folder_path: Path to the folder containing Claude project conversations
|
||||
dry_run: If True, preview import without saving (default: False)
|
||||
project_id: Optional project ID to associate all contexts with
|
||||
session_id: Optional session ID to associate all contexts with
|
||||
db: Database session
|
||||
current_user: Current authenticated user
|
||||
|
||||
Returns:
|
||||
Dictionary with import results and statistics
|
||||
"""
|
||||
result = {
|
||||
"dry_run": dry_run,
|
||||
"folder_path": folder_path,
|
||||
"files_scanned": 0,
|
||||
"files_processed": 0,
|
||||
"contexts_created": 0,
|
||||
"errors": [],
|
||||
"contexts_preview": [],
|
||||
}
|
||||
|
||||
try:
|
||||
# Step 1: Scan folder for conversation files
|
||||
conversation_files = scan_folder_for_conversations(folder_path)
|
||||
result["files_scanned"] = len(conversation_files)
|
||||
|
||||
if not conversation_files:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"No .jsonl conversation files found in {folder_path}"
|
||||
)
|
||||
|
||||
# Step 2: Process each conversation file
|
||||
for file_path in conversation_files:
|
||||
try:
|
||||
# Parse conversation file using the new parser
|
||||
conversation = parse_jsonl_conversation(file_path)
|
||||
|
||||
if not conversation.get("messages"):
|
||||
result["errors"].append({
|
||||
"file": file_path,
|
||||
"error": "No messages found in file"
|
||||
})
|
||||
continue
|
||||
|
||||
# Extract context using the new parser
|
||||
context = extract_context_from_conversation(conversation)
|
||||
|
||||
# Map context to database format
|
||||
context_title = context["raw_metadata"].get("title", f"Conversation: {conversation.get('file_paths', ['Unknown'])[0] if conversation.get('file_paths') else 'Unknown'}")
|
||||
|
||||
# Build dense summary from compressed summary
|
||||
summary_parts = []
|
||||
if context["summary"].get("summary"):
|
||||
summary_parts.append(context["summary"]["summary"])
|
||||
|
||||
# Add category information
|
||||
summary_parts.append(f"Category: {context['category']}")
|
||||
|
||||
# Add key statistics
|
||||
metrics = context.get("metrics", {})
|
||||
summary_parts.append(
|
||||
f"Messages: {metrics.get('message_count', 0)}, "
|
||||
f"Duration: {metrics.get('duration_seconds', 0)}s, "
|
||||
f"Quality: {metrics.get('quality_score', 0)}/10"
|
||||
)
|
||||
|
||||
dense_summary = "\n\n".join(summary_parts)
|
||||
|
||||
# Map category to context_type
|
||||
category = context.get("category", "general")
|
||||
if category == "msp":
|
||||
context_type = "session_summary"
|
||||
elif category == "development":
|
||||
context_type = "project_state"
|
||||
else:
|
||||
context_type = "general_context"
|
||||
|
||||
# Extract key decisions as JSON string
|
||||
decisions = context.get("decisions", [])
|
||||
key_decisions_json = json.dumps(decisions) if decisions else None
|
||||
|
||||
# Extract tags as JSON string
|
||||
tags = context.get("tags", [])
|
||||
tags_json = json.dumps(tags)
|
||||
|
||||
# Calculate relevance score from quality score
|
||||
quality_score = metrics.get("quality_score", 5.0)
|
||||
relevance_score = min(10.0, quality_score)
|
||||
|
||||
# Build context create schema
|
||||
context_data = ConversationContextCreate(
|
||||
session_id=session_id,
|
||||
project_id=project_id,
|
||||
machine_id=None,
|
||||
context_type=context_type,
|
||||
title=context_title,
|
||||
dense_summary=dense_summary,
|
||||
key_decisions=key_decisions_json,
|
||||
current_state=None,
|
||||
tags=tags_json,
|
||||
relevance_score=relevance_score,
|
||||
)
|
||||
|
||||
# Preview context
|
||||
context_preview = {
|
||||
"file": file_path.split('\\')[-1] if '\\' in file_path else file_path.split('/')[-1],
|
||||
"title": context_title,
|
||||
"type": context_type,
|
||||
"category": category,
|
||||
"message_count": metrics.get("message_count", 0),
|
||||
"tags": tags[:5], # First 5 tags
|
||||
"relevance_score": relevance_score,
|
||||
"quality_score": quality_score,
|
||||
}
|
||||
result["contexts_preview"].append(context_preview)
|
||||
|
||||
# Save to database (unless dry_run)
|
||||
if not dry_run:
|
||||
created_context = conversation_context_service.create_conversation_context(
|
||||
db, context_data
|
||||
)
|
||||
result["contexts_created"] += 1
|
||||
|
||||
result["files_processed"] += 1
|
||||
|
||||
except Exception as e:
|
||||
result["errors"].append({
|
||||
"file": file_path,
|
||||
"error": str(e)
|
||||
})
|
||||
continue
|
||||
|
||||
# Step 3: Generate summary
|
||||
result["summary"] = _generate_import_summary(result)
|
||||
|
||||
return result
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except FileNotFoundError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Import failed: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
def _generate_import_summary(result: Dict) -> str:
|
||||
"""
|
||||
Generate human-readable summary of import results.
|
||||
|
||||
Args:
|
||||
result: Import results dictionary
|
||||
|
||||
Returns:
|
||||
Summary string
|
||||
"""
|
||||
summary_lines = [
|
||||
f"Scanned {result['files_scanned']} files",
|
||||
f"Processed {result['files_processed']} successfully",
|
||||
]
|
||||
|
||||
if result["dry_run"]:
|
||||
summary_lines.append("DRY RUN - No changes saved to database")
|
||||
summary_lines.append(f"Would create {len(result['contexts_preview'])} contexts")
|
||||
else:
|
||||
summary_lines.append(f"Created {result['contexts_created']} contexts")
|
||||
|
||||
if result["errors"]:
|
||||
summary_lines.append(f"Encountered {len(result['errors'])} errors")
|
||||
|
||||
return " | ".join(summary_lines)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/import-status",
|
||||
response_model=dict,
|
||||
summary="Check import system status",
|
||||
description="Get status of the bulk import system",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
async def get_import_status(
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get status information about the bulk import system.
|
||||
|
||||
Returns:
|
||||
Dictionary with system status
|
||||
"""
|
||||
return {
|
||||
"status": "online",
|
||||
"features": {
|
||||
"conversation_parsing": True,
|
||||
"intelligent_categorization": True,
|
||||
"dry_run": True,
|
||||
},
|
||||
"supported_formats": [".jsonl", ".json"],
|
||||
"categories": ["msp", "development", "general"],
|
||||
"version": "1.0.0",
|
||||
}
|
||||
379
api/routers/clients.py
Normal file
379
api/routers/clients.py
Normal file
@@ -0,0 +1,379 @@
|
||||
"""
|
||||
Client API router for ClaudeTools.
|
||||
|
||||
This module defines all REST API endpoints for managing clients, including
|
||||
CRUD operations with proper authentication, validation, and error handling.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.client import (
|
||||
ClientCreate,
|
||||
ClientResponse,
|
||||
ClientUpdate,
|
||||
)
|
||||
from api.services import client_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all clients",
|
||||
description="Retrieve a paginated list of all clients with optional filtering",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_clients(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all clients with pagination.
|
||||
|
||||
- **skip**: Number of clients to skip (default: 0)
|
||||
- **limit**: Maximum number of clients to return (default: 100, max: 1000)
|
||||
|
||||
Returns a list of clients with pagination metadata.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/clients?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 5,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"clients": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"name": "Acme Corporation",
|
||||
"type": "msp_client",
|
||||
"network_subnet": "192.168.0.0/24",
|
||||
"domain_name": "acme.local",
|
||||
"m365_tenant_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"primary_contact": "John Doe",
|
||||
"notes": "Main MSP client",
|
||||
"is_active": true,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
clients, total = client_service.get_clients(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"clients": [ClientResponse.model_validate(client) for client in clients]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve clients: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{client_id}",
|
||||
response_model=ClientResponse,
|
||||
summary="Get client by ID",
|
||||
description="Retrieve a single client by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Client found and returned",
|
||||
"model": ClientResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Client not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Client with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_client(
|
||||
client_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific client by ID.
|
||||
|
||||
- **client_id**: UUID of the client to retrieve
|
||||
|
||||
Returns the complete client details.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/clients/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"name": "Acme Corporation",
|
||||
"type": "msp_client",
|
||||
"network_subnet": "192.168.0.0/24",
|
||||
"domain_name": "acme.local",
|
||||
"m365_tenant_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"primary_contact": "John Doe",
|
||||
"notes": "Main MSP client",
|
||||
"is_active": true,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
client = client_service.get_client_by_id(db, client_id)
|
||||
return ClientResponse.model_validate(client)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=ClientResponse,
|
||||
summary="Create new client",
|
||||
description="Create a new client with the provided details",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
responses={
|
||||
201: {
|
||||
"description": "Client created successfully",
|
||||
"model": ClientResponse,
|
||||
},
|
||||
409: {
|
||||
"description": "Client with name already exists",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Client with name 'Acme Corporation' already exists"}
|
||||
}
|
||||
},
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": [
|
||||
{
|
||||
"loc": ["body", "name"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def create_client(
|
||||
client_data: ClientCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new client.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/clients
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"name": "Acme Corporation",
|
||||
"type": "msp_client",
|
||||
"network_subnet": "192.168.0.0/24",
|
||||
"domain_name": "acme.local",
|
||||
"m365_tenant_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"primary_contact": "John Doe",
|
||||
"notes": "Main MSP client",
|
||||
"is_active": true
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"name": "Acme Corporation",
|
||||
"type": "msp_client",
|
||||
"network_subnet": "192.168.0.0/24",
|
||||
"domain_name": "acme.local",
|
||||
"m365_tenant_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"primary_contact": "John Doe",
|
||||
"notes": "Main MSP client",
|
||||
"is_active": true,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
client = client_service.create_client(db, client_data)
|
||||
return ClientResponse.model_validate(client)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{client_id}",
|
||||
response_model=ClientResponse,
|
||||
summary="Update client",
|
||||
description="Update an existing client's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Client updated successfully",
|
||||
"model": ClientResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Client not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Client with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
409: {
|
||||
"description": "Conflict with existing client",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Client with name 'Acme Corporation' already exists"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def update_client(
|
||||
client_id: UUID,
|
||||
client_data: ClientUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing client.
|
||||
|
||||
- **client_id**: UUID of the client to update
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
PUT /api/clients/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"primary_contact": "Jane Smith",
|
||||
"is_active": false,
|
||||
"notes": "Client moved to inactive status"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"name": "Acme Corporation",
|
||||
"type": "msp_client",
|
||||
"network_subnet": "192.168.0.0/24",
|
||||
"domain_name": "acme.local",
|
||||
"m365_tenant_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"primary_contact": "Jane Smith",
|
||||
"notes": "Client moved to inactive status",
|
||||
"is_active": false,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T14:20:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
client = client_service.update_client(db, client_id, client_data)
|
||||
return ClientResponse.model_validate(client)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{client_id}",
|
||||
response_model=dict,
|
||||
summary="Delete client",
|
||||
description="Delete a client by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Client deleted successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"message": "Client deleted successfully",
|
||||
"client_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
404: {
|
||||
"description": "Client not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Client with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def delete_client(
|
||||
client_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a client.
|
||||
|
||||
- **client_id**: UUID of the client to delete
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
DELETE /api/clients/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"message": "Client deleted successfully",
|
||||
"client_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
```
|
||||
"""
|
||||
return client_service.delete_client(db, client_id)
|
||||
312
api/routers/context_snippets.py
Normal file
312
api/routers/context_snippets.py
Normal file
@@ -0,0 +1,312 @@
|
||||
"""
|
||||
ContextSnippet API router for ClaudeTools.
|
||||
|
||||
Defines all REST API endpoints for managing context snippets,
|
||||
reusable pieces of knowledge for quick retrieval.
|
||||
"""
|
||||
|
||||
from typing import List
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.context_snippet import (
|
||||
ContextSnippetCreate,
|
||||
ContextSnippetResponse,
|
||||
ContextSnippetUpdate,
|
||||
)
|
||||
from api.services import context_snippet_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all context snippets",
|
||||
description="Retrieve a paginated list of all context snippets with optional filtering",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_context_snippets(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all context snippets with pagination.
|
||||
|
||||
Returns snippets ordered by relevance score and usage count.
|
||||
"""
|
||||
try:
|
||||
snippets, total = context_snippet_service.get_context_snippets(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"snippets": [ContextSnippetResponse.model_validate(snippet) for snippet in snippets]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve context snippets: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-tags",
|
||||
response_model=dict,
|
||||
summary="Get context snippets by tags",
|
||||
description="Retrieve context snippets filtered by tags",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_context_snippets_by_tags(
|
||||
tags: List[str] = Query(..., description="Tags to filter by (OR logic - any match)"),
|
||||
skip: int = Query(default=0, ge=0),
|
||||
limit: int = Query(default=100, ge=1, le=1000),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get context snippets filtered by tags.
|
||||
|
||||
Uses OR logic - snippets matching any of the provided tags will be returned.
|
||||
"""
|
||||
try:
|
||||
snippets, total = context_snippet_service.get_context_snippets_by_tags(
|
||||
db, tags, skip, limit
|
||||
)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"tags": tags,
|
||||
"snippets": [ContextSnippetResponse.model_validate(snippet) for snippet in snippets]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve context snippets: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/top-relevant",
|
||||
response_model=dict,
|
||||
summary="Get top relevant context snippets",
|
||||
description="Retrieve the most relevant context snippets by relevance score",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_top_relevant_snippets(
|
||||
limit: int = Query(
|
||||
default=10,
|
||||
ge=1,
|
||||
le=50,
|
||||
description="Maximum number of snippets to retrieve (max 50)"
|
||||
),
|
||||
min_relevance_score: float = Query(
|
||||
default=7.0,
|
||||
ge=0.0,
|
||||
le=10.0,
|
||||
description="Minimum relevance score threshold (0.0-10.0)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get the top most relevant context snippets.
|
||||
|
||||
Returns snippets ordered by relevance score (highest first).
|
||||
"""
|
||||
try:
|
||||
snippets = context_snippet_service.get_top_relevant_snippets(
|
||||
db, limit, min_relevance_score
|
||||
)
|
||||
|
||||
return {
|
||||
"total": len(snippets),
|
||||
"limit": limit,
|
||||
"min_relevance_score": min_relevance_score,
|
||||
"snippets": [ContextSnippetResponse.model_validate(snippet) for snippet in snippets]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve top relevant snippets: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-project/{project_id}",
|
||||
response_model=dict,
|
||||
summary="Get context snippets by project",
|
||||
description="Retrieve all context snippets for a specific project",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_context_snippets_by_project(
|
||||
project_id: UUID,
|
||||
skip: int = Query(default=0, ge=0),
|
||||
limit: int = Query(default=100, ge=1, le=1000),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all context snippets for a specific project.
|
||||
"""
|
||||
try:
|
||||
snippets, total = context_snippet_service.get_context_snippets_by_project(
|
||||
db, project_id, skip, limit
|
||||
)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"project_id": str(project_id),
|
||||
"snippets": [ContextSnippetResponse.model_validate(snippet) for snippet in snippets]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve context snippets: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-client/{client_id}",
|
||||
response_model=dict,
|
||||
summary="Get context snippets by client",
|
||||
description="Retrieve all context snippets for a specific client",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_context_snippets_by_client(
|
||||
client_id: UUID,
|
||||
skip: int = Query(default=0, ge=0),
|
||||
limit: int = Query(default=100, ge=1, le=1000),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all context snippets for a specific client.
|
||||
"""
|
||||
try:
|
||||
snippets, total = context_snippet_service.get_context_snippets_by_client(
|
||||
db, client_id, skip, limit
|
||||
)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"client_id": str(client_id),
|
||||
"snippets": [ContextSnippetResponse.model_validate(snippet) for snippet in snippets]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve context snippets: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{snippet_id}",
|
||||
response_model=ContextSnippetResponse,
|
||||
summary="Get context snippet by ID",
|
||||
description="Retrieve a single context snippet by its unique identifier (increments usage_count)",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_context_snippet(
|
||||
snippet_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific context snippet by ID.
|
||||
|
||||
Note: This automatically increments the usage_count for tracking.
|
||||
"""
|
||||
snippet = context_snippet_service.get_context_snippet_by_id(db, snippet_id)
|
||||
return ContextSnippetResponse.model_validate(snippet)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=ContextSnippetResponse,
|
||||
summary="Create new context snippet",
|
||||
description="Create a new context snippet with the provided details",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
)
|
||||
def create_context_snippet(
|
||||
snippet_data: ContextSnippetCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new context snippet.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
"""
|
||||
snippet = context_snippet_service.create_context_snippet(db, snippet_data)
|
||||
return ContextSnippetResponse.model_validate(snippet)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{snippet_id}",
|
||||
response_model=ContextSnippetResponse,
|
||||
summary="Update context snippet",
|
||||
description="Update an existing context snippet's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def update_context_snippet(
|
||||
snippet_id: UUID,
|
||||
snippet_data: ContextSnippetUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing context snippet.
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
"""
|
||||
snippet = context_snippet_service.update_context_snippet(db, snippet_id, snippet_data)
|
||||
return ContextSnippetResponse.model_validate(snippet)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{snippet_id}",
|
||||
response_model=dict,
|
||||
summary="Delete context snippet",
|
||||
description="Delete a context snippet by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def delete_context_snippet(
|
||||
snippet_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a context snippet.
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
"""
|
||||
return context_snippet_service.delete_context_snippet(db, snippet_id)
|
||||
287
api/routers/conversation_contexts.py
Normal file
287
api/routers/conversation_contexts.py
Normal file
@@ -0,0 +1,287 @@
|
||||
"""
|
||||
ConversationContext API router for ClaudeTools.
|
||||
|
||||
Defines all REST API endpoints for managing conversation contexts,
|
||||
including context recall functionality for Claude's memory system.
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.conversation_context import (
|
||||
ConversationContextCreate,
|
||||
ConversationContextResponse,
|
||||
ConversationContextUpdate,
|
||||
)
|
||||
from api.services import conversation_context_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all conversation contexts",
|
||||
description="Retrieve a paginated list of all conversation contexts with optional filtering",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_conversation_contexts(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all conversation contexts with pagination.
|
||||
|
||||
Returns contexts ordered by relevance score and recency.
|
||||
"""
|
||||
try:
|
||||
contexts, total = conversation_context_service.get_conversation_contexts(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"contexts": [ConversationContextResponse.model_validate(ctx) for ctx in contexts]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve conversation contexts: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/recall",
|
||||
response_model=dict,
|
||||
summary="Retrieve relevant contexts for injection",
|
||||
description="Get token-efficient context formatted for Claude prompt injection",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def recall_context(
|
||||
project_id: Optional[UUID] = Query(None, description="Filter by project ID"),
|
||||
tags: Optional[List[str]] = Query(None, description="Filter by tags (OR logic)"),
|
||||
limit: int = Query(
|
||||
default=10,
|
||||
ge=1,
|
||||
le=50,
|
||||
description="Maximum number of contexts to retrieve (max 50)"
|
||||
),
|
||||
min_relevance_score: float = Query(
|
||||
default=5.0,
|
||||
ge=0.0,
|
||||
le=10.0,
|
||||
description="Minimum relevance score threshold (0.0-10.0)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Retrieve relevant contexts formatted for Claude prompt injection.
|
||||
|
||||
This endpoint returns a token-efficient markdown string ready for
|
||||
injection into Claude's prompt. It's the main context recall API.
|
||||
|
||||
Query Parameters:
|
||||
- project_id: Filter contexts by project
|
||||
- tags: Filter contexts by tags (any match)
|
||||
- limit: Maximum number of contexts to retrieve
|
||||
- min_relevance_score: Minimum relevance score threshold
|
||||
|
||||
Returns a formatted string ready for prompt injection.
|
||||
"""
|
||||
try:
|
||||
formatted_context = conversation_context_service.get_recall_context(
|
||||
db=db,
|
||||
project_id=project_id,
|
||||
tags=tags,
|
||||
limit=limit,
|
||||
min_relevance_score=min_relevance_score
|
||||
)
|
||||
|
||||
return {
|
||||
"context": formatted_context,
|
||||
"project_id": str(project_id) if project_id else None,
|
||||
"tags": tags,
|
||||
"limit": limit,
|
||||
"min_relevance_score": min_relevance_score
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve recall context: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-project/{project_id}",
|
||||
response_model=dict,
|
||||
summary="Get conversation contexts by project",
|
||||
description="Retrieve all conversation contexts for a specific project",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_conversation_contexts_by_project(
|
||||
project_id: UUID,
|
||||
skip: int = Query(default=0, ge=0),
|
||||
limit: int = Query(default=100, ge=1, le=1000),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all conversation contexts for a specific project.
|
||||
"""
|
||||
try:
|
||||
contexts, total = conversation_context_service.get_conversation_contexts_by_project(
|
||||
db, project_id, skip, limit
|
||||
)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"project_id": str(project_id),
|
||||
"contexts": [ConversationContextResponse.model_validate(ctx) for ctx in contexts]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve conversation contexts: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-session/{session_id}",
|
||||
response_model=dict,
|
||||
summary="Get conversation contexts by session",
|
||||
description="Retrieve all conversation contexts for a specific session",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_conversation_contexts_by_session(
|
||||
session_id: UUID,
|
||||
skip: int = Query(default=0, ge=0),
|
||||
limit: int = Query(default=100, ge=1, le=1000),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all conversation contexts for a specific session.
|
||||
"""
|
||||
try:
|
||||
contexts, total = conversation_context_service.get_conversation_contexts_by_session(
|
||||
db, session_id, skip, limit
|
||||
)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"session_id": str(session_id),
|
||||
"contexts": [ConversationContextResponse.model_validate(ctx) for ctx in contexts]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve conversation contexts: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{context_id}",
|
||||
response_model=ConversationContextResponse,
|
||||
summary="Get conversation context by ID",
|
||||
description="Retrieve a single conversation context by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_conversation_context(
|
||||
context_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific conversation context by ID.
|
||||
"""
|
||||
context = conversation_context_service.get_conversation_context_by_id(db, context_id)
|
||||
return ConversationContextResponse.model_validate(context)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=ConversationContextResponse,
|
||||
summary="Create new conversation context",
|
||||
description="Create a new conversation context with the provided details",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
)
|
||||
def create_conversation_context(
|
||||
context_data: ConversationContextCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new conversation context.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
"""
|
||||
context = conversation_context_service.create_conversation_context(db, context_data)
|
||||
return ConversationContextResponse.model_validate(context)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{context_id}",
|
||||
response_model=ConversationContextResponse,
|
||||
summary="Update conversation context",
|
||||
description="Update an existing conversation context's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def update_conversation_context(
|
||||
context_id: UUID,
|
||||
context_data: ConversationContextUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing conversation context.
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
"""
|
||||
context = conversation_context_service.update_conversation_context(db, context_id, context_data)
|
||||
return ConversationContextResponse.model_validate(context)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{context_id}",
|
||||
response_model=dict,
|
||||
summary="Delete conversation context",
|
||||
description="Delete a conversation context by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def delete_conversation_context(
|
||||
context_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a conversation context.
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
"""
|
||||
return conversation_context_service.delete_conversation_context(db, context_id)
|
||||
179
api/routers/credential_audit_logs.py
Normal file
179
api/routers/credential_audit_logs.py
Normal file
@@ -0,0 +1,179 @@
|
||||
"""
|
||||
Credential Audit Logs API router for ClaudeTools.
|
||||
|
||||
This module defines all REST API endpoints for viewing credential audit logs (read-only).
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.credential_audit_log import CredentialAuditLogResponse
|
||||
from api.services import credential_audit_log_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all credential audit logs",
|
||||
description="Retrieve a paginated list of all credential audit log entries",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_credential_audit_logs(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all credential audit logs with pagination.
|
||||
|
||||
- **skip**: Number of logs to skip (default: 0)
|
||||
- **limit**: Maximum number of logs to return (default: 100, max: 1000)
|
||||
|
||||
Returns a list of audit log entries with pagination metadata.
|
||||
Logs are ordered by timestamp descending (most recent first).
|
||||
|
||||
**Note**: Audit logs are read-only and immutable.
|
||||
"""
|
||||
try:
|
||||
logs, total = credential_audit_log_service.get_credential_audit_logs(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"logs": [CredentialAuditLogResponse.model_validate(log) for log in logs]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve credential audit logs: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{log_id}",
|
||||
response_model=CredentialAuditLogResponse,
|
||||
summary="Get credential audit log by ID",
|
||||
description="Retrieve a single credential audit log entry by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_credential_audit_log(
|
||||
log_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific credential audit log entry by ID.
|
||||
|
||||
- **log_id**: UUID of the audit log entry to retrieve
|
||||
|
||||
Returns the complete audit log details including action, user, timestamp, and context.
|
||||
"""
|
||||
log = credential_audit_log_service.get_credential_audit_log_by_id(db, log_id)
|
||||
return CredentialAuditLogResponse.model_validate(log)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-credential/{credential_id}",
|
||||
response_model=dict,
|
||||
summary="Get audit logs for a credential",
|
||||
description="Retrieve all audit log entries for a specific credential",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_credential_audit_logs_by_credential(
|
||||
credential_id: UUID,
|
||||
skip: int = Query(default=0, ge=0, description="Number of records to skip"),
|
||||
limit: int = Query(default=100, ge=1, le=1000, description="Maximum number of records to return"),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all audit log entries for a specific credential.
|
||||
|
||||
- **credential_id**: UUID of the credential
|
||||
- **skip**: Number of logs to skip (default: 0)
|
||||
- **limit**: Maximum number of logs to return (default: 100, max: 1000)
|
||||
|
||||
Returns all operations performed on this credential including views, updates,
|
||||
and deletions. Logs are ordered by timestamp descending (most recent first).
|
||||
"""
|
||||
try:
|
||||
logs, total = credential_audit_log_service.get_credential_audit_logs_by_credential(
|
||||
db, credential_id, skip, limit
|
||||
)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"credential_id": str(credential_id),
|
||||
"logs": [CredentialAuditLogResponse.model_validate(log) for log in logs]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve credential audit logs: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-user/{user_id}",
|
||||
response_model=dict,
|
||||
summary="Get audit logs for a user",
|
||||
description="Retrieve all audit log entries for a specific user",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_credential_audit_logs_by_user(
|
||||
user_id: str,
|
||||
skip: int = Query(default=0, ge=0, description="Number of records to skip"),
|
||||
limit: int = Query(default=100, ge=1, le=1000, description="Maximum number of records to return"),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all audit log entries for a specific user.
|
||||
|
||||
- **user_id**: User ID to filter by (JWT sub claim)
|
||||
- **skip**: Number of logs to skip (default: 0)
|
||||
- **limit**: Maximum number of logs to return (default: 100, max: 1000)
|
||||
|
||||
Returns all credential operations performed by this user.
|
||||
Logs are ordered by timestamp descending (most recent first).
|
||||
"""
|
||||
try:
|
||||
logs, total = credential_audit_log_service.get_credential_audit_logs_by_user(
|
||||
db, user_id, skip, limit
|
||||
)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"user_id": user_id,
|
||||
"logs": [CredentialAuditLogResponse.model_validate(log) for log in logs]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve credential audit logs: {str(e)}"
|
||||
)
|
||||
429
api/routers/credentials.py
Normal file
429
api/routers/credentials.py
Normal file
@@ -0,0 +1,429 @@
|
||||
"""
|
||||
Credentials API router for ClaudeTools.
|
||||
|
||||
This module defines all REST API endpoints for managing credentials with encryption.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Request, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.credential import (
|
||||
CredentialCreate,
|
||||
CredentialResponse,
|
||||
CredentialUpdate,
|
||||
)
|
||||
from api.services import credential_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def _get_user_context(request: Request, current_user: dict) -> dict:
|
||||
"""Extract user context for audit logging."""
|
||||
return {
|
||||
"user_id": current_user.get("sub", "unknown"),
|
||||
"ip_address": request.client.host if request.client else None,
|
||||
"user_agent": request.headers.get("user-agent"),
|
||||
}
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all credentials",
|
||||
description="Retrieve a paginated list of all credentials (decrypted for authenticated users)",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_credentials(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all credentials with pagination.
|
||||
|
||||
- **skip**: Number of credentials to skip (default: 0)
|
||||
- **limit**: Maximum number of credentials to return (default: 100, max: 1000)
|
||||
|
||||
Returns a list of credentials with pagination metadata.
|
||||
Sensitive fields are decrypted and returned to authenticated users.
|
||||
|
||||
**Security Note**: This endpoint returns decrypted passwords and keys.
|
||||
Ensure proper authentication and authorization before calling.
|
||||
"""
|
||||
try:
|
||||
credentials, total = credential_service.get_credentials(db, skip, limit)
|
||||
|
||||
# Convert to response models with decryption
|
||||
response_credentials = []
|
||||
for cred in credentials:
|
||||
# Map encrypted fields to decrypted field names for the response schema
|
||||
cred_dict = {
|
||||
"id": cred.id,
|
||||
"client_id": cred.client_id,
|
||||
"service_id": cred.service_id,
|
||||
"infrastructure_id": cred.infrastructure_id,
|
||||
"credential_type": cred.credential_type,
|
||||
"service_name": cred.service_name,
|
||||
"username": cred.username,
|
||||
"password": cred.password_encrypted, # Will be decrypted by validator
|
||||
"api_key": cred.api_key_encrypted, # Will be decrypted by validator
|
||||
"client_id_oauth": cred.client_id_oauth,
|
||||
"client_secret": cred.client_secret_encrypted, # Will be decrypted by validator
|
||||
"tenant_id_oauth": cred.tenant_id_oauth,
|
||||
"public_key": cred.public_key,
|
||||
"token": cred.token_encrypted, # Will be decrypted by validator
|
||||
"connection_string": cred.connection_string_encrypted, # Will be decrypted by validator
|
||||
"integration_code": cred.integration_code,
|
||||
"external_url": cred.external_url,
|
||||
"internal_url": cred.internal_url,
|
||||
"custom_port": cred.custom_port,
|
||||
"role_description": cred.role_description,
|
||||
"requires_vpn": cred.requires_vpn,
|
||||
"requires_2fa": cred.requires_2fa,
|
||||
"ssh_key_auth_enabled": cred.ssh_key_auth_enabled,
|
||||
"access_level": cred.access_level,
|
||||
"expires_at": cred.expires_at,
|
||||
"last_rotated_at": cred.last_rotated_at,
|
||||
"is_active": cred.is_active,
|
||||
"created_at": cred.created_at,
|
||||
"updated_at": cred.updated_at,
|
||||
}
|
||||
response_credentials.append(CredentialResponse(**cred_dict))
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"credentials": response_credentials
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve credentials: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{credential_id}",
|
||||
response_model=CredentialResponse,
|
||||
summary="Get credential by ID",
|
||||
description="Retrieve a single credential by its unique identifier (decrypted)",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_credential(
|
||||
credential_id: UUID,
|
||||
request: Request,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific credential by ID.
|
||||
|
||||
- **credential_id**: UUID of the credential to retrieve
|
||||
|
||||
Returns the complete credential details with decrypted sensitive fields.
|
||||
This action is logged in the audit trail.
|
||||
|
||||
**Security Note**: This endpoint returns decrypted passwords and keys.
|
||||
"""
|
||||
user_ctx = _get_user_context(request, current_user)
|
||||
credential = credential_service.get_credential_by_id(db, credential_id, user_id=user_ctx["user_id"])
|
||||
|
||||
# Map encrypted fields to decrypted field names
|
||||
cred_dict = {
|
||||
"id": credential.id,
|
||||
"client_id": credential.client_id,
|
||||
"service_id": credential.service_id,
|
||||
"infrastructure_id": credential.infrastructure_id,
|
||||
"credential_type": credential.credential_type,
|
||||
"service_name": credential.service_name,
|
||||
"username": credential.username,
|
||||
"password": credential.password_encrypted,
|
||||
"api_key": credential.api_key_encrypted,
|
||||
"client_id_oauth": credential.client_id_oauth,
|
||||
"client_secret": credential.client_secret_encrypted,
|
||||
"tenant_id_oauth": credential.tenant_id_oauth,
|
||||
"public_key": credential.public_key,
|
||||
"token": credential.token_encrypted,
|
||||
"connection_string": credential.connection_string_encrypted,
|
||||
"integration_code": credential.integration_code,
|
||||
"external_url": credential.external_url,
|
||||
"internal_url": credential.internal_url,
|
||||
"custom_port": credential.custom_port,
|
||||
"role_description": credential.role_description,
|
||||
"requires_vpn": credential.requires_vpn,
|
||||
"requires_2fa": credential.requires_2fa,
|
||||
"ssh_key_auth_enabled": credential.ssh_key_auth_enabled,
|
||||
"access_level": credential.access_level,
|
||||
"expires_at": credential.expires_at,
|
||||
"last_rotated_at": credential.last_rotated_at,
|
||||
"is_active": credential.is_active,
|
||||
"created_at": credential.created_at,
|
||||
"updated_at": credential.updated_at,
|
||||
}
|
||||
|
||||
return CredentialResponse(**cred_dict)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=CredentialResponse,
|
||||
summary="Create new credential",
|
||||
description="Create a new credential with encryption of sensitive fields",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
)
|
||||
def create_credential(
|
||||
credential_data: CredentialCreate,
|
||||
request: Request,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new credential.
|
||||
|
||||
Sensitive fields (password, api_key, client_secret, token, connection_string)
|
||||
are automatically encrypted before storage. This action is logged in the audit trail.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
|
||||
**Security Note**: Plaintext credentials are never logged or stored unencrypted.
|
||||
"""
|
||||
user_ctx = _get_user_context(request, current_user)
|
||||
credential = credential_service.create_credential(
|
||||
db,
|
||||
credential_data,
|
||||
user_id=user_ctx["user_id"],
|
||||
ip_address=user_ctx["ip_address"],
|
||||
user_agent=user_ctx["user_agent"],
|
||||
)
|
||||
|
||||
# Map encrypted fields to decrypted field names
|
||||
cred_dict = {
|
||||
"id": credential.id,
|
||||
"client_id": credential.client_id,
|
||||
"service_id": credential.service_id,
|
||||
"infrastructure_id": credential.infrastructure_id,
|
||||
"credential_type": credential.credential_type,
|
||||
"service_name": credential.service_name,
|
||||
"username": credential.username,
|
||||
"password": credential.password_encrypted,
|
||||
"api_key": credential.api_key_encrypted,
|
||||
"client_id_oauth": credential.client_id_oauth,
|
||||
"client_secret": credential.client_secret_encrypted,
|
||||
"tenant_id_oauth": credential.tenant_id_oauth,
|
||||
"public_key": credential.public_key,
|
||||
"token": credential.token_encrypted,
|
||||
"connection_string": credential.connection_string_encrypted,
|
||||
"integration_code": credential.integration_code,
|
||||
"external_url": credential.external_url,
|
||||
"internal_url": credential.internal_url,
|
||||
"custom_port": credential.custom_port,
|
||||
"role_description": credential.role_description,
|
||||
"requires_vpn": credential.requires_vpn,
|
||||
"requires_2fa": credential.requires_2fa,
|
||||
"ssh_key_auth_enabled": credential.ssh_key_auth_enabled,
|
||||
"access_level": credential.access_level,
|
||||
"expires_at": credential.expires_at,
|
||||
"last_rotated_at": credential.last_rotated_at,
|
||||
"is_active": credential.is_active,
|
||||
"created_at": credential.created_at,
|
||||
"updated_at": credential.updated_at,
|
||||
}
|
||||
|
||||
return CredentialResponse(**cred_dict)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{credential_id}",
|
||||
response_model=CredentialResponse,
|
||||
summary="Update credential",
|
||||
description="Update an existing credential's details with re-encryption if needed",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def update_credential(
|
||||
credential_id: UUID,
|
||||
credential_data: CredentialUpdate,
|
||||
request: Request,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing credential.
|
||||
|
||||
- **credential_id**: UUID of the credential to update
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
If sensitive fields are updated, they are re-encrypted. This action is logged.
|
||||
|
||||
**Security Note**: Updated credentials are re-encrypted before storage.
|
||||
"""
|
||||
user_ctx = _get_user_context(request, current_user)
|
||||
credential = credential_service.update_credential(
|
||||
db,
|
||||
credential_id,
|
||||
credential_data,
|
||||
user_id=user_ctx["user_id"],
|
||||
ip_address=user_ctx["ip_address"],
|
||||
user_agent=user_ctx["user_agent"],
|
||||
)
|
||||
|
||||
# Map encrypted fields to decrypted field names
|
||||
cred_dict = {
|
||||
"id": credential.id,
|
||||
"client_id": credential.client_id,
|
||||
"service_id": credential.service_id,
|
||||
"infrastructure_id": credential.infrastructure_id,
|
||||
"credential_type": credential.credential_type,
|
||||
"service_name": credential.service_name,
|
||||
"username": credential.username,
|
||||
"password": credential.password_encrypted,
|
||||
"api_key": credential.api_key_encrypted,
|
||||
"client_id_oauth": credential.client_id_oauth,
|
||||
"client_secret": credential.client_secret_encrypted,
|
||||
"tenant_id_oauth": credential.tenant_id_oauth,
|
||||
"public_key": credential.public_key,
|
||||
"token": credential.token_encrypted,
|
||||
"connection_string": credential.connection_string_encrypted,
|
||||
"integration_code": credential.integration_code,
|
||||
"external_url": credential.external_url,
|
||||
"internal_url": credential.internal_url,
|
||||
"custom_port": credential.custom_port,
|
||||
"role_description": credential.role_description,
|
||||
"requires_vpn": credential.requires_vpn,
|
||||
"requires_2fa": credential.requires_2fa,
|
||||
"ssh_key_auth_enabled": credential.ssh_key_auth_enabled,
|
||||
"access_level": credential.access_level,
|
||||
"expires_at": credential.expires_at,
|
||||
"last_rotated_at": credential.last_rotated_at,
|
||||
"is_active": credential.is_active,
|
||||
"created_at": credential.created_at,
|
||||
"updated_at": credential.updated_at,
|
||||
}
|
||||
|
||||
return CredentialResponse(**cred_dict)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{credential_id}",
|
||||
response_model=dict,
|
||||
summary="Delete credential",
|
||||
description="Delete a credential by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def delete_credential(
|
||||
credential_id: UUID,
|
||||
request: Request,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a credential.
|
||||
|
||||
- **credential_id**: UUID of the credential to delete
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
The deletion is logged in the audit trail.
|
||||
|
||||
**Security Note**: Audit logs are retained after credential deletion.
|
||||
"""
|
||||
user_ctx = _get_user_context(request, current_user)
|
||||
return credential_service.delete_credential(
|
||||
db,
|
||||
credential_id,
|
||||
user_id=user_ctx["user_id"],
|
||||
ip_address=user_ctx["ip_address"],
|
||||
user_agent=user_ctx["user_agent"],
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-client/{client_id}",
|
||||
response_model=dict,
|
||||
summary="Get credentials by client",
|
||||
description="Retrieve all credentials for a specific client",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_credentials_by_client(
|
||||
client_id: UUID,
|
||||
skip: int = Query(default=0, ge=0, description="Number of records to skip"),
|
||||
limit: int = Query(default=100, ge=1, le=1000, description="Maximum number of records to return"),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all credentials associated with a specific client.
|
||||
|
||||
- **client_id**: UUID of the client
|
||||
- **skip**: Number of credentials to skip (default: 0)
|
||||
- **limit**: Maximum number of credentials to return (default: 100, max: 1000)
|
||||
|
||||
Returns credentials with decrypted sensitive fields.
|
||||
"""
|
||||
try:
|
||||
credentials, total = credential_service.get_credentials_by_client(db, client_id, skip, limit)
|
||||
|
||||
# Convert to response models with decryption
|
||||
response_credentials = []
|
||||
for cred in credentials:
|
||||
cred_dict = {
|
||||
"id": cred.id,
|
||||
"client_id": cred.client_id,
|
||||
"service_id": cred.service_id,
|
||||
"infrastructure_id": cred.infrastructure_id,
|
||||
"credential_type": cred.credential_type,
|
||||
"service_name": cred.service_name,
|
||||
"username": cred.username,
|
||||
"password": cred.password_encrypted,
|
||||
"api_key": cred.api_key_encrypted,
|
||||
"client_id_oauth": cred.client_id_oauth,
|
||||
"client_secret": cred.client_secret_encrypted,
|
||||
"tenant_id_oauth": cred.tenant_id_oauth,
|
||||
"public_key": cred.public_key,
|
||||
"token": cred.token_encrypted,
|
||||
"connection_string": cred.connection_string_encrypted,
|
||||
"integration_code": cred.integration_code,
|
||||
"external_url": cred.external_url,
|
||||
"internal_url": cred.internal_url,
|
||||
"custom_port": cred.custom_port,
|
||||
"role_description": cred.role_description,
|
||||
"requires_vpn": cred.requires_vpn,
|
||||
"requires_2fa": cred.requires_2fa,
|
||||
"ssh_key_auth_enabled": cred.ssh_key_auth_enabled,
|
||||
"access_level": cred.access_level,
|
||||
"expires_at": cred.expires_at,
|
||||
"last_rotated_at": cred.last_rotated_at,
|
||||
"is_active": cred.is_active,
|
||||
"created_at": cred.created_at,
|
||||
"updated_at": cred.updated_at,
|
||||
}
|
||||
response_credentials.append(CredentialResponse(**cred_dict))
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"client_id": str(client_id),
|
||||
"credentials": response_credentials
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve credentials for client: {str(e)}"
|
||||
)
|
||||
264
api/routers/decision_logs.py
Normal file
264
api/routers/decision_logs.py
Normal file
@@ -0,0 +1,264 @@
|
||||
"""
|
||||
DecisionLog API router for ClaudeTools.
|
||||
|
||||
Defines all REST API endpoints for managing decision logs,
|
||||
tracking important decisions made during work.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.decision_log import (
|
||||
DecisionLogCreate,
|
||||
DecisionLogResponse,
|
||||
DecisionLogUpdate,
|
||||
)
|
||||
from api.services import decision_log_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all decision logs",
|
||||
description="Retrieve a paginated list of all decision logs",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_decision_logs(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all decision logs with pagination.
|
||||
|
||||
Returns decision logs ordered by most recent first.
|
||||
"""
|
||||
try:
|
||||
logs, total = decision_log_service.get_decision_logs(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"logs": [DecisionLogResponse.model_validate(log) for log in logs]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve decision logs: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-impact/{impact}",
|
||||
response_model=dict,
|
||||
summary="Get decision logs by impact level",
|
||||
description="Retrieve decision logs filtered by impact level",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_decision_logs_by_impact(
|
||||
impact: str,
|
||||
skip: int = Query(default=0, ge=0),
|
||||
limit: int = Query(default=100, ge=1, le=1000),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get decision logs filtered by impact level.
|
||||
|
||||
Valid impact levels: low, medium, high, critical
|
||||
"""
|
||||
try:
|
||||
logs, total = decision_log_service.get_decision_logs_by_impact(
|
||||
db, impact, skip, limit
|
||||
)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"impact": impact,
|
||||
"logs": [DecisionLogResponse.model_validate(log) for log in logs]
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve decision logs: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-project/{project_id}",
|
||||
response_model=dict,
|
||||
summary="Get decision logs by project",
|
||||
description="Retrieve all decision logs for a specific project",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_decision_logs_by_project(
|
||||
project_id: UUID,
|
||||
skip: int = Query(default=0, ge=0),
|
||||
limit: int = Query(default=100, ge=1, le=1000),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all decision logs for a specific project.
|
||||
"""
|
||||
try:
|
||||
logs, total = decision_log_service.get_decision_logs_by_project(
|
||||
db, project_id, skip, limit
|
||||
)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"project_id": str(project_id),
|
||||
"logs": [DecisionLogResponse.model_validate(log) for log in logs]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve decision logs: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-session/{session_id}",
|
||||
response_model=dict,
|
||||
summary="Get decision logs by session",
|
||||
description="Retrieve all decision logs for a specific session",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_decision_logs_by_session(
|
||||
session_id: UUID,
|
||||
skip: int = Query(default=0, ge=0),
|
||||
limit: int = Query(default=100, ge=1, le=1000),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all decision logs for a specific session.
|
||||
"""
|
||||
try:
|
||||
logs, total = decision_log_service.get_decision_logs_by_session(
|
||||
db, session_id, skip, limit
|
||||
)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"session_id": str(session_id),
|
||||
"logs": [DecisionLogResponse.model_validate(log) for log in logs]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve decision logs: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{log_id}",
|
||||
response_model=DecisionLogResponse,
|
||||
summary="Get decision log by ID",
|
||||
description="Retrieve a single decision log by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_decision_log(
|
||||
log_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific decision log by ID.
|
||||
"""
|
||||
log = decision_log_service.get_decision_log_by_id(db, log_id)
|
||||
return DecisionLogResponse.model_validate(log)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=DecisionLogResponse,
|
||||
summary="Create new decision log",
|
||||
description="Create a new decision log with the provided details",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
)
|
||||
def create_decision_log(
|
||||
log_data: DecisionLogCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new decision log.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
"""
|
||||
log = decision_log_service.create_decision_log(db, log_data)
|
||||
return DecisionLogResponse.model_validate(log)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{log_id}",
|
||||
response_model=DecisionLogResponse,
|
||||
summary="Update decision log",
|
||||
description="Update an existing decision log's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def update_decision_log(
|
||||
log_id: UUID,
|
||||
log_data: DecisionLogUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing decision log.
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
"""
|
||||
log = decision_log_service.update_decision_log(db, log_id, log_data)
|
||||
return DecisionLogResponse.model_validate(log)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{log_id}",
|
||||
response_model=dict,
|
||||
summary="Delete decision log",
|
||||
description="Delete a decision log by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def delete_decision_log(
|
||||
log_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a decision log.
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
"""
|
||||
return decision_log_service.delete_decision_log(db, log_id)
|
||||
469
api/routers/firewall_rules.py
Normal file
469
api/routers/firewall_rules.py
Normal file
@@ -0,0 +1,469 @@
|
||||
"""
|
||||
Firewall Rule API router for ClaudeTools.
|
||||
|
||||
This module defines all REST API endpoints for managing firewall rules, including
|
||||
CRUD operations with proper authentication, validation, and error handling.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.firewall_rule import (
|
||||
FirewallRuleCreate,
|
||||
FirewallRuleResponse,
|
||||
FirewallRuleUpdate,
|
||||
)
|
||||
from api.services import firewall_rule_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all firewall rules",
|
||||
description="Retrieve a paginated list of all firewall rules with optional filtering",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_firewall_rules(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all firewall rules with pagination.
|
||||
|
||||
- **skip**: Number of firewall rules to skip (default: 0)
|
||||
- **limit**: Maximum number of firewall rules to return (default: 100, max: 1000)
|
||||
|
||||
Returns a list of firewall rules with pagination metadata.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/firewall-rules?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 15,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"firewall_rules": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"infrastructure_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"rule_name": "Allow SSH",
|
||||
"source_cidr": "10.0.0.0/8",
|
||||
"destination_cidr": "192.168.1.0/24",
|
||||
"port": 22,
|
||||
"protocol": "tcp",
|
||||
"action": "allow",
|
||||
"rule_order": 1,
|
||||
"notes": "Allow SSH from internal network",
|
||||
"created_by": "admin@example.com",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
rules, total = firewall_rule_service.get_firewall_rules(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"firewall_rules": [FirewallRuleResponse.model_validate(rule) for rule in rules]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve firewall rules: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-infrastructure/{infrastructure_id}",
|
||||
response_model=dict,
|
||||
summary="Get firewall rules by infrastructure",
|
||||
description="Retrieve all firewall rules for a specific infrastructure with pagination",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Firewall rules found and returned",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"total": 5,
|
||||
"skip": 0,
|
||||
"limit": 100,
|
||||
"firewall_rules": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"infrastructure_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"rule_name": "Allow SSH",
|
||||
"source_cidr": "10.0.0.0/8",
|
||||
"destination_cidr": "192.168.1.0/24",
|
||||
"port": 22,
|
||||
"protocol": "tcp",
|
||||
"action": "allow",
|
||||
"rule_order": 1,
|
||||
"notes": "Allow SSH from internal network",
|
||||
"created_by": "admin@example.com",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
404: {
|
||||
"description": "Infrastructure not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Infrastructure with ID abc12345-6789-0def-1234-56789abcdef0 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_firewall_rules_by_infrastructure(
|
||||
infrastructure_id: UUID,
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all firewall rules for a specific infrastructure.
|
||||
|
||||
- **infrastructure_id**: UUID of the infrastructure
|
||||
- **skip**: Number of firewall rules to skip (default: 0)
|
||||
- **limit**: Maximum number of firewall rules to return (default: 100, max: 1000)
|
||||
|
||||
Returns a list of firewall rules for the specified infrastructure with pagination metadata.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/firewall-rules/by-infrastructure/abc12345-6789-0def-1234-56789abcdef0?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
"""
|
||||
rules, total = firewall_rule_service.get_firewall_rules_by_infrastructure(db, infrastructure_id, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"firewall_rules": [FirewallRuleResponse.model_validate(rule) for rule in rules]
|
||||
}
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{firewall_rule_id}",
|
||||
response_model=FirewallRuleResponse,
|
||||
summary="Get firewall rule by ID",
|
||||
description="Retrieve a single firewall rule by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Firewall rule found and returned",
|
||||
"model": FirewallRuleResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Firewall rule not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Firewall rule with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_firewall_rule(
|
||||
firewall_rule_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific firewall rule by ID.
|
||||
|
||||
- **firewall_rule_id**: UUID of the firewall rule to retrieve
|
||||
|
||||
Returns the complete firewall rule details.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/firewall-rules/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"infrastructure_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"rule_name": "Allow SSH",
|
||||
"source_cidr": "10.0.0.0/8",
|
||||
"destination_cidr": "192.168.1.0/24",
|
||||
"port": 22,
|
||||
"protocol": "tcp",
|
||||
"action": "allow",
|
||||
"rule_order": 1,
|
||||
"notes": "Allow SSH from internal network",
|
||||
"created_by": "admin@example.com",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
rule = firewall_rule_service.get_firewall_rule_by_id(db, firewall_rule_id)
|
||||
return FirewallRuleResponse.model_validate(rule)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=FirewallRuleResponse,
|
||||
summary="Create new firewall rule",
|
||||
description="Create a new firewall rule with the provided details",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
responses={
|
||||
201: {
|
||||
"description": "Firewall rule created successfully",
|
||||
"model": FirewallRuleResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Infrastructure not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Infrastructure with ID abc12345-6789-0def-1234-56789abcdef0 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": [
|
||||
{
|
||||
"loc": ["body", "rule_name"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def create_firewall_rule(
|
||||
firewall_rule_data: FirewallRuleCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new firewall rule.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
The infrastructure_id must reference an existing infrastructure if provided.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/firewall-rules
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"infrastructure_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"rule_name": "Allow SSH",
|
||||
"source_cidr": "10.0.0.0/8",
|
||||
"destination_cidr": "192.168.1.0/24",
|
||||
"port": 22,
|
||||
"protocol": "tcp",
|
||||
"action": "allow",
|
||||
"rule_order": 1,
|
||||
"notes": "Allow SSH from internal network",
|
||||
"created_by": "admin@example.com"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"infrastructure_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"rule_name": "Allow SSH",
|
||||
"source_cidr": "10.0.0.0/8",
|
||||
"destination_cidr": "192.168.1.0/24",
|
||||
"port": 22,
|
||||
"protocol": "tcp",
|
||||
"action": "allow",
|
||||
"rule_order": 1,
|
||||
"notes": "Allow SSH from internal network",
|
||||
"created_by": "admin@example.com",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
rule = firewall_rule_service.create_firewall_rule(db, firewall_rule_data)
|
||||
return FirewallRuleResponse.model_validate(rule)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{firewall_rule_id}",
|
||||
response_model=FirewallRuleResponse,
|
||||
summary="Update firewall rule",
|
||||
description="Update an existing firewall rule's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Firewall rule updated successfully",
|
||||
"model": FirewallRuleResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Firewall rule or infrastructure not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Firewall rule with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def update_firewall_rule(
|
||||
firewall_rule_id: UUID,
|
||||
firewall_rule_data: FirewallRuleUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing firewall rule.
|
||||
|
||||
- **firewall_rule_id**: UUID of the firewall rule to update
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
If updating infrastructure_id, the new infrastructure must exist.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
PUT /api/firewall-rules/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"action": "deny",
|
||||
"notes": "Changed to deny SSH access"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"infrastructure_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"rule_name": "Allow SSH",
|
||||
"source_cidr": "10.0.0.0/8",
|
||||
"destination_cidr": "192.168.1.0/24",
|
||||
"port": 22,
|
||||
"protocol": "tcp",
|
||||
"action": "deny",
|
||||
"rule_order": 1,
|
||||
"notes": "Changed to deny SSH access",
|
||||
"created_by": "admin@example.com",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T14:20:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
rule = firewall_rule_service.update_firewall_rule(db, firewall_rule_id, firewall_rule_data)
|
||||
return FirewallRuleResponse.model_validate(rule)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{firewall_rule_id}",
|
||||
response_model=dict,
|
||||
summary="Delete firewall rule",
|
||||
description="Delete a firewall rule by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Firewall rule deleted successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"message": "Firewall rule deleted successfully",
|
||||
"firewall_rule_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
404: {
|
||||
"description": "Firewall rule not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Firewall rule with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def delete_firewall_rule(
|
||||
firewall_rule_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a firewall rule.
|
||||
|
||||
- **firewall_rule_id**: UUID of the firewall rule to delete
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
DELETE /api/firewall-rules/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"message": "Firewall rule deleted successfully",
|
||||
"firewall_rule_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
```
|
||||
"""
|
||||
return firewall_rule_service.delete_firewall_rule(db, firewall_rule_id)
|
||||
556
api/routers/infrastructure.py
Normal file
556
api/routers/infrastructure.py
Normal file
@@ -0,0 +1,556 @@
|
||||
"""
|
||||
Infrastructure API router for ClaudeTools.
|
||||
|
||||
This module defines all REST API endpoints for managing infrastructure assets,
|
||||
including CRUD operations with proper authentication, validation, and error handling.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.infrastructure import (
|
||||
InfrastructureCreate,
|
||||
InfrastructureResponse,
|
||||
InfrastructureUpdate,
|
||||
)
|
||||
from api.services import infrastructure_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all infrastructure items",
|
||||
description="Retrieve a paginated list of all infrastructure items with optional filtering",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_infrastructure(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all infrastructure items with pagination.
|
||||
|
||||
- **skip**: Number of items to skip (default: 0)
|
||||
- **limit**: Maximum number of items to return (default: 100, max: 1000)
|
||||
|
||||
Returns a list of infrastructure items with pagination metadata.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/infrastructure?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 10,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"infrastructure": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"hostname": "server-dc-01",
|
||||
"asset_type": "domain_controller",
|
||||
"client_id": "client-uuid",
|
||||
"site_id": "site-uuid",
|
||||
"status": "active",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
items, total = infrastructure_service.get_infrastructure_items(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"infrastructure": [InfrastructureResponse.model_validate(item) for item in items]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve infrastructure items: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{infrastructure_id}",
|
||||
response_model=InfrastructureResponse,
|
||||
summary="Get infrastructure by ID",
|
||||
description="Retrieve a single infrastructure item by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Infrastructure item found and returned",
|
||||
"model": InfrastructureResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Infrastructure item not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Infrastructure with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_infrastructure(
|
||||
infrastructure_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific infrastructure item by ID.
|
||||
|
||||
- **infrastructure_id**: UUID of the infrastructure item to retrieve
|
||||
|
||||
Returns the complete infrastructure item details.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/infrastructure/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"hostname": "server-dc-01",
|
||||
"asset_type": "domain_controller",
|
||||
"client_id": "client-uuid",
|
||||
"site_id": "site-uuid",
|
||||
"ip_address": "192.168.1.10",
|
||||
"mac_address": "00:1A:2B:3C:4D:5E",
|
||||
"os": "Windows Server 2022",
|
||||
"os_version": "21H2",
|
||||
"role_description": "Primary domain controller for the network",
|
||||
"status": "active",
|
||||
"has_gui": true,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
item = infrastructure_service.get_infrastructure_by_id(db, infrastructure_id)
|
||||
return InfrastructureResponse.model_validate(item)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=InfrastructureResponse,
|
||||
summary="Create new infrastructure item",
|
||||
description="Create a new infrastructure item with the provided details",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
responses={
|
||||
201: {
|
||||
"description": "Infrastructure item created successfully",
|
||||
"model": InfrastructureResponse,
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error or invalid foreign key",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": [
|
||||
{
|
||||
"loc": ["body", "hostname"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def create_infrastructure(
|
||||
infrastructure_data: InfrastructureCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new infrastructure item.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
Validates foreign keys (client_id, site_id, parent_host_id) before creation.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/infrastructure
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"hostname": "server-dc-01",
|
||||
"asset_type": "domain_controller",
|
||||
"client_id": "client-uuid",
|
||||
"site_id": "site-uuid",
|
||||
"ip_address": "192.168.1.10",
|
||||
"mac_address": "00:1A:2B:3C:4D:5E",
|
||||
"os": "Windows Server 2022",
|
||||
"os_version": "21H2",
|
||||
"role_description": "Primary domain controller",
|
||||
"status": "active",
|
||||
"powershell_version": "5.1",
|
||||
"shell_type": "powershell",
|
||||
"has_gui": true
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"hostname": "server-dc-01",
|
||||
"asset_type": "domain_controller",
|
||||
"status": "active",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
item = infrastructure_service.create_infrastructure(db, infrastructure_data)
|
||||
return InfrastructureResponse.model_validate(item)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{infrastructure_id}",
|
||||
response_model=InfrastructureResponse,
|
||||
summary="Update infrastructure item",
|
||||
description="Update an existing infrastructure item's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Infrastructure item updated successfully",
|
||||
"model": InfrastructureResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Infrastructure item not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Infrastructure with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error or invalid foreign key",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Client with ID client-uuid not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def update_infrastructure(
|
||||
infrastructure_id: UUID,
|
||||
infrastructure_data: InfrastructureUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing infrastructure item.
|
||||
|
||||
- **infrastructure_id**: UUID of the infrastructure item to update
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
Validates foreign keys (client_id, site_id, parent_host_id) before updating.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
PUT /api/infrastructure/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"status": "decommissioned",
|
||||
"notes": "Server retired and replaced with new hardware"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"hostname": "server-dc-01",
|
||||
"asset_type": "domain_controller",
|
||||
"status": "decommissioned",
|
||||
"notes": "Server retired and replaced with new hardware",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T14:20:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
item = infrastructure_service.update_infrastructure(db, infrastructure_id, infrastructure_data)
|
||||
return InfrastructureResponse.model_validate(item)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{infrastructure_id}",
|
||||
response_model=dict,
|
||||
summary="Delete infrastructure item",
|
||||
description="Delete an infrastructure item by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Infrastructure item deleted successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"message": "Infrastructure deleted successfully",
|
||||
"infrastructure_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
404: {
|
||||
"description": "Infrastructure item not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Infrastructure with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def delete_infrastructure(
|
||||
infrastructure_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete an infrastructure item.
|
||||
|
||||
- **infrastructure_id**: UUID of the infrastructure item to delete
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
DELETE /api/infrastructure/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"message": "Infrastructure deleted successfully",
|
||||
"infrastructure_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
```
|
||||
"""
|
||||
return infrastructure_service.delete_infrastructure(db, infrastructure_id)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-site/{site_id}",
|
||||
response_model=dict,
|
||||
summary="Get infrastructure by site",
|
||||
description="Retrieve all infrastructure items for a specific site",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Infrastructure items for site returned",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"total": 5,
|
||||
"skip": 0,
|
||||
"limit": 100,
|
||||
"infrastructure": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"hostname": "server-dc-01",
|
||||
"asset_type": "domain_controller"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_infrastructure_by_site(
|
||||
site_id: str,
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all infrastructure items for a specific site.
|
||||
|
||||
- **site_id**: UUID of the site
|
||||
- **skip**: Number of items to skip (default: 0)
|
||||
- **limit**: Maximum number of items to return (default: 100, max: 1000)
|
||||
|
||||
Returns infrastructure items associated with the specified site.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/infrastructure/by-site/site-uuid-here?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 5,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"infrastructure": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"hostname": "server-dc-01",
|
||||
"asset_type": "domain_controller",
|
||||
"site_id": "site-uuid-here",
|
||||
"status": "active"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
items, total = infrastructure_service.get_infrastructure_by_site(db, site_id, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"infrastructure": [InfrastructureResponse.model_validate(item) for item in items]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve infrastructure items for site: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-client/{client_id}",
|
||||
response_model=dict,
|
||||
summary="Get infrastructure by client",
|
||||
description="Retrieve all infrastructure items for a specific client",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Infrastructure items for client returned",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"total": 15,
|
||||
"skip": 0,
|
||||
"limit": 100,
|
||||
"infrastructure": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"hostname": "server-dc-01",
|
||||
"asset_type": "domain_controller"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_infrastructure_by_client(
|
||||
client_id: str,
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all infrastructure items for a specific client.
|
||||
|
||||
- **client_id**: UUID of the client
|
||||
- **skip**: Number of items to skip (default: 0)
|
||||
- **limit**: Maximum number of items to return (default: 100, max: 1000)
|
||||
|
||||
Returns infrastructure items associated with the specified client.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/infrastructure/by-client/client-uuid-here?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 15,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"infrastructure": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"hostname": "server-dc-01",
|
||||
"asset_type": "domain_controller",
|
||||
"client_id": "client-uuid-here",
|
||||
"status": "active"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
items, total = infrastructure_service.get_infrastructure_by_client(db, client_id, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"infrastructure": [InfrastructureResponse.model_validate(item) for item in items]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve infrastructure items for client: {str(e)}"
|
||||
)
|
||||
467
api/routers/m365_tenants.py
Normal file
467
api/routers/m365_tenants.py
Normal file
@@ -0,0 +1,467 @@
|
||||
"""
|
||||
M365 Tenant API router for ClaudeTools.
|
||||
|
||||
This module defines all REST API endpoints for managing M365 tenants, including
|
||||
CRUD operations with proper authentication, validation, and error handling.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.m365_tenant import (
|
||||
M365TenantCreate,
|
||||
M365TenantResponse,
|
||||
M365TenantUpdate,
|
||||
)
|
||||
from api.services import m365_tenant_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all M365 tenants",
|
||||
description="Retrieve a paginated list of all M365 tenants with optional filtering",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_m365_tenants(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all M365 tenants with pagination.
|
||||
|
||||
- **skip**: Number of M365 tenants to skip (default: 0)
|
||||
- **limit**: Maximum number of M365 tenants to return (default: 100, max: 1000)
|
||||
|
||||
Returns a list of M365 tenants with pagination metadata.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/m365-tenants?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 3,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"m365_tenants": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"tenant_id": "def45678-9abc-0123-4567-89abcdef0123",
|
||||
"tenant_name": "dataforth.com",
|
||||
"default_domain": "dataforthcorp.onmicrosoft.com",
|
||||
"admin_email": "admin@dataforth.com",
|
||||
"cipp_name": "Dataforth Corp",
|
||||
"notes": "Primary M365 tenant",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
tenants, total = m365_tenant_service.get_m365_tenants(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"m365_tenants": [M365TenantResponse.model_validate(tenant) for tenant in tenants]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve M365 tenants: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{tenant_id}",
|
||||
response_model=M365TenantResponse,
|
||||
summary="Get M365 tenant by ID",
|
||||
description="Retrieve a single M365 tenant by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "M365 tenant found and returned",
|
||||
"model": M365TenantResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "M365 tenant not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "M365 tenant with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_m365_tenant(
|
||||
tenant_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific M365 tenant by ID.
|
||||
|
||||
- **tenant_id**: UUID of the M365 tenant to retrieve
|
||||
|
||||
Returns the complete M365 tenant details.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/m365-tenants/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"tenant_id": "def45678-9abc-0123-4567-89abcdef0123",
|
||||
"tenant_name": "dataforth.com",
|
||||
"default_domain": "dataforthcorp.onmicrosoft.com",
|
||||
"admin_email": "admin@dataforth.com",
|
||||
"cipp_name": "Dataforth Corp",
|
||||
"notes": "Primary M365 tenant",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
tenant = m365_tenant_service.get_m365_tenant_by_id(db, tenant_id)
|
||||
return M365TenantResponse.model_validate(tenant)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-client/{client_id}",
|
||||
response_model=dict,
|
||||
summary="Get M365 tenants by client",
|
||||
description="Retrieve all M365 tenants for a specific client",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "M365 tenants found and returned",
|
||||
},
|
||||
404: {
|
||||
"description": "Client not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Client with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_m365_tenants_by_client(
|
||||
client_id: UUID,
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all M365 tenants for a specific client.
|
||||
|
||||
- **client_id**: UUID of the client
|
||||
- **skip**: Number of M365 tenants to skip (default: 0)
|
||||
- **limit**: Maximum number of M365 tenants to return (default: 100, max: 1000)
|
||||
|
||||
Returns a list of M365 tenants for the specified client.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/m365-tenants/by-client/abc12345-6789-0def-1234-56789abcdef0?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 2,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"m365_tenants": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"tenant_id": "def45678-9abc-0123-4567-89abcdef0123",
|
||||
"tenant_name": "dataforth.com",
|
||||
"default_domain": "dataforthcorp.onmicrosoft.com",
|
||||
"admin_email": "admin@dataforth.com",
|
||||
"cipp_name": "Dataforth Corp",
|
||||
"notes": "Primary M365 tenant",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
tenants, total = m365_tenant_service.get_m365_tenants_by_client(db, client_id, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"client_id": str(client_id),
|
||||
"m365_tenants": [M365TenantResponse.model_validate(tenant) for tenant in tenants]
|
||||
}
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=M365TenantResponse,
|
||||
summary="Create new M365 tenant",
|
||||
description="Create a new M365 tenant with the provided details",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
responses={
|
||||
201: {
|
||||
"description": "M365 tenant created successfully",
|
||||
"model": M365TenantResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Client not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Client with ID abc12345-6789-0def-1234-56789abcdef0 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
409: {
|
||||
"description": "M365 tenant with tenant_id already exists",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "M365 tenant with tenant_id 'def45678-9abc-0123-4567-89abcdef0123' already exists"}
|
||||
}
|
||||
},
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": [
|
||||
{
|
||||
"loc": ["body", "tenant_id"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def create_m365_tenant(
|
||||
tenant_data: M365TenantCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new M365 tenant.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/m365-tenants
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"tenant_id": "def45678-9abc-0123-4567-89abcdef0123",
|
||||
"tenant_name": "dataforth.com",
|
||||
"default_domain": "dataforthcorp.onmicrosoft.com",
|
||||
"admin_email": "admin@dataforth.com",
|
||||
"cipp_name": "Dataforth Corp",
|
||||
"notes": "Primary M365 tenant"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"tenant_id": "def45678-9abc-0123-4567-89abcdef0123",
|
||||
"tenant_name": "dataforth.com",
|
||||
"default_domain": "dataforthcorp.onmicrosoft.com",
|
||||
"admin_email": "admin@dataforth.com",
|
||||
"cipp_name": "Dataforth Corp",
|
||||
"notes": "Primary M365 tenant",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
tenant = m365_tenant_service.create_m365_tenant(db, tenant_data)
|
||||
return M365TenantResponse.model_validate(tenant)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{tenant_id}",
|
||||
response_model=M365TenantResponse,
|
||||
summary="Update M365 tenant",
|
||||
description="Update an existing M365 tenant's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "M365 tenant updated successfully",
|
||||
"model": M365TenantResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "M365 tenant or client not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "M365 tenant with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
409: {
|
||||
"description": "Conflict with existing M365 tenant",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "M365 tenant with tenant_id 'def45678-9abc-0123-4567-89abcdef0123' already exists"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def update_m365_tenant(
|
||||
tenant_id: UUID,
|
||||
tenant_data: M365TenantUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing M365 tenant.
|
||||
|
||||
- **tenant_id**: UUID of the M365 tenant to update
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
PUT /api/m365-tenants/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"admin_email": "newadmin@dataforth.com",
|
||||
"notes": "Updated administrator contact"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"tenant_id": "def45678-9abc-0123-4567-89abcdef0123",
|
||||
"tenant_name": "dataforth.com",
|
||||
"default_domain": "dataforthcorp.onmicrosoft.com",
|
||||
"admin_email": "newadmin@dataforth.com",
|
||||
"cipp_name": "Dataforth Corp",
|
||||
"notes": "Updated administrator contact",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T14:20:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
tenant = m365_tenant_service.update_m365_tenant(db, tenant_id, tenant_data)
|
||||
return M365TenantResponse.model_validate(tenant)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{tenant_id}",
|
||||
response_model=dict,
|
||||
summary="Delete M365 tenant",
|
||||
description="Delete an M365 tenant by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "M365 tenant deleted successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"message": "M365 tenant deleted successfully",
|
||||
"tenant_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
404: {
|
||||
"description": "M365 tenant not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "M365 tenant with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def delete_m365_tenant(
|
||||
tenant_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete an M365 tenant.
|
||||
|
||||
- **tenant_id**: UUID of the M365 tenant to delete
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
DELETE /api/m365-tenants/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"message": "M365 tenant deleted successfully",
|
||||
"tenant_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
```
|
||||
"""
|
||||
return m365_tenant_service.delete_m365_tenant(db, tenant_id)
|
||||
457
api/routers/machines.py
Normal file
457
api/routers/machines.py
Normal file
@@ -0,0 +1,457 @@
|
||||
"""
|
||||
Machine API router for ClaudeTools.
|
||||
|
||||
This module defines all REST API endpoints for managing machines, including
|
||||
CRUD operations with proper authentication, validation, and error handling.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.machine import (
|
||||
MachineCreate,
|
||||
MachineResponse,
|
||||
MachineUpdate,
|
||||
)
|
||||
from api.services import machine_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all machines",
|
||||
description="Retrieve a paginated list of all machines with optional filtering",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_machines(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
active_only: bool = Query(
|
||||
default=False,
|
||||
description="If true, only return active machines"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all machines with pagination.
|
||||
|
||||
- **skip**: Number of machines to skip (default: 0)
|
||||
- **limit**: Maximum number of machines to return (default: 100, max: 1000)
|
||||
- **active_only**: Filter to only active machines (default: false)
|
||||
|
||||
Returns a list of machines with pagination metadata.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/machines?skip=0&limit=50&active_only=true
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 5,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"machines": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"hostname": "laptop-dev-01",
|
||||
"friendly_name": "Main Development Laptop",
|
||||
"machine_type": "laptop",
|
||||
"platform": "win32",
|
||||
"is_primary": true,
|
||||
"is_active": true,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
if active_only:
|
||||
machines, total = machine_service.get_active_machines(db, skip, limit)
|
||||
else:
|
||||
machines, total = machine_service.get_machines(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"machines": [MachineResponse.model_validate(machine) for machine in machines]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve machines: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{machine_id}",
|
||||
response_model=MachineResponse,
|
||||
summary="Get machine by ID",
|
||||
description="Retrieve a single machine by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Machine found and returned",
|
||||
"model": MachineResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Machine not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Machine with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_machine(
|
||||
machine_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific machine by ID.
|
||||
|
||||
- **machine_id**: UUID of the machine to retrieve
|
||||
|
||||
Returns the complete machine details.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/machines/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"hostname": "laptop-dev-01",
|
||||
"friendly_name": "Main Development Laptop",
|
||||
"machine_type": "laptop",
|
||||
"platform": "win32",
|
||||
"os_version": "Windows 11 Pro",
|
||||
"username": "technician",
|
||||
"home_directory": "C:\\\\Users\\\\technician",
|
||||
"has_vpn_access": true,
|
||||
"has_docker": true,
|
||||
"has_powershell": true,
|
||||
"powershell_version": "7.4.0",
|
||||
"is_primary": true,
|
||||
"is_active": true,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
machine = machine_service.get_machine_by_id(db, machine_id)
|
||||
return MachineResponse.model_validate(machine)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=MachineResponse,
|
||||
summary="Create new machine",
|
||||
description="Create a new machine with the provided details",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
responses={
|
||||
201: {
|
||||
"description": "Machine created successfully",
|
||||
"model": MachineResponse,
|
||||
},
|
||||
409: {
|
||||
"description": "Machine with hostname already exists",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Machine with hostname 'laptop-dev-01' already exists"}
|
||||
}
|
||||
},
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": [
|
||||
{
|
||||
"loc": ["body", "hostname"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def create_machine(
|
||||
machine_data: MachineCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new machine.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/machines
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"hostname": "laptop-dev-01",
|
||||
"friendly_name": "Main Development Laptop",
|
||||
"machine_type": "laptop",
|
||||
"platform": "win32",
|
||||
"os_version": "Windows 11 Pro",
|
||||
"username": "technician",
|
||||
"home_directory": "C:\\\\Users\\\\technician",
|
||||
"has_vpn_access": true,
|
||||
"has_docker": true,
|
||||
"has_powershell": true,
|
||||
"powershell_version": "7.4.0",
|
||||
"has_ssh": true,
|
||||
"has_git": true,
|
||||
"claude_working_directory": "D:\\\\Projects",
|
||||
"preferred_shell": "powershell",
|
||||
"is_primary": true,
|
||||
"is_active": true
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"hostname": "laptop-dev-01",
|
||||
"friendly_name": "Main Development Laptop",
|
||||
"machine_type": "laptop",
|
||||
"platform": "win32",
|
||||
"is_primary": true,
|
||||
"is_active": true,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
machine = machine_service.create_machine(db, machine_data)
|
||||
return MachineResponse.model_validate(machine)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{machine_id}",
|
||||
response_model=MachineResponse,
|
||||
summary="Update machine",
|
||||
description="Update an existing machine's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Machine updated successfully",
|
||||
"model": MachineResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Machine not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Machine with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
409: {
|
||||
"description": "Conflict with existing machine",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Machine with hostname 'laptop-dev-01' already exists"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def update_machine(
|
||||
machine_id: UUID,
|
||||
machine_data: MachineUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing machine.
|
||||
|
||||
- **machine_id**: UUID of the machine to update
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
PUT /api/machines/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"friendly_name": "Updated Laptop Name",
|
||||
"is_active": false,
|
||||
"notes": "Machine being retired"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"hostname": "laptop-dev-01",
|
||||
"friendly_name": "Updated Laptop Name",
|
||||
"machine_type": "laptop",
|
||||
"platform": "win32",
|
||||
"is_active": false,
|
||||
"notes": "Machine being retired",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T14:20:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
machine = machine_service.update_machine(db, machine_id, machine_data)
|
||||
return MachineResponse.model_validate(machine)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{machine_id}",
|
||||
response_model=dict,
|
||||
summary="Delete machine",
|
||||
description="Delete a machine by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Machine deleted successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"message": "Machine deleted successfully",
|
||||
"machine_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
404: {
|
||||
"description": "Machine not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Machine with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def delete_machine(
|
||||
machine_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a machine.
|
||||
|
||||
- **machine_id**: UUID of the machine to delete
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
DELETE /api/machines/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"message": "Machine deleted successfully",
|
||||
"machine_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
```
|
||||
"""
|
||||
return machine_service.delete_machine(db, machine_id)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/primary/info",
|
||||
response_model=MachineResponse,
|
||||
summary="Get primary machine",
|
||||
description="Retrieve the machine marked as primary",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Primary machine found",
|
||||
"model": MachineResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "No primary machine configured",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "No primary machine is configured"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_primary_machine(
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get the primary machine.
|
||||
|
||||
Returns the machine that is marked as the primary machine for MSP work.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/machines/primary/info
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"hostname": "laptop-dev-01",
|
||||
"friendly_name": "Main Development Laptop",
|
||||
"machine_type": "laptop",
|
||||
"platform": "win32",
|
||||
"is_primary": true,
|
||||
"is_active": true,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
primary_machine = machine_service.get_primary_machine(db)
|
||||
|
||||
if not primary_machine:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="No primary machine is configured"
|
||||
)
|
||||
|
||||
return MachineResponse.model_validate(primary_machine)
|
||||
457
api/routers/networks.py
Normal file
457
api/routers/networks.py
Normal file
@@ -0,0 +1,457 @@
|
||||
"""
|
||||
Network API router for ClaudeTools.
|
||||
|
||||
This module defines all REST API endpoints for managing networks, including
|
||||
CRUD operations with proper authentication, validation, and error handling.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.network import (
|
||||
NetworkCreate,
|
||||
NetworkResponse,
|
||||
NetworkUpdate,
|
||||
)
|
||||
from api.services import network_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all networks",
|
||||
description="Retrieve a paginated list of all networks with optional filtering",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_networks(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all networks with pagination.
|
||||
|
||||
- **skip**: Number of networks to skip (default: 0)
|
||||
- **limit**: Maximum number of networks to return (default: 100, max: 1000)
|
||||
|
||||
Returns a list of networks with pagination metadata.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/networks?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 5,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"networks": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"site_id": "def12345-6789-0def-1234-56789abcdef0",
|
||||
"network_name": "Main LAN",
|
||||
"network_type": "lan",
|
||||
"cidr": "192.168.1.0/24",
|
||||
"gateway_ip": "192.168.1.1",
|
||||
"vlan_id": null,
|
||||
"notes": "Primary office network",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
networks, total = network_service.get_networks(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"networks": [NetworkResponse.model_validate(network) for network in networks]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve networks: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-site/{site_id}",
|
||||
response_model=dict,
|
||||
summary="Get networks by site",
|
||||
description="Retrieve all networks for a specific site with pagination",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Networks found and returned",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"total": 3,
|
||||
"skip": 0,
|
||||
"limit": 100,
|
||||
"networks": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"site_id": "def12345-6789-0def-1234-56789abcdef0",
|
||||
"network_name": "Main LAN",
|
||||
"network_type": "lan",
|
||||
"cidr": "192.168.1.0/24",
|
||||
"gateway_ip": "192.168.1.1",
|
||||
"vlan_id": None,
|
||||
"notes": "Primary office network",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
404: {
|
||||
"description": "Site not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Site with ID def12345-6789-0def-1234-56789abcdef0 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_networks_by_site(
|
||||
site_id: UUID,
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all networks for a specific site.
|
||||
|
||||
- **site_id**: UUID of the site
|
||||
- **skip**: Number of networks to skip (default: 0)
|
||||
- **limit**: Maximum number of networks to return (default: 100, max: 1000)
|
||||
|
||||
Returns a list of networks for the specified site with pagination metadata.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/networks/by-site/def12345-6789-0def-1234-56789abcdef0?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
"""
|
||||
networks, total = network_service.get_networks_by_site(db, site_id, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"networks": [NetworkResponse.model_validate(network) for network in networks]
|
||||
}
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{network_id}",
|
||||
response_model=NetworkResponse,
|
||||
summary="Get network by ID",
|
||||
description="Retrieve a single network by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Network found and returned",
|
||||
"model": NetworkResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Network not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Network with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_network(
|
||||
network_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific network by ID.
|
||||
|
||||
- **network_id**: UUID of the network to retrieve
|
||||
|
||||
Returns the complete network details.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/networks/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"site_id": "def12345-6789-0def-1234-56789abcdef0",
|
||||
"network_name": "Main LAN",
|
||||
"network_type": "lan",
|
||||
"cidr": "192.168.1.0/24",
|
||||
"gateway_ip": "192.168.1.1",
|
||||
"vlan_id": null,
|
||||
"notes": "Primary office network",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
network = network_service.get_network_by_id(db, network_id)
|
||||
return NetworkResponse.model_validate(network)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=NetworkResponse,
|
||||
summary="Create new network",
|
||||
description="Create a new network with the provided details",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
responses={
|
||||
201: {
|
||||
"description": "Network created successfully",
|
||||
"model": NetworkResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Site not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Site with ID def12345-6789-0def-1234-56789abcdef0 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": [
|
||||
{
|
||||
"loc": ["body", "network_name"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def create_network(
|
||||
network_data: NetworkCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new network.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
The site_id must reference an existing site if provided.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/networks
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"site_id": "def12345-6789-0def-1234-56789abcdef0",
|
||||
"network_name": "Main LAN",
|
||||
"network_type": "lan",
|
||||
"cidr": "192.168.1.0/24",
|
||||
"gateway_ip": "192.168.1.1",
|
||||
"vlan_id": null,
|
||||
"notes": "Primary office network"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"site_id": "def12345-6789-0def-1234-56789abcdef0",
|
||||
"network_name": "Main LAN",
|
||||
"network_type": "lan",
|
||||
"cidr": "192.168.1.0/24",
|
||||
"gateway_ip": "192.168.1.1",
|
||||
"vlan_id": null,
|
||||
"notes": "Primary office network",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
network = network_service.create_network(db, network_data)
|
||||
return NetworkResponse.model_validate(network)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{network_id}",
|
||||
response_model=NetworkResponse,
|
||||
summary="Update network",
|
||||
description="Update an existing network's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Network updated successfully",
|
||||
"model": NetworkResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Network or site not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Network with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def update_network(
|
||||
network_id: UUID,
|
||||
network_data: NetworkUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing network.
|
||||
|
||||
- **network_id**: UUID of the network to update
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
If updating site_id, the new site must exist.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
PUT /api/networks/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"gateway_ip": "192.168.1.254",
|
||||
"notes": "Gateway IP updated for redundancy"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"site_id": "def12345-6789-0def-1234-56789abcdef0",
|
||||
"network_name": "Main LAN",
|
||||
"network_type": "lan",
|
||||
"cidr": "192.168.1.0/24",
|
||||
"gateway_ip": "192.168.1.254",
|
||||
"vlan_id": null,
|
||||
"notes": "Gateway IP updated for redundancy",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T14:20:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
network = network_service.update_network(db, network_id, network_data)
|
||||
return NetworkResponse.model_validate(network)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{network_id}",
|
||||
response_model=dict,
|
||||
summary="Delete network",
|
||||
description="Delete a network by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Network deleted successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"message": "Network deleted successfully",
|
||||
"network_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
404: {
|
||||
"description": "Network not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Network with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def delete_network(
|
||||
network_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a network.
|
||||
|
||||
- **network_id**: UUID of the network to delete
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
DELETE /api/networks/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"message": "Network deleted successfully",
|
||||
"network_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
```
|
||||
"""
|
||||
return network_service.delete_network(db, network_id)
|
||||
202
api/routers/project_states.py
Normal file
202
api/routers/project_states.py
Normal file
@@ -0,0 +1,202 @@
|
||||
"""
|
||||
ProjectState API router for ClaudeTools.
|
||||
|
||||
Defines all REST API endpoints for managing project states,
|
||||
tracking the current state of projects for context retrieval.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.project_state import (
|
||||
ProjectStateCreate,
|
||||
ProjectStateResponse,
|
||||
ProjectStateUpdate,
|
||||
)
|
||||
from api.services import project_state_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all project states",
|
||||
description="Retrieve a paginated list of all project states",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_project_states(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all project states with pagination.
|
||||
|
||||
Returns project states ordered by most recently updated.
|
||||
"""
|
||||
try:
|
||||
states, total = project_state_service.get_project_states(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"states": [ProjectStateResponse.model_validate(state) for state in states]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve project states: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-project/{project_id}",
|
||||
response_model=ProjectStateResponse,
|
||||
summary="Get project state by project ID",
|
||||
description="Retrieve the project state for a specific project (unique per project)",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_project_state_by_project(
|
||||
project_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get the project state for a specific project.
|
||||
|
||||
Each project has exactly one project state.
|
||||
"""
|
||||
state = project_state_service.get_project_state_by_project(db, project_id)
|
||||
|
||||
if not state:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"ProjectState for project ID {project_id} not found"
|
||||
)
|
||||
|
||||
return ProjectStateResponse.model_validate(state)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{state_id}",
|
||||
response_model=ProjectStateResponse,
|
||||
summary="Get project state by ID",
|
||||
description="Retrieve a single project state by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_project_state(
|
||||
state_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific project state by ID.
|
||||
"""
|
||||
state = project_state_service.get_project_state_by_id(db, state_id)
|
||||
return ProjectStateResponse.model_validate(state)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=ProjectStateResponse,
|
||||
summary="Create new project state",
|
||||
description="Create a new project state with the provided details",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
)
|
||||
def create_project_state(
|
||||
state_data: ProjectStateCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new project state.
|
||||
|
||||
Each project can only have one project state (enforced by unique constraint).
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
"""
|
||||
state = project_state_service.create_project_state(db, state_data)
|
||||
return ProjectStateResponse.model_validate(state)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{state_id}",
|
||||
response_model=ProjectStateResponse,
|
||||
summary="Update project state",
|
||||
description="Update an existing project state's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def update_project_state(
|
||||
state_id: UUID,
|
||||
state_data: ProjectStateUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing project state.
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
Uses compression utilities when updating to maintain efficient storage.
|
||||
"""
|
||||
state = project_state_service.update_project_state(db, state_id, state_data)
|
||||
return ProjectStateResponse.model_validate(state)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/by-project/{project_id}",
|
||||
response_model=ProjectStateResponse,
|
||||
summary="Update project state by project ID",
|
||||
description="Update project state by project ID (creates if doesn't exist)",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def update_project_state_by_project(
|
||||
project_id: UUID,
|
||||
state_data: ProjectStateUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update project state by project ID.
|
||||
|
||||
Convenience method that creates a new project state if it doesn't exist,
|
||||
or updates the existing one if it does.
|
||||
"""
|
||||
state = project_state_service.update_project_state_by_project(db, project_id, state_data)
|
||||
return ProjectStateResponse.model_validate(state)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{state_id}",
|
||||
response_model=dict,
|
||||
summary="Delete project state",
|
||||
description="Delete a project state by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def delete_project_state(
|
||||
state_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a project state.
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
"""
|
||||
return project_state_service.delete_project_state(db, state_id)
|
||||
413
api/routers/projects.py
Normal file
413
api/routers/projects.py
Normal file
@@ -0,0 +1,413 @@
|
||||
"""
|
||||
Project API router for ClaudeTools.
|
||||
|
||||
This module defines all REST API endpoints for managing projects, including
|
||||
CRUD operations with proper authentication, validation, and error handling.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.project import (
|
||||
ProjectCreate,
|
||||
ProjectResponse,
|
||||
ProjectUpdate,
|
||||
)
|
||||
from api.services import project_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all projects",
|
||||
description="Retrieve a paginated list of all projects with optional filtering",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_projects(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
client_id: str = Query(
|
||||
default=None,
|
||||
description="Filter projects by client ID"
|
||||
),
|
||||
status_filter: str = Query(
|
||||
default=None,
|
||||
description="Filter projects by status (complete, working, blocked, pending, critical, deferred)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all projects with pagination and optional filtering.
|
||||
|
||||
- **skip**: Number of projects to skip (default: 0)
|
||||
- **limit**: Maximum number of projects to return (default: 100, max: 1000)
|
||||
- **client_id**: Filter by client ID (optional)
|
||||
- **status_filter**: Filter by status (optional)
|
||||
|
||||
Returns a list of projects with pagination metadata.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/projects?skip=0&limit=50&status_filter=working
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 15,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"projects": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"name": "Website Redesign",
|
||||
"slug": "website-redesign",
|
||||
"category": "client_project",
|
||||
"status": "working",
|
||||
"priority": "high",
|
||||
"description": "Complete website overhaul",
|
||||
"started_date": "2024-01-15",
|
||||
"target_completion_date": "2024-03-15",
|
||||
"estimated_hours": 120.00,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
if client_id:
|
||||
projects, total = project_service.get_projects_by_client(db, client_id, skip, limit)
|
||||
elif status_filter:
|
||||
projects, total = project_service.get_projects_by_status(db, status_filter, skip, limit)
|
||||
else:
|
||||
projects, total = project_service.get_projects(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"projects": [ProjectResponse.model_validate(project) for project in projects]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve projects: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{project_id}",
|
||||
response_model=ProjectResponse,
|
||||
summary="Get project by ID",
|
||||
description="Retrieve a single project by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Project found and returned",
|
||||
"model": ProjectResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Project not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Project with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_project(
|
||||
project_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific project by ID.
|
||||
|
||||
- **project_id**: UUID of the project to retrieve
|
||||
|
||||
Returns the complete project details.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/projects/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"name": "Website Redesign",
|
||||
"slug": "website-redesign",
|
||||
"category": "client_project",
|
||||
"status": "working",
|
||||
"priority": "high",
|
||||
"description": "Complete website overhaul with new branding",
|
||||
"started_date": "2024-01-15",
|
||||
"target_completion_date": "2024-03-15",
|
||||
"completed_date": null,
|
||||
"estimated_hours": 120.00,
|
||||
"actual_hours": 45.50,
|
||||
"gitea_repo_url": "https://gitea.example.com/client/website",
|
||||
"notes": "Client requested mobile-first approach",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-20T14:20:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
project = project_service.get_project_by_id(db, project_id)
|
||||
return ProjectResponse.model_validate(project)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=ProjectResponse,
|
||||
summary="Create new project",
|
||||
description="Create a new project with the provided details",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
responses={
|
||||
201: {
|
||||
"description": "Project created successfully",
|
||||
"model": ProjectResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Client not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Client with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
409: {
|
||||
"description": "Project with slug already exists",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Project with slug 'website-redesign' already exists"}
|
||||
}
|
||||
},
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": [
|
||||
{
|
||||
"loc": ["body", "name"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def create_project(
|
||||
project_data: ProjectCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new project.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
The client_id must reference an existing client.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/projects
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"client_id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"name": "Website Redesign",
|
||||
"slug": "website-redesign",
|
||||
"category": "client_project",
|
||||
"status": "working",
|
||||
"priority": "high",
|
||||
"description": "Complete website overhaul with new branding",
|
||||
"started_date": "2024-01-15",
|
||||
"target_completion_date": "2024-03-15",
|
||||
"estimated_hours": 120.00,
|
||||
"gitea_repo_url": "https://gitea.example.com/client/website",
|
||||
"notes": "Client requested mobile-first approach"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"name": "Website Redesign",
|
||||
"slug": "website-redesign",
|
||||
"status": "working",
|
||||
"priority": "high",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
project = project_service.create_project(db, project_data)
|
||||
return ProjectResponse.model_validate(project)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{project_id}",
|
||||
response_model=ProjectResponse,
|
||||
summary="Update project",
|
||||
description="Update an existing project's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Project updated successfully",
|
||||
"model": ProjectResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Project or client not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Project with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
409: {
|
||||
"description": "Conflict with existing project",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Project with slug 'website-redesign' already exists"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def update_project(
|
||||
project_id: UUID,
|
||||
project_data: ProjectUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing project.
|
||||
|
||||
- **project_id**: UUID of the project to update
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
If updating client_id, the new client must exist.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
PUT /api/projects/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"status": "completed",
|
||||
"completed_date": "2024-03-10",
|
||||
"actual_hours": 118.50,
|
||||
"notes": "Project completed ahead of schedule"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"name": "Website Redesign",
|
||||
"slug": "website-redesign",
|
||||
"status": "completed",
|
||||
"completed_date": "2024-03-10",
|
||||
"actual_hours": 118.50,
|
||||
"notes": "Project completed ahead of schedule",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-03-10T16:45:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
project = project_service.update_project(db, project_id, project_data)
|
||||
return ProjectResponse.model_validate(project)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{project_id}",
|
||||
response_model=dict,
|
||||
summary="Delete project",
|
||||
description="Delete a project by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Project deleted successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"message": "Project deleted successfully",
|
||||
"project_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
404: {
|
||||
"description": "Project not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Project with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def delete_project(
|
||||
project_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a project.
|
||||
|
||||
- **project_id**: UUID of the project to delete
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
DELETE /api/projects/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"message": "Project deleted successfully",
|
||||
"project_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
```
|
||||
"""
|
||||
return project_service.delete_project(db, project_id)
|
||||
253
api/routers/security_incidents.py
Normal file
253
api/routers/security_incidents.py
Normal file
@@ -0,0 +1,253 @@
|
||||
"""
|
||||
Security Incidents API router for ClaudeTools.
|
||||
|
||||
This module defines all REST API endpoints for managing security incidents.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.security_incident import (
|
||||
SecurityIncidentCreate,
|
||||
SecurityIncidentResponse,
|
||||
SecurityIncidentUpdate,
|
||||
)
|
||||
from api.services import security_incident_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all security incidents",
|
||||
description="Retrieve a paginated list of all security incidents",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_security_incidents(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all security incidents with pagination.
|
||||
|
||||
- **skip**: Number of incidents to skip (default: 0)
|
||||
- **limit**: Maximum number of incidents to return (default: 100, max: 1000)
|
||||
|
||||
Returns a list of security incidents with pagination metadata.
|
||||
Incidents are ordered by incident_date descending (most recent first).
|
||||
"""
|
||||
try:
|
||||
incidents, total = security_incident_service.get_security_incidents(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"incidents": [SecurityIncidentResponse.model_validate(incident) for incident in incidents]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve security incidents: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{incident_id}",
|
||||
response_model=SecurityIncidentResponse,
|
||||
summary="Get security incident by ID",
|
||||
description="Retrieve a single security incident by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_security_incident(
|
||||
incident_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific security incident by ID.
|
||||
|
||||
- **incident_id**: UUID of the security incident to retrieve
|
||||
|
||||
Returns the complete security incident details including investigation
|
||||
findings, remediation steps, and current status.
|
||||
"""
|
||||
incident = security_incident_service.get_security_incident_by_id(db, incident_id)
|
||||
return SecurityIncidentResponse.model_validate(incident)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=SecurityIncidentResponse,
|
||||
summary="Create new security incident",
|
||||
description="Create a new security incident record",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
)
|
||||
def create_security_incident(
|
||||
incident_data: SecurityIncidentCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new security incident.
|
||||
|
||||
Records a new security incident including the incident type, severity,
|
||||
affected resources, and initial description. Status defaults to 'investigating'.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
"""
|
||||
incident = security_incident_service.create_security_incident(db, incident_data)
|
||||
return SecurityIncidentResponse.model_validate(incident)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{incident_id}",
|
||||
response_model=SecurityIncidentResponse,
|
||||
summary="Update security incident",
|
||||
description="Update an existing security incident's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def update_security_incident(
|
||||
incident_id: UUID,
|
||||
incident_data: SecurityIncidentUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing security incident.
|
||||
|
||||
- **incident_id**: UUID of the security incident to update
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
Commonly updated fields include status, findings, remediation_steps,
|
||||
and resolved_at timestamp.
|
||||
"""
|
||||
incident = security_incident_service.update_security_incident(db, incident_id, incident_data)
|
||||
return SecurityIncidentResponse.model_validate(incident)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{incident_id}",
|
||||
response_model=dict,
|
||||
summary="Delete security incident",
|
||||
description="Delete a security incident by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def delete_security_incident(
|
||||
incident_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a security incident.
|
||||
|
||||
- **incident_id**: UUID of the security incident to delete
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
Consider setting status to 'resolved' instead of deleting for audit purposes.
|
||||
"""
|
||||
return security_incident_service.delete_security_incident(db, incident_id)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-client/{client_id}",
|
||||
response_model=dict,
|
||||
summary="Get security incidents by client",
|
||||
description="Retrieve all security incidents for a specific client",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_security_incidents_by_client(
|
||||
client_id: UUID,
|
||||
skip: int = Query(default=0, ge=0, description="Number of records to skip"),
|
||||
limit: int = Query(default=100, ge=1, le=1000, description="Maximum number of records to return"),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all security incidents for a specific client.
|
||||
|
||||
- **client_id**: UUID of the client
|
||||
- **skip**: Number of incidents to skip (default: 0)
|
||||
- **limit**: Maximum number of incidents to return (default: 100, max: 1000)
|
||||
|
||||
Returns incidents ordered by incident_date descending (most recent first).
|
||||
"""
|
||||
try:
|
||||
incidents, total = security_incident_service.get_security_incidents_by_client(
|
||||
db, client_id, skip, limit
|
||||
)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"client_id": str(client_id),
|
||||
"incidents": [SecurityIncidentResponse.model_validate(incident) for incident in incidents]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve security incidents for client: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-status/{status_filter}",
|
||||
response_model=dict,
|
||||
summary="Get security incidents by status",
|
||||
description="Retrieve all security incidents with a specific status",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_security_incidents_by_status(
|
||||
status_filter: str = Path(..., description="Status: investigating, contained, resolved, monitoring"),
|
||||
skip: int = Query(default=0, ge=0, description="Number of records to skip"),
|
||||
limit: int = Query(default=100, ge=1, le=1000, description="Maximum number of records to return"),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all security incidents with a specific status.
|
||||
|
||||
- **status_filter**: Status to filter by (investigating, contained, resolved, monitoring)
|
||||
- **skip**: Number of incidents to skip (default: 0)
|
||||
- **limit**: Maximum number of incidents to return (default: 100, max: 1000)
|
||||
|
||||
Returns incidents ordered by incident_date descending (most recent first).
|
||||
"""
|
||||
try:
|
||||
incidents, total = security_incident_service.get_security_incidents_by_status(
|
||||
db, status_filter, skip, limit
|
||||
)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"status": status_filter,
|
||||
"incidents": [SecurityIncidentResponse.model_validate(incident) for incident in incidents]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve security incidents by status: {str(e)}"
|
||||
)
|
||||
490
api/routers/services.py
Normal file
490
api/routers/services.py
Normal file
@@ -0,0 +1,490 @@
|
||||
"""
|
||||
Service API router for ClaudeTools.
|
||||
|
||||
This module defines all REST API endpoints for managing services, including
|
||||
CRUD operations with proper authentication, validation, and error handling.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.service import (
|
||||
ServiceCreate,
|
||||
ServiceResponse,
|
||||
ServiceUpdate,
|
||||
)
|
||||
from api.services import service_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all services",
|
||||
description="Retrieve a paginated list of all services with optional filtering",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_services(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
client_id: str = Query(
|
||||
default=None,
|
||||
description="Filter services by client ID (via infrastructure)"
|
||||
),
|
||||
service_type: str = Query(
|
||||
default=None,
|
||||
description="Filter services by type (e.g., 'git_hosting', 'database', 'web_server')"
|
||||
),
|
||||
status_filter: str = Query(
|
||||
default=None,
|
||||
description="Filter services by status (running, stopped, error, maintenance)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all services with pagination and optional filtering.
|
||||
|
||||
- **skip**: Number of services to skip (default: 0)
|
||||
- **limit**: Maximum number of services to return (default: 100, max: 1000)
|
||||
- **client_id**: Filter by client ID (optional)
|
||||
- **service_type**: Filter by service type (optional)
|
||||
- **status_filter**: Filter by status (optional)
|
||||
|
||||
Returns a list of services with pagination metadata.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/services?skip=0&limit=50&status_filter=running
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 25,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"services": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"infrastructure_id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"service_name": "Gitea",
|
||||
"service_type": "git_hosting",
|
||||
"external_url": "https://gitea.example.com",
|
||||
"port": 3000,
|
||||
"protocol": "https",
|
||||
"status": "running",
|
||||
"version": "1.21.0",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
if client_id:
|
||||
services, total = service_service.get_services_by_client(db, client_id, skip, limit)
|
||||
elif service_type:
|
||||
services, total = service_service.get_services_by_type(db, service_type, skip, limit)
|
||||
elif status_filter:
|
||||
services, total = service_service.get_services_by_status(db, status_filter, skip, limit)
|
||||
else:
|
||||
services, total = service_service.get_services(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"services": [ServiceResponse.model_validate(service) for service in services]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve services: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{service_id}",
|
||||
response_model=ServiceResponse,
|
||||
summary="Get service by ID",
|
||||
description="Retrieve a single service by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Service found and returned",
|
||||
"model": ServiceResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Service not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Service with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_service(
|
||||
service_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific service by ID.
|
||||
|
||||
- **service_id**: UUID of the service to retrieve
|
||||
|
||||
Returns the complete service details.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/services/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"infrastructure_id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"service_name": "Gitea",
|
||||
"service_type": "git_hosting",
|
||||
"external_url": "https://gitea.example.com",
|
||||
"internal_url": "http://192.168.1.10:3000",
|
||||
"port": 3000,
|
||||
"protocol": "https",
|
||||
"status": "running",
|
||||
"version": "1.21.0",
|
||||
"notes": "Primary Git server for code repositories",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-20T14:20:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
service = service_service.get_service_by_id(db, service_id)
|
||||
return ServiceResponse.model_validate(service)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=ServiceResponse,
|
||||
summary="Create new service",
|
||||
description="Create a new service with the provided details",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
responses={
|
||||
201: {
|
||||
"description": "Service created successfully",
|
||||
"model": ServiceResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Infrastructure not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Infrastructure with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": [
|
||||
{
|
||||
"loc": ["body", "service_name"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def create_service(
|
||||
service_data: ServiceCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new service.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
The infrastructure_id must reference an existing infrastructure if provided.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/services
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"infrastructure_id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"service_name": "Gitea",
|
||||
"service_type": "git_hosting",
|
||||
"external_url": "https://gitea.example.com",
|
||||
"internal_url": "http://192.168.1.10:3000",
|
||||
"port": 3000,
|
||||
"protocol": "https",
|
||||
"status": "running",
|
||||
"version": "1.21.0",
|
||||
"notes": "Primary Git server for code repositories"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"infrastructure_id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"service_name": "Gitea",
|
||||
"service_type": "git_hosting",
|
||||
"status": "running",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
service = service_service.create_service(db, service_data)
|
||||
return ServiceResponse.model_validate(service)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{service_id}",
|
||||
response_model=ServiceResponse,
|
||||
summary="Update service",
|
||||
description="Update an existing service's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Service updated successfully",
|
||||
"model": ServiceResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Service or infrastructure not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Service with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def update_service(
|
||||
service_id: UUID,
|
||||
service_data: ServiceUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing service.
|
||||
|
||||
- **service_id**: UUID of the service to update
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
If updating infrastructure_id, the new infrastructure must exist.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
PUT /api/services/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"status": "maintenance",
|
||||
"version": "1.22.0",
|
||||
"notes": "Upgraded to latest version, temporarily in maintenance mode"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"infrastructure_id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"service_name": "Gitea",
|
||||
"service_type": "git_hosting",
|
||||
"status": "maintenance",
|
||||
"version": "1.22.0",
|
||||
"notes": "Upgraded to latest version, temporarily in maintenance mode",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-03-10T16:45:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
service = service_service.update_service(db, service_id, service_data)
|
||||
return ServiceResponse.model_validate(service)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{service_id}",
|
||||
response_model=dict,
|
||||
summary="Delete service",
|
||||
description="Delete a service by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Service deleted successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"message": "Service deleted successfully",
|
||||
"service_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
404: {
|
||||
"description": "Service not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Service with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def delete_service(
|
||||
service_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a service.
|
||||
|
||||
- **service_id**: UUID of the service to delete
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
DELETE /api/services/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"message": "Service deleted successfully",
|
||||
"service_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
```
|
||||
"""
|
||||
return service_service.delete_service(db, service_id)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-client/{client_id}",
|
||||
response_model=dict,
|
||||
summary="Get services by client",
|
||||
description="Retrieve all services for a specific client (via infrastructure)",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Services found and returned",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"total": 5,
|
||||
"skip": 0,
|
||||
"limit": 100,
|
||||
"services": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"service_name": "Gitea",
|
||||
"service_type": "git_hosting",
|
||||
"status": "running"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_services_by_client(
|
||||
client_id: UUID,
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all services for a specific client.
|
||||
|
||||
- **client_id**: UUID of the client
|
||||
- **skip**: Number of services to skip (default: 0)
|
||||
- **limit**: Maximum number of services to return (default: 100, max: 1000)
|
||||
|
||||
This endpoint retrieves services associated with a client's infrastructure.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/services/by-client/123e4567-e89b-12d3-a456-426614174001?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 5,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"services": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"infrastructure_id": "123e4567-e89b-12d3-a456-426614174002",
|
||||
"service_name": "Gitea",
|
||||
"service_type": "git_hosting",
|
||||
"status": "running",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
services, total = service_service.get_services_by_client(db, str(client_id), skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"services": [ServiceResponse.model_validate(service) for service in services]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve services for client: {str(e)}"
|
||||
)
|
||||
400
api/routers/sessions.py
Normal file
400
api/routers/sessions.py
Normal file
@@ -0,0 +1,400 @@
|
||||
"""
|
||||
Session API router for ClaudeTools.
|
||||
|
||||
This module defines all REST API endpoints for managing sessions, including
|
||||
CRUD operations with proper authentication, validation, and error handling.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.session import (
|
||||
SessionCreate,
|
||||
SessionResponse,
|
||||
SessionUpdate,
|
||||
)
|
||||
from api.services import session_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all sessions",
|
||||
description="Retrieve a paginated list of all sessions with optional filtering",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_sessions(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
project_id: UUID | None = Query(
|
||||
default=None,
|
||||
description="Filter sessions by project ID"
|
||||
),
|
||||
machine_id: UUID | None = Query(
|
||||
default=None,
|
||||
description="Filter sessions by machine ID"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all sessions with pagination.
|
||||
|
||||
- **skip**: Number of sessions to skip (default: 0)
|
||||
- **limit**: Maximum number of sessions to return (default: 100, max: 1000)
|
||||
- **project_id**: Optional filter by project ID
|
||||
- **machine_id**: Optional filter by machine ID
|
||||
|
||||
Returns a list of sessions with pagination metadata.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/sessions?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 15,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"sessions": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"session_title": "Database migration work",
|
||||
"session_date": "2024-01-15",
|
||||
"status": "completed",
|
||||
"duration_minutes": 120,
|
||||
"is_billable": true,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
# Filter by project if specified
|
||||
if project_id:
|
||||
sessions, total = session_service.get_sessions_by_project(db, project_id, skip, limit)
|
||||
# Filter by machine if specified
|
||||
elif machine_id:
|
||||
sessions, total = session_service.get_sessions_by_machine(db, machine_id, skip, limit)
|
||||
# Otherwise get all sessions
|
||||
else:
|
||||
sessions, total = session_service.get_sessions(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"sessions": [SessionResponse.model_validate(session) for session in sessions]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve sessions: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{session_id}",
|
||||
response_model=SessionResponse,
|
||||
summary="Get session by ID",
|
||||
description="Retrieve a single session by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Session found and returned",
|
||||
"model": SessionResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Session not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Session with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_session(
|
||||
session_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific session by ID.
|
||||
|
||||
- **session_id**: UUID of the session to retrieve
|
||||
|
||||
Returns the complete session details.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/sessions/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "456e7890-e89b-12d3-a456-426614174001",
|
||||
"project_id": "789e0123-e89b-12d3-a456-426614174002",
|
||||
"machine_id": "012e3456-e89b-12d3-a456-426614174003",
|
||||
"session_date": "2024-01-15",
|
||||
"start_time": "2024-01-15T09:00:00Z",
|
||||
"end_time": "2024-01-15T11:00:00Z",
|
||||
"duration_minutes": 120,
|
||||
"status": "completed",
|
||||
"session_title": "Database migration work",
|
||||
"summary": "Migrated customer database to new schema version",
|
||||
"is_billable": true,
|
||||
"billable_hours": 2.0,
|
||||
"technician": "John Doe",
|
||||
"session_log_file": "/logs/2024-01-15-db-migration.md",
|
||||
"notes": "Successful migration with no issues",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
session = session_service.get_session_by_id(db, session_id)
|
||||
return SessionResponse.model_validate(session)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=SessionResponse,
|
||||
summary="Create new session",
|
||||
description="Create a new session with the provided details",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
responses={
|
||||
201: {
|
||||
"description": "Session created successfully",
|
||||
"model": SessionResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Referenced project or machine not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Project with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": [
|
||||
{
|
||||
"loc": ["body", "session_title"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def create_session(
|
||||
session_data: SessionCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new session.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/sessions
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"session_title": "Database migration work",
|
||||
"session_date": "2024-01-15",
|
||||
"project_id": "789e0123-e89b-12d3-a456-426614174002",
|
||||
"machine_id": "012e3456-e89b-12d3-a456-426614174003",
|
||||
"start_time": "2024-01-15T09:00:00Z",
|
||||
"end_time": "2024-01-15T11:00:00Z",
|
||||
"duration_minutes": 120,
|
||||
"status": "completed",
|
||||
"summary": "Migrated customer database to new schema version",
|
||||
"is_billable": true,
|
||||
"billable_hours": 2.0,
|
||||
"technician": "John Doe"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"session_title": "Database migration work",
|
||||
"session_date": "2024-01-15",
|
||||
"status": "completed",
|
||||
"duration_minutes": 120,
|
||||
"is_billable": true,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
session = session_service.create_session(db, session_data)
|
||||
return SessionResponse.model_validate(session)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{session_id}",
|
||||
response_model=SessionResponse,
|
||||
summary="Update session",
|
||||
description="Update an existing session's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Session updated successfully",
|
||||
"model": SessionResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Session, project, or machine not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Session with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Invalid project_id"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def update_session(
|
||||
session_id: UUID,
|
||||
session_data: SessionUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing session.
|
||||
|
||||
- **session_id**: UUID of the session to update
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
PUT /api/sessions/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"status": "completed",
|
||||
"end_time": "2024-01-15T11:00:00Z",
|
||||
"duration_minutes": 120,
|
||||
"summary": "Successfully completed database migration"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"session_title": "Database migration work",
|
||||
"session_date": "2024-01-15",
|
||||
"status": "completed",
|
||||
"duration_minutes": 120,
|
||||
"summary": "Successfully completed database migration",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T14:20:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
session = session_service.update_session(db, session_id, session_data)
|
||||
return SessionResponse.model_validate(session)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{session_id}",
|
||||
response_model=dict,
|
||||
summary="Delete session",
|
||||
description="Delete a session by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Session deleted successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"message": "Session deleted successfully",
|
||||
"session_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
404: {
|
||||
"description": "Session not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Session with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def delete_session(
|
||||
session_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a session.
|
||||
|
||||
- **session_id**: UUID of the session to delete
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
DELETE /api/sessions/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"message": "Session deleted successfully",
|
||||
"session_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
```
|
||||
"""
|
||||
return session_service.delete_session(db, session_id)
|
||||
457
api/routers/sites.py
Normal file
457
api/routers/sites.py
Normal file
@@ -0,0 +1,457 @@
|
||||
"""
|
||||
Site API router for ClaudeTools.
|
||||
|
||||
This module defines all REST API endpoints for managing sites, including
|
||||
CRUD operations with proper authentication, validation, and error handling.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.site import (
|
||||
SiteCreate,
|
||||
SiteResponse,
|
||||
SiteUpdate,
|
||||
)
|
||||
from api.services import site_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all sites",
|
||||
description="Retrieve a paginated list of all sites with optional filtering",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_sites(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all sites with pagination.
|
||||
|
||||
- **skip**: Number of sites to skip (default: 0)
|
||||
- **limit**: Maximum number of sites to return (default: 100, max: 1000)
|
||||
|
||||
Returns a list of sites with pagination metadata.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/sites?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 5,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"sites": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"name": "Main Office",
|
||||
"network_subnet": "172.16.9.0/24",
|
||||
"vpn_required": true,
|
||||
"vpn_subnet": "192.168.1.0/24",
|
||||
"gateway_ip": "172.16.9.1",
|
||||
"dns_servers": "[\"8.8.8.8\", \"8.8.4.4\"]",
|
||||
"notes": "Primary office location",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
sites, total = site_service.get_sites(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"sites": [SiteResponse.model_validate(site) for site in sites]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve sites: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-client/{client_id}",
|
||||
response_model=dict,
|
||||
summary="Get sites by client",
|
||||
description="Retrieve all sites for a specific client with pagination",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Sites found and returned",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"total": 3,
|
||||
"skip": 0,
|
||||
"limit": 100,
|
||||
"sites": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"name": "Main Office",
|
||||
"network_subnet": "172.16.9.0/24",
|
||||
"vpn_required": True,
|
||||
"vpn_subnet": "192.168.1.0/24",
|
||||
"gateway_ip": "172.16.9.1",
|
||||
"dns_servers": "[\"8.8.8.8\", \"8.8.4.4\"]",
|
||||
"notes": "Primary office location",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
404: {
|
||||
"description": "Client not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Client with ID abc12345-6789-0def-1234-56789abcdef0 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_sites_by_client(
|
||||
client_id: UUID,
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all sites for a specific client.
|
||||
|
||||
- **client_id**: UUID of the client
|
||||
- **skip**: Number of sites to skip (default: 0)
|
||||
- **limit**: Maximum number of sites to return (default: 100, max: 1000)
|
||||
|
||||
Returns a list of sites for the specified client with pagination metadata.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/sites/by-client/abc12345-6789-0def-1234-56789abcdef0?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
"""
|
||||
sites, total = site_service.get_sites_by_client(db, client_id, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"sites": [SiteResponse.model_validate(site) for site in sites]
|
||||
}
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{site_id}",
|
||||
response_model=SiteResponse,
|
||||
summary="Get site by ID",
|
||||
description="Retrieve a single site by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Site found and returned",
|
||||
"model": SiteResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Site not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Site with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_site(
|
||||
site_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific site by ID.
|
||||
|
||||
- **site_id**: UUID of the site to retrieve
|
||||
|
||||
Returns the complete site details.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/sites/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"name": "Main Office",
|
||||
"network_subnet": "172.16.9.0/24",
|
||||
"vpn_required": true,
|
||||
"vpn_subnet": "192.168.1.0/24",
|
||||
"gateway_ip": "172.16.9.1",
|
||||
"dns_servers": "[\"8.8.8.8\", \"8.8.4.4\"]",
|
||||
"notes": "Primary office location",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
site = site_service.get_site_by_id(db, site_id)
|
||||
return SiteResponse.model_validate(site)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=SiteResponse,
|
||||
summary="Create new site",
|
||||
description="Create a new site with the provided details",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
responses={
|
||||
201: {
|
||||
"description": "Site created successfully",
|
||||
"model": SiteResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Client not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Client with ID abc12345-6789-0def-1234-56789abcdef0 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": [
|
||||
{
|
||||
"loc": ["body", "name"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def create_site(
|
||||
site_data: SiteCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new site.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
The client_id must reference an existing client.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/sites
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"name": "Main Office",
|
||||
"network_subnet": "172.16.9.0/24",
|
||||
"vpn_required": true,
|
||||
"vpn_subnet": "192.168.1.0/24",
|
||||
"gateway_ip": "172.16.9.1",
|
||||
"dns_servers": "[\"8.8.8.8\", \"8.8.4.4\"]",
|
||||
"notes": "Primary office location"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"name": "Main Office",
|
||||
"network_subnet": "172.16.9.0/24",
|
||||
"vpn_required": true,
|
||||
"vpn_subnet": "192.168.1.0/24",
|
||||
"gateway_ip": "172.16.9.1",
|
||||
"dns_servers": "[\"8.8.8.8\", \"8.8.4.4\"]",
|
||||
"notes": "Primary office location",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
site = site_service.create_site(db, site_data)
|
||||
return SiteResponse.model_validate(site)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{site_id}",
|
||||
response_model=SiteResponse,
|
||||
summary="Update site",
|
||||
description="Update an existing site's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Site updated successfully",
|
||||
"model": SiteResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Site or client not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Site with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def update_site(
|
||||
site_id: UUID,
|
||||
site_data: SiteUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing site.
|
||||
|
||||
- **site_id**: UUID of the site to update
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
If updating client_id, the new client must exist.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
PUT /api/sites/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"vpn_required": false,
|
||||
"notes": "VPN decommissioned"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"client_id": "abc12345-6789-0def-1234-56789abcdef0",
|
||||
"name": "Main Office",
|
||||
"network_subnet": "172.16.9.0/24",
|
||||
"vpn_required": false,
|
||||
"vpn_subnet": "192.168.1.0/24",
|
||||
"gateway_ip": "172.16.9.1",
|
||||
"dns_servers": "[\"8.8.8.8\", \"8.8.4.4\"]",
|
||||
"notes": "VPN decommissioned",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T14:20:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
site = site_service.update_site(db, site_id, site_data)
|
||||
return SiteResponse.model_validate(site)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{site_id}",
|
||||
response_model=dict,
|
||||
summary="Delete site",
|
||||
description="Delete a site by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Site deleted successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"message": "Site deleted successfully",
|
||||
"site_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
404: {
|
||||
"description": "Site not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Site with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def delete_site(
|
||||
site_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a site.
|
||||
|
||||
- **site_id**: UUID of the site to delete
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
DELETE /api/sites/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"message": "Site deleted successfully",
|
||||
"site_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
```
|
||||
"""
|
||||
return site_service.delete_site(db, site_id)
|
||||
365
api/routers/tags.py
Normal file
365
api/routers/tags.py
Normal file
@@ -0,0 +1,365 @@
|
||||
"""
|
||||
Tag API router for ClaudeTools.
|
||||
|
||||
This module defines all REST API endpoints for managing tags, including
|
||||
CRUD operations with proper authentication, validation, and error handling.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.tag import (
|
||||
TagCreate,
|
||||
TagResponse,
|
||||
TagUpdate,
|
||||
)
|
||||
from api.services import tag_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all tags",
|
||||
description="Retrieve a paginated list of all tags with optional filtering",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_tags(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
category: str = Query(
|
||||
default=None,
|
||||
description="Filter by category (technology, client, infrastructure, problem_type, action, service)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all tags with pagination.
|
||||
|
||||
- **skip**: Number of tags to skip (default: 0)
|
||||
- **limit**: Maximum number of tags to return (default: 100, max: 1000)
|
||||
- **category**: Filter by category (optional)
|
||||
|
||||
Returns a list of tags with pagination metadata.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/tags?skip=0&limit=50&category=technology
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 15,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"tags": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"name": "Windows",
|
||||
"category": "technology",
|
||||
"description": "Microsoft Windows operating system",
|
||||
"usage_count": 42,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
if category:
|
||||
tags, total = tag_service.get_tags_by_category(db, category, skip, limit)
|
||||
else:
|
||||
tags, total = tag_service.get_tags(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"tags": [TagResponse.model_validate(tag) for tag in tags]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve tags: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{tag_id}",
|
||||
response_model=TagResponse,
|
||||
summary="Get tag by ID",
|
||||
description="Retrieve a single tag by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Tag found and returned",
|
||||
"model": TagResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Tag not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Tag with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_tag(
|
||||
tag_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific tag by ID.
|
||||
|
||||
- **tag_id**: UUID of the tag to retrieve
|
||||
|
||||
Returns the complete tag details.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/tags/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"name": "Windows",
|
||||
"category": "technology",
|
||||
"description": "Microsoft Windows operating system",
|
||||
"usage_count": 42,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
tag = tag_service.get_tag_by_id(db, tag_id)
|
||||
return TagResponse.model_validate(tag)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=TagResponse,
|
||||
summary="Create new tag",
|
||||
description="Create a new tag with the provided details",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
responses={
|
||||
201: {
|
||||
"description": "Tag created successfully",
|
||||
"model": TagResponse,
|
||||
},
|
||||
409: {
|
||||
"description": "Tag with name already exists",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Tag with name 'Windows' already exists"}
|
||||
}
|
||||
},
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": [
|
||||
{
|
||||
"loc": ["body", "name"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def create_tag(
|
||||
tag_data: TagCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new tag.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/tags
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"name": "Windows",
|
||||
"category": "technology",
|
||||
"description": "Microsoft Windows operating system"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"name": "Windows",
|
||||
"category": "technology",
|
||||
"description": "Microsoft Windows operating system",
|
||||
"usage_count": 0,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
tag = tag_service.create_tag(db, tag_data)
|
||||
return TagResponse.model_validate(tag)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{tag_id}",
|
||||
response_model=TagResponse,
|
||||
summary="Update tag",
|
||||
description="Update an existing tag's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Tag updated successfully",
|
||||
"model": TagResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Tag not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Tag with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
409: {
|
||||
"description": "Conflict with existing tag",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Tag with name 'Windows' already exists"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def update_tag(
|
||||
tag_id: UUID,
|
||||
tag_data: TagUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing tag.
|
||||
|
||||
- **tag_id**: UUID of the tag to update
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
PUT /api/tags/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"description": "Updated description for Windows",
|
||||
"category": "infrastructure"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"name": "Windows",
|
||||
"category": "infrastructure",
|
||||
"description": "Updated description for Windows",
|
||||
"usage_count": 42,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T14:20:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
tag = tag_service.update_tag(db, tag_id, tag_data)
|
||||
return TagResponse.model_validate(tag)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{tag_id}",
|
||||
response_model=dict,
|
||||
summary="Delete tag",
|
||||
description="Delete a tag by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Tag deleted successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"message": "Tag deleted successfully",
|
||||
"tag_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
404: {
|
||||
"description": "Tag not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Tag with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def delete_tag(
|
||||
tag_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a tag.
|
||||
|
||||
- **tag_id**: UUID of the tag to delete
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
DELETE /api/tags/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"message": "Tag deleted successfully",
|
||||
"tag_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
```
|
||||
"""
|
||||
return tag_service.delete_tag(db, tag_id)
|
||||
395
api/routers/tasks.py
Normal file
395
api/routers/tasks.py
Normal file
@@ -0,0 +1,395 @@
|
||||
"""
|
||||
Task API router for ClaudeTools.
|
||||
|
||||
This module defines all REST API endpoints for managing tasks, including
|
||||
CRUD operations with proper authentication, validation, and error handling.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.task import (
|
||||
TaskCreate,
|
||||
TaskResponse,
|
||||
TaskUpdate,
|
||||
)
|
||||
from api.services import task_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all tasks",
|
||||
description="Retrieve a paginated list of all tasks with optional filtering",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_tasks(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
session_id: UUID | None = Query(
|
||||
default=None,
|
||||
description="Filter tasks by session ID"
|
||||
),
|
||||
status_filter: str | None = Query(
|
||||
default=None,
|
||||
description="Filter tasks by status (pending, in_progress, blocked, completed, cancelled)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all tasks with pagination.
|
||||
|
||||
- **skip**: Number of tasks to skip (default: 0)
|
||||
- **limit**: Maximum number of tasks to return (default: 100, max: 1000)
|
||||
- **session_id**: Optional filter by session ID
|
||||
- **status_filter**: Optional filter by status
|
||||
|
||||
Returns a list of tasks with pagination metadata.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/tasks?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 25,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"tasks": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"title": "Implement authentication",
|
||||
"task_order": 1,
|
||||
"status": "in_progress",
|
||||
"task_type": "implementation",
|
||||
"estimated_complexity": "moderate",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
# Filter by session if specified
|
||||
if session_id:
|
||||
tasks, total = task_service.get_tasks_by_session(db, session_id, skip, limit)
|
||||
# Filter by status if specified
|
||||
elif status_filter:
|
||||
tasks, total = task_service.get_tasks_by_status(db, status_filter, skip, limit)
|
||||
# Otherwise get all tasks
|
||||
else:
|
||||
tasks, total = task_service.get_tasks(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"tasks": [TaskResponse.model_validate(task) for task in tasks]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve tasks: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{task_id}",
|
||||
response_model=TaskResponse,
|
||||
summary="Get task by ID",
|
||||
description="Retrieve a single task by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Task found and returned",
|
||||
"model": TaskResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Task not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Task with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_task(
|
||||
task_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific task by ID.
|
||||
|
||||
- **task_id**: UUID of the task to retrieve
|
||||
|
||||
Returns the complete task details.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/tasks/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"parent_task_id": null,
|
||||
"task_order": 1,
|
||||
"title": "Implement authentication",
|
||||
"description": "Add JWT-based authentication to the API",
|
||||
"task_type": "implementation",
|
||||
"status": "in_progress",
|
||||
"blocking_reason": null,
|
||||
"session_id": "456e7890-e89b-12d3-a456-426614174001",
|
||||
"client_id": "789e0123-e89b-12d3-a456-426614174002",
|
||||
"project_id": "012e3456-e89b-12d3-a456-426614174003",
|
||||
"assigned_agent": "agent-1",
|
||||
"estimated_complexity": "moderate",
|
||||
"started_at": "2024-01-15T09:00:00Z",
|
||||
"completed_at": null,
|
||||
"task_context": null,
|
||||
"dependencies": null,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
task = task_service.get_task_by_id(db, task_id)
|
||||
return TaskResponse.model_validate(task)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=TaskResponse,
|
||||
summary="Create new task",
|
||||
description="Create a new task with the provided details",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
responses={
|
||||
201: {
|
||||
"description": "Task created successfully",
|
||||
"model": TaskResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Referenced session, client, project, or parent task not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Session with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": [
|
||||
{
|
||||
"loc": ["body", "title"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def create_task(
|
||||
task_data: TaskCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new task.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/tasks
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"title": "Implement authentication",
|
||||
"task_order": 1,
|
||||
"description": "Add JWT-based authentication to the API",
|
||||
"task_type": "implementation",
|
||||
"status": "pending",
|
||||
"session_id": "456e7890-e89b-12d3-a456-426614174001",
|
||||
"project_id": "012e3456-e89b-12d3-a456-426614174003",
|
||||
"assigned_agent": "agent-1",
|
||||
"estimated_complexity": "moderate"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"title": "Implement authentication",
|
||||
"task_order": 1,
|
||||
"status": "pending",
|
||||
"task_type": "implementation",
|
||||
"estimated_complexity": "moderate",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
task = task_service.create_task(db, task_data)
|
||||
return TaskResponse.model_validate(task)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{task_id}",
|
||||
response_model=TaskResponse,
|
||||
summary="Update task",
|
||||
description="Update an existing task's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Task updated successfully",
|
||||
"model": TaskResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Task, session, client, project, or parent task not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Task with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Invalid session_id"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def update_task(
|
||||
task_id: UUID,
|
||||
task_data: TaskUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing task.
|
||||
|
||||
- **task_id**: UUID of the task to update
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
PUT /api/tasks/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"status": "completed",
|
||||
"completed_at": "2024-01-15T15:00:00Z"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"title": "Implement authentication",
|
||||
"task_order": 1,
|
||||
"status": "completed",
|
||||
"completed_at": "2024-01-15T15:00:00Z",
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"updated_at": "2024-01-15T15:00:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
task = task_service.update_task(db, task_id, task_data)
|
||||
return TaskResponse.model_validate(task)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{task_id}",
|
||||
response_model=dict,
|
||||
summary="Delete task",
|
||||
description="Delete a task by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Task deleted successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"message": "Task deleted successfully",
|
||||
"task_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
404: {
|
||||
"description": "Task not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Task with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def delete_task(
|
||||
task_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a task.
|
||||
|
||||
- **task_id**: UUID of the task to delete
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
DELETE /api/tasks/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"message": "Task deleted successfully",
|
||||
"task_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
```
|
||||
"""
|
||||
return task_service.delete_task(db, task_id)
|
||||
555
api/routers/work_items.py
Normal file
555
api/routers/work_items.py
Normal file
@@ -0,0 +1,555 @@
|
||||
"""
|
||||
Work Item API router for ClaudeTools.
|
||||
|
||||
This module defines all REST API endpoints for managing work items, including
|
||||
CRUD operations with proper authentication, validation, and error handling.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.database import get_db
|
||||
from api.middleware.auth import get_current_user
|
||||
from api.schemas.work_item import (
|
||||
WorkItemCreate,
|
||||
WorkItemResponse,
|
||||
WorkItemUpdate,
|
||||
)
|
||||
from api.services import work_item_service
|
||||
|
||||
# Create router with prefix and tags
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=dict,
|
||||
summary="List all work items",
|
||||
description="Retrieve a paginated list of all work items with optional filtering",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def list_work_items(
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
session_id: str = Query(
|
||||
default=None,
|
||||
description="Filter work items by session ID"
|
||||
),
|
||||
status_filter: str = Query(
|
||||
default=None,
|
||||
description="Filter work items by status (completed, in_progress, blocked, pending, deferred)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
List all work items with pagination and optional filtering.
|
||||
|
||||
- **skip**: Number of work items to skip (default: 0)
|
||||
- **limit**: Maximum number of work items to return (default: 100, max: 1000)
|
||||
- **session_id**: Filter by session ID (optional)
|
||||
- **status_filter**: Filter by status (optional)
|
||||
|
||||
Returns a list of work items with pagination metadata.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/work-items?skip=0&limit=50&status_filter=in_progress
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 25,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"work_items": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"session_id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"category": "infrastructure",
|
||||
"title": "Configure firewall rules",
|
||||
"description": "Updated firewall rules for new server",
|
||||
"status": "completed",
|
||||
"priority": "high",
|
||||
"is_billable": true,
|
||||
"estimated_minutes": 30,
|
||||
"actual_minutes": 25,
|
||||
"affected_systems": "[\"jupiter\", \"172.16.3.20\"]",
|
||||
"technologies_used": "[\"iptables\", \"ufw\"]",
|
||||
"item_order": 1,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"completed_at": "2024-01-15T11:00:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
if session_id:
|
||||
work_items, total = work_item_service.get_work_items_by_session(db, session_id, skip, limit)
|
||||
elif status_filter:
|
||||
work_items, total = work_item_service.get_work_items_by_status(db, status_filter, skip, limit)
|
||||
else:
|
||||
work_items, total = work_item_service.get_work_items(db, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"work_items": [WorkItemResponse.model_validate(work_item) for work_item in work_items]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve work items: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{work_item_id}",
|
||||
response_model=WorkItemResponse,
|
||||
summary="Get work item by ID",
|
||||
description="Retrieve a single work item by its unique identifier",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Work item found and returned",
|
||||
"model": WorkItemResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Work item not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Work item with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_work_item(
|
||||
work_item_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get a specific work item by ID.
|
||||
|
||||
- **work_item_id**: UUID of the work item to retrieve
|
||||
|
||||
Returns the complete work item details.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/work-items/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"session_id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"category": "infrastructure",
|
||||
"title": "Configure firewall rules",
|
||||
"description": "Updated firewall rules for new server to allow web traffic",
|
||||
"status": "completed",
|
||||
"priority": "high",
|
||||
"is_billable": true,
|
||||
"estimated_minutes": 30,
|
||||
"actual_minutes": 25,
|
||||
"affected_systems": "[\"jupiter\", \"172.16.3.20\"]",
|
||||
"technologies_used": "[\"iptables\", \"ufw\"]",
|
||||
"item_order": 1,
|
||||
"created_at": "2024-01-15T10:30:00Z",
|
||||
"completed_at": "2024-01-15T11:00:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
work_item = work_item_service.get_work_item_by_id(db, work_item_id)
|
||||
return WorkItemResponse.model_validate(work_item)
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=WorkItemResponse,
|
||||
summary="Create new work item",
|
||||
description="Create a new work item with the provided details",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
responses={
|
||||
201: {
|
||||
"description": "Work item created successfully",
|
||||
"model": WorkItemResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Session not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Session with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": [
|
||||
{
|
||||
"loc": ["body", "title"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def create_work_item(
|
||||
work_item_data: WorkItemCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Create a new work item.
|
||||
|
||||
Requires a valid JWT token with appropriate permissions.
|
||||
The session_id must reference an existing session.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/work-items
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"session_id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"category": "infrastructure",
|
||||
"title": "Configure firewall rules",
|
||||
"description": "Updated firewall rules for new server to allow web traffic",
|
||||
"status": "completed",
|
||||
"priority": "high",
|
||||
"is_billable": true,
|
||||
"estimated_minutes": 30,
|
||||
"actual_minutes": 25,
|
||||
"affected_systems": "[\"jupiter\", \"172.16.3.20\"]",
|
||||
"technologies_used": "[\"iptables\", \"ufw\"]",
|
||||
"item_order": 1,
|
||||
"completed_at": "2024-01-15T11:00:00Z"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"session_id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"category": "infrastructure",
|
||||
"title": "Configure firewall rules",
|
||||
"status": "completed",
|
||||
"priority": "high",
|
||||
"is_billable": true,
|
||||
"created_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
work_item = work_item_service.create_work_item(db, work_item_data)
|
||||
return WorkItemResponse.model_validate(work_item)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{work_item_id}",
|
||||
response_model=WorkItemResponse,
|
||||
summary="Update work item",
|
||||
description="Update an existing work item's details",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Work item updated successfully",
|
||||
"model": WorkItemResponse,
|
||||
},
|
||||
404: {
|
||||
"description": "Work item or session not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Work item with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
422: {
|
||||
"description": "Validation error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Invalid status. Must be one of: completed, in_progress, blocked, pending, deferred"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def update_work_item(
|
||||
work_item_id: UUID,
|
||||
work_item_data: WorkItemUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Update an existing work item.
|
||||
|
||||
- **work_item_id**: UUID of the work item to update
|
||||
|
||||
Only provided fields will be updated. All fields are optional.
|
||||
If updating session_id, the new session must exist.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
PUT /api/work-items/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"status": "completed",
|
||||
"actual_minutes": 30,
|
||||
"completed_at": "2024-01-15T11:00:00Z"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"session_id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"category": "infrastructure",
|
||||
"title": "Configure firewall rules",
|
||||
"status": "completed",
|
||||
"actual_minutes": 30,
|
||||
"completed_at": "2024-01-15T11:00:00Z",
|
||||
"created_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
"""
|
||||
work_item = work_item_service.update_work_item(db, work_item_id, work_item_data)
|
||||
return WorkItemResponse.model_validate(work_item)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{work_item_id}",
|
||||
response_model=dict,
|
||||
summary="Delete work item",
|
||||
description="Delete a work item by its ID",
|
||||
status_code=status.HTTP_200_OK,
|
||||
responses={
|
||||
200: {
|
||||
"description": "Work item deleted successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"message": "Work item deleted successfully",
|
||||
"work_item_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
404: {
|
||||
"description": "Work item not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Work item with ID 123e4567-e89b-12d3-a456-426614174000 not found"}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
def delete_work_item(
|
||||
work_item_id: UUID,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a work item.
|
||||
|
||||
- **work_item_id**: UUID of the work item to delete
|
||||
|
||||
This is a permanent operation and cannot be undone.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
DELETE /api/work-items/123e4567-e89b-12d3-a456-426614174000
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"message": "Work item deleted successfully",
|
||||
"work_item_id": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
```
|
||||
"""
|
||||
return work_item_service.delete_work_item(db, work_item_id)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-project/{project_id}",
|
||||
response_model=dict,
|
||||
summary="Get work items by project",
|
||||
description="Retrieve all work items associated with a specific project through sessions",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_work_items_by_project(
|
||||
project_id: str,
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all work items for a specific project.
|
||||
|
||||
- **project_id**: UUID of the project
|
||||
- **skip**: Number of work items to skip (default: 0)
|
||||
- **limit**: Maximum number of work items to return (default: 100, max: 1000)
|
||||
|
||||
Returns a list of work items associated with the project through sessions.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/work-items/by-project/123e4567-e89b-12d3-a456-426614174000?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 15,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"project_id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"work_items": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"session_id": "123e4567-e89b-12d3-a456-426614174002",
|
||||
"category": "infrastructure",
|
||||
"title": "Configure firewall rules",
|
||||
"status": "completed",
|
||||
"created_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
work_items, total = work_item_service.get_work_items_by_project(db, project_id, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"project_id": project_id,
|
||||
"work_items": [WorkItemResponse.model_validate(work_item) for work_item in work_items]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve work items for project: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/by-client/{client_id}",
|
||||
response_model=dict,
|
||||
summary="Get work items by client",
|
||||
description="Retrieve all work items associated with a specific client through sessions",
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
def get_work_items_by_client(
|
||||
client_id: str,
|
||||
skip: int = Query(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Number of records to skip for pagination"
|
||||
),
|
||||
limit: int = Query(
|
||||
default=100,
|
||||
ge=1,
|
||||
le=1000,
|
||||
description="Maximum number of records to return (max 1000)"
|
||||
),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get all work items for a specific client.
|
||||
|
||||
- **client_id**: UUID of the client
|
||||
- **skip**: Number of work items to skip (default: 0)
|
||||
- **limit**: Maximum number of work items to return (default: 100, max: 1000)
|
||||
|
||||
Returns a list of work items associated with the client through sessions.
|
||||
|
||||
**Example Request:**
|
||||
```
|
||||
GET /api/work-items/by-client/123e4567-e89b-12d3-a456-426614174000?skip=0&limit=50
|
||||
Authorization: Bearer <token>
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"total": 42,
|
||||
"skip": 0,
|
||||
"limit": 50,
|
||||
"client_id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"work_items": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"session_id": "123e4567-e89b-12d3-a456-426614174002",
|
||||
"category": "infrastructure",
|
||||
"title": "Configure firewall rules",
|
||||
"status": "completed",
|
||||
"created_at": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
work_items, total = work_item_service.get_work_items_by_client(db, client_id, skip, limit)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"client_id": client_id,
|
||||
"work_items": [WorkItemResponse.model_validate(work_item) for work_item in work_items]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve work items for client: {str(e)}"
|
||||
)
|
||||
141
api/schemas/__init__.py
Normal file
141
api/schemas/__init__.py
Normal file
@@ -0,0 +1,141 @@
|
||||
"""Pydantic schemas for request/response validation"""
|
||||
|
||||
from .billable_time import BillableTimeBase, BillableTimeCreate, BillableTimeResponse, BillableTimeUpdate
|
||||
from .client import ClientBase, ClientCreate, ClientResponse, ClientUpdate
|
||||
from .context_snippet import ContextSnippetBase, ContextSnippetCreate, ContextSnippetResponse, ContextSnippetUpdate
|
||||
from .conversation_context import (
|
||||
ConversationContextBase,
|
||||
ConversationContextCreate,
|
||||
ConversationContextResponse,
|
||||
ConversationContextUpdate,
|
||||
)
|
||||
from .credential import CredentialBase, CredentialCreate, CredentialResponse, CredentialUpdate
|
||||
from .credential_audit_log import (
|
||||
CredentialAuditLogBase,
|
||||
CredentialAuditLogCreate,
|
||||
CredentialAuditLogResponse,
|
||||
CredentialAuditLogUpdate,
|
||||
)
|
||||
from .decision_log import DecisionLogBase, DecisionLogCreate, DecisionLogResponse, DecisionLogUpdate
|
||||
from .firewall_rule import FirewallRuleBase, FirewallRuleCreate, FirewallRuleResponse, FirewallRuleUpdate
|
||||
from .infrastructure import InfrastructureBase, InfrastructureCreate, InfrastructureResponse, InfrastructureUpdate
|
||||
from .m365_tenant import M365TenantBase, M365TenantCreate, M365TenantResponse, M365TenantUpdate
|
||||
from .machine import MachineBase, MachineCreate, MachineResponse, MachineUpdate
|
||||
from .network import NetworkBase, NetworkCreate, NetworkResponse, NetworkUpdate
|
||||
from .project import ProjectBase, ProjectCreate, ProjectResponse, ProjectUpdate
|
||||
from .project_state import ProjectStateBase, ProjectStateCreate, ProjectStateResponse, ProjectStateUpdate
|
||||
from .security_incident import SecurityIncidentBase, SecurityIncidentCreate, SecurityIncidentResponse, SecurityIncidentUpdate
|
||||
from .service import ServiceBase, ServiceCreate, ServiceResponse, ServiceUpdate
|
||||
from .session import SessionBase, SessionCreate, SessionResponse, SessionUpdate
|
||||
from .site import SiteBase, SiteCreate, SiteResponse, SiteUpdate
|
||||
from .tag import TagBase, TagCreate, TagResponse, TagUpdate
|
||||
from .task import TaskBase, TaskCreate, TaskResponse, TaskUpdate
|
||||
from .work_item import WorkItemBase, WorkItemCreate, WorkItemResponse, WorkItemUpdate
|
||||
|
||||
__all__ = [
|
||||
# Machine schemas
|
||||
"MachineBase",
|
||||
"MachineCreate",
|
||||
"MachineUpdate",
|
||||
"MachineResponse",
|
||||
# Client schemas
|
||||
"ClientBase",
|
||||
"ClientCreate",
|
||||
"ClientUpdate",
|
||||
"ClientResponse",
|
||||
# Project schemas
|
||||
"ProjectBase",
|
||||
"ProjectCreate",
|
||||
"ProjectUpdate",
|
||||
"ProjectResponse",
|
||||
# Session schemas
|
||||
"SessionBase",
|
||||
"SessionCreate",
|
||||
"SessionUpdate",
|
||||
"SessionResponse",
|
||||
# Tag schemas
|
||||
"TagBase",
|
||||
"TagCreate",
|
||||
"TagUpdate",
|
||||
"TagResponse",
|
||||
# WorkItem schemas
|
||||
"WorkItemBase",
|
||||
"WorkItemCreate",
|
||||
"WorkItemUpdate",
|
||||
"WorkItemResponse",
|
||||
# Task schemas
|
||||
"TaskBase",
|
||||
"TaskCreate",
|
||||
"TaskUpdate",
|
||||
"TaskResponse",
|
||||
# BillableTime schemas
|
||||
"BillableTimeBase",
|
||||
"BillableTimeCreate",
|
||||
"BillableTimeUpdate",
|
||||
"BillableTimeResponse",
|
||||
# Site schemas
|
||||
"SiteBase",
|
||||
"SiteCreate",
|
||||
"SiteUpdate",
|
||||
"SiteResponse",
|
||||
# Infrastructure schemas
|
||||
"InfrastructureBase",
|
||||
"InfrastructureCreate",
|
||||
"InfrastructureUpdate",
|
||||
"InfrastructureResponse",
|
||||
# Service schemas
|
||||
"ServiceBase",
|
||||
"ServiceCreate",
|
||||
"ServiceUpdate",
|
||||
"ServiceResponse",
|
||||
# Network schemas
|
||||
"NetworkBase",
|
||||
"NetworkCreate",
|
||||
"NetworkUpdate",
|
||||
"NetworkResponse",
|
||||
# FirewallRule schemas
|
||||
"FirewallRuleBase",
|
||||
"FirewallRuleCreate",
|
||||
"FirewallRuleUpdate",
|
||||
"FirewallRuleResponse",
|
||||
# M365Tenant schemas
|
||||
"M365TenantBase",
|
||||
"M365TenantCreate",
|
||||
"M365TenantUpdate",
|
||||
"M365TenantResponse",
|
||||
# Credential schemas
|
||||
"CredentialBase",
|
||||
"CredentialCreate",
|
||||
"CredentialUpdate",
|
||||
"CredentialResponse",
|
||||
# CredentialAuditLog schemas
|
||||
"CredentialAuditLogBase",
|
||||
"CredentialAuditLogCreate",
|
||||
"CredentialAuditLogUpdate",
|
||||
"CredentialAuditLogResponse",
|
||||
# SecurityIncident schemas
|
||||
"SecurityIncidentBase",
|
||||
"SecurityIncidentCreate",
|
||||
"SecurityIncidentUpdate",
|
||||
"SecurityIncidentResponse",
|
||||
# ConversationContext schemas
|
||||
"ConversationContextBase",
|
||||
"ConversationContextCreate",
|
||||
"ConversationContextUpdate",
|
||||
"ConversationContextResponse",
|
||||
# ContextSnippet schemas
|
||||
"ContextSnippetBase",
|
||||
"ContextSnippetCreate",
|
||||
"ContextSnippetUpdate",
|
||||
"ContextSnippetResponse",
|
||||
# ProjectState schemas
|
||||
"ProjectStateBase",
|
||||
"ProjectStateCreate",
|
||||
"ProjectStateUpdate",
|
||||
"ProjectStateResponse",
|
||||
# DecisionLog schemas
|
||||
"DecisionLogBase",
|
||||
"DecisionLogCreate",
|
||||
"DecisionLogUpdate",
|
||||
"DecisionLogResponse",
|
||||
]
|
||||
99
api/schemas/billable_time.py
Normal file
99
api/schemas/billable_time.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""
|
||||
Pydantic schemas for BillableTime model.
|
||||
|
||||
Request and response schemas for billable time entries with billing information.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
|
||||
class BillableTimeBase(BaseModel):
|
||||
"""Base schema with shared BillableTime fields."""
|
||||
|
||||
work_item_id: Optional[str] = Field(None, description="Foreign key to work_items table (UUID)")
|
||||
session_id: Optional[str] = Field(None, description="Foreign key to sessions table (UUID)")
|
||||
client_id: str = Field(..., description="Foreign key to clients table (UUID)")
|
||||
start_time: datetime = Field(..., description="When the billable time started")
|
||||
end_time: Optional[datetime] = Field(None, description="When the billable time ended")
|
||||
duration_minutes: int = Field(..., description="Duration in minutes (auto-calculated or manual)", gt=0)
|
||||
hourly_rate: float = Field(..., description="Hourly rate applied to this time entry", ge=0)
|
||||
total_amount: float = Field(..., description="Total billable amount (calculated)", ge=0)
|
||||
is_billable: bool = Field(True, description="Whether this time entry is actually billable")
|
||||
description: str = Field(..., description="Description of the work performed")
|
||||
category: str = Field(..., description="Category: consulting, development, support, maintenance, troubleshooting, project_work, training, documentation")
|
||||
notes: Optional[str] = Field(None, description="Additional notes about this time entry")
|
||||
invoiced_at: Optional[datetime] = Field(None, description="When this time entry was invoiced")
|
||||
invoice_id: Optional[str] = Field(None, description="Reference to invoice if applicable")
|
||||
|
||||
@field_validator('category')
|
||||
@classmethod
|
||||
def validate_category(cls, v: str) -> str:
|
||||
"""Validate that category is one of the allowed values."""
|
||||
allowed_categories = {
|
||||
'consulting', 'development', 'support', 'maintenance',
|
||||
'troubleshooting', 'project_work', 'training', 'documentation'
|
||||
}
|
||||
if v not in allowed_categories:
|
||||
raise ValueError(f"Category must be one of: {', '.join(allowed_categories)}")
|
||||
return v
|
||||
|
||||
@field_validator('end_time')
|
||||
@classmethod
|
||||
def validate_end_time(cls, v: Optional[datetime], info) -> Optional[datetime]:
|
||||
"""Validate that end_time is after start_time if provided."""
|
||||
if v is not None and 'start_time' in info.data:
|
||||
start_time = info.data['start_time']
|
||||
if v < start_time:
|
||||
raise ValueError("end_time must be after start_time")
|
||||
return v
|
||||
|
||||
|
||||
class BillableTimeCreate(BillableTimeBase):
|
||||
"""Schema for creating a new BillableTime entry."""
|
||||
pass
|
||||
|
||||
|
||||
class BillableTimeUpdate(BaseModel):
|
||||
"""Schema for updating an existing BillableTime entry. All fields are optional."""
|
||||
|
||||
work_item_id: Optional[str] = Field(None, description="Foreign key to work_items table (UUID)")
|
||||
session_id: Optional[str] = Field(None, description="Foreign key to sessions table (UUID)")
|
||||
client_id: Optional[str] = Field(None, description="Foreign key to clients table (UUID)")
|
||||
start_time: Optional[datetime] = Field(None, description="When the billable time started")
|
||||
end_time: Optional[datetime] = Field(None, description="When the billable time ended")
|
||||
duration_minutes: Optional[int] = Field(None, description="Duration in minutes (auto-calculated or manual)", gt=0)
|
||||
hourly_rate: Optional[float] = Field(None, description="Hourly rate applied to this time entry", ge=0)
|
||||
total_amount: Optional[float] = Field(None, description="Total billable amount (calculated)", ge=0)
|
||||
is_billable: Optional[bool] = Field(None, description="Whether this time entry is actually billable")
|
||||
description: Optional[str] = Field(None, description="Description of the work performed")
|
||||
category: Optional[str] = Field(None, description="Category: consulting, development, support, maintenance, troubleshooting, project_work, training, documentation")
|
||||
notes: Optional[str] = Field(None, description="Additional notes about this time entry")
|
||||
invoiced_at: Optional[datetime] = Field(None, description="When this time entry was invoiced")
|
||||
invoice_id: Optional[str] = Field(None, description="Reference to invoice if applicable")
|
||||
|
||||
@field_validator('category')
|
||||
@classmethod
|
||||
def validate_category(cls, v: Optional[str]) -> Optional[str]:
|
||||
"""Validate that category is one of the allowed values."""
|
||||
if v is not None:
|
||||
allowed_categories = {
|
||||
'consulting', 'development', 'support', 'maintenance',
|
||||
'troubleshooting', 'project_work', 'training', 'documentation'
|
||||
}
|
||||
if v not in allowed_categories:
|
||||
raise ValueError(f"Category must be one of: {', '.join(allowed_categories)}")
|
||||
return v
|
||||
|
||||
|
||||
class BillableTimeResponse(BillableTimeBase):
|
||||
"""Schema for BillableTime responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the billable time entry")
|
||||
created_at: datetime = Field(..., description="Timestamp when the entry was created")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the entry was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
52
api/schemas/client.py
Normal file
52
api/schemas/client.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""
|
||||
Pydantic schemas for Client model.
|
||||
|
||||
Request and response schemas for client organizations.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class ClientBase(BaseModel):
|
||||
"""Base schema with shared Client fields."""
|
||||
|
||||
name: str = Field(..., description="Client name (unique)")
|
||||
type: str = Field(..., description="Client type: msp_client, internal, project")
|
||||
network_subnet: Optional[str] = Field(None, description="Client network subnet (e.g., '192.168.0.0/24')")
|
||||
domain_name: Optional[str] = Field(None, description="Active Directory domain or primary domain")
|
||||
m365_tenant_id: Optional[str] = Field(None, description="Microsoft 365 tenant ID (UUID format)")
|
||||
primary_contact: Optional[str] = Field(None, description="Primary contact person")
|
||||
notes: Optional[str] = Field(None, description="Additional notes about the client")
|
||||
is_active: bool = Field(True, description="Whether client is currently active")
|
||||
|
||||
|
||||
class ClientCreate(ClientBase):
|
||||
"""Schema for creating a new Client."""
|
||||
pass
|
||||
|
||||
|
||||
class ClientUpdate(BaseModel):
|
||||
"""Schema for updating an existing Client. All fields are optional."""
|
||||
|
||||
name: Optional[str] = Field(None, description="Client name (unique)")
|
||||
type: Optional[str] = Field(None, description="Client type: msp_client, internal, project")
|
||||
network_subnet: Optional[str] = Field(None, description="Client network subnet (e.g., '192.168.0.0/24')")
|
||||
domain_name: Optional[str] = Field(None, description="Active Directory domain or primary domain")
|
||||
m365_tenant_id: Optional[str] = Field(None, description="Microsoft 365 tenant ID (UUID format)")
|
||||
primary_contact: Optional[str] = Field(None, description="Primary contact person")
|
||||
notes: Optional[str] = Field(None, description="Additional notes about the client")
|
||||
is_active: Optional[bool] = Field(None, description="Whether client is currently active")
|
||||
|
||||
|
||||
class ClientResponse(ClientBase):
|
||||
"""Schema for Client responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the client")
|
||||
created_at: datetime = Field(..., description="Timestamp when the client was created")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the client was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
54
api/schemas/context_snippet.py
Normal file
54
api/schemas/context_snippet.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""
|
||||
Pydantic schemas for ContextSnippet model.
|
||||
|
||||
Request and response schemas for reusable context snippets.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class ContextSnippetBase(BaseModel):
|
||||
"""Base schema with shared ContextSnippet fields."""
|
||||
|
||||
project_id: Optional[UUID] = Field(None, description="Project ID (optional)")
|
||||
client_id: Optional[UUID] = Field(None, description="Client ID (optional)")
|
||||
category: str = Field(..., description="Category: tech_decision, configuration, pattern, lesson_learned")
|
||||
title: str = Field(..., description="Brief title describing the snippet")
|
||||
dense_content: str = Field(..., description="Highly compressed information content")
|
||||
structured_data: Optional[str] = Field(None, description="JSON object for optional structured representation")
|
||||
tags: Optional[str] = Field(None, description="JSON array of tags for retrieval and categorization")
|
||||
relevance_score: float = Field(1.0, ge=0.0, le=10.0, description="Float score for ranking relevance (0.0-10.0)")
|
||||
usage_count: int = Field(0, ge=0, description="Integer count of how many times this snippet was retrieved")
|
||||
|
||||
|
||||
class ContextSnippetCreate(ContextSnippetBase):
|
||||
"""Schema for creating a new ContextSnippet."""
|
||||
pass
|
||||
|
||||
|
||||
class ContextSnippetUpdate(BaseModel):
|
||||
"""Schema for updating an existing ContextSnippet. All fields are optional."""
|
||||
|
||||
project_id: Optional[UUID] = Field(None, description="Project ID (optional)")
|
||||
client_id: Optional[UUID] = Field(None, description="Client ID (optional)")
|
||||
category: Optional[str] = Field(None, description="Category: tech_decision, configuration, pattern, lesson_learned")
|
||||
title: Optional[str] = Field(None, description="Brief title describing the snippet")
|
||||
dense_content: Optional[str] = Field(None, description="Highly compressed information content")
|
||||
structured_data: Optional[str] = Field(None, description="JSON object for optional structured representation")
|
||||
tags: Optional[str] = Field(None, description="JSON array of tags for retrieval and categorization")
|
||||
relevance_score: Optional[float] = Field(None, ge=0.0, le=10.0, description="Float score for ranking relevance (0.0-10.0)")
|
||||
usage_count: Optional[int] = Field(None, ge=0, description="Integer count of how many times this snippet was retrieved")
|
||||
|
||||
|
||||
class ContextSnippetResponse(ContextSnippetBase):
|
||||
"""Schema for ContextSnippet responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the context snippet")
|
||||
created_at: datetime = Field(..., description="Timestamp when the snippet was created")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the snippet was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
56
api/schemas/conversation_context.py
Normal file
56
api/schemas/conversation_context.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""
|
||||
Pydantic schemas for ConversationContext model.
|
||||
|
||||
Request and response schemas for conversation context storage and recall.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class ConversationContextBase(BaseModel):
|
||||
"""Base schema with shared ConversationContext fields."""
|
||||
|
||||
session_id: Optional[UUID] = Field(None, description="Session ID (optional)")
|
||||
project_id: Optional[UUID] = Field(None, description="Project ID (optional)")
|
||||
machine_id: Optional[UUID] = Field(None, description="Machine ID that created this context")
|
||||
context_type: str = Field(..., description="Type of context: session_summary, project_state, general_context")
|
||||
title: str = Field(..., description="Brief title describing the context")
|
||||
dense_summary: Optional[str] = Field(None, description="Compressed, structured summary (JSON or dense text)")
|
||||
key_decisions: Optional[str] = Field(None, description="JSON array of important decisions made")
|
||||
current_state: Optional[str] = Field(None, description="JSON object describing what's currently in progress")
|
||||
tags: Optional[str] = Field(None, description="JSON array of tags for retrieval and categorization")
|
||||
relevance_score: float = Field(1.0, ge=0.0, le=10.0, description="Float score for ranking relevance (0.0-10.0)")
|
||||
|
||||
|
||||
class ConversationContextCreate(ConversationContextBase):
|
||||
"""Schema for creating a new ConversationContext."""
|
||||
pass
|
||||
|
||||
|
||||
class ConversationContextUpdate(BaseModel):
|
||||
"""Schema for updating an existing ConversationContext. All fields are optional."""
|
||||
|
||||
session_id: Optional[UUID] = Field(None, description="Session ID (optional)")
|
||||
project_id: Optional[UUID] = Field(None, description="Project ID (optional)")
|
||||
machine_id: Optional[UUID] = Field(None, description="Machine ID that created this context")
|
||||
context_type: Optional[str] = Field(None, description="Type of context: session_summary, project_state, general_context")
|
||||
title: Optional[str] = Field(None, description="Brief title describing the context")
|
||||
dense_summary: Optional[str] = Field(None, description="Compressed, structured summary (JSON or dense text)")
|
||||
key_decisions: Optional[str] = Field(None, description="JSON array of important decisions made")
|
||||
current_state: Optional[str] = Field(None, description="JSON object describing what's currently in progress")
|
||||
tags: Optional[str] = Field(None, description="JSON array of tags for retrieval and categorization")
|
||||
relevance_score: Optional[float] = Field(None, ge=0.0, le=10.0, description="Float score for ranking relevance (0.0-10.0)")
|
||||
|
||||
|
||||
class ConversationContextResponse(ConversationContextBase):
|
||||
"""Schema for ConversationContext responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the conversation context")
|
||||
created_at: datetime = Field(..., description="Timestamp when the context was created")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the context was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
176
api/schemas/credential.py
Normal file
176
api/schemas/credential.py
Normal file
@@ -0,0 +1,176 @@
|
||||
"""
|
||||
Pydantic schemas for Credential model.
|
||||
|
||||
Request and response schemas for secure credential storage.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
from api.utils.crypto import decrypt_string
|
||||
|
||||
|
||||
class CredentialBase(BaseModel):
|
||||
"""Base schema with shared Credential fields."""
|
||||
|
||||
client_id: Optional[UUID] = Field(None, description="Reference to client")
|
||||
service_id: Optional[UUID] = Field(None, description="Reference to service")
|
||||
infrastructure_id: Optional[UUID] = Field(None, description="Reference to infrastructure component")
|
||||
credential_type: str = Field(..., description="Type of credential: password, api_key, oauth, ssh_key, shared_secret, jwt, connection_string, certificate")
|
||||
service_name: str = Field(..., description="Display name for the service (e.g., 'Gitea Admin')")
|
||||
username: Optional[str] = Field(None, description="Username for authentication")
|
||||
client_id_oauth: Optional[str] = Field(None, description="OAuth client ID")
|
||||
tenant_id_oauth: Optional[str] = Field(None, description="OAuth tenant ID")
|
||||
public_key: Optional[str] = Field(None, description="SSH public key (not encrypted)")
|
||||
integration_code: Optional[str] = Field(None, description="Integration code for services like Autotask")
|
||||
external_url: Optional[str] = Field(None, description="External URL for the service")
|
||||
internal_url: Optional[str] = Field(None, description="Internal URL for the service")
|
||||
custom_port: Optional[int] = Field(None, description="Custom port number if applicable")
|
||||
role_description: Optional[str] = Field(None, description="Description of access level/role")
|
||||
requires_vpn: bool = Field(False, description="Whether VPN is required for access")
|
||||
requires_2fa: bool = Field(False, description="Whether 2FA is required")
|
||||
ssh_key_auth_enabled: bool = Field(False, description="Whether SSH key authentication is enabled")
|
||||
access_level: Optional[str] = Field(None, description="Description of access level")
|
||||
expires_at: Optional[datetime] = Field(None, description="When the credential expires")
|
||||
last_rotated_at: Optional[datetime] = Field(None, description="When the credential was last rotated")
|
||||
is_active: bool = Field(True, description="Whether the credential is currently active")
|
||||
|
||||
|
||||
class CredentialCreate(CredentialBase):
|
||||
"""Schema for creating a new Credential."""
|
||||
|
||||
password: Optional[str] = Field(None, description="Plain text password (will be encrypted before storage)")
|
||||
api_key: Optional[str] = Field(None, description="Plain text API key (will be encrypted before storage)")
|
||||
client_secret: Optional[str] = Field(None, description="Plain text OAuth client secret (will be encrypted before storage)")
|
||||
token: Optional[str] = Field(None, description="Plain text bearer/access token (will be encrypted before storage)")
|
||||
connection_string: Optional[str] = Field(None, description="Plain text connection string (will be encrypted before storage)")
|
||||
|
||||
|
||||
class CredentialUpdate(BaseModel):
|
||||
"""Schema for updating an existing Credential. All fields are optional."""
|
||||
|
||||
client_id: Optional[UUID] = Field(None, description="Reference to client")
|
||||
service_id: Optional[UUID] = Field(None, description="Reference to service")
|
||||
infrastructure_id: Optional[UUID] = Field(None, description="Reference to infrastructure component")
|
||||
credential_type: Optional[str] = Field(None, description="Type of credential")
|
||||
service_name: Optional[str] = Field(None, description="Display name for the service")
|
||||
username: Optional[str] = Field(None, description="Username for authentication")
|
||||
password: Optional[str] = Field(None, description="Plain text password (will be encrypted before storage)")
|
||||
api_key: Optional[str] = Field(None, description="Plain text API key (will be encrypted before storage)")
|
||||
client_id_oauth: Optional[str] = Field(None, description="OAuth client ID")
|
||||
client_secret: Optional[str] = Field(None, description="Plain text OAuth client secret (will be encrypted before storage)")
|
||||
tenant_id_oauth: Optional[str] = Field(None, description="OAuth tenant ID")
|
||||
public_key: Optional[str] = Field(None, description="SSH public key")
|
||||
token: Optional[str] = Field(None, description="Plain text bearer/access token (will be encrypted before storage)")
|
||||
connection_string: Optional[str] = Field(None, description="Plain text connection string (will be encrypted before storage)")
|
||||
integration_code: Optional[str] = Field(None, description="Integration code")
|
||||
external_url: Optional[str] = Field(None, description="External URL for the service")
|
||||
internal_url: Optional[str] = Field(None, description="Internal URL for the service")
|
||||
custom_port: Optional[int] = Field(None, description="Custom port number")
|
||||
role_description: Optional[str] = Field(None, description="Description of access level/role")
|
||||
requires_vpn: Optional[bool] = Field(None, description="Whether VPN is required")
|
||||
requires_2fa: Optional[bool] = Field(None, description="Whether 2FA is required")
|
||||
ssh_key_auth_enabled: Optional[bool] = Field(None, description="Whether SSH key authentication is enabled")
|
||||
access_level: Optional[str] = Field(None, description="Description of access level")
|
||||
expires_at: Optional[datetime] = Field(None, description="When the credential expires")
|
||||
last_rotated_at: Optional[datetime] = Field(None, description="When the credential was last rotated")
|
||||
is_active: Optional[bool] = Field(None, description="Whether the credential is active")
|
||||
|
||||
|
||||
class CredentialResponse(BaseModel):
|
||||
"""Schema for Credential responses with ID and timestamps. Includes decrypted values."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the credential")
|
||||
client_id: Optional[UUID] = Field(None, description="Reference to client")
|
||||
service_id: Optional[UUID] = Field(None, description="Reference to service")
|
||||
infrastructure_id: Optional[UUID] = Field(None, description="Reference to infrastructure component")
|
||||
credential_type: str = Field(..., description="Type of credential")
|
||||
service_name: str = Field(..., description="Display name for the service")
|
||||
username: Optional[str] = Field(None, description="Username for authentication")
|
||||
|
||||
# Decrypted sensitive fields (computed from encrypted database fields)
|
||||
password: Optional[str] = Field(None, description="Decrypted password")
|
||||
api_key: Optional[str] = Field(None, description="Decrypted API key")
|
||||
client_secret: Optional[str] = Field(None, description="Decrypted OAuth client secret")
|
||||
token: Optional[str] = Field(None, description="Decrypted bearer/access token")
|
||||
connection_string: Optional[str] = Field(None, description="Decrypted connection string")
|
||||
|
||||
# OAuth and other non-encrypted fields
|
||||
client_id_oauth: Optional[str] = Field(None, description="OAuth client ID")
|
||||
tenant_id_oauth: Optional[str] = Field(None, description="OAuth tenant ID")
|
||||
public_key: Optional[str] = Field(None, description="SSH public key")
|
||||
integration_code: Optional[str] = Field(None, description="Integration code")
|
||||
external_url: Optional[str] = Field(None, description="External URL for the service")
|
||||
internal_url: Optional[str] = Field(None, description="Internal URL for the service")
|
||||
custom_port: Optional[int] = Field(None, description="Custom port number")
|
||||
role_description: Optional[str] = Field(None, description="Description of access level/role")
|
||||
requires_vpn: bool = Field(..., description="Whether VPN is required")
|
||||
requires_2fa: bool = Field(..., description="Whether 2FA is required")
|
||||
ssh_key_auth_enabled: bool = Field(..., description="Whether SSH key authentication is enabled")
|
||||
access_level: Optional[str] = Field(None, description="Description of access level")
|
||||
expires_at: Optional[datetime] = Field(None, description="When the credential expires")
|
||||
last_rotated_at: Optional[datetime] = Field(None, description="When the credential was last rotated")
|
||||
is_active: bool = Field(..., description="Whether the credential is active")
|
||||
created_at: datetime = Field(..., description="Timestamp when the credential was created")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the credential was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
@field_validator("password", mode="before")
|
||||
@classmethod
|
||||
def decrypt_password(cls, v):
|
||||
"""Decrypt password_encrypted field from database."""
|
||||
if v is None:
|
||||
return None
|
||||
if isinstance(v, bytes):
|
||||
# This is the encrypted bytes from password_encrypted field
|
||||
encrypted_str = v.decode('utf-8')
|
||||
return decrypt_string(encrypted_str, default=None)
|
||||
return v
|
||||
|
||||
@field_validator("api_key", mode="before")
|
||||
@classmethod
|
||||
def decrypt_api_key(cls, v):
|
||||
"""Decrypt api_key_encrypted field from database."""
|
||||
if v is None:
|
||||
return None
|
||||
if isinstance(v, bytes):
|
||||
encrypted_str = v.decode('utf-8')
|
||||
return decrypt_string(encrypted_str, default=None)
|
||||
return v
|
||||
|
||||
@field_validator("client_secret", mode="before")
|
||||
@classmethod
|
||||
def decrypt_client_secret(cls, v):
|
||||
"""Decrypt client_secret_encrypted field from database."""
|
||||
if v is None:
|
||||
return None
|
||||
if isinstance(v, bytes):
|
||||
encrypted_str = v.decode('utf-8')
|
||||
return decrypt_string(encrypted_str, default=None)
|
||||
return v
|
||||
|
||||
@field_validator("token", mode="before")
|
||||
@classmethod
|
||||
def decrypt_token(cls, v):
|
||||
"""Decrypt token_encrypted field from database."""
|
||||
if v is None:
|
||||
return None
|
||||
if isinstance(v, bytes):
|
||||
encrypted_str = v.decode('utf-8')
|
||||
return decrypt_string(encrypted_str, default=None)
|
||||
return v
|
||||
|
||||
@field_validator("connection_string", mode="before")
|
||||
@classmethod
|
||||
def decrypt_connection_string(cls, v):
|
||||
"""Decrypt connection_string_encrypted field from database."""
|
||||
if v is None:
|
||||
return None
|
||||
if isinstance(v, bytes):
|
||||
encrypted_str = v.decode('utf-8')
|
||||
return decrypt_string(encrypted_str, default=None)
|
||||
return v
|
||||
47
api/schemas/credential_audit_log.py
Normal file
47
api/schemas/credential_audit_log.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""
|
||||
Pydantic schemas for CredentialAuditLog model.
|
||||
|
||||
Request and response schemas for credential audit logging.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class CredentialAuditLogBase(BaseModel):
|
||||
"""Base schema with shared CredentialAuditLog fields."""
|
||||
|
||||
credential_id: UUID = Field(..., description="Reference to the credential")
|
||||
action: str = Field(..., description="Type of action: view, create, update, delete, rotate, decrypt")
|
||||
user_id: str = Field(..., description="User who performed the action (JWT sub claim)")
|
||||
ip_address: Optional[str] = Field(None, description="IP address (IPv4 or IPv6)")
|
||||
user_agent: Optional[str] = Field(None, description="Browser/client user agent string")
|
||||
details: Optional[str] = Field(None, description="JSON string with additional context (what changed, why, etc.)")
|
||||
|
||||
|
||||
class CredentialAuditLogCreate(CredentialAuditLogBase):
|
||||
"""Schema for creating a new CredentialAuditLog entry."""
|
||||
pass
|
||||
|
||||
|
||||
class CredentialAuditLogUpdate(BaseModel):
|
||||
"""
|
||||
Schema for updating an existing CredentialAuditLog.
|
||||
|
||||
NOTE: Audit logs should be immutable in most cases. This schema is provided
|
||||
for completeness but should rarely be used.
|
||||
"""
|
||||
|
||||
details: Optional[str] = Field(None, description="JSON string with additional context")
|
||||
|
||||
|
||||
class CredentialAuditLogResponse(CredentialAuditLogBase):
|
||||
"""Schema for CredentialAuditLog responses with ID and timestamp."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the audit log entry")
|
||||
timestamp: datetime = Field(..., description="When the action was performed")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
52
api/schemas/decision_log.py
Normal file
52
api/schemas/decision_log.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""
|
||||
Pydantic schemas for DecisionLog model.
|
||||
|
||||
Request and response schemas for tracking important decisions made during work.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class DecisionLogBase(BaseModel):
|
||||
"""Base schema with shared DecisionLog fields."""
|
||||
|
||||
project_id: Optional[UUID] = Field(None, description="Project ID (optional)")
|
||||
session_id: Optional[UUID] = Field(None, description="Session ID (optional)")
|
||||
decision_type: str = Field(..., description="Type of decision: technical, architectural, process, security")
|
||||
decision_text: str = Field(..., description="What was decided (the actual decision)")
|
||||
rationale: Optional[str] = Field(None, description="Why this decision was made")
|
||||
alternatives_considered: Optional[str] = Field(None, description="JSON array of other options that were considered")
|
||||
impact: str = Field("medium", description="Impact level: low, medium, high, critical")
|
||||
tags: Optional[str] = Field(None, description="JSON array of tags for retrieval and categorization")
|
||||
|
||||
|
||||
class DecisionLogCreate(DecisionLogBase):
|
||||
"""Schema for creating a new DecisionLog."""
|
||||
pass
|
||||
|
||||
|
||||
class DecisionLogUpdate(BaseModel):
|
||||
"""Schema for updating an existing DecisionLog. All fields are optional."""
|
||||
|
||||
project_id: Optional[UUID] = Field(None, description="Project ID (optional)")
|
||||
session_id: Optional[UUID] = Field(None, description="Session ID (optional)")
|
||||
decision_type: Optional[str] = Field(None, description="Type of decision: technical, architectural, process, security")
|
||||
decision_text: Optional[str] = Field(None, description="What was decided (the actual decision)")
|
||||
rationale: Optional[str] = Field(None, description="Why this decision was made")
|
||||
alternatives_considered: Optional[str] = Field(None, description="JSON array of other options that were considered")
|
||||
impact: Optional[str] = Field(None, description="Impact level: low, medium, high, critical")
|
||||
tags: Optional[str] = Field(None, description="JSON array of tags for retrieval and categorization")
|
||||
|
||||
|
||||
class DecisionLogResponse(DecisionLogBase):
|
||||
"""Schema for DecisionLog responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the decision log")
|
||||
created_at: datetime = Field(..., description="Timestamp when the decision was logged")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the decision log was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
56
api/schemas/firewall_rule.py
Normal file
56
api/schemas/firewall_rule.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""
|
||||
Pydantic schemas for FirewallRule model.
|
||||
|
||||
Request and response schemas for network security rules.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class FirewallRuleBase(BaseModel):
|
||||
"""Base schema with shared FirewallRule fields."""
|
||||
|
||||
infrastructure_id: Optional[UUID] = Field(None, description="Reference to the infrastructure this rule applies to")
|
||||
rule_name: Optional[str] = Field(None, description="Name of the firewall rule")
|
||||
source_cidr: Optional[str] = Field(None, description="Source CIDR notation")
|
||||
destination_cidr: Optional[str] = Field(None, description="Destination CIDR notation")
|
||||
port: Optional[int] = Field(None, description="Port number")
|
||||
protocol: Optional[str] = Field(None, description="Protocol: tcp, udp, icmp")
|
||||
action: Optional[str] = Field(None, description="Action: allow, deny, drop")
|
||||
rule_order: Optional[int] = Field(None, description="Order of the rule in the firewall")
|
||||
notes: Optional[str] = Field(None, description="Additional notes")
|
||||
created_by: Optional[str] = Field(None, description="Who created the rule")
|
||||
|
||||
|
||||
class FirewallRuleCreate(FirewallRuleBase):
|
||||
"""Schema for creating a new FirewallRule."""
|
||||
pass
|
||||
|
||||
|
||||
class FirewallRuleUpdate(BaseModel):
|
||||
"""Schema for updating an existing FirewallRule. All fields are optional."""
|
||||
|
||||
infrastructure_id: Optional[UUID] = Field(None, description="Reference to the infrastructure this rule applies to")
|
||||
rule_name: Optional[str] = Field(None, description="Name of the firewall rule")
|
||||
source_cidr: Optional[str] = Field(None, description="Source CIDR notation")
|
||||
destination_cidr: Optional[str] = Field(None, description="Destination CIDR notation")
|
||||
port: Optional[int] = Field(None, description="Port number")
|
||||
protocol: Optional[str] = Field(None, description="Protocol: tcp, udp, icmp")
|
||||
action: Optional[str] = Field(None, description="Action: allow, deny, drop")
|
||||
rule_order: Optional[int] = Field(None, description="Order of the rule in the firewall")
|
||||
notes: Optional[str] = Field(None, description="Additional notes")
|
||||
created_by: Optional[str] = Field(None, description="Who created the rule")
|
||||
|
||||
|
||||
class FirewallRuleResponse(FirewallRuleBase):
|
||||
"""Schema for FirewallRule responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the firewall rule")
|
||||
created_at: datetime = Field(..., description="Timestamp when the firewall rule was created")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the firewall rule was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
73
api/schemas/infrastructure.py
Normal file
73
api/schemas/infrastructure.py
Normal file
@@ -0,0 +1,73 @@
|
||||
"""
|
||||
Pydantic schemas for Infrastructure model.
|
||||
|
||||
Request and response schemas for infrastructure assets including servers,
|
||||
network devices, workstations, and other IT infrastructure.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class InfrastructureBase(BaseModel):
|
||||
"""Base schema with shared Infrastructure fields."""
|
||||
|
||||
client_id: Optional[str] = Field(None, description="Reference to the client")
|
||||
site_id: Optional[str] = Field(None, description="Reference to the site this infrastructure is located at")
|
||||
asset_type: str = Field(..., description="Type: physical_server, virtual_machine, container, network_device, nas_storage, workstation, firewall, domain_controller")
|
||||
hostname: str = Field(..., description="Hostname of the infrastructure")
|
||||
ip_address: Optional[str] = Field(None, description="IP address (IPv4 or IPv6)")
|
||||
mac_address: Optional[str] = Field(None, description="MAC address")
|
||||
os: Optional[str] = Field(None, description="Operating system name (e.g., 'Ubuntu 22.04', 'Windows Server 2022')")
|
||||
os_version: Optional[str] = Field(None, description="Operating system version (e.g., '6.22', '2008 R2', '22.04')")
|
||||
role_description: Optional[str] = Field(None, description="Description of the infrastructure's role")
|
||||
parent_host_id: Optional[str] = Field(None, description="Reference to parent host for VMs/containers")
|
||||
status: str = Field("active", description="Status: active, migration_source, migration_destination, decommissioned")
|
||||
environmental_notes: Optional[str] = Field(None, description="Special environmental constraints or notes")
|
||||
powershell_version: Optional[str] = Field(None, description="PowerShell version (e.g., '2.0', '5.1', '7.4')")
|
||||
shell_type: Optional[str] = Field(None, description="Shell type: bash, cmd, powershell, sh")
|
||||
package_manager: Optional[str] = Field(None, description="Package manager: apt, yum, chocolatey, none")
|
||||
has_gui: bool = Field(True, description="Whether the system has a GUI")
|
||||
limitations: Optional[str] = Field(None, description='JSON array of limitations (e.g., ["no_ps7", "smb1_only", "dos_6.22_commands"])')
|
||||
notes: Optional[str] = Field(None, description="Additional notes")
|
||||
|
||||
|
||||
class InfrastructureCreate(InfrastructureBase):
|
||||
"""Schema for creating a new Infrastructure item."""
|
||||
pass
|
||||
|
||||
|
||||
class InfrastructureUpdate(BaseModel):
|
||||
"""Schema for updating an existing Infrastructure item. All fields are optional."""
|
||||
|
||||
client_id: Optional[str] = Field(None, description="Reference to the client")
|
||||
site_id: Optional[str] = Field(None, description="Reference to the site this infrastructure is located at")
|
||||
asset_type: Optional[str] = Field(None, description="Type: physical_server, virtual_machine, container, network_device, nas_storage, workstation, firewall, domain_controller")
|
||||
hostname: Optional[str] = Field(None, description="Hostname of the infrastructure")
|
||||
ip_address: Optional[str] = Field(None, description="IP address (IPv4 or IPv6)")
|
||||
mac_address: Optional[str] = Field(None, description="MAC address")
|
||||
os: Optional[str] = Field(None, description="Operating system name (e.g., 'Ubuntu 22.04', 'Windows Server 2022')")
|
||||
os_version: Optional[str] = Field(None, description="Operating system version (e.g., '6.22', '2008 R2', '22.04')")
|
||||
role_description: Optional[str] = Field(None, description="Description of the infrastructure's role")
|
||||
parent_host_id: Optional[str] = Field(None, description="Reference to parent host for VMs/containers")
|
||||
status: Optional[str] = Field(None, description="Status: active, migration_source, migration_destination, decommissioned")
|
||||
environmental_notes: Optional[str] = Field(None, description="Special environmental constraints or notes")
|
||||
powershell_version: Optional[str] = Field(None, description="PowerShell version (e.g., '2.0', '5.1', '7.4')")
|
||||
shell_type: Optional[str] = Field(None, description="Shell type: bash, cmd, powershell, sh")
|
||||
package_manager: Optional[str] = Field(None, description="Package manager: apt, yum, chocolatey, none")
|
||||
has_gui: Optional[bool] = Field(None, description="Whether the system has a GUI")
|
||||
limitations: Optional[str] = Field(None, description='JSON array of limitations (e.g., ["no_ps7", "smb1_only", "dos_6.22_commands"])')
|
||||
notes: Optional[str] = Field(None, description="Additional notes")
|
||||
|
||||
|
||||
class InfrastructureResponse(InfrastructureBase):
|
||||
"""Schema for Infrastructure responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the infrastructure item")
|
||||
created_at: datetime = Field(..., description="Timestamp when the infrastructure was created")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the infrastructure was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
50
api/schemas/m365_tenant.py
Normal file
50
api/schemas/m365_tenant.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""
|
||||
Pydantic schemas for M365Tenant model.
|
||||
|
||||
Request and response schemas for Microsoft 365 tenant configurations.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class M365TenantBase(BaseModel):
|
||||
"""Base schema with shared M365Tenant fields."""
|
||||
|
||||
client_id: Optional[UUID] = Field(None, description="Reference to the client")
|
||||
tenant_id: str = Field(..., description="Microsoft tenant ID (UUID)")
|
||||
tenant_name: Optional[str] = Field(None, description="Tenant name (e.g., 'dataforth.com')")
|
||||
default_domain: Optional[str] = Field(None, description="Default domain (e.g., 'dataforthcorp.onmicrosoft.com')")
|
||||
admin_email: Optional[str] = Field(None, description="Administrator email address")
|
||||
cipp_name: Optional[str] = Field(None, description="Name in CIPP portal")
|
||||
notes: Optional[str] = Field(None, description="Additional notes")
|
||||
|
||||
|
||||
class M365TenantCreate(M365TenantBase):
|
||||
"""Schema for creating a new M365Tenant."""
|
||||
pass
|
||||
|
||||
|
||||
class M365TenantUpdate(BaseModel):
|
||||
"""Schema for updating an existing M365Tenant. All fields are optional."""
|
||||
|
||||
client_id: Optional[UUID] = Field(None, description="Reference to the client")
|
||||
tenant_id: Optional[str] = Field(None, description="Microsoft tenant ID (UUID)")
|
||||
tenant_name: Optional[str] = Field(None, description="Tenant name (e.g., 'dataforth.com')")
|
||||
default_domain: Optional[str] = Field(None, description="Default domain (e.g., 'dataforthcorp.onmicrosoft.com')")
|
||||
admin_email: Optional[str] = Field(None, description="Administrator email address")
|
||||
cipp_name: Optional[str] = Field(None, description="Name in CIPP portal")
|
||||
notes: Optional[str] = Field(None, description="Additional notes")
|
||||
|
||||
|
||||
class M365TenantResponse(M365TenantBase):
|
||||
"""Schema for M365Tenant responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the M365 tenant")
|
||||
created_at: datetime = Field(..., description="Timestamp when the M365 tenant was created")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the M365 tenant was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
98
api/schemas/machine.py
Normal file
98
api/schemas/machine.py
Normal file
@@ -0,0 +1,98 @@
|
||||
"""
|
||||
Pydantic schemas for Machine model.
|
||||
|
||||
Request and response schemas for technician's machines used for MSP work.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class MachineBase(BaseModel):
|
||||
"""Base schema with shared Machine fields."""
|
||||
|
||||
hostname: str = Field(..., description="Machine hostname from `hostname` command")
|
||||
machine_fingerprint: Optional[str] = Field(None, description="SHA256 hash: hostname + username + platform + home_directory")
|
||||
friendly_name: Optional[str] = Field(None, description="Human-readable name like 'Main Laptop' or 'Home Desktop'")
|
||||
machine_type: Optional[str] = Field(None, description="Type of machine: laptop, desktop, workstation, vm")
|
||||
platform: Optional[str] = Field(None, description="Operating system platform: win32, darwin, linux")
|
||||
os_version: Optional[str] = Field(None, description="Operating system version")
|
||||
username: Optional[str] = Field(None, description="Username from `whoami` command")
|
||||
home_directory: Optional[str] = Field(None, description="User home directory path")
|
||||
has_vpn_access: bool = Field(False, description="Whether machine can connect to client networks")
|
||||
vpn_profiles: Optional[str] = Field(None, description="JSON array of available VPN profiles")
|
||||
has_docker: bool = Field(False, description="Whether Docker is installed")
|
||||
has_powershell: bool = Field(False, description="Whether PowerShell is installed")
|
||||
powershell_version: Optional[str] = Field(None, description="PowerShell version if installed")
|
||||
has_ssh: bool = Field(True, description="Whether SSH is available")
|
||||
has_git: bool = Field(True, description="Whether Git is installed")
|
||||
typical_network_location: Optional[str] = Field(None, description="Typical network location: home, office, mobile")
|
||||
static_ip: Optional[str] = Field(None, description="Static IP address if applicable (supports IPv4/IPv6)")
|
||||
claude_working_directory: Optional[str] = Field(None, description="Primary working directory for Claude Code")
|
||||
additional_working_dirs: Optional[str] = Field(None, description="JSON array of additional working directories")
|
||||
installed_tools: Optional[str] = Field(None, description='JSON object with tool versions like {"git": "2.40", "docker": "24.0"}')
|
||||
available_mcps: Optional[str] = Field(None, description="JSON array of available MCP servers")
|
||||
mcp_capabilities: Optional[str] = Field(None, description="JSON object with MCP capabilities")
|
||||
available_skills: Optional[str] = Field(None, description="JSON array of available skills")
|
||||
skill_paths: Optional[str] = Field(None, description="JSON object mapping skill names to paths")
|
||||
preferred_shell: Optional[str] = Field(None, description="Preferred shell: powershell, bash, zsh, cmd")
|
||||
package_manager_commands: Optional[str] = Field(None, description="JSON object with package manager commands")
|
||||
is_primary: bool = Field(False, description="Whether this is the primary machine")
|
||||
is_active: bool = Field(True, description="Whether machine is currently active")
|
||||
last_seen: Optional[datetime] = Field(None, description="Last time machine was seen")
|
||||
last_session_id: Optional[str] = Field(None, description="UUID of last session from this machine")
|
||||
notes: Optional[str] = Field(None, description="Additional notes about the machine")
|
||||
|
||||
|
||||
class MachineCreate(MachineBase):
|
||||
"""Schema for creating a new Machine."""
|
||||
pass
|
||||
|
||||
|
||||
class MachineUpdate(BaseModel):
|
||||
"""Schema for updating an existing Machine. All fields are optional."""
|
||||
|
||||
hostname: Optional[str] = Field(None, description="Machine hostname from `hostname` command")
|
||||
machine_fingerprint: Optional[str] = Field(None, description="SHA256 hash: hostname + username + platform + home_directory")
|
||||
friendly_name: Optional[str] = Field(None, description="Human-readable name like 'Main Laptop' or 'Home Desktop'")
|
||||
machine_type: Optional[str] = Field(None, description="Type of machine: laptop, desktop, workstation, vm")
|
||||
platform: Optional[str] = Field(None, description="Operating system platform: win32, darwin, linux")
|
||||
os_version: Optional[str] = Field(None, description="Operating system version")
|
||||
username: Optional[str] = Field(None, description="Username from `whoami` command")
|
||||
home_directory: Optional[str] = Field(None, description="User home directory path")
|
||||
has_vpn_access: Optional[bool] = Field(None, description="Whether machine can connect to client networks")
|
||||
vpn_profiles: Optional[str] = Field(None, description="JSON array of available VPN profiles")
|
||||
has_docker: Optional[bool] = Field(None, description="Whether Docker is installed")
|
||||
has_powershell: Optional[bool] = Field(None, description="Whether PowerShell is installed")
|
||||
powershell_version: Optional[str] = Field(None, description="PowerShell version if installed")
|
||||
has_ssh: Optional[bool] = Field(None, description="Whether SSH is available")
|
||||
has_git: Optional[bool] = Field(None, description="Whether Git is installed")
|
||||
typical_network_location: Optional[str] = Field(None, description="Typical network location: home, office, mobile")
|
||||
static_ip: Optional[str] = Field(None, description="Static IP address if applicable (supports IPv4/IPv6)")
|
||||
claude_working_directory: Optional[str] = Field(None, description="Primary working directory for Claude Code")
|
||||
additional_working_dirs: Optional[str] = Field(None, description="JSON array of additional working directories")
|
||||
installed_tools: Optional[str] = Field(None, description='JSON object with tool versions like {"git": "2.40", "docker": "24.0"}')
|
||||
available_mcps: Optional[str] = Field(None, description="JSON array of available MCP servers")
|
||||
mcp_capabilities: Optional[str] = Field(None, description="JSON object with MCP capabilities")
|
||||
available_skills: Optional[str] = Field(None, description="JSON array of available skills")
|
||||
skill_paths: Optional[str] = Field(None, description="JSON object mapping skill names to paths")
|
||||
preferred_shell: Optional[str] = Field(None, description="Preferred shell: powershell, bash, zsh, cmd")
|
||||
package_manager_commands: Optional[str] = Field(None, description="JSON object with package manager commands")
|
||||
is_primary: Optional[bool] = Field(None, description="Whether this is the primary machine")
|
||||
is_active: Optional[bool] = Field(None, description="Whether machine is currently active")
|
||||
last_seen: Optional[datetime] = Field(None, description="Last time machine was seen")
|
||||
last_session_id: Optional[str] = Field(None, description="UUID of last session from this machine")
|
||||
notes: Optional[str] = Field(None, description="Additional notes about the machine")
|
||||
|
||||
|
||||
class MachineResponse(MachineBase):
|
||||
"""Schema for Machine responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the machine")
|
||||
created_at: datetime = Field(..., description="Timestamp when the machine was created")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the machine was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
52
api/schemas/network.py
Normal file
52
api/schemas/network.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""
|
||||
Pydantic schemas for Network model.
|
||||
|
||||
Request and response schemas for network segments and VLANs.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class NetworkBase(BaseModel):
|
||||
"""Base schema with shared Network fields."""
|
||||
|
||||
client_id: Optional[UUID] = Field(None, description="Reference to the client")
|
||||
site_id: Optional[UUID] = Field(None, description="Reference to the site")
|
||||
network_name: str = Field(..., description="Name of the network")
|
||||
network_type: Optional[str] = Field(None, description="Type: lan, vpn, vlan, isolated, dmz")
|
||||
cidr: str = Field(..., description="Network CIDR notation (e.g., '192.168.0.0/24')")
|
||||
gateway_ip: Optional[str] = Field(None, description="Gateway IP address")
|
||||
vlan_id: Optional[int] = Field(None, description="VLAN ID if applicable")
|
||||
notes: Optional[str] = Field(None, description="Additional notes")
|
||||
|
||||
|
||||
class NetworkCreate(NetworkBase):
|
||||
"""Schema for creating a new Network."""
|
||||
pass
|
||||
|
||||
|
||||
class NetworkUpdate(BaseModel):
|
||||
"""Schema for updating an existing Network. All fields are optional."""
|
||||
|
||||
client_id: Optional[UUID] = Field(None, description="Reference to the client")
|
||||
site_id: Optional[UUID] = Field(None, description="Reference to the site")
|
||||
network_name: Optional[str] = Field(None, description="Name of the network")
|
||||
network_type: Optional[str] = Field(None, description="Type: lan, vpn, vlan, isolated, dmz")
|
||||
cidr: Optional[str] = Field(None, description="Network CIDR notation (e.g., '192.168.0.0/24')")
|
||||
gateway_ip: Optional[str] = Field(None, description="Gateway IP address")
|
||||
vlan_id: Optional[int] = Field(None, description="VLAN ID if applicable")
|
||||
notes: Optional[str] = Field(None, description="Additional notes")
|
||||
|
||||
|
||||
class NetworkResponse(NetworkBase):
|
||||
"""Schema for Network responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the network")
|
||||
created_at: datetime = Field(..., description="Timestamp when the network was created")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the network was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
64
api/schemas/project.py
Normal file
64
api/schemas/project.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""
|
||||
Pydantic schemas for Project model.
|
||||
|
||||
Request and response schemas for individual projects and engagements.
|
||||
"""
|
||||
|
||||
from datetime import date, datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class ProjectBase(BaseModel):
|
||||
"""Base schema with shared Project fields."""
|
||||
|
||||
client_id: str = Field(..., description="Foreign key to clients table (UUID)")
|
||||
name: str = Field(..., description="Project name")
|
||||
slug: Optional[str] = Field(None, description="URL-safe slug (directory name like 'dataforth-dos')")
|
||||
category: Optional[str] = Field(None, description="Project category: client_project, internal_product, infrastructure, website, development_tool, documentation")
|
||||
status: str = Field("working", description="Status: complete, working, blocked, pending, critical, deferred")
|
||||
priority: Optional[str] = Field(None, description="Priority level: critical, high, medium, low")
|
||||
description: Optional[str] = Field(None, description="Project description")
|
||||
started_date: Optional[date] = Field(None, description="Date project started")
|
||||
target_completion_date: Optional[date] = Field(None, description="Target completion date")
|
||||
completed_date: Optional[date] = Field(None, description="Actual completion date")
|
||||
estimated_hours: Optional[float] = Field(None, description="Estimated hours for completion")
|
||||
actual_hours: Optional[float] = Field(None, description="Actual hours spent")
|
||||
gitea_repo_url: Optional[str] = Field(None, description="Gitea repository URL if applicable")
|
||||
notes: Optional[str] = Field(None, description="Additional notes about the project")
|
||||
|
||||
|
||||
class ProjectCreate(ProjectBase):
|
||||
"""Schema for creating a new Project."""
|
||||
pass
|
||||
|
||||
|
||||
class ProjectUpdate(BaseModel):
|
||||
"""Schema for updating an existing Project. All fields are optional."""
|
||||
|
||||
client_id: Optional[str] = Field(None, description="Foreign key to clients table (UUID)")
|
||||
name: Optional[str] = Field(None, description="Project name")
|
||||
slug: Optional[str] = Field(None, description="URL-safe slug (directory name like 'dataforth-dos')")
|
||||
category: Optional[str] = Field(None, description="Project category: client_project, internal_product, infrastructure, website, development_tool, documentation")
|
||||
status: Optional[str] = Field(None, description="Status: complete, working, blocked, pending, critical, deferred")
|
||||
priority: Optional[str] = Field(None, description="Priority level: critical, high, medium, low")
|
||||
description: Optional[str] = Field(None, description="Project description")
|
||||
started_date: Optional[date] = Field(None, description="Date project started")
|
||||
target_completion_date: Optional[date] = Field(None, description="Target completion date")
|
||||
completed_date: Optional[date] = Field(None, description="Actual completion date")
|
||||
estimated_hours: Optional[float] = Field(None, description="Estimated hours for completion")
|
||||
actual_hours: Optional[float] = Field(None, description="Actual hours spent")
|
||||
gitea_repo_url: Optional[str] = Field(None, description="Gitea repository URL if applicable")
|
||||
notes: Optional[str] = Field(None, description="Additional notes about the project")
|
||||
|
||||
|
||||
class ProjectResponse(ProjectBase):
|
||||
"""Schema for Project responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the project")
|
||||
created_at: datetime = Field(..., description="Timestamp when the project was created")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the project was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
53
api/schemas/project_state.py
Normal file
53
api/schemas/project_state.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""
|
||||
Pydantic schemas for ProjectState model.
|
||||
|
||||
Request and response schemas for tracking current state of projects.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class ProjectStateBase(BaseModel):
|
||||
"""Base schema with shared ProjectState fields."""
|
||||
|
||||
project_id: UUID = Field(..., description="Project ID (required, unique - one state per project)")
|
||||
last_session_id: Optional[UUID] = Field(None, description="Last session ID that updated this state")
|
||||
current_phase: Optional[str] = Field(None, description="Current phase or stage of the project")
|
||||
progress_percentage: int = Field(0, ge=0, le=100, description="Integer percentage of completion (0-100)")
|
||||
blockers: Optional[str] = Field(None, description="JSON array of current blockers preventing progress")
|
||||
next_actions: Optional[str] = Field(None, description="JSON array of next steps to take")
|
||||
context_summary: Optional[str] = Field(None, description="Dense overview text of where the project currently stands")
|
||||
key_files: Optional[str] = Field(None, description="JSON array of important file paths for this project")
|
||||
important_decisions: Optional[str] = Field(None, description="JSON array of key decisions made for this project")
|
||||
|
||||
|
||||
class ProjectStateCreate(ProjectStateBase):
|
||||
"""Schema for creating a new ProjectState."""
|
||||
pass
|
||||
|
||||
|
||||
class ProjectStateUpdate(BaseModel):
|
||||
"""Schema for updating an existing ProjectState. All fields are optional except project_id."""
|
||||
|
||||
last_session_id: Optional[UUID] = Field(None, description="Last session ID that updated this state")
|
||||
current_phase: Optional[str] = Field(None, description="Current phase or stage of the project")
|
||||
progress_percentage: Optional[int] = Field(None, ge=0, le=100, description="Integer percentage of completion (0-100)")
|
||||
blockers: Optional[str] = Field(None, description="JSON array of current blockers preventing progress")
|
||||
next_actions: Optional[str] = Field(None, description="JSON array of next steps to take")
|
||||
context_summary: Optional[str] = Field(None, description="Dense overview text of where the project currently stands")
|
||||
key_files: Optional[str] = Field(None, description="JSON array of important file paths for this project")
|
||||
important_decisions: Optional[str] = Field(None, description="JSON array of key decisions made for this project")
|
||||
|
||||
|
||||
class ProjectStateResponse(ProjectStateBase):
|
||||
"""Schema for ProjectState responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the project state")
|
||||
created_at: datetime = Field(..., description="Timestamp when the state was created")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the state was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
60
api/schemas/security_incident.py
Normal file
60
api/schemas/security_incident.py
Normal file
@@ -0,0 +1,60 @@
|
||||
"""
|
||||
Pydantic schemas for SecurityIncident model.
|
||||
|
||||
Request and response schemas for security incident tracking.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class SecurityIncidentBase(BaseModel):
|
||||
"""Base schema with shared SecurityIncident fields."""
|
||||
|
||||
client_id: Optional[UUID] = Field(None, description="Reference to affected client")
|
||||
service_id: Optional[UUID] = Field(None, description="Reference to affected service")
|
||||
infrastructure_id: Optional[UUID] = Field(None, description="Reference to affected infrastructure")
|
||||
incident_type: Optional[str] = Field(None, description="Type of incident: bec, backdoor, malware, unauthorized_access, data_breach, phishing, ransomware, brute_force")
|
||||
incident_date: datetime = Field(..., description="When the incident occurred")
|
||||
severity: Optional[str] = Field(None, description="Severity level: critical, high, medium, low")
|
||||
description: str = Field(..., description="Detailed description of the incident")
|
||||
findings: Optional[str] = Field(None, description="Investigation results and findings")
|
||||
remediation_steps: Optional[str] = Field(None, description="Steps taken to remediate the incident")
|
||||
status: str = Field("investigating", description="Status: investigating, contained, resolved, monitoring")
|
||||
resolved_at: Optional[datetime] = Field(None, description="When the incident was resolved")
|
||||
notes: Optional[str] = Field(None, description="Additional notes and context")
|
||||
|
||||
|
||||
class SecurityIncidentCreate(SecurityIncidentBase):
|
||||
"""Schema for creating a new SecurityIncident."""
|
||||
pass
|
||||
|
||||
|
||||
class SecurityIncidentUpdate(BaseModel):
|
||||
"""Schema for updating an existing SecurityIncident. All fields are optional."""
|
||||
|
||||
client_id: Optional[UUID] = Field(None, description="Reference to affected client")
|
||||
service_id: Optional[UUID] = Field(None, description="Reference to affected service")
|
||||
infrastructure_id: Optional[UUID] = Field(None, description="Reference to affected infrastructure")
|
||||
incident_type: Optional[str] = Field(None, description="Type of incident")
|
||||
incident_date: Optional[datetime] = Field(None, description="When the incident occurred")
|
||||
severity: Optional[str] = Field(None, description="Severity level")
|
||||
description: Optional[str] = Field(None, description="Detailed description of the incident")
|
||||
findings: Optional[str] = Field(None, description="Investigation results and findings")
|
||||
remediation_steps: Optional[str] = Field(None, description="Steps taken to remediate the incident")
|
||||
status: Optional[str] = Field(None, description="Status of incident handling")
|
||||
resolved_at: Optional[datetime] = Field(None, description="When the incident was resolved")
|
||||
notes: Optional[str] = Field(None, description="Additional notes and context")
|
||||
|
||||
|
||||
class SecurityIncidentResponse(SecurityIncidentBase):
|
||||
"""Schema for SecurityIncident responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the security incident")
|
||||
created_at: datetime = Field(..., description="Timestamp when the incident was created")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the incident was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
56
api/schemas/service.py
Normal file
56
api/schemas/service.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""
|
||||
Pydantic schemas for Service model.
|
||||
|
||||
Request and response schemas for services running on infrastructure.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class ServiceBase(BaseModel):
|
||||
"""Base schema with shared Service fields."""
|
||||
|
||||
infrastructure_id: Optional[str] = Field(None, description="Foreign key to infrastructure table (UUID)")
|
||||
service_name: str = Field(..., description="Name of the service (e.g., 'Gitea', 'PostgreSQL', 'Apache')")
|
||||
service_type: Optional[str] = Field(None, description="Type of service (e.g., 'git_hosting', 'database', 'web_server')")
|
||||
external_url: Optional[str] = Field(None, description="External URL for accessing the service")
|
||||
internal_url: Optional[str] = Field(None, description="Internal URL for accessing the service")
|
||||
port: Optional[int] = Field(None, description="Port number the service runs on")
|
||||
protocol: Optional[str] = Field(None, description="Protocol used (https, ssh, smb, etc.)")
|
||||
status: str = Field("running", description="Status: running, stopped, error, maintenance")
|
||||
version: Optional[str] = Field(None, description="Version of the service")
|
||||
notes: Optional[str] = Field(None, description="Additional notes")
|
||||
|
||||
|
||||
class ServiceCreate(ServiceBase):
|
||||
"""Schema for creating a new Service."""
|
||||
pass
|
||||
|
||||
|
||||
class ServiceUpdate(BaseModel):
|
||||
"""Schema for updating an existing Service. All fields are optional."""
|
||||
|
||||
infrastructure_id: Optional[str] = Field(None, description="Foreign key to infrastructure table (UUID)")
|
||||
service_name: Optional[str] = Field(None, description="Name of the service (e.g., 'Gitea', 'PostgreSQL', 'Apache')")
|
||||
service_type: Optional[str] = Field(None, description="Type of service (e.g., 'git_hosting', 'database', 'web_server')")
|
||||
external_url: Optional[str] = Field(None, description="External URL for accessing the service")
|
||||
internal_url: Optional[str] = Field(None, description="Internal URL for accessing the service")
|
||||
port: Optional[int] = Field(None, description="Port number the service runs on")
|
||||
protocol: Optional[str] = Field(None, description="Protocol used (https, ssh, smb, etc.)")
|
||||
status: Optional[str] = Field(None, description="Status: running, stopped, error, maintenance")
|
||||
version: Optional[str] = Field(None, description="Version of the service")
|
||||
notes: Optional[str] = Field(None, description="Additional notes")
|
||||
|
||||
|
||||
class ServiceResponse(ServiceBase):
|
||||
"""Schema for Service responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the service")
|
||||
created_at: datetime = Field(..., description="Timestamp when the service was created")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the service was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
66
api/schemas/session.py
Normal file
66
api/schemas/session.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""
|
||||
Pydantic schemas for Session model.
|
||||
|
||||
Request and response schemas for work sessions with time tracking.
|
||||
"""
|
||||
|
||||
from datetime import date, datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class SessionBase(BaseModel):
|
||||
"""Base schema with shared Session fields."""
|
||||
|
||||
client_id: Optional[str] = Field(None, description="Foreign key to clients table (UUID)")
|
||||
project_id: Optional[str] = Field(None, description="Foreign key to projects table (UUID)")
|
||||
machine_id: Optional[str] = Field(None, description="Foreign key to machines table (UUID)")
|
||||
session_date: date = Field(..., description="Date of the session")
|
||||
start_time: Optional[datetime] = Field(None, description="Session start timestamp")
|
||||
end_time: Optional[datetime] = Field(None, description="Session end timestamp")
|
||||
duration_minutes: Optional[int] = Field(None, description="Duration in minutes (auto-calculated or manual)")
|
||||
status: str = Field("completed", description="Session status: completed, in_progress, blocked, pending")
|
||||
session_title: str = Field(..., description="Brief title describing the session")
|
||||
summary: Optional[str] = Field(None, description="Markdown summary of the session")
|
||||
is_billable: bool = Field(False, description="Whether this session is billable")
|
||||
billable_hours: Optional[float] = Field(None, description="Billable hours if applicable")
|
||||
technician: Optional[str] = Field(None, description="Name of technician who performed the work")
|
||||
session_log_file: Optional[str] = Field(None, description="Path to markdown session log file")
|
||||
notes: Optional[str] = Field(None, description="Additional notes about the session")
|
||||
|
||||
|
||||
class SessionCreate(SessionBase):
|
||||
"""Schema for creating a new Session."""
|
||||
pass
|
||||
|
||||
|
||||
class SessionUpdate(BaseModel):
|
||||
"""Schema for updating an existing Session. All fields are optional."""
|
||||
|
||||
client_id: Optional[str] = Field(None, description="Foreign key to clients table (UUID)")
|
||||
project_id: Optional[str] = Field(None, description="Foreign key to projects table (UUID)")
|
||||
machine_id: Optional[str] = Field(None, description="Foreign key to machines table (UUID)")
|
||||
session_date: Optional[date] = Field(None, description="Date of the session")
|
||||
start_time: Optional[datetime] = Field(None, description="Session start timestamp")
|
||||
end_time: Optional[datetime] = Field(None, description="Session end timestamp")
|
||||
duration_minutes: Optional[int] = Field(None, description="Duration in minutes (auto-calculated or manual)")
|
||||
status: Optional[str] = Field(None, description="Session status: completed, in_progress, blocked, pending")
|
||||
session_title: Optional[str] = Field(None, description="Brief title describing the session")
|
||||
summary: Optional[str] = Field(None, description="Markdown summary of the session")
|
||||
is_billable: Optional[bool] = Field(None, description="Whether this session is billable")
|
||||
billable_hours: Optional[float] = Field(None, description="Billable hours if applicable")
|
||||
technician: Optional[str] = Field(None, description="Name of technician who performed the work")
|
||||
session_log_file: Optional[str] = Field(None, description="Path to markdown session log file")
|
||||
notes: Optional[str] = Field(None, description="Additional notes about the session")
|
||||
|
||||
|
||||
class SessionResponse(SessionBase):
|
||||
"""Schema for Session responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the session")
|
||||
created_at: datetime = Field(..., description="Timestamp when the session was created")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the session was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
52
api/schemas/site.py
Normal file
52
api/schemas/site.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""
|
||||
Pydantic schemas for Site model.
|
||||
|
||||
Request and response schemas for client physical locations.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class SiteBase(BaseModel):
|
||||
"""Base schema with shared Site fields."""
|
||||
|
||||
client_id: UUID = Field(..., description="Reference to the client this site belongs to")
|
||||
name: str = Field(..., description="Site name (e.g., 'Main Office', 'SLC - Salt Lake City')")
|
||||
network_subnet: Optional[str] = Field(None, description="Network subnet for the site (e.g., '172.16.9.0/24')")
|
||||
vpn_required: bool = Field(False, description="Whether VPN is required to access this site")
|
||||
vpn_subnet: Optional[str] = Field(None, description="VPN subnet if applicable (e.g., '192.168.1.0/24')")
|
||||
gateway_ip: Optional[str] = Field(None, description="Gateway IP address (IPv4 or IPv6)")
|
||||
dns_servers: Optional[str] = Field(None, description="JSON array of DNS server addresses")
|
||||
notes: Optional[str] = Field(None, description="Additional notes about the site")
|
||||
|
||||
|
||||
class SiteCreate(SiteBase):
|
||||
"""Schema for creating a new Site."""
|
||||
pass
|
||||
|
||||
|
||||
class SiteUpdate(BaseModel):
|
||||
"""Schema for updating an existing Site. All fields are optional."""
|
||||
|
||||
client_id: Optional[UUID] = Field(None, description="Reference to the client this site belongs to")
|
||||
name: Optional[str] = Field(None, description="Site name (e.g., 'Main Office', 'SLC - Salt Lake City')")
|
||||
network_subnet: Optional[str] = Field(None, description="Network subnet for the site (e.g., '172.16.9.0/24')")
|
||||
vpn_required: Optional[bool] = Field(None, description="Whether VPN is required to access this site")
|
||||
vpn_subnet: Optional[str] = Field(None, description="VPN subnet if applicable (e.g., '192.168.1.0/24')")
|
||||
gateway_ip: Optional[str] = Field(None, description="Gateway IP address (IPv4 or IPv6)")
|
||||
dns_servers: Optional[str] = Field(None, description="JSON array of DNS server addresses")
|
||||
notes: Optional[str] = Field(None, description="Additional notes about the site")
|
||||
|
||||
|
||||
class SiteResponse(SiteBase):
|
||||
"""Schema for Site responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the site")
|
||||
created_at: datetime = Field(..., description="Timestamp when the site was created")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the site was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
47
api/schemas/tag.py
Normal file
47
api/schemas/tag.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""
|
||||
Pydantic schemas for Tag model.
|
||||
|
||||
Request and response schemas for categorizing and organizing work items.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class TagBase(BaseModel):
|
||||
"""Base schema with shared Tag fields."""
|
||||
|
||||
name: str = Field(..., description="Tag name (unique)")
|
||||
category: Optional[str] = Field(None, description="Tag category: technology, client, infrastructure, problem_type, action, service")
|
||||
description: Optional[str] = Field(None, description="Description of the tag")
|
||||
usage_count: int = Field(0, description="Number of times this tag has been used (auto-incremented)")
|
||||
|
||||
|
||||
class TagCreate(BaseModel):
|
||||
"""Schema for creating a new Tag. usage_count is not user-provided."""
|
||||
|
||||
name: str = Field(..., description="Tag name (unique)")
|
||||
category: Optional[str] = Field(None, description="Tag category: technology, client, infrastructure, problem_type, action, service")
|
||||
description: Optional[str] = Field(None, description="Description of the tag")
|
||||
|
||||
|
||||
class TagUpdate(BaseModel):
|
||||
"""Schema for updating an existing Tag. All fields are optional."""
|
||||
|
||||
name: Optional[str] = Field(None, description="Tag name (unique)")
|
||||
category: Optional[str] = Field(None, description="Tag category: technology, client, infrastructure, problem_type, action, service")
|
||||
description: Optional[str] = Field(None, description="Description of the tag")
|
||||
usage_count: Optional[int] = Field(None, description="Number of times this tag has been used (auto-incremented)")
|
||||
|
||||
|
||||
class TagResponse(TagBase):
|
||||
"""Schema for Tag responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the tag")
|
||||
created_at: datetime = Field(..., description="Timestamp when the tag was created")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the tag was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
86
api/schemas/task.py
Normal file
86
api/schemas/task.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""
|
||||
Pydantic schemas for Task model.
|
||||
|
||||
Request and response schemas for hierarchical task tracking.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class TaskBase(BaseModel):
|
||||
"""Base schema with shared Task fields."""
|
||||
|
||||
parent_task_id: Optional[str] = Field(None, description="Reference to parent task for hierarchical structure (UUID)")
|
||||
task_order: int = Field(..., description="Order of this task relative to siblings")
|
||||
title: str = Field(..., description="Task title", max_length=500)
|
||||
description: Optional[str] = Field(None, description="Detailed task description")
|
||||
task_type: Optional[str] = Field(
|
||||
None,
|
||||
description="Type: implementation, research, review, deployment, testing, documentation, bugfix, analysis"
|
||||
)
|
||||
status: str = Field(
|
||||
...,
|
||||
description="Status: pending, in_progress, blocked, completed, cancelled"
|
||||
)
|
||||
blocking_reason: Optional[str] = Field(None, description="Reason why task is blocked (if status='blocked')")
|
||||
session_id: Optional[str] = Field(None, description="Foreign key to sessions table (UUID)")
|
||||
client_id: Optional[str] = Field(None, description="Foreign key to clients table (UUID)")
|
||||
project_id: Optional[str] = Field(None, description="Foreign key to projects table (UUID)")
|
||||
assigned_agent: Optional[str] = Field(None, description="Which agent is handling this task", max_length=100)
|
||||
estimated_complexity: Optional[str] = Field(
|
||||
None,
|
||||
description="Complexity: trivial, simple, moderate, complex, very_complex"
|
||||
)
|
||||
started_at: Optional[datetime] = Field(None, description="When the task was started")
|
||||
completed_at: Optional[datetime] = Field(None, description="When the task was completed")
|
||||
task_context: Optional[str] = Field(None, description="Detailed context for this task (JSON)")
|
||||
dependencies: Optional[str] = Field(None, description="JSON array of dependency task IDs")
|
||||
|
||||
|
||||
class TaskCreate(TaskBase):
|
||||
"""Schema for creating a new Task."""
|
||||
pass
|
||||
|
||||
|
||||
class TaskUpdate(BaseModel):
|
||||
"""Schema for updating an existing Task. All fields are optional."""
|
||||
|
||||
parent_task_id: Optional[str] = Field(None, description="Reference to parent task for hierarchical structure (UUID)")
|
||||
task_order: Optional[int] = Field(None, description="Order of this task relative to siblings")
|
||||
title: Optional[str] = Field(None, description="Task title", max_length=500)
|
||||
description: Optional[str] = Field(None, description="Detailed task description")
|
||||
task_type: Optional[str] = Field(
|
||||
None,
|
||||
description="Type: implementation, research, review, deployment, testing, documentation, bugfix, analysis"
|
||||
)
|
||||
status: Optional[str] = Field(
|
||||
None,
|
||||
description="Status: pending, in_progress, blocked, completed, cancelled"
|
||||
)
|
||||
blocking_reason: Optional[str] = Field(None, description="Reason why task is blocked (if status='blocked')")
|
||||
session_id: Optional[str] = Field(None, description="Foreign key to sessions table (UUID)")
|
||||
client_id: Optional[str] = Field(None, description="Foreign key to clients table (UUID)")
|
||||
project_id: Optional[str] = Field(None, description="Foreign key to projects table (UUID)")
|
||||
assigned_agent: Optional[str] = Field(None, description="Which agent is handling this task", max_length=100)
|
||||
estimated_complexity: Optional[str] = Field(
|
||||
None,
|
||||
description="Complexity: trivial, simple, moderate, complex, very_complex"
|
||||
)
|
||||
started_at: Optional[datetime] = Field(None, description="When the task was started")
|
||||
completed_at: Optional[datetime] = Field(None, description="When the task was completed")
|
||||
task_context: Optional[str] = Field(None, description="Detailed context for this task (JSON)")
|
||||
dependencies: Optional[str] = Field(None, description="JSON array of dependency task IDs")
|
||||
|
||||
|
||||
class TaskResponse(TaskBase):
|
||||
"""Schema for Task responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the task")
|
||||
created_at: datetime = Field(..., description="Timestamp when the task was created")
|
||||
updated_at: datetime = Field(..., description="Timestamp when the task was last updated")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
91
api/schemas/work_item.py
Normal file
91
api/schemas/work_item.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""
|
||||
Pydantic schemas for WorkItem model.
|
||||
|
||||
Request and response schemas for work items tracking session activities.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class WorkItemBase(BaseModel):
|
||||
"""Base schema with shared WorkItem fields."""
|
||||
|
||||
session_id: str = Field(..., description="Foreign key to sessions table (UUID)")
|
||||
category: str = Field(
|
||||
...,
|
||||
description="Work category: infrastructure, troubleshooting, configuration, development, maintenance, security, documentation"
|
||||
)
|
||||
title: str = Field(..., description="Brief title of the work item")
|
||||
description: str = Field(..., description="Detailed description of the work performed")
|
||||
status: str = Field(
|
||||
"completed",
|
||||
description="Status: completed, in_progress, blocked, pending, deferred"
|
||||
)
|
||||
priority: Optional[str] = Field(
|
||||
None,
|
||||
description="Priority level: critical, high, medium, low"
|
||||
)
|
||||
is_billable: bool = Field(False, description="Whether this work item is billable")
|
||||
estimated_minutes: Optional[int] = Field(None, description="Estimated time to complete in minutes")
|
||||
actual_minutes: Optional[int] = Field(None, description="Actual time spent in minutes")
|
||||
affected_systems: Optional[str] = Field(
|
||||
None,
|
||||
description='JSON array of affected systems (e.g., ["jupiter", "172.16.3.20"])'
|
||||
)
|
||||
technologies_used: Optional[str] = Field(
|
||||
None,
|
||||
description='JSON array of technologies used (e.g., ["docker", "mariadb"])'
|
||||
)
|
||||
item_order: Optional[int] = Field(None, description="Sequence order within the session")
|
||||
completed_at: Optional[datetime] = Field(None, description="When the work item was completed")
|
||||
|
||||
|
||||
class WorkItemCreate(WorkItemBase):
|
||||
"""Schema for creating a new WorkItem."""
|
||||
pass
|
||||
|
||||
|
||||
class WorkItemUpdate(BaseModel):
|
||||
"""Schema for updating an existing WorkItem. All fields are optional."""
|
||||
|
||||
session_id: Optional[str] = Field(None, description="Foreign key to sessions table (UUID)")
|
||||
category: Optional[str] = Field(
|
||||
None,
|
||||
description="Work category: infrastructure, troubleshooting, configuration, development, maintenance, security, documentation"
|
||||
)
|
||||
title: Optional[str] = Field(None, description="Brief title of the work item")
|
||||
description: Optional[str] = Field(None, description="Detailed description of the work performed")
|
||||
status: Optional[str] = Field(
|
||||
None,
|
||||
description="Status: completed, in_progress, blocked, pending, deferred"
|
||||
)
|
||||
priority: Optional[str] = Field(
|
||||
None,
|
||||
description="Priority level: critical, high, medium, low"
|
||||
)
|
||||
is_billable: Optional[bool] = Field(None, description="Whether this work item is billable")
|
||||
estimated_minutes: Optional[int] = Field(None, description="Estimated time to complete in minutes")
|
||||
actual_minutes: Optional[int] = Field(None, description="Actual time spent in minutes")
|
||||
affected_systems: Optional[str] = Field(
|
||||
None,
|
||||
description='JSON array of affected systems (e.g., ["jupiter", "172.16.3.20"])'
|
||||
)
|
||||
technologies_used: Optional[str] = Field(
|
||||
None,
|
||||
description='JSON array of technologies used (e.g., ["docker", "mariadb"])'
|
||||
)
|
||||
item_order: Optional[int] = Field(None, description="Sequence order within the session")
|
||||
completed_at: Optional[datetime] = Field(None, description="When the work item was completed")
|
||||
|
||||
|
||||
class WorkItemResponse(WorkItemBase):
|
||||
"""Schema for WorkItem responses with ID and timestamps."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier for the work item")
|
||||
created_at: datetime = Field(..., description="Timestamp when the work item was created")
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
35
api/services/__init__.py
Normal file
35
api/services/__init__.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""Business logic services for ClaudeTools API"""
|
||||
|
||||
from . import (
|
||||
machine_service,
|
||||
client_service,
|
||||
site_service,
|
||||
network_service,
|
||||
tag_service,
|
||||
service_service,
|
||||
infrastructure_service,
|
||||
credential_service,
|
||||
credential_audit_log_service,
|
||||
security_incident_service,
|
||||
conversation_context_service,
|
||||
context_snippet_service,
|
||||
project_state_service,
|
||||
decision_log_service,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"machine_service",
|
||||
"client_service",
|
||||
"site_service",
|
||||
"network_service",
|
||||
"tag_service",
|
||||
"service_service",
|
||||
"infrastructure_service",
|
||||
"credential_service",
|
||||
"credential_audit_log_service",
|
||||
"security_incident_service",
|
||||
"conversation_context_service",
|
||||
"context_snippet_service",
|
||||
"project_state_service",
|
||||
"decision_log_service",
|
||||
]
|
||||
407
api/services/billable_time_service.py
Normal file
407
api/services/billable_time_service.py
Normal file
@@ -0,0 +1,407 @@
|
||||
"""
|
||||
BillableTime service layer for business logic and database operations.
|
||||
|
||||
This module handles all database operations for billable time entries, providing a clean
|
||||
separation between the API routes and data access layer.
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.models.billable_time import BillableTime as BillableTimeModel
|
||||
from api.models.client import Client
|
||||
from api.models.session import Session as SessionModel
|
||||
from api.models.work_item import WorkItem
|
||||
from api.schemas.billable_time import BillableTimeCreate, BillableTimeUpdate
|
||||
|
||||
|
||||
def get_billable_time_entries(db: Session, skip: int = 0, limit: int = 100) -> tuple[list[BillableTimeModel], int]:
|
||||
"""
|
||||
Retrieve a paginated list of billable time entries.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
skip: Number of records to skip (for pagination)
|
||||
limit: Maximum number of records to return
|
||||
|
||||
Returns:
|
||||
tuple: (list of billable time entries, total count)
|
||||
|
||||
Example:
|
||||
```python
|
||||
entries, total = get_billable_time_entries(db, skip=0, limit=50)
|
||||
print(f"Retrieved {len(entries)} of {total} billable time entries")
|
||||
```
|
||||
"""
|
||||
# Get total count
|
||||
total = db.query(BillableTimeModel).count()
|
||||
|
||||
# Get paginated results, ordered by start_time descending (newest first)
|
||||
entries = (
|
||||
db.query(BillableTimeModel)
|
||||
.order_by(BillableTimeModel.start_time.desc())
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
return entries, total
|
||||
|
||||
|
||||
def get_billable_time_by_id(db: Session, billable_time_id: UUID) -> BillableTimeModel:
|
||||
"""
|
||||
Retrieve a single billable time entry by its ID.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
billable_time_id: UUID of the billable time entry to retrieve
|
||||
|
||||
Returns:
|
||||
BillableTimeModel: The billable time entry object
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if billable time entry not found
|
||||
|
||||
Example:
|
||||
```python
|
||||
entry = get_billable_time_by_id(db, billable_time_id)
|
||||
print(f"Found entry: {entry.description}")
|
||||
```
|
||||
"""
|
||||
entry = db.query(BillableTimeModel).filter(BillableTimeModel.id == str(billable_time_id)).first()
|
||||
|
||||
if not entry:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Billable time entry with ID {billable_time_id} not found"
|
||||
)
|
||||
|
||||
return entry
|
||||
|
||||
|
||||
def get_billable_time_by_session(db: Session, session_id: UUID, skip: int = 0, limit: int = 100) -> tuple[list[BillableTimeModel], int]:
|
||||
"""
|
||||
Retrieve billable time entries for a specific session.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
session_id: UUID of the session
|
||||
skip: Number of records to skip (for pagination)
|
||||
limit: Maximum number of records to return
|
||||
|
||||
Returns:
|
||||
tuple: (list of billable time entries, total count)
|
||||
|
||||
Example:
|
||||
```python
|
||||
entries, total = get_billable_time_by_session(db, session_id)
|
||||
print(f"Found {total} billable time entries for session")
|
||||
```
|
||||
"""
|
||||
# Get total count
|
||||
total = db.query(BillableTimeModel).filter(BillableTimeModel.session_id == str(session_id)).count()
|
||||
|
||||
# Get paginated results
|
||||
entries = (
|
||||
db.query(BillableTimeModel)
|
||||
.filter(BillableTimeModel.session_id == str(session_id))
|
||||
.order_by(BillableTimeModel.start_time.desc())
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
return entries, total
|
||||
|
||||
|
||||
def get_billable_time_by_work_item(db: Session, work_item_id: UUID, skip: int = 0, limit: int = 100) -> tuple[list[BillableTimeModel], int]:
|
||||
"""
|
||||
Retrieve billable time entries for a specific work item.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
work_item_id: UUID of the work item
|
||||
skip: Number of records to skip (for pagination)
|
||||
limit: Maximum number of records to return
|
||||
|
||||
Returns:
|
||||
tuple: (list of billable time entries, total count)
|
||||
|
||||
Example:
|
||||
```python
|
||||
entries, total = get_billable_time_by_work_item(db, work_item_id)
|
||||
print(f"Found {total} billable time entries for work item")
|
||||
```
|
||||
"""
|
||||
# Get total count
|
||||
total = db.query(BillableTimeModel).filter(BillableTimeModel.work_item_id == str(work_item_id)).count()
|
||||
|
||||
# Get paginated results
|
||||
entries = (
|
||||
db.query(BillableTimeModel)
|
||||
.filter(BillableTimeModel.work_item_id == str(work_item_id))
|
||||
.order_by(BillableTimeModel.start_time.desc())
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
return entries, total
|
||||
|
||||
|
||||
def create_billable_time(db: Session, billable_time_data: BillableTimeCreate) -> BillableTimeModel:
|
||||
"""
|
||||
Create a new billable time entry.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
billable_time_data: Billable time creation data
|
||||
|
||||
Returns:
|
||||
BillableTimeModel: The created billable time entry object
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if referenced client, session, or work item not found
|
||||
HTTPException: 422 if validation fails
|
||||
HTTPException: 500 if database error occurs
|
||||
|
||||
Example:
|
||||
```python
|
||||
entry_data = BillableTimeCreate(
|
||||
client_id="123e4567-e89b-12d3-a456-426614174000",
|
||||
start_time=datetime.now(),
|
||||
duration_minutes=60,
|
||||
hourly_rate=150.00,
|
||||
total_amount=150.00,
|
||||
description="Database optimization",
|
||||
category="development"
|
||||
)
|
||||
entry = create_billable_time(db, entry_data)
|
||||
print(f"Created billable time entry: {entry.id}")
|
||||
```
|
||||
"""
|
||||
try:
|
||||
# Validate foreign keys
|
||||
# Client is required
|
||||
client = db.query(Client).filter(Client.id == str(billable_time_data.client_id)).first()
|
||||
if not client:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Client with ID {billable_time_data.client_id} not found"
|
||||
)
|
||||
|
||||
# Session is optional
|
||||
if billable_time_data.session_id:
|
||||
session = db.query(SessionModel).filter(SessionModel.id == str(billable_time_data.session_id)).first()
|
||||
if not session:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Session with ID {billable_time_data.session_id} not found"
|
||||
)
|
||||
|
||||
# Work item is optional
|
||||
if billable_time_data.work_item_id:
|
||||
work_item = db.query(WorkItem).filter(WorkItem.id == str(billable_time_data.work_item_id)).first()
|
||||
if not work_item:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Work item with ID {billable_time_data.work_item_id} not found"
|
||||
)
|
||||
|
||||
# Create new billable time entry instance
|
||||
db_billable_time = BillableTimeModel(**billable_time_data.model_dump())
|
||||
|
||||
# Add to database
|
||||
db.add(db_billable_time)
|
||||
db.commit()
|
||||
db.refresh(db_billable_time)
|
||||
|
||||
return db_billable_time
|
||||
|
||||
except HTTPException:
|
||||
db.rollback()
|
||||
raise
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
# Handle foreign key constraint violations
|
||||
if "client_id" in str(e.orig):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
detail=f"Invalid client_id: {billable_time_data.client_id}"
|
||||
)
|
||||
elif "session_id" in str(e.orig):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
detail=f"Invalid session_id: {billable_time_data.session_id}"
|
||||
)
|
||||
elif "work_item_id" in str(e.orig):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
detail=f"Invalid work_item_id: {billable_time_data.work_item_id}"
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Database error: {str(e)}"
|
||||
)
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to create billable time entry: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
def update_billable_time(db: Session, billable_time_id: UUID, billable_time_data: BillableTimeUpdate) -> BillableTimeModel:
|
||||
"""
|
||||
Update an existing billable time entry.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
billable_time_id: UUID of the billable time entry to update
|
||||
billable_time_data: Billable time update data (only provided fields will be updated)
|
||||
|
||||
Returns:
|
||||
BillableTimeModel: The updated billable time entry object
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if billable time entry, client, session, or work item not found
|
||||
HTTPException: 422 if validation fails
|
||||
HTTPException: 500 if database error occurs
|
||||
|
||||
Example:
|
||||
```python
|
||||
update_data = BillableTimeUpdate(
|
||||
duration_minutes=90,
|
||||
total_amount=225.00
|
||||
)
|
||||
entry = update_billable_time(db, billable_time_id, update_data)
|
||||
print(f"Updated billable time entry: {entry.description}")
|
||||
```
|
||||
"""
|
||||
# Get existing billable time entry
|
||||
entry = get_billable_time_by_id(db, billable_time_id)
|
||||
|
||||
try:
|
||||
# Update only provided fields
|
||||
update_data = billable_time_data.model_dump(exclude_unset=True)
|
||||
|
||||
# Validate foreign keys if being updated
|
||||
if "client_id" in update_data and update_data["client_id"]:
|
||||
client = db.query(Client).filter(Client.id == str(update_data["client_id"])).first()
|
||||
if not client:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Client with ID {update_data['client_id']} not found"
|
||||
)
|
||||
|
||||
if "session_id" in update_data and update_data["session_id"]:
|
||||
session = db.query(SessionModel).filter(SessionModel.id == str(update_data["session_id"])).first()
|
||||
if not session:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Session with ID {update_data['session_id']} not found"
|
||||
)
|
||||
|
||||
if "work_item_id" in update_data and update_data["work_item_id"]:
|
||||
work_item = db.query(WorkItem).filter(WorkItem.id == str(update_data["work_item_id"])).first()
|
||||
if not work_item:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Work item with ID {update_data['work_item_id']} not found"
|
||||
)
|
||||
|
||||
# Validate end_time if being updated along with start_time
|
||||
if "end_time" in update_data and update_data["end_time"]:
|
||||
start_time = update_data.get("start_time", entry.start_time)
|
||||
if update_data["end_time"] < start_time:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
detail="end_time must be after start_time"
|
||||
)
|
||||
|
||||
# Apply updates
|
||||
for field, value in update_data.items():
|
||||
setattr(entry, field, value)
|
||||
|
||||
db.commit()
|
||||
db.refresh(entry)
|
||||
|
||||
return entry
|
||||
|
||||
except HTTPException:
|
||||
db.rollback()
|
||||
raise
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
if "client_id" in str(e.orig):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
detail="Invalid client_id"
|
||||
)
|
||||
elif "session_id" in str(e.orig):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
detail="Invalid session_id"
|
||||
)
|
||||
elif "work_item_id" in str(e.orig):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
detail="Invalid work_item_id"
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Database error: {str(e)}"
|
||||
)
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to update billable time entry: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
def delete_billable_time(db: Session, billable_time_id: UUID) -> dict:
|
||||
"""
|
||||
Delete a billable time entry by its ID.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
billable_time_id: UUID of the billable time entry to delete
|
||||
|
||||
Returns:
|
||||
dict: Success message
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if billable time entry not found
|
||||
HTTPException: 500 if database error occurs
|
||||
|
||||
Example:
|
||||
```python
|
||||
result = delete_billable_time(db, billable_time_id)
|
||||
print(result["message"]) # "Billable time entry deleted successfully"
|
||||
```
|
||||
"""
|
||||
# Get existing billable time entry (raises 404 if not found)
|
||||
entry = get_billable_time_by_id(db, billable_time_id)
|
||||
|
||||
try:
|
||||
db.delete(entry)
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"message": "Billable time entry deleted successfully",
|
||||
"billable_time_id": str(billable_time_id)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to delete billable time entry: {str(e)}"
|
||||
)
|
||||
283
api/services/client_service.py
Normal file
283
api/services/client_service.py
Normal file
@@ -0,0 +1,283 @@
|
||||
"""
|
||||
Client service layer for business logic and database operations.
|
||||
|
||||
This module handles all database operations for clients, providing a clean
|
||||
separation between the API routes and data access layer.
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from api.models.client import Client
|
||||
from api.schemas.client import ClientCreate, ClientUpdate
|
||||
|
||||
|
||||
def get_clients(db: Session, skip: int = 0, limit: int = 100) -> tuple[list[Client], int]:
|
||||
"""
|
||||
Retrieve a paginated list of clients.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
skip: Number of records to skip (for pagination)
|
||||
limit: Maximum number of records to return
|
||||
|
||||
Returns:
|
||||
tuple: (list of clients, total count)
|
||||
|
||||
Example:
|
||||
```python
|
||||
clients, total = get_clients(db, skip=0, limit=50)
|
||||
print(f"Retrieved {len(clients)} of {total} clients")
|
||||
```
|
||||
"""
|
||||
# Get total count
|
||||
total = db.query(Client).count()
|
||||
|
||||
# Get paginated results, ordered by created_at descending (newest first)
|
||||
clients = (
|
||||
db.query(Client)
|
||||
.order_by(Client.created_at.desc())
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
return clients, total
|
||||
|
||||
|
||||
def get_client_by_id(db: Session, client_id: UUID) -> Client:
|
||||
"""
|
||||
Retrieve a single client by its ID.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
client_id: UUID of the client to retrieve
|
||||
|
||||
Returns:
|
||||
Client: The client object
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if client not found
|
||||
|
||||
Example:
|
||||
```python
|
||||
client = get_client_by_id(db, client_id)
|
||||
print(f"Found client: {client.name}")
|
||||
```
|
||||
"""
|
||||
client = db.query(Client).filter(Client.id == str(client_id)).first()
|
||||
|
||||
if not client:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Client with ID {client_id} not found"
|
||||
)
|
||||
|
||||
return client
|
||||
|
||||
|
||||
def get_client_by_name(db: Session, name: str) -> Optional[Client]:
|
||||
"""
|
||||
Retrieve a client by its name.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
name: Client name to search for
|
||||
|
||||
Returns:
|
||||
Optional[Client]: The client if found, None otherwise
|
||||
|
||||
Example:
|
||||
```python
|
||||
client = get_client_by_name(db, "Acme Corporation")
|
||||
if client:
|
||||
print(f"Found client: {client.type}")
|
||||
```
|
||||
"""
|
||||
return db.query(Client).filter(Client.name == name).first()
|
||||
|
||||
|
||||
def create_client(db: Session, client_data: ClientCreate) -> Client:
|
||||
"""
|
||||
Create a new client.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
client_data: Client creation data
|
||||
|
||||
Returns:
|
||||
Client: The created client object
|
||||
|
||||
Raises:
|
||||
HTTPException: 409 if client with name already exists
|
||||
HTTPException: 500 if database error occurs
|
||||
|
||||
Example:
|
||||
```python
|
||||
client_data = ClientCreate(
|
||||
name="Acme Corporation",
|
||||
type="msp_client",
|
||||
primary_contact="John Doe"
|
||||
)
|
||||
client = create_client(db, client_data)
|
||||
print(f"Created client: {client.id}")
|
||||
```
|
||||
"""
|
||||
# Check if client with name already exists
|
||||
existing_client = get_client_by_name(db, client_data.name)
|
||||
if existing_client:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail=f"Client with name '{client_data.name}' already exists"
|
||||
)
|
||||
|
||||
try:
|
||||
# Create new client instance
|
||||
db_client = Client(**client_data.model_dump())
|
||||
|
||||
# Add to database
|
||||
db.add(db_client)
|
||||
db.commit()
|
||||
db.refresh(db_client)
|
||||
|
||||
return db_client
|
||||
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
# Handle unique constraint violations
|
||||
if "name" in str(e.orig):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail=f"Client with name '{client_data.name}' already exists"
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Database error: {str(e)}"
|
||||
)
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to create client: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
def update_client(db: Session, client_id: UUID, client_data: ClientUpdate) -> Client:
|
||||
"""
|
||||
Update an existing client.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
client_id: UUID of the client to update
|
||||
client_data: Client update data (only provided fields will be updated)
|
||||
|
||||
Returns:
|
||||
Client: The updated client object
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if client not found
|
||||
HTTPException: 409 if update would violate unique constraints
|
||||
HTTPException: 500 if database error occurs
|
||||
|
||||
Example:
|
||||
```python
|
||||
update_data = ClientUpdate(
|
||||
primary_contact="Jane Smith",
|
||||
is_active=False
|
||||
)
|
||||
client = update_client(db, client_id, update_data)
|
||||
print(f"Updated client: {client.name}")
|
||||
```
|
||||
"""
|
||||
# Get existing client
|
||||
client = get_client_by_id(db, client_id)
|
||||
|
||||
try:
|
||||
# Update only provided fields
|
||||
update_data = client_data.model_dump(exclude_unset=True)
|
||||
|
||||
# If updating name, check if new name is already taken
|
||||
if "name" in update_data and update_data["name"] != client.name:
|
||||
existing = get_client_by_name(db, update_data["name"])
|
||||
if existing:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail=f"Client with name '{update_data['name']}' already exists"
|
||||
)
|
||||
|
||||
# Apply updates
|
||||
for field, value in update_data.items():
|
||||
setattr(client, field, value)
|
||||
|
||||
db.commit()
|
||||
db.refresh(client)
|
||||
|
||||
return client
|
||||
|
||||
except HTTPException:
|
||||
db.rollback()
|
||||
raise
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
if "name" in str(e.orig):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail="Client with this name already exists"
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Database error: {str(e)}"
|
||||
)
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to update client: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
def delete_client(db: Session, client_id: UUID) -> dict:
|
||||
"""
|
||||
Delete a client by its ID.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
client_id: UUID of the client to delete
|
||||
|
||||
Returns:
|
||||
dict: Success message
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if client not found
|
||||
HTTPException: 500 if database error occurs
|
||||
|
||||
Example:
|
||||
```python
|
||||
result = delete_client(db, client_id)
|
||||
print(result["message"]) # "Client deleted successfully"
|
||||
```
|
||||
"""
|
||||
# Get existing client (raises 404 if not found)
|
||||
client = get_client_by_id(db, client_id)
|
||||
|
||||
try:
|
||||
db.delete(client)
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"message": "Client deleted successfully",
|
||||
"client_id": str(client_id)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to delete client: {str(e)}"
|
||||
)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user