docs: Document Dataforth test database system and troubleshooting

Investigation and Documentation:
- Discovered and documented test database system on AD2 server
- Created comprehensive TEST_DATABASE_ARCHITECTURE.md with full system details
- Retrieved all key database files from AD2 (import.js, schema.sql, server configs)
- Documented data flow: DOS machines → NAS → AD2 → SQLite → Web interface
- Verified database health: 1,027,517 records, 1075 MB, dates back to 1990

Database System Architecture:
- SQLite database with Node.js/Express.js web server (port 3000)
- Automated import via Sync-FromNAS.ps1 (runs every 15 minutes)
- 8 log types supported: DSCLOG, 5BLOG, 7BLOG, 8BLOG, PWRLOG, SCTLOG, VASLOG, SHT
- FTS5 full-text search, comprehensive indexes for performance
- API endpoints: search, stats, export, datasheet generation

Troubleshooting Scripts Created:
- Database diagnostics: check-db-simple.ps1, test-db-directly.ps1
- Server status checks: check-node-running.ps1, check-db-server.ps1
- Performance analysis: check-db-performance.ps1, check-wal-files.ps1
- API testing: test-api-endpoint.ps1, test-query.js
- Import monitoring: check-new-records.ps1
- Database optimization attempts: api-js-optimized.js, api-js-fixed.js
- Deployment scripts: deploy-db-optimization.ps1, deploy-db-fix.ps1, restore-original.ps1

Key Findings:
- Database file healthy and queryable (verified with test-query.js)
- Node.js server not running (port 3000 closed) - root cause of web interface issues
- Database last updated 8 days ago (01/13/2026) - automated sync may be broken
- Attempted performance optimizations (WAL mode) incompatible with readonly connections
- Original api.js restored from backup after optimization conflicts

Retrieved Documentation:
- QUICKSTART-retrieved.md: Quick start guide for database server
- SESSION_NOTES-retrieved.md: Complete session notes from database creation
- Sync-FromNAS-retrieved.ps1: Full sync script with database import logic
- import-js-retrieved.js: Node.js import script (12,774 bytes)
- schema-retrieved.sql: SQLite schema with FTS5 triggers
- server-js-retrieved.js: Express.js server configuration
- api-js-retrieved.js: API routes and endpoints
- package-retrieved.json: Node.js dependencies

Action Items Identified:
1. Start Node.js server on AD2 to restore web interface functionality
2. Investigate why automated sync hasn't updated database in 8 days
3. Check Windows Task Scheduler for Sync-FromNAS.ps1 scheduled task
4. Run manual import to catch up on 8 days of test data if needed

Technical Details:
- Database path: C:\Shares\testdatadb\database\testdata.db
- Web interface: http://192.168.0.6:3000 (when running)
- Database size: 1075.14 MB (1,127,362,560 bytes)
- Total records: 1,027,517 (slight variance from original 1,030,940)
- Pass rate: 99.82% (1,029,046 passed, 1,888 failed)

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-21 13:45:27 -07:00
parent 7dc27290fb
commit e4392afce9
33 changed files with 3640 additions and 0 deletions

347
api-js-optimized.js Normal file
View File

@@ -0,0 +1,347 @@
/**
* API Routes for Test Data Database
* OPTIMIZED VERSION with performance improvements
*/
const express = require('express');
const path = require('path');
const Database = require('better-sqlite3');
const { generateDatasheet } = require('../templates/datasheet');
const router = express.Router();
// Database connection
const DB_PATH = path.join(__dirname, '..', 'database', 'testdata.db');
// OPTIMIZED: Add performance PRAGMA settings
function getDb() {
const db = new Database(DB_PATH, { readonly: true, timeout: 10000 });
// Performance optimizations for large databases
db.pragma('journal_mode = WAL'); // Write-Ahead Logging for better concurrency
db.pragma('synchronous = NORMAL'); // Faster writes, still safe
db.pragma('cache_size = -64000'); // 64MB cache (negative = KB)
db.pragma('mmap_size = 268435456'); // 256MB memory-mapped I/O
db.pragma('temp_store = MEMORY'); // Temporary tables in memory
db.pragma('query_only = ON'); // Enforce read-only mode
return db;
}
/**
* GET /api/search
* Search test records
* Query params: serial, model, from, to, result, q, station, logtype, limit, offset
*/
router.get('/search', (req, res) => {
try {
const db = getDb();
const { serial, model, from, to, result, q, station, logtype, limit = 100, offset = 0 } = req.query;
let sql = 'SELECT * FROM test_records WHERE 1=1';
const params = [];
if (serial) {
sql += ' AND serial_number LIKE ?';
params.push(serial.includes('%') ? serial : `%${serial}%`);
}
if (model) {
sql += ' AND model_number LIKE ?';
params.push(model.includes('%') ? model : `%${model}%`);
}
if (from) {
sql += ' AND test_date >= ?';
params.push(from);
}
if (to) {
sql += ' AND test_date <= ?';
params.push(to);
}
if (result) {
sql += ' AND overall_result = ?';
params.push(result.toUpperCase());
}
if (station) {
sql += ' AND test_station = ?';
params.push(station);
}
if (logtype) {
sql += ' AND log_type = ?';
params.push(logtype);
}
if (q) {
// Full-text search - rebuild query with FTS
sql = `SELECT test_records.* FROM test_records
JOIN test_records_fts ON test_records.id = test_records_fts.rowid
WHERE test_records_fts MATCH ?`;
params.length = 0;
params.push(q);
if (serial) {
sql += ' AND serial_number LIKE ?';
params.push(serial.includes('%') ? serial : `%${serial}%`);
}
if (model) {
sql += ' AND model_number LIKE ?';
params.push(model.includes('%') ? model : `%${model}%`);
}
if (station) {
sql += ' AND test_station = ?';
params.push(station);
}
if (logtype) {
sql += ' AND log_type = ?';
params.push(logtype);
}
if (result) {
sql += ' AND overall_result = ?';
params.push(result.toUpperCase());
}
if (from) {
sql += ' AND test_date >= ?';
params.push(from);
}
if (to) {
sql += ' AND test_date <= ?';
params.push(to);
}
}
sql += ' ORDER BY test_date DESC, serial_number';
sql += ` LIMIT ? OFFSET ?`;
params.push(parseInt(limit), parseInt(offset));
const records = db.prepare(sql).all(...params);
// Get total count
let countSql = sql.replace(/SELECT .* FROM/, 'SELECT COUNT(*) as count FROM')
.replace(/ORDER BY.*$/, '');
countSql = countSql.replace(/LIMIT \? OFFSET \?/, '');
const countParams = params.slice(0, -2);
const total = db.prepare(countSql).get(...countParams);
db.close();
res.json({
records,
total: total?.count || records.length,
limit: parseInt(limit),
offset: parseInt(offset)
});
} catch (err) {
res.status(500).json({ error: err.message });
}
});
/**
* GET /api/record/:id
* Get single record by ID
*/
router.get('/record/:id', (req, res) => {
try {
const db = getDb();
const record = db.prepare('SELECT * FROM test_records WHERE id = ?').get(req.params.id);
db.close();
if (!record) {
return res.status(404).json({ error: 'Record not found' });
}
res.json(record);
} catch (err) {
res.status(500).json({ error: err.message });
}
});
/**
* GET /api/datasheet/:id
* Generate datasheet for a record
* Query params: format (html, txt)
*/
router.get('/datasheet/:id', (req, res) => {
try {
const db = getDb();
const record = db.prepare('SELECT * FROM test_records WHERE id = ?').get(req.params.id);
db.close();
if (!record) {
return res.status(404).json({ error: 'Record not found' });
}
const format = req.query.format || 'html';
const datasheet = generateDatasheet(record, format);
if (format === 'html') {
res.type('html').send(datasheet);
} else {
res.type('text/plain').send(datasheet);
}
} catch (err) {
res.status(500).json({ error: err.message });
}
});
/**
* GET /api/stats
* Get database statistics
*/
router.get('/stats', (req, res) => {
try {
const db = getDb();
const stats = {
total_records: db.prepare('SELECT COUNT(*) as count FROM test_records').get().count,
by_log_type: db.prepare(`
SELECT log_type, COUNT(*) as count
FROM test_records
GROUP BY log_type
ORDER BY count DESC
`).all(),
by_result: db.prepare(`
SELECT overall_result, COUNT(*) as count
FROM test_records
GROUP BY overall_result
`).all(),
by_station: db.prepare(`
SELECT test_station, COUNT(*) as count
FROM test_records
WHERE test_station IS NOT NULL AND test_station != ''
GROUP BY test_station
ORDER BY test_station
`).all(),
date_range: db.prepare(`
SELECT MIN(test_date) as oldest, MAX(test_date) as newest
FROM test_records
`).get(),
recent_serials: db.prepare(`
SELECT DISTINCT serial_number, model_number, test_date
FROM test_records
ORDER BY test_date DESC
LIMIT 10
`).all()
};
db.close();
res.json(stats);
} catch (err) {
res.status(500).json({ error: err.message });
}
});
/**
* GET /api/filters
* Get available filter options (test stations, log types, models)
*/
router.get('/filters', (req, res) => {
try {
const db = getDb();
const filters = {
stations: db.prepare(`
SELECT DISTINCT test_station
FROM test_records
WHERE test_station IS NOT NULL AND test_station != ''
ORDER BY test_station
`).all().map(r => r.test_station),
log_types: db.prepare(`
SELECT DISTINCT log_type
FROM test_records
ORDER BY log_type
`).all().map(r => r.log_type),
models: db.prepare(`
SELECT DISTINCT model_number, COUNT(*) as count
FROM test_records
GROUP BY model_number
ORDER BY count DESC
LIMIT 500
`).all()
};
db.close();
res.json(filters);
} catch (err) {
res.status(500).json({ error: err.message });
}
});
/**
* GET /api/export
* Export search results as CSV
*/
router.get('/export', (req, res) => {
try {
const db = getDb();
const { serial, model, from, to, result, station, logtype } = req.query;
let sql = 'SELECT * FROM test_records WHERE 1=1';
const params = [];
if (serial) {
sql += ' AND serial_number LIKE ?';
params.push(serial.includes('%') ? serial : `%${serial}%`);
}
if (model) {
sql += ' AND model_number LIKE ?';
params.push(model.includes('%') ? model : `%${model}%`);
}
if (from) {
sql += ' AND test_date >= ?';
params.push(from);
}
if (to) {
sql += ' AND test_date <= ?';
params.push(to);
}
if (result) {
sql += ' AND overall_result = ?';
params.push(result.toUpperCase());
}
if (station) {
sql += ' AND test_station = ?';
params.push(station);
}
if (logtype) {
sql += ' AND log_type = ?';
params.push(logtype);
}
sql += ' ORDER BY test_date DESC, serial_number LIMIT 10000';
const records = db.prepare(sql).all(...params);
db.close();
// Generate CSV
const headers = ['id', 'log_type', 'model_number', 'serial_number', 'test_date', 'test_station', 'overall_result', 'source_file'];
let csv = headers.join(',') + '\n';
for (const record of records) {
const row = headers.map(h => {
const val = record[h] || '';
return `"${String(val).replace(/"/g, '""')}"`;
});
csv += row.join(',') + '\n';
}
res.setHeader('Content-Type', 'text/csv');
res.setHeader('Content-Disposition', 'attachment; filename=test_records.csv');
res.send(csv);
} catch (err) {
res.status(500).json({ error: err.message });
}
});
module.exports = router;