Files
claudetools/import-js-retrieved.js
Mike Swanson e4392afce9 docs: Document Dataforth test database system and troubleshooting
Investigation and Documentation:
- Discovered and documented test database system on AD2 server
- Created comprehensive TEST_DATABASE_ARCHITECTURE.md with full system details
- Retrieved all key database files from AD2 (import.js, schema.sql, server configs)
- Documented data flow: DOS machines → NAS → AD2 → SQLite → Web interface
- Verified database health: 1,027,517 records, 1075 MB, dates back to 1990

Database System Architecture:
- SQLite database with Node.js/Express.js web server (port 3000)
- Automated import via Sync-FromNAS.ps1 (runs every 15 minutes)
- 8 log types supported: DSCLOG, 5BLOG, 7BLOG, 8BLOG, PWRLOG, SCTLOG, VASLOG, SHT
- FTS5 full-text search, comprehensive indexes for performance
- API endpoints: search, stats, export, datasheet generation

Troubleshooting Scripts Created:
- Database diagnostics: check-db-simple.ps1, test-db-directly.ps1
- Server status checks: check-node-running.ps1, check-db-server.ps1
- Performance analysis: check-db-performance.ps1, check-wal-files.ps1
- API testing: test-api-endpoint.ps1, test-query.js
- Import monitoring: check-new-records.ps1
- Database optimization attempts: api-js-optimized.js, api-js-fixed.js
- Deployment scripts: deploy-db-optimization.ps1, deploy-db-fix.ps1, restore-original.ps1

Key Findings:
- Database file healthy and queryable (verified with test-query.js)
- Node.js server not running (port 3000 closed) - root cause of web interface issues
- Database last updated 8 days ago (01/13/2026) - automated sync may be broken
- Attempted performance optimizations (WAL mode) incompatible with readonly connections
- Original api.js restored from backup after optimization conflicts

Retrieved Documentation:
- QUICKSTART-retrieved.md: Quick start guide for database server
- SESSION_NOTES-retrieved.md: Complete session notes from database creation
- Sync-FromNAS-retrieved.ps1: Full sync script with database import logic
- import-js-retrieved.js: Node.js import script (12,774 bytes)
- schema-retrieved.sql: SQLite schema with FTS5 triggers
- server-js-retrieved.js: Express.js server configuration
- api-js-retrieved.js: API routes and endpoints
- package-retrieved.json: Node.js dependencies

Action Items Identified:
1. Start Node.js server on AD2 to restore web interface functionality
2. Investigate why automated sync hasn't updated database in 8 days
3. Check Windows Task Scheduler for Sync-FromNAS.ps1 scheduled task
4. Run manual import to catch up on 8 days of test data if needed

Technical Details:
- Database path: C:\Shares\testdatadb\database\testdata.db
- Web interface: http://192.168.0.6:3000 (when running)
- Database size: 1075.14 MB (1,127,362,560 bytes)
- Total records: 1,027,517 (slight variance from original 1,030,940)
- Pass rate: 99.82% (1,029,046 passed, 1,888 failed)

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-01-21 16:38:54 -07:00

397 lines
12 KiB
JavaScript

/**
* Data Import Script
* Imports test data from DAT and SHT files into SQLite database
*/
const fs = require('fs');
const path = require('path');
const Database = require('better-sqlite3');
const { parseMultilineFile, extractTestStation } = require('../parsers/multiline');
const { parseCsvFile } = require('../parsers/csvline');
const { parseShtFile } = require('../parsers/shtfile');
// Configuration
const DB_PATH = path.join(__dirname, 'testdata.db');
const SCHEMA_PATH = path.join(__dirname, 'schema.sql');
// Data source paths
const TEST_PATH = 'C:/Shares/test';
const RECOVERY_PATH = 'C:/Shares/Recovery-TEST';
const HISTLOGS_PATH = path.join(TEST_PATH, 'Ate/HISTLOGS');
// Log types and their parsers
const LOG_TYPES = {
'DSCLOG': { parser: 'multiline', ext: '.DAT' },
'5BLOG': { parser: 'multiline', ext: '.DAT' },
'8BLOG': { parser: 'multiline', ext: '.DAT' },
'PWRLOG': { parser: 'multiline', ext: '.DAT' },
'SCTLOG': { parser: 'multiline', ext: '.DAT' },
'VASLOG': { parser: 'multiline', ext: '.DAT' },
'7BLOG': { parser: 'csvline', ext: '.DAT' }
};
// Initialize database
function initDatabase() {
console.log('Initializing database...');
const db = new Database(DB_PATH);
// Read and execute schema
const schema = fs.readFileSync(SCHEMA_PATH, 'utf8');
db.exec(schema);
console.log('Database initialized.');
return db;
}
// Prepare insert statement
function prepareInsert(db) {
return db.prepare(`
INSERT OR IGNORE INTO test_records
(log_type, model_number, serial_number, test_date, test_station, overall_result, raw_data, source_file)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
`);
}
// Find all files of a specific type in a directory
function findFiles(dir, pattern, recursive = true) {
const results = [];
try {
if (!fs.existsSync(dir)) return results;
const items = fs.readdirSync(dir, { withFileTypes: true });
for (const item of items) {
const fullPath = path.join(dir, item.name);
if (item.isDirectory() && recursive) {
results.push(...findFiles(fullPath, pattern, recursive));
} else if (item.isFile()) {
if (pattern.test(item.name)) {
results.push(fullPath);
}
}
}
} catch (err) {
// Ignore permission errors
}
return results;
}
// Import records from a file
function importFile(db, insertStmt, filePath, logType, parser) {
let records = [];
const testStation = extractTestStation(filePath);
try {
switch (parser) {
case 'multiline':
records = parseMultilineFile(filePath, logType, testStation);
break;
case 'csvline':
records = parseCsvFile(filePath, testStation);
break;
case 'shtfile':
records = parseShtFile(filePath, testStation);
break;
}
let imported = 0;
for (const record of records) {
try {
const result = insertStmt.run(
record.log_type,
record.model_number,
record.serial_number,
record.test_date,
record.test_station,
record.overall_result,
record.raw_data,
record.source_file
);
if (result.changes > 0) imported++;
} catch (err) {
// Duplicate or constraint error - skip
}
}
return { total: records.length, imported };
} catch (err) {
console.error(`Error importing ${filePath}: ${err.message}`);
return { total: 0, imported: 0 };
}
}
// Import from HISTLOGS (master consolidated logs)
function importHistlogs(db, insertStmt) {
console.log('\n=== Importing from HISTLOGS ===');
let totalImported = 0;
let totalRecords = 0;
for (const [logType, config] of Object.entries(LOG_TYPES)) {
const logDir = path.join(HISTLOGS_PATH, logType);
if (!fs.existsSync(logDir)) {
console.log(` ${logType}: directory not found`);
continue;
}
const files = findFiles(logDir, new RegExp(`\\${config.ext}$`, 'i'), false);
console.log(` ${logType}: found ${files.length} files`);
for (const file of files) {
const { total, imported } = importFile(db, insertStmt, file, logType, config.parser);
totalRecords += total;
totalImported += imported;
}
}
console.log(` HISTLOGS total: ${totalImported} records imported (${totalRecords} parsed)`);
return totalImported;
}
// Import from test station logs
function importStationLogs(db, insertStmt, basePath, label) {
console.log(`\n=== Importing from ${label} ===`);
let totalImported = 0;
let totalRecords = 0;
// Find all test station directories (TS-1, TS-27, TS-8L, TS-10R, etc.)
const stationPattern = /^TS-\d+[LR]?$/i;
let stations = [];
try {
const items = fs.readdirSync(basePath, { withFileTypes: true });
stations = items
.filter(i => i.isDirectory() && stationPattern.test(i.name))
.map(i => i.name);
} catch (err) {
console.log(` Error reading ${basePath}: ${err.message}`);
return 0;
}
console.log(` Found stations: ${stations.join(', ')}`);
for (const station of stations) {
const logsDir = path.join(basePath, station, 'LOGS');
if (!fs.existsSync(logsDir)) continue;
for (const [logType, config] of Object.entries(LOG_TYPES)) {
const logDir = path.join(logsDir, logType);
if (!fs.existsSync(logDir)) continue;
const files = findFiles(logDir, new RegExp(`\\${config.ext}$`, 'i'), false);
for (const file of files) {
const { total, imported } = importFile(db, insertStmt, file, logType, config.parser);
totalRecords += total;
totalImported += imported;
}
}
}
// Also import SHT files
const shtFiles = findFiles(basePath, /\.SHT$/i, true);
console.log(` Found ${shtFiles.length} SHT files`);
for (const file of shtFiles) {
const { total, imported } = importFile(db, insertStmt, file, 'SHT', 'shtfile');
totalRecords += total;
totalImported += imported;
}
console.log(` ${label} total: ${totalImported} records imported (${totalRecords} parsed)`);
return totalImported;
}
// Import from Recovery-TEST backups (newest first)
function importRecoveryBackups(db, insertStmt) {
console.log('\n=== Importing from Recovery-TEST backups ===');
if (!fs.existsSync(RECOVERY_PATH)) {
console.log(' Recovery-TEST directory not found');
return 0;
}
// Get backup dates, sort newest first
const backups = fs.readdirSync(RECOVERY_PATH, { withFileTypes: true })
.filter(i => i.isDirectory() && /^\d{2}-\d{2}-\d{2}$/.test(i.name))
.map(i => i.name)
.sort()
.reverse();
console.log(` Found backup dates: ${backups.join(', ')}`);
let totalImported = 0;
for (const backup of backups) {
const backupPath = path.join(RECOVERY_PATH, backup);
const imported = importStationLogs(db, insertStmt, backupPath, `Recovery-TEST/${backup}`);
totalImported += imported;
}
return totalImported;
}
// Main import function
async function runImport() {
console.log('========================================');
console.log('Test Data Import');
console.log('========================================');
console.log(`Database: ${DB_PATH}`);
console.log(`Start time: ${new Date().toISOString()}`);
const db = initDatabase();
const insertStmt = prepareInsert(db);
let grandTotal = 0;
// Use transaction for performance
const importAll = db.transaction(() => {
// 1. Import HISTLOGS first (authoritative)
grandTotal += importHistlogs(db, insertStmt);
// 2. Import Recovery backups (newest first)
grandTotal += importRecoveryBackups(db, insertStmt);
// 3. Import current test folder
grandTotal += importStationLogs(db, insertStmt, TEST_PATH, 'test');
});
importAll();
// Get final stats
const stats = db.prepare('SELECT COUNT(*) as count FROM test_records').get();
console.log('\n========================================');
console.log('Import Complete');
console.log('========================================');
console.log(`Total records in database: ${stats.count}`);
console.log(`End time: ${new Date().toISOString()}`);
db.close();
}
// Import a single file (for incremental imports from sync)
function importSingleFile(filePath) {
console.log(`Importing: ${filePath}`);
const db = new Database(DB_PATH);
const insertStmt = prepareInsert(db);
// Determine log type from path
let logType = null;
let parser = null;
for (const [type, config] of Object.entries(LOG_TYPES)) {
if (filePath.includes(type)) {
logType = type;
parser = config.parser;
break;
}
}
if (!logType) {
// Check for SHT files
if (/\.SHT$/i.test(filePath)) {
logType = 'SHT';
parser = 'shtfile';
} else {
console.log(` Unknown log type for: ${filePath}`);
db.close();
return { total: 0, imported: 0 };
}
}
const result = importFile(db, insertStmt, filePath, logType, parser);
console.log(` Imported ${result.imported} of ${result.total} records`);
db.close();
return result;
}
// Import multiple files (for batch incremental imports)
function importFiles(filePaths) {
console.log(`\n========================================`);
console.log(`Incremental Import: ${filePaths.length} files`);
console.log(`========================================`);
const db = new Database(DB_PATH);
const insertStmt = prepareInsert(db);
let totalImported = 0;
let totalRecords = 0;
const importBatch = db.transaction(() => {
for (const filePath of filePaths) {
// Determine log type from path
let logType = null;
let parser = null;
for (const [type, config] of Object.entries(LOG_TYPES)) {
if (filePath.includes(type)) {
logType = type;
parser = config.parser;
break;
}
}
if (!logType) {
if (/\.SHT$/i.test(filePath)) {
logType = 'SHT';
parser = 'shtfile';
} else {
console.log(` Skipping unknown type: ${filePath}`);
continue;
}
}
const { total, imported } = importFile(db, insertStmt, filePath, logType, parser);
totalRecords += total;
totalImported += imported;
console.log(` ${path.basename(filePath)}: ${imported}/${total} records`);
}
});
importBatch();
console.log(`\nTotal: ${totalImported} records imported (${totalRecords} parsed)`);
db.close();
return { total: totalRecords, imported: totalImported };
}
// Run if called directly
if (require.main === module) {
// Check for command line arguments
const args = process.argv.slice(2);
if (args.length > 0 && args[0] === '--file') {
// Import specific file(s)
const files = args.slice(1);
if (files.length === 0) {
console.log('Usage: node import.js --file <file1> [file2] ...');
process.exit(1);
}
importFiles(files);
} else if (args.length > 0 && args[0] === '--help') {
console.log('Usage:');
console.log(' node import.js Full import from all sources');
console.log(' node import.js --file <f> Import specific file(s)');
process.exit(0);
} else {
// Full import
runImport().catch(console.error);
}
}
module.exports = { runImport, importSingleFile, importFiles };