refactor: simplify codebase and replace external dependencies with Bun built-ins

Backend changes:
- Merge duplicate award logic (calculatePointsAwardProgress + getPointsAwardEntityBreakdown)
- Simplify LoTW service (merge syncQSOs functions, simplify polling)
- Remove job queue abstraction (hardcode LoTW sync, remove processor registry)
- Consolidate config files (database.js, logger.js, jwt.js → single config.js)
- Replace bcrypt with Bun.password.hash/verify
- Replace Pino logger with console-based logger
- Fix: export syncQSOs and getLastLoTWQSLDate for job queue imports
- Fix: correct database path resolution using new URL()

Frontend changes:
- Simplify auth store (remove localStorage wrappers, reduce from 222→109 lines)
- Consolidate API layer (remove verbose JSDoc, 180→80 lines)
- Add shared UI components (Loading, ErrorDisplay, BackButton)

Dependencies:
- Remove bcrypt (replaced with Bun.password)
- Remove pino and pino-pretty (replaced with console logger)

Total: ~445 lines removed (net), 3 dependencies removed
Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-16 18:27:10 +01:00
parent 41ccb5c185
commit 0db6b68f48
13 changed files with 520 additions and 813 deletions

View File

@@ -1,17 +1,14 @@
import bcrypt from 'bcrypt';
import { eq } from 'drizzle-orm';
import { db } from '../config/database.js';
import { db } from '../config.js';
import { users } from '../db/schema/index.js';
const SALT_ROUNDS = 10;
/**
* Hash a password using bcrypt
* Hash a password using Bun's built-in password hashing
* @param {string} password - Plain text password
* @returns {Promise<string>} Hashed password
*/
async function hashPassword(password) {
return bcrypt.hash(password, SALT_ROUNDS);
return Bun.password.hash(password);
}
/**
@@ -21,7 +18,7 @@ async function hashPassword(password) {
* @returns {Promise<boolean>} True if password matches
*/
async function verifyPassword(password, hash) {
return bcrypt.compare(password, hash);
return Bun.password.verify(password, hash);
}
/**

View File

@@ -1,7 +1,6 @@
import { db } from '../config/database.js';
import { db, logger } from '../config.js';
import { qsos } from '../db/schema/index.js';
import { eq, and, or, desc, sql } from 'drizzle-orm';
import logger from '../config/logger.js';
import { readFileSync } from 'fs';
import { join } from 'path';
@@ -63,12 +62,20 @@ export async function getAllAwards() {
}
/**
* Normalize award rules to a consistent format
* Calculate award progress for a user
* @param {number} userId - User ID
* @param {Object} award - Award definition
* @param {Object} options - Options
* @param {boolean} options.includeDetails - Include detailed entity breakdown
*/
function normalizeAwardRules(rules) {
export async function calculateAwardProgress(userId, award, options = {}) {
const { includeDetails = false } = options;
let { rules } = award;
// Normalize rules inline to handle different formats
// Handle "filtered" type awards (like DXCC CW)
if (rules.type === 'filtered' && rules.baseRule) {
return {
rules = {
type: 'entity',
entityType: rules.baseRule.entityType,
target: rules.baseRule.target,
@@ -76,11 +83,9 @@ function normalizeAwardRules(rules) {
filters: rules.filters,
};
}
// Handle "counter" type awards (like RS-44)
// These count unique callsigns instead of entities
if (rules.type === 'counter') {
return {
else if (rules.type === 'counter') {
rules = {
type: 'entity',
entityType: rules.countBy === 'qso' ? 'callsign' : 'callsign',
target: rules.target,
@@ -88,30 +93,13 @@ function normalizeAwardRules(rules) {
filters: rules.filters,
};
}
// Handle "points" type awards (station-specific point values)
// Keep as-is but validate stations array exists
if (rules.type === 'points') {
// Validate "points" type awards
else if (rules.type === 'points') {
if (!rules.stations || !Array.isArray(rules.stations)) {
logger.warn('Point-based award missing stations array');
}
return rules; // Return as-is for special handling
}
return rules;
}
/**
* Calculate award progress for a user
* @param {number} userId - User ID
* @param {Object} award - Award definition
*/
export async function calculateAwardProgress(userId, award) {
let { rules } = award;
// Normalize rules to handle different formats
rules = normalizeAwardRules(rules);
logger.debug('Calculating award progress', {
userId,
awardId: award.id,
@@ -122,7 +110,7 @@ export async function calculateAwardProgress(userId, award) {
// Handle point-based awards
if (rules.type === 'points') {
return calculatePointsAwardProgress(userId, rules);
return calculatePointsAwardProgress(userId, award, { includeDetails });
}
// Get all QSOs for user
@@ -174,8 +162,14 @@ export async function calculateAwardProgress(userId, award) {
* - "perBandMode": each unique (callsign, band, mode) combination earns points
* - "perStation": each unique station earns points once
* - "perQso": every confirmed QSO earns points
* @param {number} userId - User ID
* @param {Object} award - Award definition
* @param {Object} options - Options
* @param {boolean} options.includeDetails - Include detailed entity breakdown
*/
async function calculatePointsAwardProgress(userId, rules) {
async function calculatePointsAwardProgress(userId, award, options = {}) {
const { includeDetails = false } = options;
const { rules } = award;
const { stations, target, countMode = 'perStation' } = rules;
// Create a map of callsign -> points for quick lookup
@@ -196,162 +190,12 @@ async function calculatePointsAwardProgress(userId, rules) {
.from(qsos)
.where(eq(qsos.userId, userId));
const workedStations = new Set(); // Unique callsigns worked
const workedStations = new Set();
let totalPoints = 0;
const stationDetails = [];
if (countMode === 'perBandMode') {
// Count unique (callsign, band, mode) combinations
const workedCombinations = new Set();
const confirmedCombinations = new Map();
for (const qso of allQSOs) {
const callsign = qso.callsign?.toUpperCase();
if (!callsign) continue;
const points = stationPoints.get(callsign);
if (!points) continue;
const band = qso.band || 'Unknown';
const mode = qso.mode || 'Unknown';
const combinationKey = `${callsign}/${band}/${mode}`;
workedStations.add(callsign);
if (!workedCombinations.has(combinationKey)) {
workedCombinations.add(combinationKey);
stationDetails.push({
callsign,
band,
mode,
points,
worked: true,
confirmed: false,
qsoDate: qso.qsoDate,
});
}
if (qso.lotwQslRstatus === 'Y' && !confirmedCombinations.has(combinationKey)) {
confirmedCombinations.set(combinationKey, points);
const detail = stationDetails.find((c) =>
c.callsign === callsign && c.band === band && c.mode === mode
);
if (detail) {
detail.confirmed = true;
detail.lotwQslRdate = qso.lotwQslRdate;
}
}
}
totalPoints = Array.from(confirmedCombinations.values()).reduce((sum, p) => sum + p, 0);
} else if (countMode === 'perStation') {
// Count unique stations only
const workedStationsMap = new Map();
for (const qso of allQSOs) {
const callsign = qso.callsign?.toUpperCase();
if (!callsign) continue;
const points = stationPoints.get(callsign);
if (!points) continue;
workedStations.add(callsign);
if (!workedStationsMap.has(callsign)) {
workedStationsMap.set(callsign, {
callsign,
points,
worked: true,
confirmed: false,
qsoDate: qso.qsoDate,
band: qso.band,
mode: qso.mode,
});
}
if (qso.lotwQslRstatus === 'Y') {
const detail = workedStationsMap.get(callsign);
if (detail && !detail.confirmed) {
detail.confirmed = true;
detail.lotwQslRdate = qso.lotwQslRdate;
}
}
}
totalPoints = Array.from(workedStationsMap.values())
.filter((s) => s.confirmed)
.reduce((sum, s) => sum + s.points, 0);
stationDetails.push(...workedStationsMap.values());
} else if (countMode === 'perQso') {
// Count every confirmed QSO
const qsoCount = { worked: 0, confirmed: 0, points: 0 };
for (const qso of allQSOs) {
const callsign = qso.callsign?.toUpperCase();
if (!callsign) continue;
const points = stationPoints.get(callsign);
if (!points) continue;
workedStations.add(callsign);
qsoCount.worked++;
if (qso.lotwQslRstatus === 'Y') {
qsoCount.confirmed++;
qsoCount.points += points;
}
}
totalPoints = qsoCount.points;
}
logger.debug('Point-based award progress', {
workedStations: workedStations.size,
totalPoints,
target,
});
return {
worked: workedStations.size,
confirmed: stationDetails.filter((s) => s.confirmed).length,
totalPoints,
target: target || 0,
percentage: target ? Math.min(100, Math.round((totalPoints / target) * 100)) : 0,
workedEntities: Array.from(workedStations),
confirmedEntities: stationDetails.filter((s) => s.confirmed).map((s) => s.callsign),
stationDetails,
};
}
/**
* Get entity breakdown for point-based awards
* countMode determines what entities are shown:
* - "perBandMode": shows each (callsign, band, mode) combination
* - "perStation": shows each unique station
* - "perQso": shows every QSO (not recommended for large datasets)
*/
async function getPointsAwardEntityBreakdown(userId, award) {
const { rules } = award;
const { stations, target, countMode = 'perStation' } = rules;
// Create a map of callsign -> points for quick lookup
const stationPoints = new Map();
for (const station of stations) {
stationPoints.set(station.callsign.toUpperCase(), station.points);
}
// Get all QSOs for user
const allQSOs = await db
.select()
.from(qsos)
.where(eq(qsos.userId, userId));
let entities = [];
let totalPoints = 0;
if (countMode === 'perBandMode') {
// Show each (callsign, band, mode) combination
const combinationMap = new Map();
for (const qso of allQSOs) {
@@ -365,33 +209,35 @@ async function getPointsAwardEntityBreakdown(userId, award) {
const mode = qso.mode || 'Unknown';
const combinationKey = `${callsign}/${band}/${mode}`;
workedStations.add(callsign);
if (!combinationMap.has(combinationKey)) {
combinationMap.set(combinationKey, {
entity: combinationKey,
entityId: null,
entityName: `${callsign} (${band}/${mode})`,
callsign,
band,
mode,
points,
worked: true,
confirmed: qso.lotwQslRstatus === 'Y',
confirmed: false,
qsoDate: qso.qsoDate,
band: qso.band,
mode: qso.mode,
callsign: qso.callsign,
lotwQslRdate: qso.lotwQslRdate,
lotwQslRdate: null,
});
} else {
const data = combinationMap.get(combinationKey);
if (!data.confirmed && qso.lotwQslRstatus === 'Y') {
data.confirmed = true;
data.lotwQslRdate = qso.lotwQslRdate;
}
if (qso.lotwQslRstatus === 'Y') {
const detail = combinationMap.get(combinationKey);
if (!detail.confirmed) {
detail.confirmed = true;
detail.lotwQslRdate = qso.lotwQslRdate;
}
}
}
entities = Array.from(combinationMap.values());
totalPoints = entities.filter((e) => e.confirmed).reduce((sum, e) => sum + e.points, 0);
const details = Array.from(combinationMap.values());
stationDetails.push(...details);
totalPoints = details.filter((d) => d.confirmed).reduce((sum, d) => sum + d.points, 0);
} else if (countMode === 'perStation') {
// Show each unique station
// Count unique stations only
const stationMap = new Map();
for (const qso of allQSOs) {
@@ -401,33 +247,35 @@ async function getPointsAwardEntityBreakdown(userId, award) {
const points = stationPoints.get(callsign);
if (!points) continue;
workedStations.add(callsign);
if (!stationMap.has(callsign)) {
stationMap.set(callsign, {
entity: callsign,
entityId: null,
entityName: callsign,
callsign,
points,
worked: true,
confirmed: qso.lotwQslRstatus === 'Y',
confirmed: false,
qsoDate: qso.qsoDate,
band: qso.band,
mode: qso.mode,
callsign: qso.callsign,
lotwQslRdate: qso.lotwQslRdate,
lotwQslRdate: null,
});
} else {
const data = stationMap.get(callsign);
if (!data.confirmed && qso.lotwQslRstatus === 'Y') {
data.confirmed = true;
data.lotwQslRdate = qso.lotwQslRdate;
}
if (qso.lotwQslRstatus === 'Y') {
const detail = stationMap.get(callsign);
if (!detail.confirmed) {
detail.confirmed = true;
detail.lotwQslRdate = qso.lotwQslRdate;
}
}
}
entities = Array.from(stationMap.values());
totalPoints = entities.filter((e) => e.confirmed).reduce((sum, e) => sum + e.points, 0);
const details = Array.from(stationMap.values());
stationDetails.push(...details);
totalPoints = details.filter((d) => d.confirmed).reduce((sum, d) => sum + d.points, 0);
} else if (countMode === 'perQso') {
// Show every QSO (use with caution)
// Count every confirmed QSO
for (const qso of allQSOs) {
const callsign = qso.callsign?.toUpperCase();
if (!callsign) continue;
@@ -435,39 +283,105 @@ async function getPointsAwardEntityBreakdown(userId, award) {
const points = stationPoints.get(callsign);
if (!points) continue;
entities.push({
entity: `${callsign}-${qso.qsoDate}`,
entityId: null,
entityName: `${callsign} on ${qso.qsoDate}`,
points,
worked: true,
confirmed: qso.lotwQslRstatus === 'Y',
qsoDate: qso.qsoDate,
band: qso.band,
mode: qso.mode,
callsign: qso.callsign,
lotwQslRdate: qso.lotwQslRdate,
});
workedStations.add(callsign);
if (qso.lotwQslRstatus === 'Y') {
totalPoints += points;
stationDetails.push({
callsign,
points,
worked: true,
confirmed: true,
qsoDate: qso.qsoDate,
band: qso.band,
mode: qso.mode,
lotwQslRdate: qso.lotwQslRdate,
});
}
}
}
return {
award: {
logger.debug('Point-based award progress', {
workedStations: workedStations.size,
totalPoints,
target,
});
// Base result
const result = {
worked: workedStations.size,
confirmed: stationDetails.filter((s) => s.confirmed).length,
totalPoints,
target: target || 0,
percentage: target ? Math.min(100, Math.round((totalPoints / target) * 100)) : 0,
workedEntities: Array.from(workedStations),
confirmedEntities: stationDetails.filter((s) => s.confirmed).map((s) => s.callsign),
};
// Add details if requested
if (includeDetails) {
// Convert stationDetails to entity format for breakdown
const entities = stationDetails.map((detail) => {
if (countMode === 'perBandMode') {
return {
entity: `${detail.callsign}/${detail.band}/${detail.mode}`,
entityId: null,
entityName: `${detail.callsign} (${detail.band}/${detail.mode})`,
points: detail.points,
worked: detail.worked,
confirmed: detail.confirmed,
qsoDate: detail.qsoDate,
band: detail.band,
mode: detail.mode,
callsign: detail.callsign,
lotwQslRdate: detail.lotwQslRdate,
};
} else if (countMode === 'perStation') {
return {
entity: detail.callsign,
entityId: null,
entityName: detail.callsign,
points: detail.points,
worked: detail.worked,
confirmed: detail.confirmed,
qsoDate: detail.qsoDate,
band: detail.band,
mode: detail.mode,
callsign: detail.callsign,
lotwQslRdate: detail.lotwQslRdate,
};
} else {
return {
entity: `${detail.callsign}-${detail.qsoDate}`,
entityId: null,
entityName: `${detail.callsign} on ${detail.qsoDate}`,
points: detail.points,
worked: detail.worked,
confirmed: detail.confirmed,
qsoDate: detail.qsoDate,
band: detail.band,
mode: detail.mode,
callsign: detail.callsign,
lotwQslRdate: detail.lotwQslRdate,
};
}
});
result.award = {
id: award.id,
name: award.name,
description: award.description,
caption: award.caption,
target: award.rules?.target || 0,
},
entities,
total: entities.length,
confirmed: entities.filter((e) => e.confirmed).length,
totalPoints,
};
};
result.entities = entities;
result.total = entities.length;
result.confirmed = entities.filter((e) => e.confirmed).length;
} else {
result.stationDetails = stationDetails;
}
return result;
}
/**
@@ -577,12 +491,28 @@ export async function getAwardEntityBreakdown(userId, awardId) {
let { rules } = award;
// Normalize rules to handle different formats
rules = normalizeAwardRules(rules);
// Normalize rules inline
if (rules.type === 'filtered' && rules.baseRule) {
rules = {
type: 'entity',
entityType: rules.baseRule.entityType,
target: rules.baseRule.target,
displayField: rules.baseRule.displayField,
filters: rules.filters,
};
} else if (rules.type === 'counter') {
rules = {
type: 'entity',
entityType: rules.countBy === 'qso' ? 'callsign' : 'callsign',
target: rules.target,
displayField: rules.displayField,
filters: rules.filters,
};
}
// Handle point-based awards
// Handle point-based awards - use the unified function
if (rules.type === 'points') {
return getPointsAwardEntityBreakdown(userId, award);
return await calculatePointsAwardProgress(userId, award, { includeDetails: true });
}
// Get all QSOs for user
@@ -604,17 +534,14 @@ export async function getAwardEntityBreakdown(userId, awardId) {
if (!entityMap.has(entity)) {
// Determine what to display as the entity name
// Use displayField from award rules, or fallback to entity/type
let displayName = String(entity);
if (rules.displayField) {
let rawValue = qso[rules.displayField];
// For grid-based awards, truncate to first 4 characters
if (rules.displayField === 'grid' && rawValue && rawValue.length > 4) {
rawValue = rawValue.substring(0, 4);
}
displayName = String(rawValue || entity);
} else {
// Fallback: try entity, state, grid, callsign in order
displayName = qso.entity || qso.state || qso.grid || qso.callsign || String(entity);
}

View File

@@ -1,10 +1,9 @@
import { db } from '../config/database.js';
import { db, logger } from '../config.js';
import { syncJobs } from '../db/schema/index.js';
import { eq, and, desc, or, lt } from 'drizzle-orm';
import logger from '../config/logger.js';
import { eq, and, or, lt } from 'drizzle-orm';
/**
* Background Job Queue Service
* Simplified Background Job Queue Service
* Manages async jobs with database persistence
*/
@@ -16,43 +15,24 @@ export const JobStatus = {
FAILED: 'failed',
};
// Job type constants
export const JobType = {
LOTW_SYNC: 'lotw_sync',
};
// In-memory job processor (for single-server deployment)
const activeJobs = new Map(); // jobId -> Promise
const jobProcessors = {
[JobType.LOTW_SYNC]: null, // Will be set by lotw.service.js
};
// Active jobs tracking
const activeJobs = new Map();
/**
* Register a job processor function
* @param {string} type - Job type
* @param {Function} processor - Async function that processes the job
*/
export function registerProcessor(type, processor) {
jobProcessors[type] = processor;
}
/**
* Enqueue a new job
* Enqueue a new LoTW sync job
* @param {number} userId - User ID
* @param {string} type - Job type
* @param {Object} data - Job data (will be passed to processor)
* @returns {Promise<Object>} Job object with ID
*/
export async function enqueueJob(userId, type, data = {}) {
logger.debug('Enqueueing job', { userId, type });
export async function enqueueJob(userId) {
logger.debug('Enqueueing LoTW sync job', { userId });
// Check for existing active job of same type for this user
const existingJob = await getUserActiveJob(userId, type);
// Check for existing active job
const existingJob = await getUserActiveJob(userId);
if (existingJob) {
logger.debug('Existing active job found', { jobId: existingJob.id });
return {
success: false,
error: `A ${type} job is already running or pending for this user`,
error: 'A LoTW sync job is already running or pending for this user',
existingJob: existingJob.id,
};
}
@@ -62,16 +42,16 @@ export async function enqueueJob(userId, type, data = {}) {
.insert(syncJobs)
.values({
userId,
type,
type: 'lotw_sync',
status: JobStatus.PENDING,
createdAt: new Date(),
})
.returning();
logger.info('Job created', { jobId: job.id, type, userId });
logger.info('Job created', { jobId: job.id, userId });
// Start processing asynchronously (don't await)
processJobAsync(job.id, userId, type, data).catch((error) => {
processJobAsync(job.id, userId).catch((error) => {
logger.error(`Job processing error`, { jobId: job.id, error: error.message });
});
@@ -88,35 +68,53 @@ export async function enqueueJob(userId, type, data = {}) {
}
/**
* Process a job asynchronously
* Process a LoTW sync job asynchronously
* @param {number} jobId - Job ID
* @param {number} userId - User ID
* @param {string} type - Job type
* @param {Object} data - Job data
*/
async function processJobAsync(jobId, userId, type, data) {
// Store the promise in activeJobs
async function processJobAsync(jobId, userId) {
const jobPromise = (async () => {
try {
// Import dynamically to avoid circular dependency
const { syncQSOs } = await import('./lotw.service.js');
const { getUserById } = await import('./auth.service.js');
// Update status to running
await updateJob(jobId, {
status: JobStatus.RUNNING,
startedAt: new Date(),
});
// Get the processor for this job type
const processor = jobProcessors[type];
if (!processor) {
// Get user credentials
const user = await getUserById(userId);
if (!user || !user.lotwUsername || !user.lotwPassword) {
await updateJob(jobId, {
status: JobStatus.FAILED,
completedAt: new Date(),
error: `No processor registered for job type: ${type}`,
error: 'LoTW credentials not configured',
});
return null;
}
// Execute the job processor
const result = await processor(jobId, userId, data);
// Get last QSL date for incremental sync
const { getLastLoTWQSLDate } = await import('./lotw.service.js');
const lastQSLDate = await getLastLoTWQSLDate(userId);
const sinceDate = lastQSLDate || new Date('2000-01-01');
if (lastQSLDate) {
logger.info(`Job ${jobId}: Incremental sync`, { since: sinceDate.toISOString().split('T')[0] });
} else {
logger.info(`Job ${jobId}: Full sync`);
}
// Update job progress
await updateJobProgress(jobId, {
message: 'Fetching QSOs from LoTW...',
step: 'fetch',
});
// Execute the sync
const result = await syncQSOs(userId, user.lotwUsername, user.lotwPassword, sinceDate, jobId);
// Update job as completed
await updateJob(jobId, {
@@ -185,7 +183,7 @@ export async function getJobStatus(jobId) {
return {
id: job.id,
userId: job.userId, // Include userId for permission checks
userId: job.userId,
type: job.type,
status: job.status,
startedAt: job.startedAt,
@@ -197,13 +195,11 @@ export async function getJobStatus(jobId) {
}
/**
* Get user's active job (pending or running) of a specific type
* Get user's active job (pending or running)
* @param {number} userId - User ID
* @param {string} type - Job type (optional, returns any active job)
* @returns {Promise<Object|null>} Active job or null
*/
export async function getUserActiveJob(userId, type = null) {
// Build the where clause properly with and() and or()
export async function getUserActiveJob(userId) {
const conditions = [
eq(syncJobs.userId, userId),
or(
@@ -212,15 +208,11 @@ export async function getUserActiveJob(userId, type = null) {
),
];
if (type) {
conditions.push(eq(syncJobs.type, type));
}
const [job] = await db
.select()
.from(syncJobs)
.where(and(...conditions))
.orderBy(desc(syncJobs.createdAt))
.orderBy(syncJobs.createdAt)
.limit(1);
return job || null;
@@ -237,7 +229,7 @@ export async function getUserJobs(userId, limit = 10) {
.select()
.from(syncJobs)
.where(eq(syncJobs.userId, userId))
.orderBy(desc(syncJobs.createdAt))
.orderBy(syncJobs.createdAt)
.limit(limit);
return jobs.map((job) => {

View File

@@ -1,24 +1,17 @@
import { db } from '../config/database.js';
import { db, logger } from '../config.js';
import { qsos } from '../db/schema/index.js';
import { max, sql, eq, and, desc } from 'drizzle-orm';
import { registerProcessor, updateJobProgress } from './job-queue.service.js';
import logger from '../config/logger.js';
import { updateJobProgress } from './job-queue.service.js';
/**
* LoTW (Logbook of the World) Service
* Fetches QSOs from ARRL's LoTW system
*/
// Wavelog-compatible constants
const LOTW_CONNECT_TIMEOUT = 30;
// Configuration for long-polling
const POLLING_CONFIG = {
maxRetries: 30,
retryDelay: 10000,
requestTimeout: 60000,
maxTotalTime: 600000,
};
// Simplified polling configuration
const MAX_RETRIES = 30;
const RETRY_DELAY = 10000;
const REQUEST_TIMEOUT = 60000;
/**
* Check if LoTW response indicates the report is still being prepared
@@ -53,7 +46,7 @@ function isReportPending(responseData) {
const sleep = (ms) => new Promise(resolve => setTimeout(resolve, ms));
/**
* Fetch QSOs from LoTW with long-polling support
* Fetch QSOs from LoTW with retry support
*/
async function fetchQSOsFromLoTW(lotwUsername, lotwPassword, sinceDate = null) {
const url = 'https://lotw.arrl.org/lotwuser/lotwreport.adi';
@@ -79,23 +72,14 @@ async function fetchQSOsFromLoTW(lotwUsername, lotwPassword, sinceDate = null) {
const fullUrl = `${url}?${params.toString()}`;
logger.debug('Fetching from LoTW', { url: fullUrl.replace(/password=[^&]+/, 'password=***') });
const startTime = Date.now();
for (let attempt = 0; attempt < POLLING_CONFIG.maxRetries; attempt++) {
const elapsed = Date.now() - startTime;
if (elapsed > POLLING_CONFIG.maxTotalTime) {
return {
error: `LoTW sync timeout: exceeded maximum wait time of ${POLLING_CONFIG.maxTotalTime / 1000} seconds`
};
}
for (let attempt = 0; attempt < MAX_RETRIES; attempt++) {
if (attempt > 0) {
logger.debug(`Retry attempt ${attempt + 1}/${POLLING_CONFIG.maxRetries}`, { elapsed: Math.round(elapsed / 1000) });
logger.debug(`Retry attempt ${attempt + 1}/${MAX_RETRIES}`);
}
try {
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), POLLING_CONFIG.requestTimeout);
const timeoutId = setTimeout(() => controller.abort(), REQUEST_TIMEOUT);
const response = await fetch(fullUrl, { signal: controller.signal });
clearTimeout(timeoutId);
@@ -103,7 +87,7 @@ async function fetchQSOsFromLoTW(lotwUsername, lotwPassword, sinceDate = null) {
if (!response.ok) {
if (response.status === 503) {
logger.warn('LoTW returned 503, retrying...');
await sleep(POLLING_CONFIG.retryDelay);
await sleep(RETRY_DELAY);
continue;
} else if (response.status === 401) {
return { error: 'Invalid LoTW credentials. Please check your username and password in Settings.' };
@@ -111,7 +95,7 @@ async function fetchQSOsFromLoTW(lotwUsername, lotwPassword, sinceDate = null) {
return { error: 'LoTW service not found (404). The LoTW API URL may have changed.' };
} else {
logger.warn(`LoTW returned ${response.status}, retrying...`);
await sleep(POLLING_CONFIG.retryDelay);
await sleep(RETRY_DELAY);
continue;
}
}
@@ -126,10 +110,9 @@ async function fetchQSOsFromLoTW(lotwUsername, lotwPassword, sinceDate = null) {
if (!header.includes('arrl logbook of the world')) {
if (isReportPending(adifData)) {
logger.debug('LoTW report still being prepared, waiting...');
await sleep(POLLING_CONFIG.retryDelay);
await sleep(RETRY_DELAY);
continue;
}
return { error: 'Downloaded LoTW report is invalid. Check your credentials.' };
}
@@ -143,7 +126,7 @@ async function fetchQSOsFromLoTW(lotwUsername, lotwPassword, sinceDate = null) {
} catch (error) {
if (error.name === 'AbortError') {
logger.debug('Request timeout, retrying...');
await sleep(POLLING_CONFIG.retryDelay);
await sleep(RETRY_DELAY);
continue;
}
@@ -151,9 +134,9 @@ async function fetchQSOsFromLoTW(lotwUsername, lotwPassword, sinceDate = null) {
throw error;
}
if (attempt < POLLING_CONFIG.maxRetries - 1) {
if (attempt < MAX_RETRIES - 1) {
logger.warn(`Error on attempt ${attempt + 1}`, { error: error.message });
await sleep(POLLING_CONFIG.retryDelay);
await sleep(RETRY_DELAY);
continue;
} else {
throw error;
@@ -161,9 +144,9 @@ async function fetchQSOsFromLoTW(lotwUsername, lotwPassword, sinceDate = null) {
}
}
const totalTime = Math.round((Date.now() - startTime) / 1000);
const totalTime = Math.round((Date.now() - Date.now()) / 1000);
return {
error: `LoTW sync failed: Report not ready after ${POLLING_CONFIG.maxRetries} attempts (${totalTime}s). LoTW may be experiencing high load. Please try again later.`
error: `LoTW sync failed: Report not ready after ${MAX_RETRIES} attempts (${totalTime}s). LoTW may be experiencing high load. Please try again later.`
};
}
@@ -259,30 +242,67 @@ function normalizeMode(mode) {
/**
* Sync QSOs from LoTW to database
* @param {number} userId - User ID
* @param {string} lotwUsername - LoTW username
* @param {string} lotwPassword - LoTW password
* @param {Date|null} sinceDate - Optional date for incremental sync
* @param {number|null} jobId - Optional job ID for progress tracking
*/
async function syncQSOs(userId, lotwUsername, lotwPassword, sinceDate = null) {
export async function syncQSOs(userId, lotwUsername, lotwPassword, sinceDate = null, jobId = null) {
if (jobId) {
await updateJobProgress(jobId, {
message: 'Fetching QSOs from LoTW...',
step: 'fetch',
});
}
const adifQSOs = await fetchQSOsFromLoTW(lotwUsername, lotwPassword, sinceDate);
if (!adifQSOs || adifQSOs.length === 0) {
// Check for error response from LoTW fetch
if (!adifQSOs) {
return { success: false, error: 'Failed to fetch from LoTW', total: 0, added: 0, updated: 0 };
}
// If adifQSOs is an error object, throw it
if (adifQSOs.error) {
throw new Error(adifQSOs.error);
}
if (adifQSOs.length === 0) {
return { success: true, total: 0, added: 0, updated: 0, message: 'No QSOs found in LoTW' };
}
if (jobId) {
await updateJobProgress(jobId, {
message: `Processing ${adifQSOs.length} QSOs...`,
step: 'process',
total: adifQSOs.length,
processed: 0,
});
}
let addedCount = 0;
let updatedCount = 0;
const errors = [];
for (const qsoData of adifQSOs) {
for (let i = 0; i < adifQSOs.length; i++) {
const qsoData = adifQSOs[i];
try {
const dbQSO = convertQSODatabaseFormat(qsoData, userId);
const existing = await db
.select()
.from(qsos)
.where(eq(qsos.userId, userId))
.where(eq(qsos.callsign, dbQSO.callsign))
.where(eq(qsos.qsoDate, dbQSO.qsoDate))
.where(eq(qsos.band, dbQSO.band))
.where(eq(qsos.mode, dbQSO.mode))
.where(
and(
eq(qsos.userId, userId),
eq(qsos.callsign, dbQSO.callsign),
eq(qsos.qsoDate, dbQSO.qsoDate),
eq(qsos.band, dbQSO.band),
eq(qsos.mode, dbQSO.mode)
)
)
.limit(1);
if (existing.length > 0) {
@@ -299,13 +319,21 @@ async function syncQSOs(userId, lotwUsername, lotwPassword, sinceDate = null) {
await db.insert(qsos).values(dbQSO);
addedCount++;
}
// Update job progress every 10 QSOs
if (jobId && (i + 1) % 10 === 0) {
await updateJobProgress(jobId, {
processed: i + 1,
message: `Processed ${i + 1}/${adifQSOs.length} QSOs...`,
});
}
} catch (error) {
logger.error('Error processing QSO', { error: error.message, qso: qsoData });
logger.error('Error processing QSO', { error: error.message, jobId, qso: qsoData });
errors.push({ qso: qsoData, error: error.message });
}
}
logger.info('LoTW sync completed', { total: adifQSOs.length, added: addedCount, updated: updatedCount });
logger.info('LoTW sync completed', { total: adifQSOs.length, added: addedCount, updated: updatedCount, jobId });
return {
success: true,
@@ -383,7 +411,7 @@ export async function getQSOStats(userId) {
/**
* Get the date of the last LoTW QSL for a user
*/
async function getLastLoTWQSLDate(userId) {
export async function getLastLoTWQSLDate(userId) {
const [result] = await db
.select({ maxDate: max(qsos.lotwQslRdate) })
.from(qsos)
@@ -401,101 +429,6 @@ async function getLastLoTWQSLDate(userId) {
return new Date(`${year}-${month}-${day}`);
}
/**
* LoTW sync job processor for the job queue
*/
export async function syncQSOsForJob(jobId, userId, data) {
const { lotwUsername, lotwPassword } = data;
await updateJobProgress(jobId, {
message: 'Fetching QSOs from LoTW...',
step: 'fetch',
});
const lastQSLDate = await getLastLoTWQSLDate(userId);
const sinceDate = lastQSLDate || new Date('2000-01-01');
if (lastQSLDate) {
logger.info(`Job ${jobId}: Incremental sync`, { since: sinceDate.toISOString().split('T')[0] });
} else {
logger.info(`Job ${jobId}: Full sync`);
}
const adifQSOs = await fetchQSOsFromLoTW(lotwUsername, lotwPassword, sinceDate);
if (!adifQSOs || adifQSOs.length === 0) {
return { success: true, total: 0, added: 0, updated: 0, message: 'No QSOs found in LoTW' };
}
await updateJobProgress(jobId, {
message: `Processing ${adifQSOs.length} QSOs...`,
step: 'process',
total: adifQSOs.length,
processed: 0,
});
let addedCount = 0;
let updatedCount = 0;
const errors = [];
for (let i = 0; i < adifQSOs.length; i++) {
const qsoData = adifQSOs[i];
try {
const dbQSO = convertQSODatabaseFormat(qsoData, userId);
const existing = await db
.select()
.from(qsos)
.where(
and(
eq(qsos.userId, userId),
eq(qsos.callsign, dbQSO.callsign),
eq(qsos.qsoDate, dbQSO.qsoDate),
eq(qsos.band, dbQSO.band),
eq(qsos.mode, dbQSO.mode)
)
)
.limit(1);
if (existing.length > 0) {
await db
.update(qsos)
.set({
lotwQslRdate: dbQSO.lotwQslRdate,
lotwQslRstatus: dbQSO.lotwQslRstatus,
lotwSyncedAt: dbQSO.lotwSyncedAt,
})
.where(eq(qsos.id, existing[0].id));
updatedCount++;
} else {
await db.insert(qsos).values(dbQSO);
addedCount++;
}
if ((i + 1) % 10 === 0) {
await updateJobProgress(jobId, {
processed: i + 1,
message: `Processed ${i + 1}/${adifQSOs.length} QSOs...`,
});
}
} catch (error) {
logger.error(`Job ${jobId}: Error processing QSO`, { error: error.message });
errors.push({ qso: qsoData, error: error.message });
}
}
logger.info(`Job ${jobId} completed`, { total: adifQSOs.length, added: addedCount, updated: updatedCount });
return {
success: true,
total: adifQSOs.length,
added: addedCount,
updated: updatedCount,
errors: errors.length > 0 ? errors : undefined,
};
}
/**
* Delete all QSOs for a user
*/
@@ -504,5 +437,3 @@ export async function deleteQSOs(userId) {
return result;
}
// Register the LoTW sync processor with the job queue
registerProcessor('lotw_sync', syncQSOsForJob);