Refactor LoTW sync with background job queue and Wavelog compatibility

Backend changes:
- Add sync_jobs table for background job tracking with Drizzle schema
- Create job queue service (job-queue.service.js) for async job processing
- Only ONE active sync job per user enforced at queue level
- Refactor LoTW service with Wavelog download logic:
  - Validate for "Username/password incorrect" in response
  - Check file starts with "ARRL Logbook of the World Status Report"
  - Use last LoTW QSL date for incremental sync (qso_qslsince)
  - Wavelog-compatible timeouts and error handling
- Add deleteQSOs function to clear all user QSOs
- Fix database path to use absolute path for consistency
- Register job processor for lotw_sync job type

API endpoints:
- POST /api/lotw/sync - Queue background sync job, returns jobId immediately
- GET /api/jobs/:jobId - Get job status with progress tracking
- GET /api/jobs/active - Get user's active job
- GET /api/jobs - Get user's recent jobs
- DELETE /api/qsos/all - Delete all QSOs for authenticated user

Frontend changes:
- Add job polling every 2 seconds during sync
- Show real-time progress indicator during sync
- Add "Clear All QSOs" button with type-to-confirm ("DELETE")
- Check for active job on mount to resume polling after refresh
- Clean up polling interval on component unmount
- Update API client with jobsAPI methods (getStatus, getActive, getRecent)

Database:
- Add sync_jobs table: id, userId, status, type, startedAt, completedAt,
  result, error, createdAt
- Foreign key to users table
- Path fix: now uses src/backend/award.db consistently

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-15 21:47:50 +01:00
parent 44c13e1bdc
commit f82fc876ce
7 changed files with 1016 additions and 52 deletions

View File

@@ -1,9 +1,18 @@
import Database from 'bun:sqlite'; import Database from 'bun:sqlite';
import { drizzle } from 'drizzle-orm/bun-sqlite'; import { drizzle } from 'drizzle-orm/bun-sqlite';
import * as schema from '../db/schema/index.js'; import * as schema from '../db/schema/index.js';
import { join } from 'path';
// Get the directory of this file (src/backend/config/)
const configDir = import.meta.dir || new URL('.', import.meta.url).pathname;
// Go up one level to get src/backend/, then to award.db
const dbPath = join(configDir, '..', 'award.db');
console.error('[Database] Using database at:', dbPath);
// Create SQLite database connection // Create SQLite database connection
const sqlite = new Database('./award.db'); const sqlite = new Database(dbPath);
// Enable foreign keys // Enable foreign keys
sqlite.exec('PRAGMA foreign_keys = ON'); sqlite.exec('PRAGMA foreign_keys = ON');

View File

@@ -142,5 +142,30 @@ export const awardProgress = sqliteTable('award_progress', {
updatedAt: integer('updated_at', { mode: 'timestamp' }).notNull().$defaultFn(() => new Date()), updatedAt: integer('updated_at', { mode: 'timestamp' }).notNull().$defaultFn(() => new Date()),
}); });
/**
* @typedef {Object} SyncJob
* @property {number} id
* @property {number} userId
* @property {string} status
* @property {string} type
* @property {Date|null} startedAt
* @property {Date|null} completedAt
* @property {string|null} result
* @property {string|null} error
* @property {Date} createdAt
*/
export const syncJobs = sqliteTable('sync_jobs', {
id: integer('id').primaryKey({ autoIncrement: true }),
userId: integer('user_id').notNull().references(() => users.id),
status: text('status').notNull(), // pending, running, completed, failed
type: text('type').notNull(), // lotw_sync, etc.
startedAt: integer('started_at', { mode: 'timestamp' }),
completedAt: integer('completed_at', { mode: 'timestamp' }),
result: text('result'), // JSON string
error: text('error'),
createdAt: integer('created_at', { mode: 'timestamp' }).notNull().$defaultFn(() => new Date()),
});
// Export all schemas // Export all schemas
export const schema = { users, qsos, awards, awardProgress }; export const schema = { users, qsos, awards, awardProgress, syncJobs };

View File

@@ -9,10 +9,16 @@ import {
updateLoTWCredentials, updateLoTWCredentials,
} from './services/auth.service.js'; } from './services/auth.service.js';
import { import {
syncQSOs,
getUserQSOs, getUserQSOs,
getQSOStats, getQSOStats,
deleteQSOs,
} from './services/lotw.service.js'; } from './services/lotw.service.js';
import {
enqueueJob,
getJobStatus,
getUserActiveJob,
getUserJobs,
} from './services/job-queue.service.js';
/** /**
* Main backend application * Main backend application
@@ -218,18 +224,29 @@ const app = new Elysia()
/** /**
* POST /api/lotw/sync * POST /api/lotw/sync
* Sync QSOs from LoTW (requires authentication) * Queue a LoTW sync job (requires authentication)
* Returns immediately with job ID
*/ */
.post('/api/lotw/sync', async ({ user, set }) => { .post('/api/lotw/sync', async ({ user, set }) => {
if (!user) { if (!user) {
console.error('[/api/lotw/sync] No user found in request');
set.status = 401; set.status = 401;
return { success: false, error: 'Unauthorized' }; return { success: false, error: 'Unauthorized' };
} }
console.error('[/api/lotw/sync] User authenticated:', user.id);
try { try {
// Get user's LoTW credentials from database // Get user's LoTW credentials from database
const userData = await getUserById(user.id); const userData = await getUserById(user.id);
console.error('[/api/lotw/sync] User data from DB:', {
id: userData?.id,
lotwUsername: userData?.lotwUsername ? '***' : null,
hasPassword: !!userData?.lotwPassword
});
if (!userData || !userData.lotwUsername || !userData.lotwPassword) { if (!userData || !userData.lotwUsername || !userData.lotwPassword) {
console.error('[/api/lotw/sync] Missing LoTW credentials');
set.status = 400; set.status = 400;
return { return {
success: false, success: false,
@@ -237,18 +254,136 @@ const app = new Elysia()
}; };
} }
// Decrypt password (for now, assuming it's stored as-is. TODO: implement encryption) // Enqueue the sync job (enqueueJob will check for existing active jobs)
const lotwPassword = userData.lotwPassword; const result = await enqueueJob(user.id, 'lotw_sync', {
lotwUsername: userData.lotwUsername,
lotwPassword: userData.lotwPassword,
});
// Sync QSOs from LoTW // If enqueueJob returned existingJob, format the response
const result = await syncQSOs(user.id, userData.lotwUsername, lotwPassword); if (!result.success && result.existingJob) {
return {
success: true,
jobId: result.existingJob,
message: 'A sync job is already running',
};
}
return result; return result;
} catch (error) {
console.error('Error in /api/lotw/sync:', error);
set.status = 500;
return {
success: false,
error: `Failed to queue sync job: ${error.message}`,
};
}
})
/**
* GET /api/jobs/:jobId
* Get job status (requires authentication)
*/
.get('/api/jobs/:jobId', async ({ user, params, set }) => {
if (!user) {
set.status = 401;
return { success: false, error: 'Unauthorized' };
}
try {
const jobId = parseInt(params.jobId);
if (isNaN(jobId)) {
set.status = 400;
return { success: false, error: 'Invalid job ID' };
}
const job = await getJobStatus(jobId);
if (!job) {
set.status = 404;
return { success: false, error: 'Job not found' };
}
// Verify user owns this job
if (job.userId !== user.id) {
set.status = 403;
return { success: false, error: 'Forbidden' };
}
return {
success: true,
job,
};
} catch (error) { } catch (error) {
set.status = 500; set.status = 500;
return { return {
success: false, success: false,
error: `LoTW sync failed: ${error.message}`, error: 'Failed to fetch job status',
};
}
})
/**
* GET /api/jobs/active
* Get user's active job (requires authentication)
*/
.get('/api/jobs/active', async ({ user, set }) => {
if (!user) {
set.status = 401;
return { success: false, error: 'Unauthorized' };
}
try {
const job = await getUserActiveJob(user.id);
if (!job) {
return {
success: true,
job: null,
};
}
return {
success: true,
job: {
id: job.id,
type: job.type,
status: job.status,
createdAt: job.createdAt,
startedAt: job.startedAt,
},
};
} catch (error) {
set.status = 500;
return {
success: false,
error: 'Failed to fetch active job',
};
}
})
/**
* GET /api/jobs
* Get user's recent jobs (requires authentication)
*/
.get('/api/jobs', async ({ user, query, set }) => {
if (!user) {
set.status = 401;
return { success: false, error: 'Unauthorized' };
}
try {
const limit = query.limit ? parseInt(query.limit) : 10;
const jobs = await getUserJobs(user.id, limit);
return {
success: true,
jobs,
};
} catch (error) {
set.status = 500;
return {
success: false,
error: 'Failed to fetch jobs',
}; };
} }
}) })
@@ -311,6 +446,33 @@ const app = new Elysia()
} }
}) })
/**
* DELETE /api/qsos/all
* Delete all QSOs for authenticated user
*/
.delete('/api/qsos/all', async ({ user, set }) => {
if (!user) {
set.status = 401;
return { success: false, error: 'Unauthorized' };
}
try {
const deleted = await deleteQSOs(user.id);
return {
success: true,
deleted,
message: `Deleted ${deleted} QSO(s)`,
};
} catch (error) {
set.status = 500;
return {
success: false,
error: 'Failed to delete QSOs',
};
}
})
// Health check endpoint // Health check endpoint
.get('/api/health', () => ({ .get('/api/health', () => ({
status: 'ok', status: 'ok',

View File

@@ -0,0 +1,314 @@
import { db } from '../config/database.js';
import { syncJobs } from '../db/schema/index.js';
import { eq, and, desc, or, lt } from 'drizzle-orm';
/**
* Background Job Queue Service
* Manages async jobs with database persistence
*/
// Job status constants
export const JobStatus = {
PENDING: 'pending',
RUNNING: 'running',
COMPLETED: 'completed',
FAILED: 'failed',
};
// Job type constants
export const JobType = {
LOTW_SYNC: 'lotw_sync',
};
// In-memory job processor (for single-server deployment)
const activeJobs = new Map(); // jobId -> Promise
const jobProcessors = {
[JobType.LOTW_SYNC]: null, // Will be set by lotw.service.js
};
/**
* Register a job processor function
* @param {string} type - Job type
* @param {Function} processor - Async function that processes the job
*/
export function registerProcessor(type, processor) {
jobProcessors[type] = processor;
}
/**
* Enqueue a new job
* @param {number} userId - User ID
* @param {string} type - Job type
* @param {Object} data - Job data (will be passed to processor)
* @returns {Promise<Object>} Job object with ID
*/
export async function enqueueJob(userId, type, data = {}) {
console.error('[enqueueJob] Starting job enqueue:', { userId, type, hasData: !!data });
// Check for existing active job of same type for this user
const existingJob = await getUserActiveJob(userId, type);
if (existingJob) {
console.error('[enqueueJob] Found existing active job:', existingJob.id);
return {
success: false,
error: `A ${type} job is already running or pending for this user`,
existingJob: existingJob.id,
};
}
// Create job record
console.error('[enqueueJob] Creating job record in database...');
const [job] = await db
.insert(syncJobs)
.values({
userId,
type,
status: JobStatus.PENDING,
createdAt: new Date(),
})
.returning();
console.error('[enqueueJob] Job created:', job.id);
// Start processing asynchronously (don't await)
processJobAsync(job.id, userId, type, data).catch((error) => {
console.error(`[enqueueJob] Error processing job ${job.id}:`, error);
});
return {
success: true,
jobId: job.id,
job: {
id: job.id,
type: job.type,
status: job.status,
createdAt: job.createdAt,
},
};
}
/**
* Process a job asynchronously
* @param {number} jobId - Job ID
* @param {number} userId - User ID
* @param {string} type - Job type
* @param {Object} data - Job data
*/
async function processJobAsync(jobId, userId, type, data) {
// Store the promise in activeJobs
const jobPromise = (async () => {
try {
// Update status to running
await updateJob(jobId, {
status: JobStatus.RUNNING,
startedAt: new Date(),
});
// Get the processor for this job type
const processor = jobProcessors[type];
if (!processor) {
throw new Error(`No processor registered for job type: ${type}`);
}
// Execute the job processor
const result = await processor(jobId, userId, data);
// Update job as completed
await updateJob(jobId, {
status: JobStatus.COMPLETED,
completedAt: new Date(),
result: JSON.stringify(result),
});
return result;
} catch (error) {
// Update job as failed
await updateJob(jobId, {
status: JobStatus.FAILED,
completedAt: new Date(),
error: error.message,
});
throw error;
} finally {
// Remove from active jobs
activeJobs.delete(jobId);
}
})();
activeJobs.set(jobId, jobPromise);
return jobPromise;
}
/**
* Update job record
* @param {number} jobId - Job ID
* @param {Object} updates - Fields to update
*/
export async function updateJob(jobId, updates) {
await db.update(syncJobs).set(updates).where(eq(syncJobs.id, jobId));
}
/**
* Get job by ID
* @param {number} jobId - Job ID
* @returns {Promise<Object|null>} Job object or null
*/
export async function getJob(jobId) {
const [job] = await db.select().from(syncJobs).where(eq(syncJobs.id, jobId)).limit(1);
return job || null;
}
/**
* Get job status (with parsed result if completed)
* @param {number} jobId - Job ID
* @returns {Promise<Object|null>} Job object with parsed result
*/
export async function getJobStatus(jobId) {
const job = await getJob(jobId);
if (!job) return null;
// Parse result JSON if completed
let parsedResult = null;
if (job.status === JobStatus.COMPLETED && job.result) {
try {
parsedResult = JSON.parse(job.result);
} catch (e) {
console.error('Failed to parse job result:', e);
}
}
return {
id: job.id,
userId: job.userId, // Include userId for permission checks
type: job.type,
status: job.status,
startedAt: job.startedAt,
completedAt: job.completedAt,
result: parsedResult,
error: job.error,
createdAt: job.createdAt,
};
}
/**
* Get user's active job (pending or running) of a specific type
* @param {number} userId - User ID
* @param {string} type - Job type (optional, returns any active job)
* @returns {Promise<Object|null>} Active job or null
*/
export async function getUserActiveJob(userId, type = null) {
console.error('[getUserActiveJob] Querying for active job:', { userId, type });
// Build the where clause properly with and() and or()
const conditions = [
eq(syncJobs.userId, userId),
or(
eq(syncJobs.status, JobStatus.PENDING),
eq(syncJobs.status, JobStatus.RUNNING)
),
];
if (type) {
conditions.push(eq(syncJobs.type, type));
}
try {
const [job] = await db
.select()
.from(syncJobs)
.where(and(...conditions))
.orderBy(desc(syncJobs.createdAt))
.limit(1);
console.error('[getUserActiveJob] Result:', job ? `Found job ${job.id}` : 'No active job');
return job || null;
} catch (error) {
console.error('[getUserActiveJob] Database error:', error);
throw error;
}
}
/**
* Get recent jobs for a user
* @param {number} userId - User ID
* @param {number} limit - Maximum number of jobs to return
* @returns {Promise<Array>} Array of jobs
*/
export async function getUserJobs(userId, limit = 10) {
const jobs = await db
.select()
.from(syncJobs)
.where(eq(syncJobs.userId, userId))
.orderBy(desc(syncJobs.createdAt))
.limit(limit);
return jobs.map((job) => {
let parsedResult = null;
if (job.status === JobStatus.COMPLETED && job.result) {
try {
parsedResult = JSON.parse(job.result);
} catch (e) {
// Ignore parse errors
}
}
return {
id: job.id,
type: job.type,
status: job.status,
startedAt: job.startedAt,
completedAt: job.completedAt,
result: parsedResult,
error: job.error,
createdAt: job.createdAt,
};
});
}
/**
* Delete old completed jobs (cleanup)
* @param {number} daysOld - Delete jobs older than this many days
* @returns {Promise<number>} Number of jobs deleted
*/
export async function cleanupOldJobs(daysOld = 7) {
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - daysOld);
const result = await db
.delete(syncJobs)
.where(
and(
eq(syncJobs.status, JobStatus.COMPLETED),
lt(syncJobs.completedAt, cutoffDate)
)
);
return result;
}
/**
* Update job progress (for long-running jobs)
* @param {number} jobId - Job ID
* @param {Object} progressData - Progress data to store in result field
*/
export async function updateJobProgress(jobId, progressData) {
const job = await getJob(jobId);
if (!job) return;
let currentData = {};
if (job.result) {
try {
currentData = JSON.parse(job.result);
} catch (e) {
// Start fresh if invalid JSON
}
}
// Merge progress data
const updatedData = { ...currentData, ...progressData, progress: true };
await updateJob(jobId, {
result: JSON.stringify(updatedData),
});
}

View File

@@ -1,11 +1,16 @@
import { db } from '../config/database.js'; import { db } from '../config/database.js';
import { qsos } from '../db/schema/index.js'; import { qsos } from '../db/schema/index.js';
import { max, sql } from 'drizzle-orm';
import { registerProcessor, updateJobProgress } from './job-queue.service.js';
/** /**
* LoTW (Logbook of the World) Service * LoTW (Logbook of the World) Service
* Fetches QSOs from ARRL's LoTW system * Fetches QSOs from ARRL's LoTW system
*/ */
// Wavelog-compatible constants
const LOTW_CONNECT_TIMEOUT = 30; // CURLOPT_CONNECTTIMEOUT from Wavelog
// Configuration for long-polling // Configuration for long-polling
const POLLING_CONFIG = { const POLLING_CONFIG = {
maxRetries: 30, // Maximum number of retry attempts maxRetries: 30, // Maximum number of retry attempts
@@ -154,13 +159,21 @@ export async function fetchQSOsFromLoTW(lotwUsername, lotwPassword, sinceDate =
const adifData = await response.text(); const adifData = await response.text();
console.error(`Response length: ${adifData.length} bytes`); console.error(`Response length: ${adifData.length} bytes`);
// Check if report is still pending // Wavelog: Validate response for credential errors
if (isReportPending(adifData)) { if (adifData.toLowerCase().includes('username/password incorrect')) {
console.error('LoTW report is still being prepared, waiting...', adifData.substring(0, 100)); throw new Error('Username/password incorrect');
}
// Wait before retrying // Wavelog: Check if file starts with expected header
await sleep(POLLING_CONFIG.retryDelay); const header = adifData.trim().substring(0, 39).toLowerCase();
continue; if (!header.includes('arrl logbook of the world')) {
// This might be because the report is still pending
if (isReportPending(adifData)) {
console.error('LoTW report is still being prepared, waiting...', adifData.substring(0, 100));
await sleep(POLLING_CONFIG.retryDelay);
continue;
}
throw new Error('Downloaded LoTW report is invalid. Check your credentials.');
} }
// We have valid data! // We have valid data!
@@ -529,3 +542,200 @@ export async function getQSOStats(userId) {
return stats; return stats;
} }
/**
* Get the date of the last LoTW QSL for a user
* Used for qso_qslsince parameter to minimize downloads
* @param {number} userId - User ID
* @returns {Promise<Date|null>} Last QSL date or null
*/
export async function getLastLoTWQSLDate(userId) {
const { eq } = await import('drizzle-orm');
// Get the most recent lotwQslRdate for this user
const [result] = await db
.select({ maxDate: max(qsos.lotwQslRdate) })
.from(qsos)
.where(eq(qsos.userId, userId));
if (!result || !result.maxDate) {
return null;
}
// Parse ADIF date format (YYYYMMDD) to Date
const dateStr = result.maxDate;
if (!dateStr || dateStr === '') {
return null;
}
const year = dateStr.substring(0, 4);
const month = dateStr.substring(4, 6);
const day = dateStr.substring(6, 8);
return new Date(`${year}-${month}-${day}`);
}
/**
* Validate LoTW response following Wavelog logic
* @param {string} responseData - Response from LoTW
* @returns {Object} { valid: boolean, error?: string }
*/
function validateLoTWResponse(responseData) {
const trimmed = responseData.trim();
// Wavelog: Check for username/password incorrect
if (trimmed.toLowerCase().includes('username/password incorrect')) {
return {
valid: false,
error: 'Username/password incorrect',
shouldClearCredentials: true,
};
}
// Wavelog: Check if file starts with "ARRL Logbook of the World Status Report"
const header = trimmed.substring(0, 39).toLowerCase();
if (!header.includes('arrl logbook of the world')) {
return {
valid: false,
error: 'Downloaded LoTW report is invalid. File does not start with expected header.',
};
}
return { valid: true };
}
/**
* LoTW sync job processor for the job queue
* @param {number} jobId - Job ID
* @param {number} userId - User ID
* @param {Object} data - Job data { lotwUsername, lotwPassword }
* @returns {Promise<Object>} Sync result
*/
export async function syncQSOsForJob(jobId, userId, data) {
const { lotwUsername, lotwPassword } = data;
try {
// Update job progress: starting
await updateJobProgress(jobId, {
message: 'Fetching QSOs from LoTW...',
step: 'fetch',
});
// Get last LoTW QSL date for incremental sync
const lastQSLDate = await getLastLoTWQSLDate(userId);
const sinceDate = lastQSLDate || new Date('2026-01-01'); // Default as per Wavelog
console.error(`[Job ${jobId}] Syncing LoTW QSOs since ${sinceDate.toISOString().split('T')[0]}`);
// Fetch from LoTW
const adifQSOs = await fetchQSOsFromLoTW(lotwUsername, lotwPassword, sinceDate);
if (!adifQSOs || adifQSOs.length === 0) {
return {
success: true,
total: 0,
added: 0,
updated: 0,
message: 'No QSOs found in LoTW',
};
}
// Update job progress: processing
await updateJobProgress(jobId, {
message: `Processing ${adifQSOs.length} QSOs...`,
step: 'process',
total: adifQSOs.length,
processed: 0,
});
let addedCount = 0;
let updatedCount = 0;
const errors = [];
// Process each QSO
for (let i = 0; i < adifQSOs.length; i++) {
const qsoData = adifQSOs[i];
try {
const dbQSO = convertQSODatabaseFormat(qsoData, userId);
// Check if QSO already exists
const { eq, and } = await import('drizzle-orm');
const existing = await db
.select()
.from(qsos)
.where(
and(
eq(qsos.userId, userId),
eq(qsos.callsign, dbQSO.callsign),
eq(qsos.qsoDate, dbQSO.qsoDate),
eq(qsos.band, dbQSO.band),
eq(qsos.mode, dbQSO.mode)
)
)
.limit(1);
if (existing.length > 0) {
// Update existing QSO
await db
.update(qsos)
.set({
lotwQslRdate: dbQSO.lotwQslRdate,
lotwQslRstatus: dbQSO.lotwQslRstatus,
lotwSyncedAt: dbQSO.lotwSyncedAt,
})
.where(eq(qsos.id, existing[0].id));
updatedCount++;
} else {
// Insert new QSO
await db.insert(qsos).values(dbQSO);
addedCount++;
}
// Update progress every 10 QSOs
if ((i + 1) % 10 === 0) {
await updateJobProgress(jobId, {
processed: i + 1,
message: `Processed ${i + 1}/${adifQSOs.length} QSOs...`,
});
}
} catch (error) {
console.error(`[Job ${jobId}] ERROR processing QSO:`, error);
errors.push({
qso: qsoData,
error: error.message,
});
}
}
return {
success: true,
total: adifQSOs.length,
added: addedCount,
updated: updatedCount,
errors: errors.length > 0 ? errors : undefined,
};
} catch (error) {
// Check if it's a credential error
if (error.message.includes('Username/password incorrect')) {
throw new Error('Invalid LoTW credentials. Please check your username and password.');
}
throw error;
}
}
/**
* Delete all QSOs for a user
* @param {number} userId - User ID
* @returns {Promise<number>} Number of QSOs deleted
*/
export async function deleteQSOs(userId) {
const { eq } = await import('drizzle-orm');
const result = await db.delete(qsos).where(eq(qsos.userId, userId));
return result;
}
// Register the LoTW sync processor with the job queue
registerProcessor('lotw_sync', syncQSOsForJob);

View File

@@ -134,11 +134,45 @@ export const qsosAPI = {
getStats: () => apiRequest('/qsos/stats'), getStats: () => apiRequest('/qsos/stats'),
/** /**
* Sync QSOs from LoTW * Sync QSOs from LoTW (queues a job)
* @returns {Promise<Object>} Sync result * @returns {Promise<Object>} Job information
*/ */
syncFromLoTW: () => syncFromLoTW: () =>
apiRequest('/lotw/sync', { apiRequest('/lotw/sync', {
method: 'POST', method: 'POST',
}), }),
/**
* Delete all QSOs for authenticated user
* @returns {Promise<Object>} Delete result
*/
deleteAll: () =>
apiRequest('/qsos/all', {
method: 'DELETE',
}),
};
/**
* Jobs API
*/
export const jobsAPI = {
/**
* Get job status
* @param {number} jobId - Job ID
* @returns {Promise<Object>} Job status
*/
getStatus: (jobId) => apiRequest(`/jobs/${jobId}`),
/**
* Get user's active job
* @returns {Promise<Object>} Active job or null
*/
getActive: () => apiRequest('/jobs/active'),
/**
* Get user's recent jobs
* @param {number} limit - Maximum number of jobs to return
* @returns {Promise<Object>} List of jobs
*/
getRecent: (limit = 10) => apiRequest(`/jobs?limit=${limit}`),
}; };

View File

@@ -1,14 +1,23 @@
<script> <script>
import { onMount } from 'svelte'; import { onMount, onDestroy } from 'svelte';
import { qsosAPI } from '$lib/api.js'; import { qsosAPI, jobsAPI } from '$lib/api.js';
import { auth } from '$lib/stores.js'; import { auth } from '$lib/stores.js';
let qsos = []; let qsos = [];
let stats = null; let stats = null;
let loading = true; let loading = true;
let error = null; let error = null;
let syncing = false;
let syncResult = null; // Job polling state
let syncJobId = null;
let syncStatus = null;
let syncProgress = null;
let pollingInterval = null;
// Delete confirmation state
let showDeleteConfirm = false;
let deleteConfirmText = '';
let deleting = false;
let filters = { let filters = {
band: '', band: '',
@@ -21,6 +30,15 @@
if (!$auth.user) return; if (!$auth.user) return;
await loadQSOs(); await loadQSOs();
await loadStats(); await loadStats();
// Check for active job on mount
await checkActiveJob();
});
// Clean up polling interval on unmount
onDestroy(() => {
if (pollingInterval) {
clearInterval(pollingInterval);
}
}); });
async function loadQSOs() { async function loadQSOs() {
@@ -51,32 +69,99 @@
} }
} }
async function handleSync() { async function checkActiveJob() {
console.log('handleSync called!');
try { try {
syncing = true; const response = await jobsAPI.getActive();
syncResult = null; if (response.job) {
console.log('Calling qsosAPI.syncFromLoTW...'); syncJobId = response.job.id;
syncStatus = response.job.status;
const result = await qsosAPI.syncFromLoTW(); // Start polling if job is running
console.log('Sync result:', result); if (syncStatus === 'running' || syncStatus === 'pending') {
syncResult = result; startPolling(response.job.id);
}
// Reload QSOs and stats after sync }
await loadQSOs();
await loadStats();
} catch (err) { } catch (err) {
console.error('Sync error:', err); console.error('Failed to check active job:', err);
syncResult = {
success: false,
error: err.message
};
} finally {
syncing = false;
console.log('Sync complete, syncing = false');
} }
} }
async function startPolling(jobId) {
syncJobId = jobId;
syncStatus = 'running';
// Clear any existing interval
if (pollingInterval) {
clearInterval(pollingInterval);
}
// Poll every 2 seconds
pollingInterval = setInterval(async () => {
try {
const response = await jobsAPI.getStatus(jobId);
const job = response.job;
syncStatus = job.status;
syncProgress = job.result?.progress ? job.result : null;
if (job.status === 'completed') {
clearInterval(pollingInterval);
pollingInterval = null;
syncJobId = null;
syncProgress = null;
syncStatus = null;
// Reload QSOs and stats
await loadQSOs();
await loadStats();
// Show success message
syncResult = {
success: true,
...job.result,
};
} else if (job.status === 'failed') {
clearInterval(pollingInterval);
pollingInterval = null;
syncJobId = null;
syncProgress = null;
syncStatus = null;
// Show error message
syncResult = {
success: false,
error: job.error || 'Sync failed',
};
}
} catch (err) {
console.error('Failed to poll job status:', err);
// Don't stop polling on error, might be temporary
}
}, 2000);
}
async function handleSync() {
try {
const response = await qsosAPI.syncFromLoTW();
if (response.jobId) {
// Job was queued successfully
startPolling(response.jobId);
} else if (response.existingJob) {
// There's already an active job
startPolling(response.existingJob);
} else {
throw new Error(response.error || 'Failed to queue sync job');
}
} catch (err) {
syncResult = {
success: false,
error: err.message,
};
}
}
let syncResult = null;
async function applyFilters() { async function applyFilters() {
await loadQSOs(); await loadQSOs();
} }
@@ -105,6 +190,33 @@
return timeStr.substring(0, 2) + ':' + timeStr.substring(2, 4); return timeStr.substring(0, 2) + ':' + timeStr.substring(2, 4);
} }
async function handleDeleteAll() {
if (deleteConfirmText !== 'DELETE') {
alert('Please type DELETE to confirm');
return;
}
try {
deleting = true;
const response = await qsosAPI.deleteAll();
// Reload QSOs and stats
await loadQSOs();
await loadStats();
// Reset delete confirmation
showDeleteConfirm = false;
deleteConfirmText = '';
// Show success message
alert(`Deleted ${response.deleted} QSO(s)`);
} catch (err) {
alert(`Failed to delete QSOs: ${err.message}`);
} finally {
deleting = false;
}
}
const bands = ['160m', '80m', '60m', '40m', '30m', '20m', '17m', '15m', '12m', '10m', '6m', '2m', '70cm']; const bands = ['160m', '80m', '60m', '40m', '30m', '20m', '17m', '15m', '12m', '10m', '6m', '2m', '70cm'];
const modes = ['CW', 'SSB', 'AM', 'FM', 'RTTY', 'PSK31', 'FT8', 'FT4', 'JT65', 'JT9']; const modes = ['CW', 'SSB', 'AM', 'FM', 'RTTY', 'PSK31', 'FT8', 'FT4', 'JT65', 'JT9'];
</script> </script>
@@ -116,15 +228,40 @@
<div class="container"> <div class="container">
<div class="header"> <div class="header">
<h1>QSO Log</h1> <h1>QSO Log</h1>
<button <div class="header-buttons">
class="btn btn-primary" {#if qsos.length > 0}
on:click={handleSync} <button
disabled={syncing} class="btn btn-danger"
> on:click={() => showDeleteConfirm = true}
{syncing ? 'Syncing from LoTW...' : 'Sync from LoTW'} disabled={syncStatus === 'running' || syncStatus === 'pending' || deleting}
</button> >
Clear All QSOs
</button>
{/if}
<button
class="btn btn-primary"
on:click={handleSync}
disabled={syncStatus === 'running' || syncStatus === 'pending' || deleting}
>
{#if syncStatus === 'running' || syncStatus === 'pending'}
Syncing...
{:else}
Sync from LoTW
{/if}
</button>
</div>
</div> </div>
{#if syncProgress}
<div class="alert alert-info">
<h3>Syncing from LoTW...</h3>
<p>{syncProgress.message || 'Processing...'}</p>
{#if syncProgress.total}
<p>Progress: {syncProgress.processed || 0} / {syncProgress.total}</p>
{/if}
</div>
{/if}
{#if syncResult} {#if syncResult}
<div class="alert {syncResult.success ? 'alert-success' : 'alert-error'}"> <div class="alert {syncResult.success ? 'alert-success' : 'alert-error'}">
{#if syncResult.success} {#if syncResult.success}
@@ -141,6 +278,37 @@
</div> </div>
{/if} {/if}
{#if showDeleteConfirm}
<div class="alert alert-error">
<h3>⚠️ Delete All QSOs?</h3>
<p>This will permanently delete all {qsos.length} QSOs. This action cannot be undone!</p>
<p>Type <strong>DELETE</strong> to confirm:</p>
<input
type="text"
bind:value={deleteConfirmText}
placeholder="Type DELETE"
class="delete-input"
disabled={deleting}
/>
<div class="delete-buttons">
<button
on:click={handleDeleteAll}
disabled={deleteConfirmText !== 'DELETE' || deleting}
class="btn btn-danger"
>
{deleting ? 'Deleting...' : 'Delete All QSOs'}
</button>
<button
on:click={() => { showDeleteConfirm = false; deleteConfirmText = ''; }}
disabled={deleting}
class="btn btn-secondary"
>
Cancel
</button>
</div>
</div>
{/if}
{#if stats} {#if stats}
<div class="stats-grid"> <div class="stats-grid">
<div class="stat-card"> <div class="stat-card">
@@ -249,6 +417,8 @@
justify-content: space-between; justify-content: space-between;
align-items: center; align-items: center;
margin-bottom: 2rem; margin-bottom: 2rem;
flex-wrap: wrap;
gap: 1rem;
} }
.header h1 { .header h1 {
@@ -256,6 +426,11 @@
color: #333; color: #333;
} }
.header-buttons {
display: flex;
gap: 1rem;
}
.stats-grid { .stats-grid {
display: grid; display: grid;
grid-template-columns: repeat(auto-fit, minmax(150px, 1fr)); grid-template-columns: repeat(auto-fit, minmax(150px, 1fr));
@@ -349,6 +524,20 @@
background-color: #5a6268; background-color: #5a6268;
} }
.btn-danger {
background-color: #dc3545;
color: white;
}
.btn-danger:hover:not(:disabled) {
background-color: #c82333;
}
.btn-danger:disabled {
opacity: 0.6;
cursor: not-allowed;
}
.btn-small { .btn-small {
padding: 0.25rem 0.75rem; padding: 0.25rem 0.75rem;
font-size: 0.875rem; font-size: 0.875rem;
@@ -364,8 +553,8 @@
border-radius: 8px; border-radius: 8px;
margin-bottom: 2rem; margin-bottom: 2rem;
display: flex; display: flex;
justify-content: space-between; flex-direction: column;
align-items: flex-start; gap: 0.5rem;
} }
.alert-success { .alert-success {
@@ -380,8 +569,14 @@
color: #721c24; color: #721c24;
} }
.alert-info {
background-color: #d1ecf1;
border: 1px solid #bee5eb;
color: #0c5460;
}
.alert h3 { .alert h3 {
margin: 0 0 0.5rem 0; margin: 0;
} }
.alert p { .alert p {
@@ -393,6 +588,21 @@
margin-top: 0.5rem; margin-top: 0.5rem;
} }
.delete-input {
padding: 0.5rem;
border: 1px solid #ddd;
border-radius: 4px;
font-size: 1rem;
margin: 0.5rem 0;
width: 200px;
}
.delete-buttons {
display: flex;
gap: 1rem;
margin-top: 0.5rem;
}
.qso-table-container { .qso-table-container {
overflow-x: auto; overflow-x: auto;
border: 1px solid #e0e0e0; border: 1px solid #e0e0e0;