diff --git a/CLAUDE.md b/CLAUDE.md index 764c1dd..dc05337 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -104,3 +104,126 @@ bun --hot ./index.ts ``` For more information, read the Bun API docs in `node_modules/bun-types/docs/**.mdx`. + +## Project: Quickawards by DJ7NT + +Quickawards is a amateur radio award tracking application that calculates progress toward various awards based on QSO (contact) data. + +### Award System Architecture + +The award system is JSON-driven and located in `award-definitions/` directory. Each award has: +- `id`: Unique identifier (e.g., "dld", "dxcc") +- `name`: Display name +- `description`: Short description +- `caption`: Detailed explanation +- `category`: Award category ("dxcc", "darc", etc.) +- `rules`: Award calculation logic + +### Award Rule Types + +1. **`entity`**: Count unique entities (DXCC countries, states, grid squares) + - `entityType`: What to count ("dxcc", "state", "grid", "callsign") + - `target`: Number required for award + - `filters`: Optional filters (band, mode, etc.) + - `displayField`: Optional field to display + +2. **`dok`**: Count unique DOK (DARC Ortsverband Kennung) combinations + - `target`: Number required + - `confirmationType`: "dcl" (DARC Community Logbook) + - Counts unique (DOK, band, mode) combinations + - Only DCL-confirmed QSOs count + +3. **`points`**: Point-based awards + - `stations`: Array of {callsign, points} + - `target`: Points required + - `countMode`: "perStation", "perBandMode", or "perQso" + +4. **`filtered`**: Filtered version of another award + - `baseRule`: The base entity rule + - `filters`: Additional filters to apply + +5. **`counter`**: Count QSOs or callsigns + +### Key Files + +**Backend Award Service**: `src/backend/services/awards.service.js` +- `getAllAwards()`: Returns all available award definitions +- `calculateAwardProgress(userId, award, options)`: Main calculation function +- `calculateDOKAwardProgress(userId, award, options)`: DOK-specific calculation +- `calculatePointsAwardProgress(userId, award, options)`: Point-based calculation +- `getAwardEntityBreakdown(userId, awardId)`: Detailed entity breakdown +- `getAwardProgressDetails(userId, awardId)`: Progress with details + +**Database Schema**: `src/backend/db/schema/index.js` +- QSO fields include: `darcDok`, `dclQslRstatus`, `dclQslRdate` +- DOK fields support DLD award tracking +- DCL confirmation fields separate from LoTW + +**Award Definitions**: `award-definitions/*.json` +- Add new awards by creating JSON definition files +- Add filename to `loadAwardDefinitions()` file list in awards.service.js + +### DLD Award Implementation (COMPLETED) + +The DLD (Deutschland Diplom) award was recently implemented: + +**Definition**: `award-definitions/dld.json` +```json +{ + "id": "dld", + "name": "DLD", + "description": "Deutschland Diplom - Confirm 100 unique DOKs on different bands/modes", + "caption": "Contact and confirm stations with 100 unique DOKs (DARC Ortsverband Kennung) on different band/mode combinations.", + "category": "darc", + "rules": { + "type": "dok", + "target": 100, + "confirmationType": "dcl", + "displayField": "darcDok" + } +} +``` + +**Implementation Details**: +- Function: `calculateDOKAwardProgress()` in `src/backend/services/awards.service.js` (lines 173-268) +- Counts unique (DOK, band, mode) combinations +- Only DCL-confirmed QSOs count (`dclQslRstatus === 'Y'`) +- Each unique DOK on each unique band/mode counts separately +- Returns worked, confirmed counts and entity breakdowns + +**Database Fields Used**: +- `darcDok`: DOK identifier (e.g., "F03", "P30", "G20") +- `band`: Band (e.g., "80m", "40m", "20m") +- `mode`: Mode (e.g., "CW", "SSB", "FT8") +- `dclQslRstatus`: DCL confirmation status ('Y' = confirmed) +- `dclQslRdate`: DCL confirmation date + +**Documentation**: See `docs/DOCUMENTATION.md` for complete documentation including DLD award example. + +### Adding New Awards + +To add a new award: + +1. Create JSON definition in `award-definitions/` +2. Add filename to `loadAwardDefinitions()` in `src/backend/services/awards.service.js` +3. If new rule type needed, add calculation function +4. Add type handling in `calculateAwardProgress()` switch statement +5. Add type handling in `getAwardEntityBreakdown()` if needed +6. Update documentation in `docs/DOCUMENTATION.md` +7. Test with sample QSO data + +### Confirmation Systems + +- **LoTW (Logbook of The World)**: ARRL's confirmation system + - Fields: `lotwQslRstatus`, `lotwQslRdate` + - Used for DXCC, WAS, VUCC, most awards + +- **DCL (DARC Community Logbook)**: DARC's confirmation system + - Fields: `dclQslRstatus`, `dclQslRdate` + - Required for DLD award + - German amateur radio specific + +### Recent Commits + +- `c982dcd`: feat: implement DLD (Deutschland Diplom) award +- `322ccaf`: docs: add DLD (Deutschland Diplom) award documentation diff --git a/src/backend/services/dcl.service.js b/src/backend/services/dcl.service.js index cd4ef3c..b99391b 100644 --- a/src/backend/services/dcl.service.js +++ b/src/backend/services/dcl.service.js @@ -2,184 +2,289 @@ import { db, logger } from '../config.js'; import { qsos } from '../db/schema/index.js'; import { max, sql, eq, and, desc } from 'drizzle-orm'; import { updateJobProgress } from './job-queue.service.js'; +import { parseDCLResponse, normalizeBand, normalizeMode } from '../utils/adif-parser.js'; /** * DCL (DARC Community Logbook) Service * - * NOTE: DCL does not currently have a public API for downloading QSOs. - * This service is prepared as a stub for when DCL adds API support. - * - * When DCL provides an API, implement: - * - fetchQSOsFromDCL() - Download QSOs from DCL - * - syncQSOs() - Sync QSOs to database - * - getLastDCLQSLDate() - Get last QSL date for incremental sync - * * DCL Information: * - Website: https://dcl.darc.de/ + * - API: Coming soon (currently in development) * - ADIF Export: https://dcl.darc.de/dml/export_adif_form.php (manual only) * - DOK fields: MY_DARC_DOK (user's DOK), DARC_DOK (partner's DOK) + * + * Expected API Response Format: + * { + * "adif": "3.1.3\\n20260117 095453\\n\\n..." + * } */ +const REQUEST_TIMEOUT = 60000; + /** - * Fetch QSOs from DCL + * Fetch QSOs from DCL API * - * TODO: Implement when DCL provides a download API - * Expected implementation: - * - Use DCL API key for authentication - * - Fetch ADIF data with confirmations - * - Parse and return QSO records + * When DCL provides their API, update the URL and parameters. + * Expected response format: { "adif": "" } * * @param {string} dclApiKey - DCL API key * @param {Date|null} sinceDate - Last sync date for incremental sync * @returns {Promise} Array of parsed QSO records */ export async function fetchQSOsFromDCL(dclApiKey, sinceDate = null) { - logger.info('DCL sync not yet implemented - API endpoint not available', { + logger.info('Fetching QSOs from DCL', { + hasApiKey: !!dclApiKey, sinceDate: sinceDate?.toISOString(), }); - throw new Error('DCL download API is not yet available. DCL does not currently provide a public API for downloading QSOs. Use the manual ADIF export at https://dcl.darc.de/dml/export_adif_form.php'); + // TODO: Update URL when DCL publishes their API endpoint + const url = 'https://dcl.darc.de/api/export'; // Placeholder URL - /* - * FUTURE IMPLEMENTATION (when DCL provides API): - * - * const url = 'https://dcl.darc.de/api/...'; // TBA - * - * const params = new URLSearchParams({ - * api_key: dclApiKey, - * format: 'adif', - * qsl: 'yes', - * }); - * - * if (sinceDate) { - * const dateStr = sinceDate.toISOString().split('T')[0].replace(/-/g, ''); - * params.append('qso_qslsince', dateStr); - * } - * - * const response = await fetch(`${url}?${params}`, { - * headers: { - * 'Accept': 'text/plain', - * }, - * timeout: REQUEST_TIMEOUT, - * }); - * - * if (!response.ok) { - * throw new Error(`DCL API error: ${response.status}`); - * } - * - * const adifData = await response.text(); - * return parseADIF(adifData); - */ + const params = new URLSearchParams({ + api_key: dclApiKey, + format: 'json', + qsl: 'yes', + }); + + // Add date filter for incremental sync if provided + if (sinceDate) { + const dateStr = sinceDate.toISOString().split('T')[0].replace(/-/g, ''); + params.append('qsl_since', dateStr); + } + + try { + const controller = new AbortController(); + const timeoutId = setTimeout(() => controller.abort(), REQUEST_TIMEOUT); + + const response = await fetch(`${url}?${params}`, { + signal: controller.signal, + headers: { + 'Accept': 'application/json', + }, + }); + + clearTimeout(timeoutId); + + if (!response.ok) { + if (response.status === 401) { + throw new Error('Invalid DCL API key. Please check your DCL credentials in Settings.'); + } else if (response.status === 404) { + throw new Error('DCL API endpoint not found. The DCL API may not be available yet.'); + } else { + throw new Error(`DCL API error: ${response.status} ${response.statusText}`); + } + } + + const data = await response.json(); + + // Parse the DCL response format + const qsos = parseDCLResponse(data); + + logger.info('Successfully fetched QSOs from DCL', { + total: qsos.length, + hasConfirmations: qsos.filter(q => qso.dcl_qsl_rcvd === 'Y').length, + }); + + return qsos; + + } catch (error) { + if (error.name === 'AbortError') { + throw new Error('DCL API request timed out. Please try again.'); + } + + logger.error('Failed to fetch from DCL', { + error: error.message, + url: url.replace(/api_key=[^&]+/, 'api_key=***'), + }); + + throw error; + } } /** - * Parse ADIF data from DCL + * Parse DCL API response from JSON + * This function exists for testing with example payloads before DCL API is available * - * TODO: Implement ADIF parser for DCL format - * Should handle DCL-specific fields: - * - MY_DARC_DOK - * - DARC_DOK - * - * @param {string} adifData - Raw ADIF data + * @param {Object} jsonResponse - JSON response in DCL format * @returns {Array} Array of parsed QSO records */ -function parseADIF(adifData) { - // TODO: Implement ADIF parser - // Should parse standard ADIF fields plus DCL-specific fields: - // - MY_DARC_DOK (user's own DOK) - // - DARC_DOK (QSO partner's DOK) - // - QSL_DATE (confirmation date from DCL) +export function parseDCLJSONResponse(jsonResponse) { + return parseDCLResponse(jsonResponse); +} - return []; +/** + * Convert DCL ADIF QSO to database format + * @param {Object} adifQSO - Parsed ADIF QSO record + * @param {number} userId - User ID + * @returns {Object} Database-ready QSO object + */ +function convertQSODatabaseFormat(adifQSO, userId) { + return { + userId, + callsign: adifQSO.call || '', + qsoDate: adifQSO.qso_date || '', + timeOn: adifQSO.time_on || adifQSO.time_off || '000000', + band: normalizeBand(adifQSO.band), + mode: normalizeMode(adifQSO.mode), + freq: adifQSO.freq ? parseInt(adifQSO.freq) : null, + freqRx: adifQSO.freq_rx ? parseInt(adifQSO.freq_rx) : null, + entity: adifQSO.country || adifQSO.dxcc_country || '', + entityId: adifQSO.dxcc ? parseInt(adifQSO.dxcc) : null, + grid: adifQSO.gridsquare || '', + gridSource: adifQSO.gridsquare ? 'DCL' : null, + continent: adifQSO.continent || '', + cqZone: adifQSO.cq_zone ? parseInt(adifQSO.cq_zone) : null, + ituZone: adifQSO.itu_zone ? parseInt(adifQSO.itu_zone) : null, + state: adifQSO.state || adifQSO.us_state || '', + county: adifQSO.county || '', + satName: adifQSO.sat_name || '', + satMode: adifQSO.sat_mode || '', + myDarcDok: adifQSO.my_darc_dok || '', + darcDok: adifQSO.darc_dok || '', + // DCL confirmation fields + dclQslRdate: adifQSO.dcl_qslrdate || '', + dclQslRstatus: adifQSO.dcl_qsl_rcvd || 'N', + }; } /** * Sync QSOs from DCL to database - * - * TODO: Implement when DCL provides API + * Updates existing QSOs with DCL confirmation data * * @param {number} userId - User ID * @param {string} dclApiKey - DCL API key - * @param {Date|null} sinceDate - Last sync date + * @param {Date|null} sinceDate - Last sync date for incremental sync * @param {number|null} jobId - Job ID for progress tracking * @returns {Promise} Sync results */ export async function syncQSOs(userId, dclApiKey, sinceDate = null, jobId = null) { - logger.info('DCL sync not yet implemented', { userId, sinceDate, jobId }); + logger.info('Starting DCL sync', { userId, sinceDate, jobId }); - throw new Error('DCL download API is not yet available'); + if (jobId) { + await updateJobProgress(jobId, { + message: 'Fetching QSOs from DCL...', + step: 'fetch', + }); + } - /* - * FUTURE IMPLEMENTATION: - * - * try { - * const adifQSOs = await fetchQSOsFromDCL(dclApiKey, sinceDate); - * - * let addedCount = 0; - * let updatedCount = 0; - * let errors = []; - * - * for (const adifQSO of adifQSOs) { - * try { - * // Map ADIF fields to database schema - * const qsoData = mapADIFToDB(adifQSO); - * - * // Check if QSO already exists - * const existing = await db.select() - * .from(qsos) - * .where( - * and( - * eq(qsos.userId, userId), - * eq(qsos.callsign, adifQSO.call), - * eq(qsos.qsoDate, adifQSO.qso_date), - * eq(qsos.timeOn, adifQSO.time_on) - * ) - * ) - * .limit(1); - * - * if (existing.length > 0) { - * // Update existing QSO with DCL confirmation - * await db.update(qsos) - * .set({ - * dclQslRdate: adifQSO.qslrdate || null, - * dclQslRstatus: adifQSO.qslrdate ? 'Y' : 'N', - * darcDok: adifQSO.darc_dok || null, - * myDarcDok: adifQSO.my_darc_dok || null, - * }) - * .where(eq(qsos.id, existing[0].id)); - * updatedCount++; - * } else { - * // Insert new QSO - * await db.insert(qsos).values({ - * userId, - * ...qsoData, - * dclQslRdate: adifQSO.qslrdate || null, - * dclQslRstatus: adifQSO.qslrdate ? 'Y' : 'N', - * }); - * addedCount++; - * } - * } catch (err) { - * logger.error('Failed to process QSO', { error: err.message, qso: adifQSO }); - * errors.push(err.message); - * } - * } - * - * const result = { - * success: true, - * total: adifQSOs.length, - * added: addedCount, - * updated: updatedCount, - * errors, - * }; - * - * logger.info('DCL sync completed', { ...result, jobId }); - * return result; - * - * } catch (error) { - * logger.error('DCL sync failed', { error: error.message, userId, jobId }); - * return { success: false, error: error.message, total: 0, added: 0, updated: 0 }; - * } - */ + try { + const adifQSOs = await fetchQSOsFromDCL(dclApiKey, sinceDate); + + if (!Array.isArray(adifQSOs) || adifQSOs.length === 0) { + logger.info('No QSOs found in DCL response', { userId }); + return { + success: true, + total: 0, + added: 0, + updated: 0, + message: 'No QSOs found in DCL', + }; + } + + if (jobId) { + await updateJobProgress(jobId, { + message: `Processing ${adifQSOs.length} QSOs from DCL...`, + step: 'process', + total: adifQSOs.length, + processed: 0, + }); + } + + let addedCount = 0; + let updatedCount = 0; + const errors = []; + + for (let i = 0; i < adifQSOs.length; i++) { + const adifQSO = adifQSOs[i]; + + try { + const dbQSO = convertQSODatabaseFormat(adifQSO, userId); + + // Check if QSO already exists (match by callsign, date, time, band, mode) + const existing = await db + .select() + .from(qsos) + .where( + and( + eq(qsos.userId, userId), + eq(qsos.callsign, dbQSO.callsign), + eq(qsos.qsoDate, dbQSO.qsoDate), + eq(qsos.timeOn, dbQSO.timeOn), + eq(qsos.band, dbQSO.band), + eq(qsos.mode, dbQSO.mode) + ) + ) + .limit(1); + + if (existing.length > 0) { + // Update existing QSO with DCL confirmation and DOK data + await db + .update(qsos) + .set({ + dclQslRdate: dbQSO.dclQslRdate, + dclQslRstatus: dbQSO.dclQslRstatus, + darcDok: dbQSO.darcDok || existing[0].darcDok, + myDarcDok: dbQSO.myDarcDok || existing[0].myDarcDok, + grid: dbQSO.grid || existing[0].grid, + gridSource: dbQSO.gridSource || existing[0].gridSource, + }) + .where(eq(qsos.id, existing[0].id)); + updatedCount++; + } else { + // Insert new QSO + await db.insert(qsos).values(dbQSO); + addedCount++; + } + + // Update job progress every 10 QSOs + if (jobId && (i + 1) % 10 === 0) { + await updateJobProgress(jobId, { + processed: i + 1, + message: `Processed ${i + 1}/${adifQSOs.length} QSOs from DCL...`, + }); + } + } catch (error) { + logger.error('Failed to process DCL QSO', { + error: error.message, + qso: adifQSO, + userId, + }); + errors.push({ qso: adifQSO, error: error.message }); + } + } + + const result = { + success: true, + total: adifQSOs.length, + added: addedCount, + updated: updatedCount, + confirmed: adifQSOs.filter(q => q.dcl_qsl_rcvd === 'Y').length, + errors: errors.length > 0 ? errors : undefined, + }; + + logger.info('DCL sync completed', { + ...result, + userId, + jobId, + }); + + return result; + + } catch (error) { + logger.error('DCL sync failed', { + error: error.message, + userId, + jobId, + }); + + return { + success: false, + error: error.message, + total: 0, + added: 0, + updated: 0, + }; + } } /** diff --git a/src/backend/services/lotw.service.js b/src/backend/services/lotw.service.js index 3401ee8..483eaf5 100644 --- a/src/backend/services/lotw.service.js +++ b/src/backend/services/lotw.service.js @@ -2,6 +2,7 @@ import { db, logger } from '../config.js'; import { qsos } from '../db/schema/index.js'; import { max, sql, eq, and, desc } from 'drizzle-orm'; import { updateJobProgress } from './job-queue.service.js'; +import { parseADIF, normalizeBand, normalizeMode } from '../utils/adif-parser.js'; /** * LoTW (Logbook of the World) Service @@ -150,39 +151,6 @@ async function fetchQSOsFromLoTW(lotwUsername, lotwPassword, sinceDate = null) { }; } -/** - * Parse ADIF (Amateur Data Interchange Format) data - */ -function parseADIF(adifData) { - const qsos = []; - const records = adifData.split(''); - - for (const record of records) { - if (!record.trim()) continue; - if (record.trim().startsWith('<') && !record.includes('([\s\S])/gi; - let match; - - while ((match = regex.exec(record)) !== null) { - const [fullMatch, fieldName, lengthStr, firstChar] = match; - const length = parseInt(lengthStr, 10); - const valueStart = match.index + fullMatch.length - 1; - const value = record.substring(valueStart, valueStart + length); - - qso[fieldName.toLowerCase()] = value.trim(); - regex.lastIndex = valueStart + length; - } - - if (Object.keys(qso).length > 0 && (qso.call || qso.call)) { - qsos.push(qso); - } - } - - return qsos; -} - /** * Convert ADIF QSO to database format */ @@ -211,35 +179,6 @@ function convertQSODatabaseFormat(adifQSO, userId) { }; } -function normalizeBand(band) { - if (!band) return null; - - const bandMap = { - '160m': '160m', '80m': '80m', '60m': '60m', '40m': '40m', - '30m': '30m', '20m': '20m', '17m': '17m', '15m': '15m', - '12m': '12m', '10m': '10m', '6m': '6m', '4m': '4m', - '2m': '2m', '1.25m': '1.25m', '70cm': '70cm', '33cm': '33cm', - '23cm': '23cm', '13cm': '13cm', '9cm': '9cm', '6cm': '6cm', - '3cm': '3cm', '1.2cm': '1.2cm', 'mm': 'mm', - }; - - return bandMap[band.toLowerCase()] || band; -} - -function normalizeMode(mode) { - if (!mode) return ''; - - const modeMap = { - 'cw': 'CW', 'ssb': 'SSB', 'am': 'AM', 'fm': 'FM', - 'rtty': 'RTTY', 'psk31': 'PSK31', 'psk63': 'PSK63', - 'ft8': 'FT8', 'ft4': 'FT4', 'jt65': 'JT65', 'jt9': 'JT9', - 'js8': 'JS8', 'mfsk': 'MFSK', 'olivia': 'OLIVIA', - }; - - const normalized = modeMap[mode.toLowerCase()]; - return normalized || mode.toUpperCase(); -} - /** * Sync QSOs from LoTW to database * @param {number} userId - User ID diff --git a/src/backend/utils/adif-parser.js b/src/backend/utils/adif-parser.js new file mode 100644 index 0000000..b0cc798 --- /dev/null +++ b/src/backend/utils/adif-parser.js @@ -0,0 +1,145 @@ +/** + * ADIF (Amateur Data Interchange Format) Parser + * Handles standard ADIF format from LoTW, DCL, and other sources + * + * ADIF format: value + * Example: DK0MU80m20250621 + */ + +/** + * Parse ADIF data into array of QSO records + * @param {string} adifData - Raw ADIF data string + * @returns {Array} Array of parsed QSO records + */ +export function parseADIF(adifData) { + const qsos = []; + // Split by (end of record) - case sensitive as per ADIF spec + const records = adifData.split(''); + + for (const record of records) { + if (!record.trim()) continue; + + // Skip header records + const trimmed = record.trim(); + if (trimmed.startsWith('<') && !trimmed.includes('/gi; + let match; + + while ((match = regex.exec(record)) !== null) { + const [fullMatch, fieldName, lengthStr] = match; + const length = parseInt(lengthStr, 10); + const valueStart = match.index + fullMatch.length; + + // Extract exactly 'length' characters from the string + const value = record.substring(valueStart, valueStart + length); + + qso[fieldName.toLowerCase()] = value.trim(); + + // Update regex position to continue after the value + regex.lastIndex = valueStart + length; + } + + // Only add if we have at least a callsign + if (Object.keys(qso).length > 0 && (qso.call || qso.callsign)) { + qsos.push(qso); + } + } + + return qsos; +} + +/** + * Parse DCL API response + * DCL returns JSON with an "adif" field containing ADIF data + * @param {Object} response - DCL API response + * @returns {Array} Array of parsed QSO records + */ +export function parseDCLResponse(response) { + if (!response || !response.adif) { + return []; + } + + const adifData = response.adif; + const qsos = parseADIF(adifData); + + // Map DCL-specific fields to standard names + return qsos.map(qso => ({ + ...qso, + dcl_qsl_rcvd: qso.dcl_qsl_rcvd, + dcl_qslrdate: qso.dcl_qslrdate, + darc_dok: qso.darc_dok, + my_darc_dok: qso.my_darc_dok, + })); +} + +/** + * Normalize band name to standard format + * @param {string} band - Band name + * @returns {string|null} Normalized band name + */ +export function normalizeBand(band) { + if (!band) return null; + + const bandMap = { + '160m': '160m', '1800': '160m', + '80m': '80m', '3500': '80m', '3.5mhz': '80m', + '60m': '60m', '5mhz': '60m', + '40m': '40m', '7000': '40m', '7mhz': '40m', + '30m': '30m', '10100': '30m', '10mhz': '30m', + '20m': '20m', '14000': '20m', '14mhz': '20m', + '17m': '17m', '18100': '17m', '18mhz': '17m', + '15m': '15m', '21000': '15m', '21mhz': '15m', + '12m': '12m', '24890': '12m', '24mhz': '12m', + '10m': '10m', '28000': '10m', '28mhz': '10m', + '6m': '6m', '50000': '6m', '50mhz': '6m', + '4m': '4m', '70000': '4m', '70mhz': '4m', + '2m': '2m', '144000': '2m', '144mhz': '2m', + '1.25m': '1.25m', '222000': '1.25m', '222mhz': '1.25m', + '70cm': '70cm', '432000': '70cm', '432mhz': '70cm', + '33cm': '33cm', '902000': '33cm', '902mhz': '33cm', + '23cm': '23cm', '1296000': '23cm', '1296mhz': '23cm', + }; + + const normalized = bandMap[band.toLowerCase()]; + return normalized || band; +} + +/** + * Normalize mode name to standard format + * @param {string} mode - Mode name + * @returns {string} Normalized mode name + */ +export function normalizeMode(mode) { + if (!mode) return ''; + + const modeMap = { + 'cw': 'CW', + 'ssb': 'SSB', 'lsb': 'SSB', 'usb': 'SSB', + 'am': 'AM', + 'fm': 'FM', + 'rtty': 'RTTY', + 'psk31': 'PSK31', + 'psk63': 'PSK63', + 'ft8': 'FT8', + 'ft4': 'FT4', + 'jt65': 'JT65', + 'jt9': 'JT9', + 'js8': 'JS8', + 'mfsk': 'MFSK', + 'olivia': 'OLIVIA', + 'sstv': 'SSTV', + 'packet': 'PACKET', + 'pactor': 'PACTOR', + 'winlink': 'WINLINK', + 'fax': 'FAX', + 'hell': 'HELL', + 'tor': 'TOR', + }; + + const normalized = modeMap[mode.toLowerCase()]; + return normalized || mode.toUpperCase(); +}