Compare commits
24 Commits
docker
...
69b33720b3
| Author | SHA1 | Date | |
|---|---|---|---|
|
69b33720b3
|
|||
|
648cf2c5a5
|
|||
|
cce520a00e
|
|||
|
d9e0e462b9
|
|||
|
ebdd75e03f
|
|||
|
205b311244
|
|||
|
6bc0a2f9b2
|
|||
|
8550b91255
|
|||
|
a93d4ff85b
|
|||
|
f3ee1be651
|
|||
|
6c9aa1efe7
|
|||
|
14c7319c9e
|
|||
|
5792a98dca
|
|||
|
aa25d21c6b
|
|||
|
e14da11a93
|
|||
|
dc34fc20b1
|
|||
|
c75e55d130
|
|||
|
89edd07722
|
|||
|
dd3beef9af
|
|||
|
695000e35c
|
|||
|
bdd8aa497d
|
|||
|
7c209e3270
|
|||
|
6d3291e331
|
|||
|
c0a471f7c2
|
@@ -1,61 +0,0 @@
|
||||
# Dependencies
|
||||
node_modules
|
||||
# Note: bun.lock is needed by Dockerfile for --frozen-lockfile
|
||||
|
||||
# Environment
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
|
||||
# Database - will be in volume mount
|
||||
**/*.db
|
||||
**/*.db-shm
|
||||
**/*.db-wal
|
||||
|
||||
# Build outputs - built in container
|
||||
src/frontend/build/
|
||||
src/frontend/.svelte-kit/
|
||||
src/frontend/dist/
|
||||
build/
|
||||
dist/
|
||||
|
||||
# IDE
|
||||
.vscode
|
||||
.idea
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Git
|
||||
.git/
|
||||
.gitignore
|
||||
|
||||
# Documentation (keep docs in image but don't need in build context)
|
||||
# README.md
|
||||
docs/
|
||||
*.md
|
||||
|
||||
# Logs
|
||||
logs/
|
||||
*.log
|
||||
backend.log
|
||||
|
||||
# Tests
|
||||
*.test.js
|
||||
*.test.ts
|
||||
coverage/
|
||||
|
||||
# Docker files
|
||||
Dockerfile
|
||||
docker-compose.yml
|
||||
.dockerignore
|
||||
|
||||
# CI/CD
|
||||
.github/
|
||||
.gitlab-ci.yml
|
||||
|
||||
# Data directory (for volume mount)
|
||||
data/
|
||||
@@ -1,26 +0,0 @@
|
||||
# Docker Environment Configuration
|
||||
# Copy this file to .env and update with your values
|
||||
|
||||
# ============================================
|
||||
# Application Settings
|
||||
# ============================================
|
||||
NODE_ENV=production
|
||||
PORT=3001
|
||||
LOG_LEVEL=debug
|
||||
|
||||
# ============================================
|
||||
# Security (IMPORTANT: Change in production!)
|
||||
# ============================================
|
||||
# Generate a secure JWT secret with: openssl rand -base64 32
|
||||
JWT_SECRET=change-this-in-production-use-openssl-rand-base64-32
|
||||
|
||||
# ============================================
|
||||
# CORS Configuration
|
||||
# ============================================
|
||||
# Your application's public URL (e.g., https://awards.example.com)
|
||||
VITE_APP_URL=
|
||||
|
||||
# Comma-separated list of allowed origins for CORS
|
||||
# Only needed if not using same domain deployment
|
||||
# Example: https://awards.example.com,https://www.awards.example.com
|
||||
ALLOWED_ORIGINS=
|
||||
41
.env.example
41
.env.example
@@ -1,22 +1,47 @@
|
||||
# Application Configuration
|
||||
# Copy this file to .env and update with your values
|
||||
|
||||
# Hostname for the application (e.g., https://awards.dj7nt.de)
|
||||
# ===================================================================
|
||||
# Environment
|
||||
# ===================================================================
|
||||
# Development: development
|
||||
# Production: production
|
||||
NODE_ENV=development
|
||||
|
||||
# Log Level (debug, info, warn, error)
|
||||
# Development: debug
|
||||
# Production: info
|
||||
LOG_LEVEL=debug
|
||||
|
||||
# Server Port (default: 3001)
|
||||
PORT=3001
|
||||
|
||||
# ===================================================================
|
||||
# URLs
|
||||
# ===================================================================
|
||||
# Frontend URL (e.g., https://awards.dj7nt.de)
|
||||
# Leave empty for development (uses localhost)
|
||||
VITE_APP_URL=
|
||||
|
||||
# API Base URL (in production, can be same domain or separate)
|
||||
# Leave empty to use relative paths (recommended for same-domain deployment)
|
||||
# API Base URL (leave empty for same-domain deployment)
|
||||
# Only set if API is on different domain
|
||||
VITE_API_BASE_URL=
|
||||
|
||||
# Allowed CORS origins for backend (comma-separated)
|
||||
# Only needed for production if not using same domain
|
||||
# Add all domains that should access the API
|
||||
# Example: https://awards.dj7nt.de,https://www.awards.dj7nt.de
|
||||
ALLOWED_ORIGINS=
|
||||
|
||||
# JWT Secret (for production, use a strong random string)
|
||||
# Generate with: openssl rand -base64 32
|
||||
# ===================================================================
|
||||
# Security
|
||||
# ===================================================================
|
||||
# JWT Secret (REQUIRED for production)
|
||||
# Development: uses default if not set
|
||||
# Production: Generate with: openssl rand -base64 32
|
||||
JWT_SECRET=change-this-in-production
|
||||
|
||||
# Node Environment
|
||||
NODE_ENV=development
|
||||
# ===================================================================
|
||||
# Database (Optional)
|
||||
# ===================================================================
|
||||
# Leave empty to use default SQLite database
|
||||
# DATABASE_URL=file:/path/to/custom.db
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
# Production Configuration Template
|
||||
# Copy this file to .env.production and update with your production values
|
||||
|
||||
# Application Environment
|
||||
NODE_ENV=production
|
||||
|
||||
# Log Level (debug, info, warn, error)
|
||||
# Recommended: info for production
|
||||
LOG_LEVEL=info
|
||||
|
||||
# Server Port (default: 3001)
|
||||
PORT=3001
|
||||
|
||||
# Frontend URL (e.g., https://awards.dj7nt.de)
|
||||
VITE_APP_URL=https://awards.dj7nt.de
|
||||
|
||||
# API Base URL (leave empty for same-domain deployment)
|
||||
VITE_API_BASE_URL=
|
||||
|
||||
# Allowed CORS origins (comma-separated)
|
||||
# Add all domains that should access the API
|
||||
ALLOWED_ORIGINS=https://awards.dj7nt.de,https://www.awards.dj7nt.de
|
||||
|
||||
# JWT Secret (REQUIRED - generate a strong secret!)
|
||||
# Generate with: openssl rand -base64 32
|
||||
JWT_SECRET=REPLACE_WITH_SECURE_RANDOM_STRING
|
||||
|
||||
# Database (if using external database)
|
||||
# Leave empty to use default SQLite database
|
||||
# DATABASE_URL=file:/path/to/production.db
|
||||
452
CLAUDE.md
452
CLAUDE.md
@@ -77,58 +77,6 @@ test("hello world", () => {
|
||||
});
|
||||
```
|
||||
|
||||
## Docker Deployment
|
||||
|
||||
The application supports Docker deployment with single-port architecture and host-mounted database persistence.
|
||||
|
||||
**Quick Start**:
|
||||
```bash
|
||||
# Create environment file
|
||||
cp .env.docker.example .env
|
||||
|
||||
# Generate JWT secret
|
||||
openssl rand -base64 32 # Add to .env as JWT_SECRET
|
||||
|
||||
# Start application
|
||||
docker-compose up -d --build
|
||||
|
||||
# Access at http://localhost:3001
|
||||
```
|
||||
|
||||
**Architecture**:
|
||||
- **Single Port**: Port 3001 serves both API (`/api/*`) and frontend (all other routes)
|
||||
- **Database Persistence**: SQLite database stored at `./data/award.db` on host
|
||||
- **Auto-initialization**: Database created from template on first startup
|
||||
- **Health Checks**: Built-in health monitoring at `/api/health`
|
||||
|
||||
**Key Docker Files**:
|
||||
- `Dockerfile`: Multi-stage build using official Bun runtime
|
||||
- `docker-compose.yml`: Stack orchestration with volume mounts
|
||||
- `docker-entrypoint.sh`: Database initialization logic
|
||||
- `.env.docker.example`: Environment variable template
|
||||
- `DOCKER.md`: Complete deployment documentation
|
||||
|
||||
**Environment Variables**:
|
||||
- `NODE_ENV`: Environment mode (default: production)
|
||||
- `PORT`: Application port (default: 3001)
|
||||
- `LOG_LEVEL`: Logging level (debug/info/warn/error)
|
||||
- `JWT_SECRET`: JWT signing secret (required, change in production!)
|
||||
- `VITE_APP_URL`: Your application's public URL
|
||||
- `ALLOWED_ORIGINS`: CORS allowed origins (comma-separated)
|
||||
|
||||
**Database Management**:
|
||||
- Database location: `./data/award.db` (host-mounted volume)
|
||||
- Backups: `cp data/award.db data/award.db.backup.$(date +%Y%m%d)`
|
||||
- Reset: `docker-compose down -v && docker-compose up -d`
|
||||
|
||||
**Important Notes**:
|
||||
- Database persists across container restarts/recreations
|
||||
- Frontend dependencies are reinstalled in container to ensure correct platform binaries
|
||||
- Uses custom init script (`src/backend/scripts/init-db.js`) with `bun:sqlite`
|
||||
- Architecture-agnostic (works on x86, ARM64, etc.)
|
||||
|
||||
For detailed documentation, see `DOCKER.md`.
|
||||
|
||||
## Frontend
|
||||
|
||||
Use HTML imports with `Bun.serve()`. Don't use `vite`. HTML imports fully support React, CSS, Tailwind.
|
||||
@@ -222,6 +170,8 @@ The award system is JSON-driven and located in `award-definitions/` directory. E
|
||||
1. **`entity`**: Count unique entities (DXCC countries, states, grid squares)
|
||||
- `entityType`: What to count ("dxcc", "state", "grid", "callsign")
|
||||
- `target`: Number required for award
|
||||
- `allowed_bands`: Optional array of bands that count (e.g., `["160m", "80m", "40m", "30m", "20m", "17m", "15m", "12m", "10m"]` for HF only)
|
||||
- `satellite_only`: Optional boolean to only count satellite QSOs (QSOs with `satName` field)
|
||||
- `filters`: Optional filters (band, mode, etc.)
|
||||
- `displayField`: Optional field to display
|
||||
|
||||
@@ -231,7 +181,6 @@ The award system is JSON-driven and located in `award-definitions/` directory. E
|
||||
- `filters`: Optional filters (band, mode, etc.) for award variants
|
||||
- Counts unique (DOK, band, mode) combinations
|
||||
- Only DCL-confirmed QSOs count
|
||||
- Example variants: DLD 80m, DLD CW, DLD 80m CW
|
||||
|
||||
3. **`points`**: Point-based awards
|
||||
- `stations`: Array of {callsign, points}
|
||||
@@ -244,6 +193,16 @@ The award system is JSON-driven and located in `award-definitions/` directory. E
|
||||
|
||||
5. **`counter`**: Count QSOs or callsigns
|
||||
|
||||
### Current Awards
|
||||
|
||||
- **DXCC**: HF bands only (160m-10m), 100 entities required
|
||||
- **DXCC SAT**: Satellite QSOs only, 100 entities required
|
||||
- **WAS**: Worked All States award
|
||||
- **VUCC SAT**: VUCC Satellite award
|
||||
- **SAT-RS44**: Special satellite award
|
||||
- **73 on 73**: Special stations award
|
||||
- **DLD**: Deutschland Diplom, 100 unique DOKs required
|
||||
|
||||
### Key Files
|
||||
|
||||
**Backend Award Service**: `src/backend/services/awards.service.js`
|
||||
@@ -253,11 +212,13 @@ The award system is JSON-driven and located in `award-definitions/` directory. E
|
||||
- `calculatePointsAwardProgress(userId, award, options)`: Point-based calculation
|
||||
- `getAwardEntityBreakdown(userId, awardId)`: Detailed entity breakdown
|
||||
- `getAwardProgressDetails(userId, awardId)`: Progress with details
|
||||
- Implements `allowed_bands` and `satellite_only` filtering
|
||||
|
||||
**Database Schema**: `src/backend/db/schema/index.js`
|
||||
- QSO fields include: `darcDok`, `dclQslRstatus`, `dclQslRdate`
|
||||
- QSO fields include: `darcDok`, `dclQslRstatus`, `dclQslRdate`, `satName`
|
||||
- DOK fields support DLD award tracking
|
||||
- DCL confirmation fields separate from LoTW
|
||||
- `satName` field for satellite QSO tracking
|
||||
|
||||
**Award Definitions**: `award-definitions/*.json`
|
||||
- Add new awards by creating JSON definition files
|
||||
@@ -268,7 +229,6 @@ The award system is JSON-driven and located in `award-definitions/` directory. E
|
||||
- Handles case-insensitive `<EOR>` delimiters (supports `<EOR>`, `<eor>`, `<Eor>`)
|
||||
- Uses `matchAll()` for reliable field parsing
|
||||
- Skips header records automatically
|
||||
- `parseDCLResponse(response)`: Parse DCL's JSON response format `{ "adif": "..." }`
|
||||
- `normalizeBand(band)`: Standardize band names (80m, 40m, etc.)
|
||||
- `normalizeMode(mode)`: Standardize mode names (CW, FT8, SSB, etc.)
|
||||
- Used by both LoTW and DCL services for consistency
|
||||
@@ -289,6 +249,7 @@ The award system is JSON-driven and located in `award-definitions/` directory. E
|
||||
- `POST /api/dcl/sync`: Queue DCL sync job
|
||||
- `GET /api/jobs/:jobId`: Get job status
|
||||
- `GET /api/jobs/active`: Get active job for current user
|
||||
- `DELETE /api/qsos/all`: Delete all QSOs for authenticated user
|
||||
- `GET /*`: Serves static files from `src/frontend/build/` with SPA fallback
|
||||
|
||||
**SPA Routing**: The backend serves the SvelteKit frontend build from `src/frontend/build/`.
|
||||
@@ -314,9 +275,9 @@ The award system is JSON-driven and located in `award-definitions/` directory. E
|
||||
- Fully implemented and functional
|
||||
- **Note**: DCL API is a custom prototype by DARC; contact DARC for API specification details
|
||||
|
||||
### DLD Award Implementation (COMPLETED)
|
||||
### DLD Award Implementation
|
||||
|
||||
The DLD (Deutschland Diplom) award was recently implemented:
|
||||
The DLD (Deutschland Diplom) award:
|
||||
|
||||
**Definition**: `award-definitions/dld.json`
|
||||
```json
|
||||
@@ -336,7 +297,7 @@ The DLD (Deutschland Diplom) award was recently implemented:
|
||||
```
|
||||
|
||||
**Implementation Details**:
|
||||
- Function: `calculateDOKAwardProgress()` in `src/backend/services/awards.service.js` (lines 173-268)
|
||||
- Function: `calculateDOKAwardProgress()` in `src/backend/services/awards.service.js`
|
||||
- Counts unique (DOK, band, mode) combinations
|
||||
- Only DCL-confirmed QSOs count (`dclQslRstatus === 'Y'`)
|
||||
- Each unique DOK on each unique band/mode counts separately
|
||||
@@ -349,8 +310,6 @@ The DLD (Deutschland Diplom) award was recently implemented:
|
||||
- `dclQslRstatus`: DCL confirmation status ('Y' = confirmed)
|
||||
- `dclQslRdate`: DCL confirmation date
|
||||
|
||||
**Documentation**: See `docs/DOCUMENTATION.md` for complete documentation including DLD award example.
|
||||
|
||||
**Frontend**: `src/frontend/src/routes/qsos/+page.svelte`
|
||||
- Separate sync buttons for LoTW (blue) and DCL (orange)
|
||||
- Independent progress tracking for each sync type
|
||||
@@ -374,79 +333,42 @@ To add a new award:
|
||||
3. If new rule type needed, add calculation function
|
||||
4. Add type handling in `calculateAwardProgress()` switch statement
|
||||
5. Add type handling in `getAwardEntityBreakdown()` if needed
|
||||
6. Update documentation in `docs/DOCUMENTATION.md`
|
||||
6. Update documentation
|
||||
7. Test with sample QSO data
|
||||
|
||||
### Creating DLD Award Variants
|
||||
### Award Rule Options
|
||||
|
||||
The DOK award type supports filters to create award variants. Examples:
|
||||
|
||||
**DLD on 80m** (`dld-80m.json`):
|
||||
```json
|
||||
{
|
||||
"id": "dld-80m",
|
||||
"name": "DLD 80m",
|
||||
"description": "Confirm 100 unique DOKs on 80m",
|
||||
"caption": "Contact 100 different DOKs on the 80m band.",
|
||||
"category": "darc",
|
||||
"rules": {
|
||||
"type": "dok",
|
||||
"target": 100,
|
||||
"confirmationType": "dcl",
|
||||
"displayField": "darcDok",
|
||||
"filters": {
|
||||
"operator": "AND",
|
||||
"filters": [
|
||||
{ "field": "band", "operator": "eq", "value": "80m" }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**DLD in CW mode** (`dld-cw.json`):
|
||||
**allowed_bands**: Restrict which bands count toward an award
|
||||
```json
|
||||
{
|
||||
"rules": {
|
||||
"type": "dok",
|
||||
"target": 100,
|
||||
"confirmationType": "dcl",
|
||||
"filters": {
|
||||
"operator": "AND",
|
||||
"filters": [
|
||||
{ "field": "mode", "operator": "eq", "value": "CW" }
|
||||
]
|
||||
}
|
||||
"type": "entity",
|
||||
"allowed_bands": ["160m", "80m", "60m", "40m", "30m", "20m", "17m", "15m", "12m", "10m"]
|
||||
}
|
||||
}
|
||||
```
|
||||
- If absent or empty, all bands are allowed (default behavior)
|
||||
- Used for DXCC to restrict to HF bands only
|
||||
|
||||
**DLD on 80m using CW** (combined filters, `dld-80m-cw.json`):
|
||||
**satellite_only**: Only count satellite QSOs
|
||||
```json
|
||||
{
|
||||
"rules": {
|
||||
"type": "dok",
|
||||
"target": 100,
|
||||
"confirmationType": "dcl",
|
||||
"filters": {
|
||||
"operator": "AND",
|
||||
"filters": [
|
||||
{ "field": "band", "operator": "eq", "value": "80m" },
|
||||
{ "field": "mode", "operator": "eq", "value": "CW" }
|
||||
]
|
||||
}
|
||||
"type": "entity",
|
||||
"satellite_only": true
|
||||
}
|
||||
}
|
||||
```
|
||||
- If `true`, only QSOs with `satName` field set are counted
|
||||
- Used for DXCC SAT award
|
||||
|
||||
**Available filter operators**:
|
||||
**filters**: Additional filtering options
|
||||
- `eq`: equals
|
||||
- `ne`: not equals
|
||||
- `in`: in array
|
||||
- `nin`: not in array
|
||||
- `contains`: contains substring
|
||||
|
||||
**Available filter fields**: Any QSO field (band, mode, callsign, grid, state, satName, etc.)
|
||||
- Can filter any QSO field (band, mode, callsign, grid, state, etc.)
|
||||
|
||||
### Confirmation Systems
|
||||
|
||||
@@ -466,13 +388,8 @@ The DOK award type supports filters to create award variants. Examples:
|
||||
- Required for DLD award
|
||||
- German amateur radio specific
|
||||
- Request format: POST JSON `{ key, limit, qsl_since, qso_since, cnf_only }`
|
||||
- `cnf_only: null` - Fetch all QSOs (confirmed + unconfirmed)
|
||||
- `cnf_only: true` - Fetch only confirmed QSOs
|
||||
- `qso_since` - QSOs since this date (YYYYMMDD)
|
||||
- `qsl_since` - QSL confirmations since this date (YYYYMMDD)
|
||||
- Response format: JSON with ADIF string in `adif` field
|
||||
- Syncs ALL QSOs (both confirmed and unconfirmed)
|
||||
- Unconfirmed QSOs stored but don't count toward awards
|
||||
- Updates QSOs only if confirmation data has changed
|
||||
|
||||
### ADIF Format
|
||||
@@ -491,138 +408,13 @@ Both LoTW and DCL return data in ADIF (Amateur Data Interchange Format):
|
||||
- `MY_DARC_DOK`: User's own DOK
|
||||
- `STATION_CALLSIGN`: User's callsign
|
||||
|
||||
### Recent Commits
|
||||
### QSO Management
|
||||
|
||||
- `aeeb75c`: feat: add QSO count display to filter section
|
||||
- Shows count of QSOs matching current filters next to "Filters" heading
|
||||
- Displays "Showing X filtered QSOs" when filters are active
|
||||
- Displays "Showing X total QSOs" when no filters applied
|
||||
- Dynamically updates when filters change
|
||||
- `bee02d1`: fix: count QSOs confirmed by either LoTW or DCL in stats
|
||||
- QSO stats were only counting LoTW-confirmed QSOs (`lotwQslRstatus === 'Y'`)
|
||||
- QSOs confirmed only by DCL were excluded from "confirmed" count
|
||||
- Fixed by changing filter to: `q.lotwQslRstatus === 'Y' || q.dclQslRstatus === 'Y'`
|
||||
- Now correctly shows all QSOs confirmed by at least one system
|
||||
- `233888c`: fix: make ADIF parser case-insensitive for EOR delimiter
|
||||
- **Critical bug**: LoTW uses lowercase `<eor>` tags, parser was splitting on uppercase `<EOR>`
|
||||
- Caused 242K+ QSOs to be parsed as 1 giant record with fields overwriting each other
|
||||
- Changed to case-insensitive regex: `new RegExp('<eor>', 'gi')`
|
||||
- Replaced `regex.exec()` while loop with `matchAll()` for-of iteration
|
||||
- Now correctly imports all QSOs from large LoTW reports
|
||||
- `645f786`: fix: add missing timeOn field to LoTW duplicate detection
|
||||
- LoTW sync was missing `timeOn` in duplicate detection query
|
||||
- Multiple QSOs with same callsign/date/band/mode but different times were treated as duplicates
|
||||
- Now matches DCL sync logic: `userId, callsign, qsoDate, timeOn, band, mode`
|
||||
- `7f77c3a`: feat: add filter support for DOK awards
|
||||
- DOK award type now supports filtering by band, mode, and other QSO fields
|
||||
- Allows creating award variants like DLD 80m, DLD CW, DLD 80m CW
|
||||
- Uses existing filter system with eq, ne, in, nin, contains operators
|
||||
- Example awards created: dld-80m, dld-40m, dld-cw, dld-80m-cw
|
||||
- `9e73704`: docs: update CLAUDE.md with DLD award variants documentation
|
||||
- `7201446`: fix: return proper HTML for SPA routes instead of Bun error page
|
||||
- When accessing client-side routes (like /qsos) via curl or non-JS clients,
|
||||
the server attempted to open them as static files, causing Bun to throw
|
||||
an unhandled ENOENT error that showed an ugly error page
|
||||
- Now checks if a path has a file extension before attempting to serve it
|
||||
- Paths without extensions are immediately served index.html for SPA routing
|
||||
- Also improves the 503 error page with user-friendly HTML when frontend build is missing
|
||||
- `223461f`: fix: enable debug logging and improve DCL sync observability
|
||||
- `27d2ef1`: fix: preserve DOK data when DCL doesn't send values
|
||||
- DCL sync only updates DOK/grid fields when DCL provides non-empty values
|
||||
- Prevents accidentally clearing DOK data from manual entry or other sources
|
||||
- Preserves existing DOK when DCL syncs QSO without DOK information
|
||||
- `e09ab94`: feat: skip QSOs with unchanged confirmation data
|
||||
- LoTW/DCL sync only updates QSOs if confirmation data has changed
|
||||
- Tracks added, updated, and skipped QSO counts
|
||||
- LoTW: Checks if lotwQslRstatus or lotwQslRdate changed
|
||||
- DCL: Checks if dclQslRstatus, dclQslRdate, darcDok, myDarcDok, or grid changed
|
||||
- `3592dbb`: feat: add import log showing synced QSOs
|
||||
- Backend returns addedQSOs and updatedQSOs arrays in sync result
|
||||
- Frontend displays import log with callsign, date, band, mode for each QSO
|
||||
- Separate sections for "New QSOs" and "Updated QSOs"
|
||||
- Sync summary shows total, added, updated, skipped counts
|
||||
- `8a1a580`: feat: implement DCL ADIF parser and service integration
|
||||
- Add shared ADIF parser utility (src/backend/utils/adif-parser.js)
|
||||
- Implement DCL service with API integration
|
||||
- Refactor LoTW service to use shared parser
|
||||
- Tested with example DCL payload (6 QSOs parsed successfully)
|
||||
- `c982dcd`: feat: implement DLD (Deutschland Diplom) award
|
||||
- `322ccaf`: docs: add DLD (Deutschland Diplom) award documentation
|
||||
|
||||
### Sync Behavior
|
||||
|
||||
**Import Log**: After each sync, displays a table showing:
|
||||
- New QSOs: Callsign, Date, Band, Mode
|
||||
- Updated QSOs: Callsign, Date, Band, Mode (only if data changed)
|
||||
- Skipped QSOs: Counted but not shown (data unchanged)
|
||||
|
||||
**Duplicate Handling**:
|
||||
- QSOs matched by: userId, callsign, qsoDate, timeOn, band, mode
|
||||
- If confirmation data unchanged: Skipped (not updated)
|
||||
- If confirmation data changed: Updated with new values
|
||||
- Prevents unnecessary database writes and shows accurate import counts
|
||||
|
||||
**DOK Update Behavior**:
|
||||
- If QSO imported via LoTW (no DOK) and later DCL confirms with DOK: DOK is added ✓
|
||||
- If QSO already has DOK and DCL sends different DOK: DOK is updated ✓
|
||||
- If QSO has DOK and DCL syncs without DOK (empty): Existing DOK is preserved ✓
|
||||
- LoTW never sends DOK data; only DCL provides DOK fields
|
||||
|
||||
**Important**: DCL sync only updates DOK/grid fields when DCL provides non-empty values. This prevents accidentally clearing DOK data that was manually entered or imported from other sources.
|
||||
|
||||
### DCL Sync Strategy
|
||||
|
||||
**Current Behavior**: DCL syncs ALL QSOs (confirmed + unconfirmed)
|
||||
|
||||
The application syncs both confirmed and unconfirmed QSOs from DCL:
|
||||
- **Confirmed QSOs**: `dclQslRstatus = 'Y'` - Count toward awards
|
||||
- **Unconfirmed QSOs**: `dclQslRstatus = 'N'` - Stored but don't count toward awards
|
||||
|
||||
**Purpose of syncing unconfirmed QSOs**:
|
||||
- Users can see who they've worked (via "Not Confirmed" filter)
|
||||
- Track QSOs awaiting confirmation
|
||||
- QSOs can get confirmed later and will be updated on next sync
|
||||
|
||||
**Award Calculation**: Always uses confirmed QSOs only (e.g., `dclQslRstatus === 'Y'` for DLD award)
|
||||
|
||||
### DCL Incremental Sync Strategy
|
||||
|
||||
**Challenge**: Need to fetch both new QSOs AND confirmation updates to old QSOs
|
||||
|
||||
**Example Scenario**:
|
||||
1. Full sync on 2026-01-20 → Last QSO date: 2026-01-20
|
||||
2. User works 3 new QSOs on 2026-01-25 (unconfirmed)
|
||||
3. Old QSO from 2026-01-10 gets confirmed on 2026-01-26
|
||||
4. Next sync needs both: new QSOs (2026-01-25) AND confirmation update (2026-01-10)
|
||||
|
||||
**Solution**: Use both `qso_since` and `qsl_since` parameters with OR logic
|
||||
|
||||
```javascript
|
||||
// Proposed sync logic (requires OR logic from DCL API)
|
||||
const lastQSODate = await getLastDCLQSODate(userId); // Track QSO dates
|
||||
const lastQSLDate = await getLastDCLQSLDate(userId); // Track QSL dates
|
||||
|
||||
const requestBody = {
|
||||
key: dclApiKey,
|
||||
limit: 50000,
|
||||
qso_since: lastQSODate, // Get new QSOs since last contact
|
||||
qsl_since: lastQSLDate, // Get QSL confirmations since last sync
|
||||
cnf_only: null, // Fetch all QSOs
|
||||
};
|
||||
```
|
||||
|
||||
**Required API Behavior (OR Logic)**:
|
||||
- Return QSOs where `(qso_date >= qso_since) OR (qsl_date >= qsl_since)`
|
||||
- This ensures we get both new QSOs and confirmation updates
|
||||
|
||||
**Current DCL API Status**:
|
||||
- Unknown if current API uses AND or OR logic for combined filters
|
||||
- **Action Needed**: Request OR logic implementation from DARC
|
||||
- Test current behavior to confirm API response pattern
|
||||
|
||||
**Why OR Logic is Needed**:
|
||||
- With AND logic: Old QSOs getting confirmed are missed (qso_date too old)
|
||||
- With OR logic: All updates captured efficiently in one API call
|
||||
**Delete All QSOs**: `DELETE /api/qsos/all`
|
||||
- Deletes all QSOs for authenticated user
|
||||
- Also deletes related `qso_changes` records to satisfy foreign key constraints
|
||||
- Invalidates stats and user caches after deletion
|
||||
- Returns count of deleted QSOs
|
||||
|
||||
### QSO Page Filters
|
||||
|
||||
@@ -630,34 +422,45 @@ The QSO page (`src/frontend/src/routes/qsos/+page.svelte`) includes advanced fil
|
||||
|
||||
**Available Filters**:
|
||||
- **Search Box**: Full-text search across callsign, entity (DXCC country), and grid square fields
|
||||
- Press Enter to apply search
|
||||
- Case-insensitive partial matching
|
||||
- **Band Filter**: Dropdown to filter by amateur band (160m, 80m, 60m, 40m, 30m, 20m, 17m, 15m, 12m, 10m, 6m, 2m, 70cm)
|
||||
- **Mode Filter**: Dropdown to filter by mode (CW, SSB, AM, FM, RTTY, PSK31, FT8, FT4, JT65, JT9)
|
||||
- **Confirmation Type Filter**: Filter by confirmation status
|
||||
- "All QSOs": Shows all QSOs (no filter)
|
||||
- "LoTW Only": Shows QSOs confirmed by LoTW but NOT DCL
|
||||
- "DCL Only": Shows QSOs confirmed by DCL but NOT LoTW
|
||||
- "Both Confirmed": Shows QSOs confirmed by BOTH LoTW AND DCL
|
||||
- "Not Confirmed": Shows QSOs confirmed by NEITHER LoTW nor DCL
|
||||
- **Clear Button**: Resets all filters and reloads all QSOs
|
||||
- "All QSOs", "LoTW Only", "DCL Only", "Both Confirmed", "Not Confirmed"
|
||||
- **Clear Button**: Resets all filters
|
||||
|
||||
**Backend Implementation** (`src/backend/services/lotw.service.js`):
|
||||
- `getUserQSOs(userId, filters, options)`: Main filtering function
|
||||
- Supports pagination with `page` and `limit` options
|
||||
- Filter logic uses Drizzle ORM query builders for safe SQL generation
|
||||
- Debug logging when `LOG_LEVEL=debug` shows applied filters
|
||||
|
||||
**Frontend API** (`src/frontend/src/lib/api.js`):
|
||||
- `qsosAPI.getAll(filters)`: Fetch QSOs with optional filters
|
||||
- Filters passed as query parameters: `?band=20m&mode=CW&confirmationType=lotw&search=DL`
|
||||
|
||||
**QSO Count Display**:
|
||||
- Shows count of QSOs matching current filters next to "Filters" heading
|
||||
- **With filters active**: "Showing **X** filtered QSOs"
|
||||
- **No filters**: "Showing **X** total QSOs"
|
||||
- Dynamically updates when filters are applied or cleared
|
||||
- Uses `pagination.totalCount` from backend API response
|
||||
### Award Detail View
|
||||
|
||||
**Overview**: The award detail page (`src/frontend/src/routes/awards/[id]/+page.svelte`) displays award progress in a pivot table format.
|
||||
|
||||
**Key Features**:
|
||||
- **Summary Cards**: Show total, confirmed, worked, needed counts for unique entities
|
||||
- **Mode Filter**: Filter by specific mode or view "Mixed Mode" (aggregates all modes by band)
|
||||
- **Table Columns**: Show bands (or band/mode combinations) as columns
|
||||
- **QSO Counts**: Each cell shows count of confirmed QSOs for that (entity, band, mode) slot
|
||||
- **Drill-Down**: Click a count to open modal showing all QSOs for that slot
|
||||
- **QSO Detail**: Click any QSO to view full QSO details
|
||||
- **Satellite Grouping**: Satellite QSOs grouped under "SAT" column instead of frequency band
|
||||
|
||||
**Column Sorting**: Bands sorted by wavelength (longest to shortest):
|
||||
160m, 80m, 60m, 40m, 30m, 20m, 17m, 15m, 12m, 10m, 6m, 2m, 70cm, SAT
|
||||
|
||||
**Column Sums**: Show unique entity count per column (not QSO counts)
|
||||
|
||||
**Backend Changes** (`src/backend/services/awards.service.js`):
|
||||
- `calculateDOKAwardProgress()`: Groups by (DOK, band, mode) slots, collects QSOs in `qsos` array
|
||||
- `calculatePointsAwardProgress()`: Handles all count modes with `qsos` array
|
||||
- `getAwardEntityBreakdown()`: Groups by (entity, band, mode) slots
|
||||
- Includes `satName` in QSO data for satellite grouping
|
||||
- Implements `allowed_bands` and `satellite_only` filtering
|
||||
|
||||
### DXCC Entity Priority Logic
|
||||
|
||||
@@ -665,58 +468,18 @@ When syncing QSOs from multiple confirmation sources, the system follows a prior
|
||||
|
||||
**Priority Order**: LoTW > DCL
|
||||
|
||||
**Implementation** (`src/backend/services/dcl.service.js`):
|
||||
```javascript
|
||||
// DXCC priority: LoTW > DCL
|
||||
// Only update entity fields from DCL if:
|
||||
// 1. QSO is NOT LoTW confirmed, AND
|
||||
// 2. DCL actually sent entity data, AND
|
||||
// 3. Current entity is missing
|
||||
const hasLoTWConfirmation = existingQSO.lotwQslRstatus === 'Y';
|
||||
const hasDCLData = dbQSO.entity || dbQSO.entityId;
|
||||
const missingEntity = !existingQSO.entity || existingQSO.entity === '';
|
||||
|
||||
if (!hasLoTWConfirmation && hasDCLData && missingEntity) {
|
||||
// Fill in entity data from DCL (only if DCL provides it)
|
||||
updateData.entity = dbQSO.entity;
|
||||
updateData.entityId = dbQSO.entityId;
|
||||
// ... other entity fields
|
||||
}
|
||||
```
|
||||
|
||||
**Rules**:
|
||||
1. **LoTW-confirmed QSOs**: Always use LoTW's DXCC data (most reliable)
|
||||
2. **DCL-only QSOs**: Use DCL's DXCC data IF available in ADIF payload
|
||||
3. **Empty entity fields**: If DCL doesn't send DXCC data, entity remains empty
|
||||
4. **Never overwrite**: Once LoTW confirms with entity data, DCL sync won't change it
|
||||
|
||||
**Important Note**: DCL API currently doesn't send DXCC/entity fields in their ADIF export. This is a limitation of the DCL API, not the application. If DCL adds these fields in the future, the system will automatically use them for DCL-only QSOs.
|
||||
**Important Note**: DCL API currently doesn't send DXCC/entity fields in their ADIF export.
|
||||
|
||||
### Recent Development Work (January 2025)
|
||||
|
||||
**QSO Page Enhancements**:
|
||||
- Added confirmation type filter with exclusive logic (LoTW Only, DCL Only, Both Confirmed, Not Confirmed)
|
||||
- Added search box for filtering by callsign, entity, or grid square
|
||||
- Renamed "All Confirmation" to "All QSOs" for clarity
|
||||
- Fixed filter logic to properly handle exclusive confirmation types
|
||||
|
||||
**Bug Fixes**:
|
||||
- Fixed confirmation filter showing wrong QSOs (e.g., "LoTW Only" was also showing DCL QSOs)
|
||||
- Implemented proper SQL conditions for exclusive filters using separate condition pushes
|
||||
- Added debug logging to track filter application
|
||||
|
||||
**DXCC Entity Handling**:
|
||||
- Clarified that DCL API doesn't send DXCC fields (current limitation)
|
||||
- Implemented priority logic: LoTW entity data takes precedence over DCL
|
||||
- System ready to auto-use DCL DXCC data if they add it in future API updates
|
||||
|
||||
### Critical LoTW Sync Behavior (LEARNED THE HARD WAY)
|
||||
### Critical LoTW Sync Behavior
|
||||
|
||||
**⚠️ IMPORTANT: LoTW sync MUST only import confirmed QSOs**
|
||||
|
||||
After attempting to implement "QSO Delta" sync (all QSOs, confirmed + unconfirmed), we discovered:
|
||||
|
||||
**The Problem:**
|
||||
LoTW ADIF export with `qso_qsl=no` (all QSOs mode) only includes:
|
||||
- `CALL` (callsign)
|
||||
- `QSL_RCVD` (confirmation status: Y/N)
|
||||
@@ -724,9 +487,7 @@ LoTW ADIF export with `qso_qsl=no` (all QSOs mode) only includes:
|
||||
**Missing Fields for Unconfirmed QSOs:**
|
||||
- `DXCC` (entity ID) ← **CRITICAL for awards!**
|
||||
- `COUNTRY` (entity name)
|
||||
- `CONTINENT`
|
||||
- `CQ_ZONE`
|
||||
- `ITU_ZONE`
|
||||
- `CONTINENT`, `CQ_ZONE`, `ITU_ZONE`
|
||||
|
||||
**Result:** Unconfirmed QSOs have `entityId: null` and `entity: ""`, breaking award calculations.
|
||||
|
||||
@@ -742,67 +503,22 @@ const params = new URLSearchParams({
|
||||
});
|
||||
```
|
||||
|
||||
**Why This Matters:**
|
||||
- Awards require `entityId` to count entities
|
||||
- Without `entityId`, QSOs can't be counted toward DXCC, WAS, etc.
|
||||
- Users can still see "worked" stations in QSO list, but awards only count confirmed
|
||||
- DCL sync can import all QSOs because it provides entity data via callsign lookup
|
||||
### Recent Development Work (January 2026)
|
||||
|
||||
**Attempted Solution (REVERTED):**
|
||||
- Tried implementing callsign prefix lookup to populate missing `entityId`
|
||||
- Created `src/backend/utils/callsign-lookup.js` with basic prefix mappings
|
||||
- Complexity: 1000+ DXCC entities, many special event callsigns, portable designators
|
||||
- Decision: Too complex, reverted (commit 310b154)
|
||||
**Award System Enhancements**:
|
||||
- Added `allowed_bands` filter to restrict which bands count toward awards
|
||||
- Added `satellite_only` flag for satellite-only awards
|
||||
- DXCC restricted to HF bands (160m-10m) only
|
||||
- Added DXCC SAT award for satellite-only QSOs
|
||||
- Removed redundant award variants (DXCC CW, DLD variants)
|
||||
|
||||
**Takeaway:** LoTW confirmed QSOs have reliable DXCC data. Don't try to workaround this fundamental limitation.
|
||||
**Award Detail View Improvements**:
|
||||
- Summary shows unique entity progress instead of QSO counts
|
||||
- Column sums count unique entities per column
|
||||
- Satellite QSOs grouped under "SAT" column
|
||||
- Bands sorted by wavelength instead of alphabetically
|
||||
- Mode removed from table headers (visible in filter dropdown)
|
||||
|
||||
### QSO Confirmation Filters
|
||||
|
||||
Added "Confirmed by at least 1 service" filter to QSO view (commit 688b0fc):
|
||||
|
||||
**Filter Options:**
|
||||
- "All QSOs" - No filter
|
||||
- "Confirmed by at least 1 service" (NEW) - LoTW OR DCL confirmed
|
||||
- "LoTW Only" - Confirmed by LoTW but NOT DCL
|
||||
- "DCL Only" - Confirmed by DCL but NOT LoTW
|
||||
- "Both Confirmed" - Confirmed by BOTH LoTW AND DCL
|
||||
- "Not Confirmed" - Confirmed by NEITHER
|
||||
|
||||
**SQL Logic:**
|
||||
```sql
|
||||
-- "Confirmed by at least 1 service"
|
||||
WHERE lotwQslRstatus = 'Y' OR dclQslRstatus = 'Y'
|
||||
|
||||
-- "LoTW Only"
|
||||
WHERE lotwQslRstatus = 'Y' AND (dclQslRstatus IS NULL OR dclQslRstatus != 'Y')
|
||||
|
||||
-- "DCL Only"
|
||||
WHERE dclQslRstatus = 'Y' AND (lotwQslRstatus IS NULL OR lotwQslRstatus != 'Y')
|
||||
|
||||
-- "Both Confirmed"
|
||||
WHERE lotwQslRstatus = 'Y' AND dclQslRstatus = 'Y'
|
||||
|
||||
-- "Not Confirmed"
|
||||
WHERE (lotwQslRstatus IS NULL OR lotwQslRstatus != 'Y')
|
||||
AND (dclQslRstatus IS NULL OR dclQslRstatus != 'Y')
|
||||
```
|
||||
|
||||
### Recent Development Work (January 2025)
|
||||
|
||||
**Sync Type Support (ATTEMPTED & REVERTED):**
|
||||
- Commit 5b78935: Added LoTW sync type support (QSL/QSO delta/full)
|
||||
- Commit 310b154: Reverted - LoTW doesn't provide entity data for unconfirmed QSOs
|
||||
- **Lesson:** Keep it simple - only sync confirmed QSOs from LoTW
|
||||
|
||||
**Dashboard Enhancements:**
|
||||
- Added sync job history display with real-time polling (every 2 seconds)
|
||||
- Shows job progress, status, and import logs
|
||||
- Cancel button for stale/failed jobs with rollback capability
|
||||
- Tracks all QSO changes in `qso_changes` table for rollback
|
||||
|
||||
**Rollback System:**
|
||||
- `cancelJob(jobId, userId)` - Cancels and rolls back sync jobs
|
||||
- Tracks added QSOs (deletes them on rollback)
|
||||
- Tracks updated QSOs (restores previous state)
|
||||
- Only allows canceling failed jobs or stale running jobs (>1 hour)
|
||||
- Server-side validation prevents unauthorized cancellations
|
||||
**QSO Management**:
|
||||
- Fixed DELETE /api/qsos/all to handle foreign key constraints
|
||||
- Added cache invalidation after QSO deletion
|
||||
|
||||
219
DOCKER.md
219
DOCKER.md
@@ -1,219 +0,0 @@
|
||||
# Docker Deployment Guide
|
||||
|
||||
This guide covers deploying Quickawards using Docker.
|
||||
|
||||
## Quick Start
|
||||
|
||||
1. **Create environment file:**
|
||||
```bash
|
||||
cp .env.docker.example .env
|
||||
```
|
||||
|
||||
2. **Generate secure JWT secret:**
|
||||
```bash
|
||||
openssl rand -base64 32
|
||||
```
|
||||
Copy the output and set it as `JWT_SECRET` in `.env`.
|
||||
|
||||
3. **Update `.env` with your settings:**
|
||||
- `JWT_SECRET`: Strong random string (required)
|
||||
- `VITE_APP_URL`: Your domain (e.g., `https://awards.example.com`)
|
||||
- `ALLOWED_ORIGINS`: Your domain(s) for CORS
|
||||
|
||||
4. **Start the application:**
|
||||
```bash
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
5. **Access the application:**
|
||||
- URL: http://localhost:3001
|
||||
- Health check: http://localhost:3001/api/health
|
||||
|
||||
## Architecture
|
||||
|
||||
### Single Port Design
|
||||
|
||||
The Docker stack exposes a single port (3001) which serves both:
|
||||
- **Backend API** (`/api/*`)
|
||||
- **Frontend SPA** (all other routes)
|
||||
|
||||
### Database Persistence
|
||||
|
||||
- **Location**: `./data/award.db` (host-mounted volume)
|
||||
- **Initialization**: Automatic on first startup
|
||||
- **Persistence**: Database survives container restarts/recreations
|
||||
|
||||
### Startup Behavior
|
||||
|
||||
1. **First startup**: Database is created from template
|
||||
2. **Subsequent startups**: Existing database is used
|
||||
3. **Container recreation**: Database persists in volume
|
||||
|
||||
## Commands
|
||||
|
||||
### Start the application
|
||||
```bash
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
### View logs
|
||||
```bash
|
||||
docker-compose logs -f
|
||||
```
|
||||
|
||||
### Stop the application
|
||||
```bash
|
||||
docker-compose down
|
||||
```
|
||||
|
||||
### Rebuild after code changes
|
||||
```bash
|
||||
docker-compose up -d --build
|
||||
```
|
||||
|
||||
### Stop and remove everything (including database volume)
|
||||
```bash
|
||||
docker-compose down -v
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
| Variable | Required | Default | Description |
|
||||
|----------|----------|---------|-------------|
|
||||
| `NODE_ENV` | No | `production` | Environment mode |
|
||||
| `PORT` | No | `3001` | Application port |
|
||||
| `LOG_LEVEL` | No | `info` | Logging level (debug/info/warn/error) |
|
||||
| `JWT_SECRET` | **Yes** | - | JWT signing secret (change this!) |
|
||||
| `VITE_APP_URL` | No | - | Your application's public URL |
|
||||
| `ALLOWED_ORIGINS` | No | - | CORS allowed origins (comma-separated) |
|
||||
|
||||
## Database Management
|
||||
|
||||
### Backup the database
|
||||
```bash
|
||||
cp data/award.db data/award.db.backup.$(date +%Y%m%d)
|
||||
```
|
||||
|
||||
### Restore from backup
|
||||
```bash
|
||||
docker-compose down
|
||||
cp data/award.db.backup.YYYYMMDD data/award.db
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
### Reset the database
|
||||
```bash
|
||||
docker-compose down -v
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Container won't start
|
||||
```bash
|
||||
# Check logs
|
||||
docker-compose logs -f
|
||||
|
||||
# Check container status
|
||||
docker-compose ps
|
||||
```
|
||||
|
||||
### Database errors
|
||||
```bash
|
||||
# Check database file exists
|
||||
ls -la data/
|
||||
|
||||
# Check database permissions
|
||||
stat data/award.db
|
||||
```
|
||||
|
||||
### Port already in use
|
||||
Change the port mapping in `docker-compose.yml`:
|
||||
```yaml
|
||||
ports:
|
||||
- "8080:3001" # Maps host port 8080 to container port 3001
|
||||
```
|
||||
|
||||
### Health check failing
|
||||
```bash
|
||||
# Check if container is responding
|
||||
curl http://localhost:3001/api/health
|
||||
|
||||
# Check container logs
|
||||
docker-compose logs quickawards
|
||||
```
|
||||
|
||||
## Production Deployment
|
||||
|
||||
### Using a Reverse Proxy (nginx)
|
||||
|
||||
Example nginx configuration:
|
||||
|
||||
```nginx
|
||||
server {
|
||||
listen 80;
|
||||
server_name awards.example.com;
|
||||
|
||||
location / {
|
||||
proxy_pass http://localhost:3001;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection 'upgrade';
|
||||
proxy_set_header Host $host;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### SSL/TLS with Let's Encrypt
|
||||
|
||||
Use certbot with nginx:
|
||||
|
||||
```bash
|
||||
sudo certbot --nginx -d awards.example.com
|
||||
```
|
||||
|
||||
### Security Checklist
|
||||
|
||||
- [ ] Set strong `JWT_SECRET`
|
||||
- [ ] Set `NODE_ENV=production`
|
||||
- [ ] Set `LOG_LEVEL=info` (or `warn` in production)
|
||||
- [ ] Configure `ALLOWED_ORIGINS` to your domain only
|
||||
- [ ] Use HTTPS/TLS in production
|
||||
- [ ] Regular database backups
|
||||
- [ ] Monitor logs for suspicious activity
|
||||
- [ ] Keep Docker image updated
|
||||
|
||||
## File Structure After Deployment
|
||||
|
||||
```
|
||||
project/
|
||||
├── data/
|
||||
│ └── award.db # Persisted database (volume mount)
|
||||
├── docker-compose.yml
|
||||
├── Dockerfile
|
||||
├── .dockerignore
|
||||
├── .env # Your environment variables
|
||||
└── ... (source code)
|
||||
```
|
||||
|
||||
## Building Without docker-compose
|
||||
|
||||
If you prefer to use `docker` directly:
|
||||
|
||||
```bash
|
||||
# Build the image
|
||||
docker build -t quickawards .
|
||||
|
||||
# Run the container
|
||||
docker run -d \
|
||||
--name quickawards \
|
||||
-p 3001:3001 \
|
||||
-v $(pwd)/data:/data \
|
||||
-e JWT_SECRET=your-secret-here \
|
||||
-e NODE_ENV=production \
|
||||
quickawards
|
||||
```
|
||||
72
Dockerfile
72
Dockerfile
@@ -1,72 +0,0 @@
|
||||
# Multi-stage Dockerfile for Quickawards
|
||||
# Uses official Bun runtime image
|
||||
|
||||
# ============================================
|
||||
# Stage 1: Dependencies & Database Init
|
||||
# ============================================
|
||||
FROM oven/bun:1 AS builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install ALL dependencies (including devDependencies for drizzle-kit)
|
||||
COPY package.json bun.lock ./
|
||||
RUN bun install --frozen-lockfile
|
||||
|
||||
# Copy source code (node_modules excluded by .dockerignore)
|
||||
COPY . .
|
||||
|
||||
# Reinstall frontend dependencies to get correct platform binaries
|
||||
RUN cd src/frontend && bun install
|
||||
|
||||
# Initialize database using custom script
|
||||
# This creates a fresh database with the correct schema using bun:sqlite
|
||||
RUN bun src/backend/scripts/init-db.js
|
||||
|
||||
# Build frontend
|
||||
RUN bun run build
|
||||
|
||||
# ============================================
|
||||
# Stage 2: Production Image
|
||||
# ============================================
|
||||
FROM oven/bun:1 AS production
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install production dependencies only
|
||||
COPY package.json bun.lock ./
|
||||
RUN bun install --frozen-lockfile --production
|
||||
|
||||
# Copy backend source and schema files
|
||||
COPY src/backend ./src/backend
|
||||
COPY award-definitions ./award-definitions
|
||||
COPY drizzle.config.ts ./
|
||||
|
||||
# Copy frontend build from builder stage
|
||||
COPY --from=builder /app/src/frontend/build ./src/frontend/build
|
||||
|
||||
# Copy initialized database from builder (will be used as template)
|
||||
COPY --from=builder /app/src/backend/award.db /app/award.db.template
|
||||
|
||||
# Copy drizzle migrations (if they exist)
|
||||
COPY --from=builder /app/drizzle ./drizzle
|
||||
|
||||
# Create directory for database volume mount
|
||||
RUN mkdir -p /data
|
||||
|
||||
# Copy entrypoint script
|
||||
COPY docker-entrypoint.sh /usr/local/bin/
|
||||
RUN chmod +x /usr/local/bin/docker-entrypoint.sh
|
||||
|
||||
# Set environment variables
|
||||
ENV NODE_ENV=production \
|
||||
PORT=3001 \
|
||||
LOG_LEVEL=info
|
||||
|
||||
# Expose the application port
|
||||
EXPOSE 3001
|
||||
|
||||
# Use entrypoint script to handle database initialization
|
||||
ENTRYPOINT ["docker-entrypoint.sh"]
|
||||
|
||||
# Start the backend server
|
||||
CMD ["bun", "run", "src/backend/index.js"]
|
||||
56
README.md
56
README.md
@@ -116,7 +116,7 @@ award/
|
||||
│ └── package.json
|
||||
├── award-definitions/ # Award rule definitions (JSON)
|
||||
├── award.db # SQLite database (auto-created)
|
||||
├── .env.production.template # Production configuration template
|
||||
├── .env.example # Environment configuration template
|
||||
├── bunfig.toml # Bun configuration
|
||||
├── drizzle.config.js # Drizzle ORM configuration
|
||||
├── package.json
|
||||
@@ -149,20 +149,32 @@ cp .env.example .env
|
||||
|
||||
Edit `.env` with your configuration:
|
||||
```env
|
||||
# Application URL (for production deployment)
|
||||
VITE_APP_URL=https://awards.dj7nt.de
|
||||
# Environment (development/production)
|
||||
NODE_ENV=development
|
||||
|
||||
# Log Level (debug/info/warn/error)
|
||||
LOG_LEVEL=debug
|
||||
|
||||
# Server Port (default: 3001)
|
||||
PORT=3001
|
||||
|
||||
# Frontend URL (e.g., https://awards.dj7nt.de)
|
||||
# Leave empty for development (uses localhost)
|
||||
VITE_APP_URL=
|
||||
|
||||
# API Base URL (leave empty for same-domain deployment)
|
||||
VITE_API_BASE_URL=
|
||||
|
||||
# JWT Secret (generate with: openssl rand -base64 32)
|
||||
JWT_SECRET=your-generated-secret-here
|
||||
# Allowed CORS origins (comma-separated)
|
||||
# Add all domains that should access the API
|
||||
ALLOWED_ORIGINS=
|
||||
|
||||
# Environment
|
||||
NODE_ENV=production
|
||||
# JWT Secret (generate with: openssl rand -base64 32)
|
||||
JWT_SECRET=change-this-in-production
|
||||
```
|
||||
|
||||
**For development**: You can leave `.env` empty or use defaults.
|
||||
**For development**: Use defaults above.
|
||||
**For production**: Set `NODE_ENV=production`, `LOG_LEVEL=info`, and generate a strong `JWT_SECRET`.
|
||||
|
||||
4. Initialize the database with performance indexes:
|
||||
```bash
|
||||
@@ -414,20 +426,26 @@ bun run build
|
||||
Create `.env` in the project root:
|
||||
|
||||
```bash
|
||||
# Application URL
|
||||
VITE_APP_URL=https://awards.dj7nt.de
|
||||
|
||||
# API Base URL (empty for same-domain)
|
||||
VITE_API_BASE_URL=
|
||||
|
||||
# JWT Secret (generate with: openssl rand -base64 32)
|
||||
JWT_SECRET=your-generated-secret-here
|
||||
|
||||
# Environment
|
||||
NODE_ENV=production
|
||||
|
||||
# Database path (absolute path recommended)
|
||||
DATABASE_PATH=/path/to/award/award.db
|
||||
# Log Level (debug/info/warn/error)
|
||||
LOG_LEVEL=info
|
||||
|
||||
# Server Port (default: 3001)
|
||||
PORT=3001
|
||||
|
||||
# Frontend URL
|
||||
VITE_APP_URL=https://awards.dj7nt.de
|
||||
|
||||
# API Base URL (leave empty for same-domain deployment)
|
||||
VITE_API_BASE_URL=
|
||||
|
||||
# Allowed CORS origins (comma-separated)
|
||||
ALLOWED_ORIGINS=https://awards.dj7nt.de,https://www.awards.dj7nt.de
|
||||
|
||||
# JWT Secret (generate with: openssl rand -base64 32)
|
||||
JWT_SECRET=your-generated-secret-here
|
||||
```
|
||||
|
||||
**Security**: Ensure `.env` has restricted permissions:
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"id": "dld-40m",
|
||||
"name": "DLD 40m",
|
||||
"description": "Confirm 100 unique DOKs on 40m",
|
||||
"caption": "Contact and confirm stations with 100 unique DOKs (DARC Ortsverband Kennung) on the 40m band. Only DCL-confirmed QSOs with valid DOK information on 40m count toward this award.",
|
||||
"category": "darc",
|
||||
"rules": {
|
||||
"type": "dok",
|
||||
"target": 100,
|
||||
"confirmationType": "dcl",
|
||||
"displayField": "darcDok",
|
||||
"filters": {
|
||||
"operator": "AND",
|
||||
"filters": [
|
||||
{ "field": "band", "operator": "eq", "value": "40m" }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
{
|
||||
"id": "dld-80m-cw",
|
||||
"name": "DLD 80m CW",
|
||||
"description": "Confirm 100 unique DOKs on 80m using CW",
|
||||
"caption": "Contact and confirm stations with 100 unique DOKs (DARC Ortsverband Kennung) on the 80m band using CW mode. Only DCL-confirmed QSOs with valid DOK information on 80m CW count toward this award.",
|
||||
"category": "darc",
|
||||
"rules": {
|
||||
"type": "dok",
|
||||
"target": 100,
|
||||
"confirmationType": "dcl",
|
||||
"displayField": "darcDok",
|
||||
"filters": {
|
||||
"operator": "AND",
|
||||
"filters": [
|
||||
{ "field": "band", "operator": "eq", "value": "80m" },
|
||||
{ "field": "mode", "operator": "eq", "value": "CW" }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"id": "dld-80m",
|
||||
"name": "DLD 80m",
|
||||
"description": "Confirm 100 unique DOKs on 80m",
|
||||
"caption": "Contact and confirm stations with 100 unique DOKs (DARC Ortsverband Kennung) on the 80m band. Only DCL-confirmed QSOs with valid DOK information on 80m count toward this award.",
|
||||
"category": "darc",
|
||||
"rules": {
|
||||
"type": "dok",
|
||||
"target": 100,
|
||||
"confirmationType": "dcl",
|
||||
"displayField": "darcDok",
|
||||
"filters": {
|
||||
"operator": "AND",
|
||||
"filters": [
|
||||
{ "field": "band", "operator": "eq", "value": "80m" }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"id": "dld-cw",
|
||||
"name": "DLD CW",
|
||||
"description": "Confirm 100 unique DOKs using CW mode",
|
||||
"caption": "Contact and confirm stations with 100 unique DOKs (DARC Ortsverband Kennung) using CW (Morse code). Each unique DOK on CW counts separately. Only DCL-confirmed QSOs with valid DOK information count toward this award.",
|
||||
"category": "darc",
|
||||
"rules": {
|
||||
"type": "dok",
|
||||
"target": 100,
|
||||
"confirmationType": "dcl",
|
||||
"displayField": "darcDok",
|
||||
"filters": {
|
||||
"operator": "AND",
|
||||
"filters": [
|
||||
{ "field": "mode", "operator": "eq", "value": "CW" }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
{
|
||||
"id": "dxcc-cw",
|
||||
"name": "DXCC CW",
|
||||
"description": "Confirm 100 DXCC entities using CW mode",
|
||||
"caption": "Contact and confirm 100 different DXCC entities using CW mode only. Only QSOs made with CW mode count toward this award. QSOs are confirmed when LoTW QSL is received.",
|
||||
"category": "dxcc",
|
||||
"rules": {
|
||||
"target": 100,
|
||||
"type": "filtered",
|
||||
"baseRule": {
|
||||
"type": "entity",
|
||||
"entityType": "dxcc",
|
||||
"target": 100,
|
||||
"displayField": "entity"
|
||||
},
|
||||
"filters": {
|
||||
"operator": "AND",
|
||||
"filters": [
|
||||
{
|
||||
"field": "mode",
|
||||
"operator": "eq",
|
||||
"value": "CW"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
14
award-definitions/dxcc-sat.json
Normal file
14
award-definitions/dxcc-sat.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"id": "dxcc-sat",
|
||||
"name": "DXCC SAT",
|
||||
"description": "Confirm 100 DXCC entities via satellite",
|
||||
"caption": "Contact and confirm 100 different DXCC entities using satellite communications. Only satellite QSOs count toward this award. QSOs are confirmed when LoTW QSL is received.",
|
||||
"category": "dxcc",
|
||||
"rules": {
|
||||
"type": "entity",
|
||||
"entityType": "dxcc",
|
||||
"target": 100,
|
||||
"displayField": "entity",
|
||||
"satellite_only": true
|
||||
}
|
||||
}
|
||||
@@ -1,13 +1,14 @@
|
||||
{
|
||||
"id": "dxcc-mixed",
|
||||
"name": "DXCC Mixed Mode",
|
||||
"description": "Confirm 100 DXCC entities on any band/mode",
|
||||
"caption": "Contact and confirm 100 different DXCC entities. Any band and mode combination counts. QSOs are confirmed when LoTW QSL is received.",
|
||||
"id": "dxcc",
|
||||
"name": "DXCC",
|
||||
"description": "Confirm 100 DXCC entities on HF bands",
|
||||
"caption": "Contact and confirm 100 different DXCC entities on HF bands (160m-10m). Only HF band QSOs count toward this award. QSOs are confirmed when LoTW QSL is received.",
|
||||
"category": "dxcc",
|
||||
"rules": {
|
||||
"type": "entity",
|
||||
"entityType": "dxcc",
|
||||
"target": 100,
|
||||
"displayField": "entity"
|
||||
"displayField": "entity",
|
||||
"allowed_bands": ["160m", "80m", "60m", "40m", "30m", "20m", "17m", "15m", "12m", "10m"]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
services:
|
||||
quickawards:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
container_name: quickawards
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "3001:3001"
|
||||
environment:
|
||||
# Application settings
|
||||
NODE_ENV: production
|
||||
PORT: 3001
|
||||
LOG_LEVEL: info
|
||||
|
||||
# Security - IMPORTANT: Change these in production!
|
||||
JWT_SECRET: ${JWT_SECRET:-change-this-in-production}
|
||||
|
||||
# CORS - Set to your domain in production
|
||||
VITE_APP_URL: ${VITE_APP_URL:-}
|
||||
ALLOWED_ORIGINS: ${ALLOWED_ORIGINS:-}
|
||||
volumes:
|
||||
# Host-mounted database directory
|
||||
# Database will be created at ./data/award.db on first startup
|
||||
- ./data:/data
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:3001/api/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
@@ -1,62 +0,0 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
# Docker container entrypoint script
|
||||
# Handles database initialization on first startup
|
||||
|
||||
echo "=========================================="
|
||||
echo "Quickawards - Docker Entrypoint"
|
||||
echo "=========================================="
|
||||
|
||||
# Database location in volume mount
|
||||
DB_PATH="/data/award.db"
|
||||
TEMPLATE_DB="/app/award.db.template"
|
||||
APP_DB_PATH="/app/src/backend/award.db"
|
||||
|
||||
# Check if database exists in the volume
|
||||
if [ ! -f "$DB_PATH" ]; then
|
||||
echo ""
|
||||
echo "📦 Database not found in volume mount."
|
||||
echo " Initializing from template database..."
|
||||
echo ""
|
||||
|
||||
# Copy the template database (created during build with drizzle-kit push)
|
||||
cp "$TEMPLATE_DB" "$DB_PATH"
|
||||
|
||||
# Ensure proper permissions
|
||||
chmod 644 "$DB_PATH"
|
||||
|
||||
echo "✅ Database initialized at: $DB_PATH"
|
||||
echo " This database will persist in the Docker volume."
|
||||
else
|
||||
echo ""
|
||||
echo "✅ Existing database found at: $DB_PATH"
|
||||
echo " Using existing database from volume mount."
|
||||
fi
|
||||
|
||||
# Create symlink from app's expected db location to volume mount
|
||||
# The app expects the database at src/backend/award.db
|
||||
# We create a symlink so it points to the volume-mounted database
|
||||
if [ -L "$APP_DB_PATH" ]; then
|
||||
# Symlink already exists, remove it to refresh
|
||||
rm "$APP_DB_PATH"
|
||||
elif [ -e "$APP_DB_PATH" ]; then
|
||||
# File or directory exists (shouldn't happen in production, but handle it)
|
||||
echo "⚠ Warning: Found existing database at $APP_DB_PATH, removing..."
|
||||
rm -f "$APP_DB_PATH"
|
||||
fi
|
||||
|
||||
# Create symlink to the volume-mounted database
|
||||
ln -s "$DB_PATH" "$APP_DB_PATH"
|
||||
echo "✅ Created symlink: $APP_DB_PATH -> $DB_PATH"
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Starting Quickawards application..."
|
||||
echo "Port: ${PORT:-3001}"
|
||||
echo "Environment: ${NODE_ENV:-production}"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
|
||||
# Execute the main command (passed as CMD in Dockerfile)
|
||||
exec "$@"
|
||||
1051
docs/AWARD-SYSTEM-SPECIFICATION.md
Normal file
1051
docs/AWARD-SYSTEM-SPECIFICATION.md
Normal file
File diff suppressed because it is too large
Load Diff
@@ -223,5 +223,39 @@ export const adminActions = sqliteTable('admin_actions', {
|
||||
createdAt: integer('created_at', { mode: 'timestamp' }).notNull().$defaultFn(() => new Date()),
|
||||
});
|
||||
|
||||
/**
|
||||
* @typedef {Object} AutoSyncSettings
|
||||
* @property {number} userId
|
||||
* @property {boolean} lotwEnabled
|
||||
* @property {number} lotwIntervalHours
|
||||
* @property {Date|null} lotwLastSyncAt
|
||||
* @property {Date|null} lotwNextSyncAt
|
||||
* @property {boolean} dclEnabled
|
||||
* @property {number} dclIntervalHours
|
||||
* @property {Date|null} dclLastSyncAt
|
||||
* @property {Date|null} dclNextSyncAt
|
||||
* @property {Date} createdAt
|
||||
* @property {Date} updatedAt
|
||||
*/
|
||||
|
||||
export const autoSyncSettings = sqliteTable('auto_sync_settings', {
|
||||
userId: integer('user_id').primaryKey().references(() => users.id),
|
||||
|
||||
// LoTW auto-sync settings
|
||||
lotwEnabled: integer('lotw_enabled', { mode: 'boolean' }).notNull().default(false),
|
||||
lotwIntervalHours: integer('lotw_interval_hours').notNull().default(24),
|
||||
lotwLastSyncAt: integer('lotw_last_sync_at', { mode: 'timestamp' }),
|
||||
lotwNextSyncAt: integer('lotw_next_sync_at', { mode: 'timestamp' }),
|
||||
|
||||
// DCL auto-sync settings
|
||||
dclEnabled: integer('dcl_enabled', { mode: 'boolean' }).notNull().default(false),
|
||||
dclIntervalHours: integer('dcl_interval_hours').notNull().default(24),
|
||||
dclLastSyncAt: integer('dcl_last_sync_at', { mode: 'timestamp' }),
|
||||
dclNextSyncAt: integer('dcl_next_sync_at', { mode: 'timestamp' }),
|
||||
|
||||
createdAt: integer('created_at', { mode: 'timestamp' }).notNull().$defaultFn(() => new Date()),
|
||||
updatedAt: integer('updated_at', { mode: 'timestamp' }).notNull().$defaultFn(() => new Date()),
|
||||
});
|
||||
|
||||
// Export all schemas
|
||||
export const schema = { users, qsos, awards, awardProgress, syncJobs, qsoChanges, adminActions };
|
||||
export const schema = { users, qsos, awards, awardProgress, syncJobs, qsoChanges, adminActions, autoSyncSettings };
|
||||
|
||||
@@ -16,6 +16,7 @@ import {
|
||||
import {
|
||||
getSystemStats,
|
||||
getUserStats,
|
||||
getAdminActions,
|
||||
impersonateUser,
|
||||
verifyImpersonation,
|
||||
stopImpersonation,
|
||||
@@ -42,6 +43,16 @@ import {
|
||||
getAwardProgressDetails,
|
||||
getAwardEntityBreakdown,
|
||||
} from './services/awards.service.js';
|
||||
import {
|
||||
getAutoSyncSettings,
|
||||
updateAutoSyncSettings,
|
||||
} from './services/auto-sync.service.js';
|
||||
import {
|
||||
startScheduler,
|
||||
stopScheduler,
|
||||
getSchedulerStatus,
|
||||
triggerSchedulerTick,
|
||||
} from './services/scheduler.service.js';
|
||||
|
||||
/**
|
||||
* Main backend application
|
||||
@@ -434,9 +445,15 @@ const app = new Elysia()
|
||||
return { success: false, error: 'User not found' };
|
||||
}
|
||||
|
||||
// Include impersonatedBy from JWT if present (not stored in database)
|
||||
const responseUser = {
|
||||
...userData,
|
||||
impersonatedBy: user.impersonatedBy,
|
||||
};
|
||||
|
||||
return {
|
||||
success: true,
|
||||
user: userData,
|
||||
user: responseUser,
|
||||
};
|
||||
})
|
||||
|
||||
@@ -860,6 +877,7 @@ const app = new Elysia()
|
||||
message: `Deleted ${deleted} QSO(s)`,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Failed to delete QSOs', { error: error.message, stack: error.stack });
|
||||
set.status = 500;
|
||||
return {
|
||||
success: false,
|
||||
@@ -1390,6 +1408,133 @@ const app = new Elysia()
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* ================================================================
|
||||
* AUTO-SYNC SETTINGS ROUTES
|
||||
* ================================================================
|
||||
* All auto-sync routes require authentication
|
||||
*/
|
||||
|
||||
/**
|
||||
* GET /api/auto-sync/settings
|
||||
* Get user's auto-sync settings (requires authentication)
|
||||
*/
|
||||
.get('/api/auto-sync/settings', async ({ user, set }) => {
|
||||
if (!user) {
|
||||
set.status = 401;
|
||||
return { success: false, error: 'Unauthorized' };
|
||||
}
|
||||
|
||||
try {
|
||||
const settings = await getAutoSyncSettings(user.id);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
settings,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Error fetching auto-sync settings', { error: error.message, userId: user.id });
|
||||
set.status = 500;
|
||||
return {
|
||||
success: false,
|
||||
error: 'Failed to fetch auto-sync settings',
|
||||
};
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* PUT /api/auto-sync/settings
|
||||
* Update user's auto-sync settings (requires authentication)
|
||||
*/
|
||||
.put(
|
||||
'/api/auto-sync/settings',
|
||||
async ({ user, body, set }) => {
|
||||
if (!user) {
|
||||
set.status = 401;
|
||||
return { success: false, error: 'Unauthorized' };
|
||||
}
|
||||
|
||||
try {
|
||||
const settings = await updateAutoSyncSettings(user.id, body);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
settings,
|
||||
message: 'Auto-sync settings updated successfully',
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Error updating auto-sync settings', { error: error.message, userId: user.id });
|
||||
set.status = 400;
|
||||
return {
|
||||
success: false,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
},
|
||||
{
|
||||
body: t.Object({
|
||||
lotwEnabled: t.Optional(t.Boolean()),
|
||||
lotwIntervalHours: t.Optional(t.Number()),
|
||||
dclEnabled: t.Optional(t.Boolean()),
|
||||
dclIntervalHours: t.Optional(t.Number()),
|
||||
}),
|
||||
}
|
||||
)
|
||||
|
||||
/**
|
||||
* GET /api/auto-sync/scheduler/status
|
||||
* Get scheduler status (admin only)
|
||||
*/
|
||||
.get('/api/auto-sync/scheduler/status', async ({ user, set }) => {
|
||||
if (!user || !user.isAdmin) {
|
||||
set.status = !user ? 401 : 403;
|
||||
return { success: false, error: !user ? 'Unauthorized' : 'Admin access required' };
|
||||
}
|
||||
|
||||
try {
|
||||
const status = getSchedulerStatus();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
scheduler: status,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Error fetching scheduler status', { error: error.message, userId: user.id });
|
||||
set.status = 500;
|
||||
return {
|
||||
success: false,
|
||||
error: 'Failed to fetch scheduler status',
|
||||
};
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* POST /api/auto-sync/scheduler/trigger
|
||||
* Manually trigger scheduler tick (admin only, for testing)
|
||||
*/
|
||||
.post('/api/auto-sync/scheduler/trigger', async ({ user, set }) => {
|
||||
if (!user || !user.isAdmin) {
|
||||
set.status = !user ? 401 : 403;
|
||||
return { success: false, error: !user ? 'Unauthorized' : 'Admin access required' };
|
||||
}
|
||||
|
||||
try {
|
||||
await triggerSchedulerTick();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: 'Scheduler tick triggered successfully',
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Error triggering scheduler tick', { error: error.message, userId: user.id });
|
||||
set.status = 500;
|
||||
return {
|
||||
success: false,
|
||||
error: 'Failed to trigger scheduler tick',
|
||||
};
|
||||
}
|
||||
})
|
||||
|
||||
// Serve static files and SPA fallback for all non-API routes
|
||||
.get('/*', ({ request }) => {
|
||||
const url = new URL(request.url);
|
||||
@@ -1546,3 +1691,21 @@ logger.info('Server started', {
|
||||
nodeEnv: process.env.NODE_ENV || 'unknown',
|
||||
logLevel: LOG_LEVEL,
|
||||
});
|
||||
|
||||
// Start the auto-sync scheduler
|
||||
startScheduler();
|
||||
|
||||
// Graceful shutdown handlers
|
||||
const gracefulShutdown = async (signal) => {
|
||||
logger.info(`Received ${signal}, shutting down gracefully...`);
|
||||
|
||||
// Stop the scheduler
|
||||
await stopScheduler();
|
||||
|
||||
logger.info('Graceful shutdown complete');
|
||||
process.exit(0);
|
||||
};
|
||||
|
||||
// Handle shutdown signals
|
||||
process.on('SIGINT', () => gracefulShutdown('SIGINT'));
|
||||
process.on('SIGTERM', () => gracefulShutdown('SIGTERM'));
|
||||
|
||||
111
src/backend/migrations/add-auto-sync-settings.js
Normal file
111
src/backend/migrations/add-auto-sync-settings.js
Normal file
@@ -0,0 +1,111 @@
|
||||
/**
|
||||
* Migration: Add auto_sync_settings table
|
||||
*
|
||||
* This script creates the auto_sync_settings table for managing
|
||||
* automatic sync intervals for DCL and LoTW services.
|
||||
* Users can enable/disable auto-sync and configure sync intervals.
|
||||
*/
|
||||
|
||||
import Database from 'bun:sqlite';
|
||||
import { join, dirname } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
// ES module equivalent of __dirname
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
const dbPath = join(__dirname, '../award.db');
|
||||
const sqlite = new Database(dbPath);
|
||||
|
||||
async function migrate() {
|
||||
console.log('Starting migration: Add auto-sync settings...');
|
||||
|
||||
try {
|
||||
// Check if auto_sync_settings table already exists
|
||||
const tableExists = sqlite.query(`
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table' AND name='auto_sync_settings'
|
||||
`).get();
|
||||
|
||||
if (tableExists) {
|
||||
console.log('Table auto_sync_settings already exists. Skipping...');
|
||||
} else {
|
||||
// Create auto_sync_settings table
|
||||
sqlite.exec(`
|
||||
CREATE TABLE auto_sync_settings (
|
||||
user_id INTEGER PRIMARY KEY,
|
||||
lotw_enabled INTEGER NOT NULL DEFAULT 0,
|
||||
lotw_interval_hours INTEGER NOT NULL DEFAULT 24,
|
||||
lotw_last_sync_at INTEGER,
|
||||
lotw_next_sync_at INTEGER,
|
||||
dcl_enabled INTEGER NOT NULL DEFAULT 0,
|
||||
dcl_interval_hours INTEGER NOT NULL DEFAULT 24,
|
||||
dcl_last_sync_at INTEGER,
|
||||
dcl_next_sync_at INTEGER,
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
)
|
||||
`);
|
||||
|
||||
// Create index for faster queries on next_sync_at
|
||||
sqlite.exec(`
|
||||
CREATE INDEX idx_auto_sync_settings_lotw_next_sync_at
|
||||
ON auto_sync_settings(lotw_next_sync_at)
|
||||
WHERE lotw_enabled = 1
|
||||
`);
|
||||
|
||||
sqlite.exec(`
|
||||
CREATE INDEX idx_auto_sync_settings_dcl_next_sync_at
|
||||
ON auto_sync_settings(dcl_next_sync_at)
|
||||
WHERE dcl_enabled = 1
|
||||
`);
|
||||
|
||||
console.log('Created auto_sync_settings table with indexes');
|
||||
}
|
||||
|
||||
console.log('Migration complete! Auto-sync settings table added to database.');
|
||||
} catch (error) {
|
||||
console.error('Migration failed:', error);
|
||||
sqlite.close();
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
sqlite.close();
|
||||
}
|
||||
|
||||
async function rollback() {
|
||||
console.log('Starting rollback: Remove auto-sync settings...');
|
||||
|
||||
try {
|
||||
// Drop indexes first
|
||||
sqlite.exec(`DROP INDEX IF EXISTS idx_auto_sync_settings_lotw_next_sync_at`);
|
||||
sqlite.exec(`DROP INDEX IF EXISTS idx_auto_sync_settings_dcl_next_sync_at`);
|
||||
|
||||
// Drop table
|
||||
sqlite.exec(`DROP TABLE IF EXISTS auto_sync_settings`);
|
||||
|
||||
console.log('Rollback complete! Auto-sync settings table removed from database.');
|
||||
} catch (error) {
|
||||
console.error('Rollback failed:', error);
|
||||
sqlite.close();
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
sqlite.close();
|
||||
}
|
||||
|
||||
// Check if this is a rollback
|
||||
const args = process.argv.slice(2);
|
||||
if (args.includes('--rollback') || args.includes('-r')) {
|
||||
rollback().then(() => {
|
||||
console.log('Rollback script completed successfully');
|
||||
process.exit(0);
|
||||
});
|
||||
} else {
|
||||
// Run migration
|
||||
migrate().then(() => {
|
||||
console.log('Migration script completed successfully');
|
||||
process.exit(0);
|
||||
});
|
||||
}
|
||||
@@ -34,31 +34,35 @@ export async function logAdminAction(adminId, actionType, targetUserId = null, d
|
||||
* @returns {Promise<Array>} Array of admin actions
|
||||
*/
|
||||
export async function getAdminActions(adminId = null, { limit = 50, offset = 0 } = {}) {
|
||||
let query = db
|
||||
.select({
|
||||
id: adminActions.id,
|
||||
adminId: adminActions.adminId,
|
||||
adminEmail: users.email,
|
||||
adminCallsign: users.callsign,
|
||||
actionType: adminActions.actionType,
|
||||
targetUserId: adminActions.targetUserId,
|
||||
targetEmail: sql`target_users.email`.as('targetEmail'),
|
||||
targetCallsign: sql`target_users.callsign`.as('targetCallsign'),
|
||||
details: adminActions.details,
|
||||
createdAt: adminActions.createdAt,
|
||||
})
|
||||
.from(adminActions)
|
||||
.leftJoin(users, eq(adminActions.adminId, users.id))
|
||||
.leftJoin(sql`${users} as target_users`, eq(adminActions.targetUserId, sql.raw('target_users.id')))
|
||||
.orderBy(desc(adminActions.createdAt))
|
||||
.limit(limit)
|
||||
.offset(offset);
|
||||
// Use raw SQL for the self-join (admin users and target users from same users table)
|
||||
// Using bun:sqlite prepared statements for raw SQL
|
||||
let query = `
|
||||
SELECT
|
||||
aa.id as id,
|
||||
aa.admin_id as adminId,
|
||||
admin_user.email as adminEmail,
|
||||
admin_user.callsign as adminCallsign,
|
||||
aa.action_type as actionType,
|
||||
aa.target_user_id as targetUserId,
|
||||
target_user.email as targetEmail,
|
||||
target_user.callsign as targetCallsign,
|
||||
aa.details as details,
|
||||
aa.created_at as createdAt
|
||||
FROM admin_actions aa
|
||||
LEFT JOIN users admin_user ON admin_user.id = aa.admin_id
|
||||
LEFT JOIN users target_user ON target_user.id = aa.target_user_id
|
||||
`;
|
||||
|
||||
if (adminId) {
|
||||
query = query.where(eq(adminActions.adminId, adminId));
|
||||
const params = [];
|
||||
if (adminId !== null) {
|
||||
query += ` WHERE aa.admin_id = ?`;
|
||||
params.push(adminId);
|
||||
}
|
||||
|
||||
return await query;
|
||||
query += ` ORDER BY aa.created_at DESC LIMIT ? OFFSET ?`;
|
||||
params.push(limit, offset);
|
||||
|
||||
return sqlite.prepare(query).all(...params);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -127,7 +131,12 @@ export async function getUserStats() {
|
||||
lotwConfirmed: sql`CAST(SUM(CASE WHEN ${qsos.lotwQslRstatus} = 'Y' THEN 1 ELSE 0 END) AS INTEGER)`,
|
||||
dclConfirmed: sql`CAST(SUM(CASE WHEN ${qsos.dclQslRstatus} = 'Y' THEN 1 ELSE 0 END) AS INTEGER)`,
|
||||
totalConfirmed: sql`CAST(SUM(CASE WHEN ${qsos.lotwQslRstatus} = 'Y' OR ${qsos.dclQslRstatus} = 'Y' THEN 1 ELSE 0 END) AS INTEGER)`,
|
||||
lastSync: sql`MAX(${qsos.createdAt})`,
|
||||
lastSync: sql`(
|
||||
SELECT MAX(${syncJobs.completedAt})
|
||||
FROM ${syncJobs}
|
||||
WHERE ${syncJobs.userId} = ${users.id}
|
||||
AND ${syncJobs.status} = 'completed'
|
||||
)`.mapWith(Number),
|
||||
createdAt: users.createdAt,
|
||||
})
|
||||
.from(users)
|
||||
@@ -135,7 +144,11 @@ export async function getUserStats() {
|
||||
.groupBy(users.id)
|
||||
.orderBy(sql`COUNT(${qsos.id}) DESC`);
|
||||
|
||||
return stats;
|
||||
// Convert lastSync timestamps (seconds) to Date objects for JSON serialization
|
||||
return stats.map(stat => ({
|
||||
...stat,
|
||||
lastSync: stat.lastSync ? new Date(stat.lastSync * 1000) : null,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -228,24 +241,26 @@ export async function stopImpersonation(adminId, targetUserId) {
|
||||
* @returns {Promise<Array>} Array of recent impersonation actions
|
||||
*/
|
||||
export async function getImpersonationStatus(adminId, { limit = 10 } = {}) {
|
||||
const impersonations = await db
|
||||
.select({
|
||||
id: adminActions.id,
|
||||
actionType: adminActions.actionType,
|
||||
targetUserId: adminActions.targetUserId,
|
||||
targetEmail: sql`target_users.email`,
|
||||
targetCallsign: sql`target_users.callsign`,
|
||||
details: adminActions.details,
|
||||
createdAt: adminActions.createdAt,
|
||||
})
|
||||
.from(adminActions)
|
||||
.leftJoin(sql`${users} as target_users`, eq(adminActions.targetUserId, sql.raw('target_users.id')))
|
||||
.where(eq(adminActions.adminId, adminId))
|
||||
.where(sql`${adminActions.actionType} LIKE 'impersonate%'`)
|
||||
.orderBy(desc(adminActions.createdAt))
|
||||
.limit(limit);
|
||||
// Use raw SQL for the self-join to avoid Drizzle alias issues
|
||||
// Using bun:sqlite prepared statements for raw SQL
|
||||
const query = `
|
||||
SELECT
|
||||
aa.id as id,
|
||||
aa.action_type as actionType,
|
||||
aa.target_user_id as targetUserId,
|
||||
u.email as targetEmail,
|
||||
u.callsign as targetCallsign,
|
||||
aa.details as details,
|
||||
aa.created_at as createdAt
|
||||
FROM admin_actions aa
|
||||
LEFT JOIN users u ON u.id = aa.target_user_id
|
||||
WHERE aa.admin_id = ?
|
||||
AND aa.action_type LIKE 'impersonate%'
|
||||
ORDER BY aa.created_at DESC
|
||||
LIMIT ?
|
||||
`;
|
||||
|
||||
return impersonations;
|
||||
return sqlite.prepare(query).all(adminId, limit);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
373
src/backend/services/auto-sync.service.js
Normal file
373
src/backend/services/auto-sync.service.js
Normal file
@@ -0,0 +1,373 @@
|
||||
import { db, logger } from '../config.js';
|
||||
import { autoSyncSettings, users } from '../db/schema/index.js';
|
||||
import { eq, and, lte, or } from 'drizzle-orm';
|
||||
|
||||
/**
|
||||
* Auto-Sync Settings Service
|
||||
* Manages user preferences for automatic DCL and LoTW synchronization
|
||||
*/
|
||||
|
||||
// Validation constants
|
||||
export const MIN_INTERVAL_HOURS = 1;
|
||||
export const MAX_INTERVAL_HOURS = 720; // 30 days
|
||||
export const DEFAULT_INTERVAL_HOURS = 24;
|
||||
|
||||
/**
|
||||
* Get auto-sync settings for a user
|
||||
* Creates default settings if they don't exist
|
||||
*
|
||||
* @param {number} userId - User ID
|
||||
* @returns {Promise<Object>} Auto-sync settings
|
||||
*/
|
||||
export async function getAutoSyncSettings(userId) {
|
||||
try {
|
||||
let [settings] = await db
|
||||
.select()
|
||||
.from(autoSyncSettings)
|
||||
.where(eq(autoSyncSettings.userId, userId));
|
||||
|
||||
// Create default settings if they don't exist
|
||||
if (!settings) {
|
||||
logger.debug('Creating default auto-sync settings for user', { userId });
|
||||
[settings] = await db
|
||||
.insert(autoSyncSettings)
|
||||
.values({
|
||||
userId,
|
||||
lotwEnabled: false,
|
||||
lotwIntervalHours: DEFAULT_INTERVAL_HOURS,
|
||||
dclEnabled: false,
|
||||
dclIntervalHours: DEFAULT_INTERVAL_HOURS,
|
||||
})
|
||||
.returning();
|
||||
}
|
||||
|
||||
return {
|
||||
lotwEnabled: settings.lotwEnabled,
|
||||
lotwIntervalHours: settings.lotwIntervalHours,
|
||||
lotwLastSyncAt: settings.lotwLastSyncAt,
|
||||
lotwNextSyncAt: settings.lotwNextSyncAt,
|
||||
dclEnabled: settings.dclEnabled,
|
||||
dclIntervalHours: settings.dclIntervalHours,
|
||||
dclLastSyncAt: settings.dclLastSyncAt,
|
||||
dclNextSyncAt: settings.dclNextSyncAt,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Failed to get auto-sync settings', { error: error.message, userId });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate interval hours
|
||||
* @param {number} hours - Interval hours to validate
|
||||
* @returns {Object} Validation result
|
||||
*/
|
||||
function validateIntervalHours(hours) {
|
||||
if (typeof hours !== 'number' || isNaN(hours)) {
|
||||
return { valid: false, error: 'Interval must be a number' };
|
||||
}
|
||||
if (!Number.isInteger(hours)) {
|
||||
return { valid: false, error: 'Interval must be a whole number of hours' };
|
||||
}
|
||||
if (hours < MIN_INTERVAL_HOURS) {
|
||||
return { valid: false, error: `Interval must be at least ${MIN_INTERVAL_HOURS} hour` };
|
||||
}
|
||||
if (hours > MAX_INTERVAL_HOURS) {
|
||||
return { valid: false, error: `Interval must be at most ${MAX_INTERVAL_HOURS} hours (30 days)` };
|
||||
}
|
||||
return { valid: true };
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate next sync time based on interval
|
||||
* @param {number} intervalHours - Interval in hours
|
||||
* @returns {Date} Next sync time
|
||||
*/
|
||||
function calculateNextSyncTime(intervalHours) {
|
||||
const nextSync = new Date();
|
||||
nextSync.setHours(nextSync.getHours() + intervalHours);
|
||||
return nextSync;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update auto-sync settings for a user
|
||||
*
|
||||
* @param {number} userId - User ID
|
||||
* @param {Object} settings - Settings to update
|
||||
* @returns {Promise<Object>} Updated settings
|
||||
*/
|
||||
export async function updateAutoSyncSettings(userId, settings) {
|
||||
try {
|
||||
// Get current settings
|
||||
let [currentSettings] = await db
|
||||
.select()
|
||||
.from(autoSyncSettings)
|
||||
.where(eq(autoSyncSettings.userId, userId));
|
||||
|
||||
// Create default settings if they don't exist
|
||||
if (!currentSettings) {
|
||||
[currentSettings] = await db
|
||||
.insert(autoSyncSettings)
|
||||
.values({
|
||||
userId,
|
||||
lotwEnabled: false,
|
||||
lotwIntervalHours: DEFAULT_INTERVAL_HOURS,
|
||||
dclEnabled: false,
|
||||
dclIntervalHours: DEFAULT_INTERVAL_HOURS,
|
||||
})
|
||||
.returning();
|
||||
}
|
||||
|
||||
// Prepare update data
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
// Validate and update LoTW settings
|
||||
if (settings.lotwEnabled !== undefined) {
|
||||
if (typeof settings.lotwEnabled !== 'boolean') {
|
||||
throw new Error('lotwEnabled must be a boolean');
|
||||
}
|
||||
updateData.lotwEnabled = settings.lotwEnabled;
|
||||
|
||||
// If enabling for the first time or interval changed, set next sync time
|
||||
if (settings.lotwEnabled && (!currentSettings.lotwEnabled || settings.lotwIntervalHours)) {
|
||||
const intervalHours = settings.lotwIntervalHours || currentSettings.lotwIntervalHours;
|
||||
const validation = validateIntervalHours(intervalHours);
|
||||
if (!validation.valid) {
|
||||
throw new Error(`LoTW interval: ${validation.error}`);
|
||||
}
|
||||
updateData.lotwNextSyncAt = calculateNextSyncTime(intervalHours);
|
||||
} else if (!settings.lotwEnabled) {
|
||||
// Clear next sync when disabling
|
||||
updateData.lotwNextSyncAt = null;
|
||||
}
|
||||
}
|
||||
|
||||
if (settings.lotwIntervalHours !== undefined) {
|
||||
const validation = validateIntervalHours(settings.lotwIntervalHours);
|
||||
if (!validation.valid) {
|
||||
throw new Error(`LoTW interval: ${validation.error}`);
|
||||
}
|
||||
updateData.lotwIntervalHours = settings.lotwIntervalHours;
|
||||
|
||||
// Update next sync time if LoTW is enabled
|
||||
if (currentSettings.lotwEnabled || settings.lotwEnabled) {
|
||||
updateData.lotwNextSyncAt = calculateNextSyncTime(settings.lotwIntervalHours);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate and update DCL settings
|
||||
if (settings.dclEnabled !== undefined) {
|
||||
if (typeof settings.dclEnabled !== 'boolean') {
|
||||
throw new Error('dclEnabled must be a boolean');
|
||||
}
|
||||
updateData.dclEnabled = settings.dclEnabled;
|
||||
|
||||
// If enabling for the first time or interval changed, set next sync time
|
||||
if (settings.dclEnabled && (!currentSettings.dclEnabled || settings.dclIntervalHours)) {
|
||||
const intervalHours = settings.dclIntervalHours || currentSettings.dclIntervalHours;
|
||||
const validation = validateIntervalHours(intervalHours);
|
||||
if (!validation.valid) {
|
||||
throw new Error(`DCL interval: ${validation.error}`);
|
||||
}
|
||||
updateData.dclNextSyncAt = calculateNextSyncTime(intervalHours);
|
||||
} else if (!settings.dclEnabled) {
|
||||
// Clear next sync when disabling
|
||||
updateData.dclNextSyncAt = null;
|
||||
}
|
||||
}
|
||||
|
||||
if (settings.dclIntervalHours !== undefined) {
|
||||
const validation = validateIntervalHours(settings.dclIntervalHours);
|
||||
if (!validation.valid) {
|
||||
throw new Error(`DCL interval: ${validation.error}`);
|
||||
}
|
||||
updateData.dclIntervalHours = settings.dclIntervalHours;
|
||||
|
||||
// Update next sync time if DCL is enabled
|
||||
if (currentSettings.dclEnabled || settings.dclEnabled) {
|
||||
updateData.dclNextSyncAt = calculateNextSyncTime(settings.dclIntervalHours);
|
||||
}
|
||||
}
|
||||
|
||||
// Update settings in database
|
||||
const [updated] = await db
|
||||
.update(autoSyncSettings)
|
||||
.set(updateData)
|
||||
.where(eq(autoSyncSettings.userId, userId))
|
||||
.returning();
|
||||
|
||||
logger.info('Updated auto-sync settings', {
|
||||
userId,
|
||||
lotwEnabled: updated.lotwEnabled,
|
||||
lotwIntervalHours: updated.lotwIntervalHours,
|
||||
dclEnabled: updated.dclEnabled,
|
||||
dclIntervalHours: updated.dclIntervalHours,
|
||||
});
|
||||
|
||||
return {
|
||||
lotwEnabled: updated.lotwEnabled,
|
||||
lotwIntervalHours: updated.lotwIntervalHours,
|
||||
lotwLastSyncAt: updated.lotwLastSyncAt,
|
||||
lotwNextSyncAt: updated.lotwNextSyncAt,
|
||||
dclEnabled: updated.dclEnabled,
|
||||
dclIntervalHours: updated.dclIntervalHours,
|
||||
dclLastSyncAt: updated.dclLastSyncAt,
|
||||
dclNextSyncAt: updated.dclNextSyncAt,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Failed to update auto-sync settings', { error: error.message, userId });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get users with pending syncs for a specific service
|
||||
*
|
||||
* @param {string} service - 'lotw' or 'dcl'
|
||||
* @returns {Promise<Array>} List of users with pending syncs
|
||||
*/
|
||||
export async function getPendingSyncUsers(service) {
|
||||
try {
|
||||
if (service !== 'lotw' && service !== 'dcl') {
|
||||
throw new Error('Service must be "lotw" or "dcl"');
|
||||
}
|
||||
|
||||
const enabledField = service === 'lotw' ? autoSyncSettings.lotwEnabled : autoSyncSettings.dclEnabled;
|
||||
const nextSyncField = service === 'lotw' ? autoSyncSettings.lotwNextSyncAt : autoSyncSettings.dclNextSyncAt;
|
||||
const credentialField = service === 'lotw' ? users.lotwUsername : users.dclApiKey;
|
||||
const intervalField = service === 'lotw' ? autoSyncSettings.lotwIntervalHours : autoSyncSettings.dclIntervalHours;
|
||||
|
||||
const now = new Date();
|
||||
|
||||
// Get users with auto-sync enabled and next sync time in the past
|
||||
const results = await db
|
||||
.select({
|
||||
userId: autoSyncSettings.userId,
|
||||
lotwEnabled: autoSyncSettings.lotwEnabled,
|
||||
lotwIntervalHours: autoSyncSettings.lotwIntervalHours,
|
||||
lotwNextSyncAt: autoSyncSettings.lotwNextSyncAt,
|
||||
dclEnabled: autoSyncSettings.dclEnabled,
|
||||
dclIntervalHours: autoSyncSettings.dclIntervalHours,
|
||||
dclNextSyncAt: autoSyncSettings.dclNextSyncAt,
|
||||
hasCredentials: credentialField, // Just check if field exists (not null/empty)
|
||||
})
|
||||
.from(autoSyncSettings)
|
||||
.innerJoin(users, eq(autoSyncSettings.userId, users.id))
|
||||
.where(
|
||||
and(
|
||||
eq(enabledField, true),
|
||||
lte(nextSyncField, now)
|
||||
)
|
||||
);
|
||||
|
||||
// Split into users with and without credentials
|
||||
const withCredentials = results.filter(r => r.hasCredentials);
|
||||
const withoutCredentials = results.filter(r => !r.hasCredentials);
|
||||
|
||||
// For users without credentials, update their next sync time to retry in 24 hours
|
||||
// This prevents them from being continuously retried every minute
|
||||
if (withoutCredentials.length > 0) {
|
||||
const retryDate = new Date();
|
||||
retryDate.setHours(retryDate.getHours() + 24);
|
||||
|
||||
for (const user of withoutCredentials) {
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
if (service === 'lotw') {
|
||||
updateData.lotwNextSyncAt = retryDate;
|
||||
} else {
|
||||
updateData.dclNextSyncAt = retryDate;
|
||||
}
|
||||
|
||||
await db
|
||||
.update(autoSyncSettings)
|
||||
.set(updateData)
|
||||
.where(eq(autoSyncSettings.userId, user.userId));
|
||||
}
|
||||
|
||||
logger.warn('Skipped auto-sync for users without credentials, will retry in 24 hours', {
|
||||
service,
|
||||
count: withoutCredentials.length,
|
||||
userIds: withoutCredentials.map(u => u.userId),
|
||||
});
|
||||
}
|
||||
|
||||
logger.debug('Found pending sync users', {
|
||||
service,
|
||||
total: results.length,
|
||||
withCredentials: withCredentials.length,
|
||||
withoutCredentials: withoutCredentials.length,
|
||||
});
|
||||
|
||||
return withCredentials;
|
||||
} catch (error) {
|
||||
logger.error('Failed to get pending sync users', { error: error.message, service });
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update sync timestamps after a successful sync
|
||||
*
|
||||
* @param {number} userId - User ID
|
||||
* @param {string} service - 'lotw' or 'dcl'
|
||||
* @param {Date} lastSyncDate - Date of last sync
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
export async function updateSyncTimestamps(userId, service, lastSyncDate) {
|
||||
try {
|
||||
if (service !== 'lotw' && service !== 'dcl') {
|
||||
throw new Error('Service must be "lotw" or "dcl"');
|
||||
}
|
||||
|
||||
// Get current settings to find the interval
|
||||
const [currentSettings] = await db
|
||||
.select()
|
||||
.from(autoSyncSettings)
|
||||
.where(eq(autoSyncSettings.userId, userId));
|
||||
|
||||
if (!currentSettings) {
|
||||
logger.warn('No auto-sync settings found for user', { userId, service });
|
||||
return;
|
||||
}
|
||||
|
||||
const intervalHours = service === 'lotw'
|
||||
? currentSettings.lotwIntervalHours
|
||||
: currentSettings.dclIntervalHours;
|
||||
|
||||
// Calculate next sync time
|
||||
const nextSyncAt = calculateNextSyncTime(intervalHours);
|
||||
|
||||
// Update timestamps
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
if (service === 'lotw') {
|
||||
updateData.lotwLastSyncAt = lastSyncDate;
|
||||
updateData.lotwNextSyncAt = nextSyncAt;
|
||||
} else {
|
||||
updateData.dclLastSyncAt = lastSyncDate;
|
||||
updateData.dclNextSyncAt = nextSyncAt;
|
||||
}
|
||||
|
||||
await db
|
||||
.update(autoSyncSettings)
|
||||
.set(updateData)
|
||||
.where(eq(autoSyncSettings.userId, userId));
|
||||
|
||||
logger.debug('Updated sync timestamps', {
|
||||
userId,
|
||||
service,
|
||||
lastSyncAt: lastSyncDate.toISOString(),
|
||||
nextSyncAt: nextSyncAt.toISOString(),
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to update sync timestamps', { error: error.message, userId, service });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db, logger } from '../config.js';
|
||||
import { qsos } from '../db/schema/index.js';
|
||||
import { eq, and, or, desc, sql } from 'drizzle-orm';
|
||||
import { readFileSync } from 'fs';
|
||||
import { readFileSync, readdirSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
import { getCachedAwardProgress, setCachedAwardProgress } from './cache.service.js';
|
||||
|
||||
@@ -13,27 +13,25 @@ import { getCachedAwardProgress, setCachedAwardProgress } from './cache.service.
|
||||
// Load award definitions from files
|
||||
const AWARD_DEFINITIONS_DIR = join(process.cwd(), 'award-definitions');
|
||||
|
||||
// In-memory cache for award definitions (static, never changes at runtime)
|
||||
let cachedAwardDefinitions = null;
|
||||
|
||||
/**
|
||||
* Load all award definitions
|
||||
* Load all award definitions (cached in memory)
|
||||
*/
|
||||
function loadAwardDefinitions() {
|
||||
// Return cached definitions if available
|
||||
if (cachedAwardDefinitions) {
|
||||
return cachedAwardDefinitions;
|
||||
}
|
||||
|
||||
const definitions = [];
|
||||
|
||||
try {
|
||||
const files = [
|
||||
'dxcc.json',
|
||||
'dxcc-cw.json',
|
||||
'was.json',
|
||||
'vucc-sat.json',
|
||||
'sat-rs44.json',
|
||||
'special-stations.json',
|
||||
'dld.json',
|
||||
'dld-80m.json',
|
||||
'dld-40m.json',
|
||||
'dld-cw.json',
|
||||
'dld-80m-cw.json',
|
||||
'73-on-73.json',
|
||||
];
|
||||
// Auto-discover all JSON files in the award-definitions directory
|
||||
const files = readdirSync(AWARD_DEFINITIONS_DIR)
|
||||
.filter(f => f.endsWith('.json'))
|
||||
.sort();
|
||||
|
||||
for (const file of files) {
|
||||
try {
|
||||
@@ -49,6 +47,9 @@ function loadAwardDefinitions() {
|
||||
logger.error('Error loading award definitions', { error: error.message });
|
||||
}
|
||||
|
||||
// Cache the definitions for future calls
|
||||
cachedAwardDefinitions = definitions;
|
||||
|
||||
return definitions;
|
||||
}
|
||||
|
||||
@@ -140,11 +141,27 @@ export async function calculateAwardProgress(userId, award, options = {}) {
|
||||
logger.debug('QSOs after filters', { count: filteredQSOs.length });
|
||||
}
|
||||
|
||||
// Apply allowed_bands filter if present
|
||||
let finalQSOs = filteredQSOs;
|
||||
if (rules.allowed_bands && Array.isArray(rules.allowed_bands) && rules.allowed_bands.length > 0) {
|
||||
finalQSOs = filteredQSOs.filter(qso => {
|
||||
const band = qso.band;
|
||||
return rules.allowed_bands.includes(band);
|
||||
});
|
||||
logger.debug('QSOs after allowed_bands filter', { count: finalQSOs.length });
|
||||
}
|
||||
|
||||
// Apply satellite_only filter if present
|
||||
if (rules.satellite_only) {
|
||||
finalQSOs = finalQSOs.filter(qso => qso.satName);
|
||||
logger.debug('QSOs after satellite_only filter', { count: finalQSOs.length });
|
||||
}
|
||||
|
||||
// Calculate worked and confirmed entities
|
||||
const workedEntities = new Set();
|
||||
const confirmedEntities = new Set();
|
||||
|
||||
for (const qso of filteredQSOs) {
|
||||
for (const qso of finalQSOs) {
|
||||
const entity = getEntityValue(qso, rules.entityType);
|
||||
|
||||
if (entity) {
|
||||
@@ -199,7 +216,7 @@ async function calculateDOKAwardProgress(userId, award, options = {}) {
|
||||
}
|
||||
|
||||
// Track unique (DOK, band, mode) combinations
|
||||
const dokCombinations = new Map(); // Key: "DOK/band/mode" -> detail object
|
||||
const dokCombinations = new Map(); // Key: "DOK/band/mode" -> detail object with qsos array
|
||||
|
||||
for (const qso of filteredQSOs) {
|
||||
const dok = qso.darcDok;
|
||||
@@ -212,29 +229,36 @@ async function calculateDOKAwardProgress(userId, award, options = {}) {
|
||||
// Initialize combination if not exists
|
||||
if (!dokCombinations.has(combinationKey)) {
|
||||
dokCombinations.set(combinationKey, {
|
||||
qsoId: qso.id,
|
||||
entity: dok,
|
||||
entityId: null,
|
||||
entityName: dok,
|
||||
band,
|
||||
mode,
|
||||
callsign: qso.callsign,
|
||||
worked: false,
|
||||
confirmed: false,
|
||||
qsoDate: qso.qsoDate,
|
||||
dclQslRdate: null,
|
||||
qsos: [], // Array of confirmed QSOs for this slot
|
||||
});
|
||||
}
|
||||
|
||||
const detail = dokCombinations.get(combinationKey);
|
||||
detail.worked = true;
|
||||
|
||||
// Check for DCL confirmation
|
||||
// Check for DCL confirmation and add to qsos array
|
||||
if (qso.dclQslRstatus === 'Y') {
|
||||
if (!detail.confirmed) {
|
||||
detail.confirmed = true;
|
||||
detail.dclQslRdate = qso.dclQslRdate;
|
||||
}
|
||||
// Add this confirmed QSO to the qsos array
|
||||
detail.qsos.push({
|
||||
qsoId: qso.id,
|
||||
callsign: qso.callsign,
|
||||
mode: qso.mode,
|
||||
qsoDate: qso.qsoDate,
|
||||
timeOn: qso.timeOn,
|
||||
band: qso.band,
|
||||
satName: qso.satName,
|
||||
confirmed: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -339,15 +363,13 @@ async function calculatePointsAwardProgress(userId, award, options = {}) {
|
||||
|
||||
if (!combinationMap.has(combinationKey)) {
|
||||
combinationMap.set(combinationKey, {
|
||||
qsoId: qso.id,
|
||||
callsign,
|
||||
band,
|
||||
mode,
|
||||
points,
|
||||
worked: true,
|
||||
confirmed: false,
|
||||
qsoDate: qso.qsoDate,
|
||||
lotwQslRdate: null,
|
||||
qsos: [], // Array of confirmed QSOs for this slot
|
||||
});
|
||||
}
|
||||
|
||||
@@ -355,8 +377,18 @@ async function calculatePointsAwardProgress(userId, award, options = {}) {
|
||||
const detail = combinationMap.get(combinationKey);
|
||||
if (!detail.confirmed) {
|
||||
detail.confirmed = true;
|
||||
detail.lotwQslRdate = qso.lotwQslRdate;
|
||||
}
|
||||
// Add this confirmed QSO to the qsos array
|
||||
detail.qsos.push({
|
||||
qsoId: qso.id,
|
||||
callsign: qso.callsign,
|
||||
mode: qso.mode,
|
||||
qsoDate: qso.qsoDate,
|
||||
timeOn: qso.timeOn,
|
||||
band: qso.band,
|
||||
satName: qso.satName,
|
||||
confirmed: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -378,15 +410,11 @@ async function calculatePointsAwardProgress(userId, award, options = {}) {
|
||||
|
||||
if (!stationMap.has(callsign)) {
|
||||
stationMap.set(callsign, {
|
||||
qsoId: qso.id,
|
||||
callsign,
|
||||
points,
|
||||
worked: true,
|
||||
confirmed: false,
|
||||
qsoDate: qso.qsoDate,
|
||||
band: qso.band,
|
||||
mode: qso.mode,
|
||||
lotwQslRdate: null,
|
||||
qsos: [], // Array of confirmed QSOs for this station
|
||||
});
|
||||
}
|
||||
|
||||
@@ -394,8 +422,18 @@ async function calculatePointsAwardProgress(userId, award, options = {}) {
|
||||
const detail = stationMap.get(callsign);
|
||||
if (!detail.confirmed) {
|
||||
detail.confirmed = true;
|
||||
detail.lotwQslRdate = qso.lotwQslRdate;
|
||||
}
|
||||
// Add this confirmed QSO to the qsos array
|
||||
detail.qsos.push({
|
||||
qsoId: qso.id,
|
||||
callsign: qso.callsign,
|
||||
mode: qso.mode,
|
||||
qsoDate: qso.qsoDate,
|
||||
timeOn: qso.timeOn,
|
||||
band: qso.band,
|
||||
satName: qso.satName,
|
||||
confirmed: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -415,6 +453,7 @@ async function calculatePointsAwardProgress(userId, award, options = {}) {
|
||||
|
||||
if (qso.lotwQslRstatus === 'Y') {
|
||||
totalPoints += points;
|
||||
// For perQso mode, each QSO is its own slot with a qsos array containing just itself
|
||||
stationDetails.push({
|
||||
qsoId: qso.id,
|
||||
callsign,
|
||||
@@ -424,7 +463,16 @@ async function calculatePointsAwardProgress(userId, award, options = {}) {
|
||||
qsoDate: qso.qsoDate,
|
||||
band: qso.band,
|
||||
mode: qso.mode,
|
||||
lotwQslRdate: qso.lotwQslRdate,
|
||||
qsos: [{
|
||||
qsoId: qso.id,
|
||||
callsign: qso.callsign,
|
||||
mode: qso.mode,
|
||||
qsoDate: qso.qsoDate,
|
||||
timeOn: qso.timeOn,
|
||||
band: qso.band,
|
||||
satName: qso.satName,
|
||||
confirmed: true,
|
||||
}],
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -465,6 +513,7 @@ async function calculatePointsAwardProgress(userId, award, options = {}) {
|
||||
mode: detail.mode,
|
||||
callsign: detail.callsign,
|
||||
lotwQslRdate: detail.lotwQslRdate,
|
||||
qsos: detail.qsos || [], // All confirmed QSOs for this slot
|
||||
};
|
||||
} else if (countMode === 'perStation') {
|
||||
return {
|
||||
@@ -480,6 +529,7 @@ async function calculatePointsAwardProgress(userId, award, options = {}) {
|
||||
mode: detail.mode,
|
||||
callsign: detail.callsign,
|
||||
lotwQslRdate: detail.lotwQslRdate,
|
||||
qsos: detail.qsos || [], // All confirmed QSOs for this station
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
@@ -495,6 +545,7 @@ async function calculatePointsAwardProgress(userId, award, options = {}) {
|
||||
mode: detail.mode,
|
||||
callsign: detail.callsign,
|
||||
lotwQslRdate: detail.lotwQslRdate,
|
||||
qsos: detail.qsos || [], // All confirmed QSOs for this slot (just this one QSO)
|
||||
};
|
||||
}
|
||||
});
|
||||
@@ -675,48 +726,77 @@ export async function getAwardEntityBreakdown(userId, awardId) {
|
||||
// Apply filters
|
||||
const filteredQSOs = applyFilters(allQSOs, rules.filters);
|
||||
|
||||
// Group by entity
|
||||
const entityMap = new Map();
|
||||
// Apply allowed_bands filter if present
|
||||
let finalQSOs = filteredQSOs;
|
||||
if (rules.allowed_bands && Array.isArray(rules.allowed_bands) && rules.allowed_bands.length > 0) {
|
||||
finalQSOs = filteredQSOs.filter(qso => {
|
||||
const band = qso.band;
|
||||
return rules.allowed_bands.includes(band);
|
||||
});
|
||||
}
|
||||
|
||||
for (const qso of filteredQSOs) {
|
||||
// Apply satellite_only filter if present
|
||||
if (rules.satellite_only) {
|
||||
finalQSOs = finalQSOs.filter(qso => qso.satName);
|
||||
}
|
||||
|
||||
// Group by (entity, band, mode) slot for entity awards
|
||||
// This allows showing multiple QSOs per entity on different bands/modes
|
||||
const slotMap = new Map(); // Key: "entity/band/mode" -> slot object
|
||||
|
||||
for (const qso of finalQSOs) {
|
||||
const entity = getEntityValue(qso, rules.entityType);
|
||||
|
||||
if (!entity) continue;
|
||||
|
||||
if (!entityMap.has(entity)) {
|
||||
// Determine what to display as the entity name
|
||||
let displayName = String(entity);
|
||||
if (rules.displayField) {
|
||||
let rawValue = qso[rules.displayField];
|
||||
if (rules.displayField === 'grid' && rawValue && rawValue.length > 4) {
|
||||
rawValue = rawValue.substring(0, 4);
|
||||
}
|
||||
displayName = String(rawValue || entity);
|
||||
} else {
|
||||
displayName = qso.entity || qso.state || qso.grid || qso.callsign || String(entity);
|
||||
}
|
||||
const band = qso.band || 'Unknown';
|
||||
const mode = qso.mode || 'Unknown';
|
||||
const slotKey = `${entity}/${band}/${mode}`;
|
||||
|
||||
entityMap.set(entity, {
|
||||
qsoId: qso.id,
|
||||
// Determine what to display as the entity name (only on first create)
|
||||
let displayName = String(entity);
|
||||
if (rules.displayField) {
|
||||
let rawValue = qso[rules.displayField];
|
||||
if (rules.displayField === 'grid' && rawValue && rawValue.length > 4) {
|
||||
rawValue = rawValue.substring(0, 4);
|
||||
}
|
||||
displayName = String(rawValue || entity);
|
||||
} else {
|
||||
displayName = qso.entity || qso.state || qso.grid || qso.callsign || String(entity);
|
||||
}
|
||||
|
||||
if (!slotMap.has(slotKey)) {
|
||||
slotMap.set(slotKey, {
|
||||
entity,
|
||||
entityId: qso.entityId,
|
||||
entityName: displayName,
|
||||
band,
|
||||
mode,
|
||||
worked: false,
|
||||
confirmed: false,
|
||||
qsoDate: qso.qsoDate,
|
||||
band: qso.band,
|
||||
mode: qso.mode,
|
||||
callsign: qso.callsign,
|
||||
satName: qso.satName,
|
||||
qsos: [], // Array of confirmed QSOs for this slot
|
||||
});
|
||||
}
|
||||
|
||||
const entityData = entityMap.get(entity);
|
||||
entityData.worked = true;
|
||||
const slotData = slotMap.get(slotKey);
|
||||
slotData.worked = true;
|
||||
|
||||
// Check for LoTW confirmation and add to qsos array
|
||||
if (qso.lotwQslRstatus === 'Y') {
|
||||
entityData.confirmed = true;
|
||||
entityData.lotwQslRdate = qso.lotwQslRdate;
|
||||
if (!slotData.confirmed) {
|
||||
slotData.confirmed = true;
|
||||
}
|
||||
// Add this confirmed QSO to the qsos array
|
||||
slotData.qsos.push({
|
||||
qsoId: qso.id,
|
||||
callsign: qso.callsign,
|
||||
mode: qso.mode,
|
||||
qsoDate: qso.qsoDate,
|
||||
timeOn: qso.timeOn,
|
||||
band: qso.band,
|
||||
satName: qso.satName,
|
||||
confirmed: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -728,8 +808,8 @@ export async function getAwardEntityBreakdown(userId, awardId) {
|
||||
caption: award.caption,
|
||||
target: rules.target || 0,
|
||||
},
|
||||
entities: Array.from(entityMap.values()),
|
||||
total: entityMap.size,
|
||||
confirmed: Array.from(entityMap.values()).filter((e) => e.confirmed).length,
|
||||
entities: Array.from(slotMap.values()),
|
||||
total: slotMap.size,
|
||||
confirmed: Array.from(slotMap.values()).filter((e) => e.confirmed).length,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -86,32 +86,6 @@ export function clearAllCache() {
|
||||
return size;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cache statistics (for monitoring/debugging)
|
||||
* @returns {object} Cache stats
|
||||
*/
|
||||
export function getCacheStats() {
|
||||
const now = Date.now();
|
||||
let expired = 0;
|
||||
let valid = 0;
|
||||
|
||||
for (const [, value] of awardCache) {
|
||||
const age = now - value.timestamp;
|
||||
if (age > CACHE_TTL) {
|
||||
expired++;
|
||||
} else {
|
||||
valid++;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
total: awardCache.size,
|
||||
valid,
|
||||
expired,
|
||||
ttl: CACHE_TTL
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up expired cache entries (maintenance function)
|
||||
* Can be called periodically to free memory
|
||||
|
||||
@@ -4,6 +4,7 @@ import { max, sql, eq, and, desc } from 'drizzle-orm';
|
||||
import { updateJobProgress } from './job-queue.service.js';
|
||||
import { parseDCLResponse, normalizeBand, normalizeMode } from '../utils/adif-parser.js';
|
||||
import { invalidateUserCache, invalidateStatsCache } from './cache.service.js';
|
||||
import { yieldToEventLoop, getQSOKey } from '../utils/sync-helpers.js';
|
||||
|
||||
/**
|
||||
* DCL (DARC Community Logbook) Service
|
||||
@@ -122,17 +123,6 @@ export async function fetchQSOsFromDCL(dclApiKey, sinceDate = null) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse DCL API response from JSON
|
||||
* Can be used for testing with example payloads
|
||||
*
|
||||
* @param {Object} jsonResponse - JSON response in DCL format
|
||||
* @returns {Array} Array of parsed QSO records
|
||||
*/
|
||||
export function parseDCLJSONResponse(jsonResponse) {
|
||||
return parseDCLResponse(jsonResponse);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert DCL ADIF QSO to database format
|
||||
* @param {Object} adifQSO - Parsed ADIF QSO record
|
||||
@@ -169,21 +159,6 @@ function convertQSODatabaseFormat(adifQSO, userId) {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Yield to event loop to allow other requests to be processed
|
||||
* This prevents blocking the server during long-running sync operations
|
||||
*/
|
||||
function yieldToEventLoop() {
|
||||
return new Promise(resolve => setImmediate(resolve));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get QSO key for duplicate detection
|
||||
*/
|
||||
function getQSOKey(qso) {
|
||||
return `${qso.callsign}|${qso.qsoDate}|${qso.timeOn}|${qso.band}|${qso.mode}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync QSOs from DCL to database (optimized with batch operations)
|
||||
* Updates existing QSOs with DCL confirmation data
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { db, logger } from '../config.js';
|
||||
import { qsos, qsoChanges } from '../db/schema/index.js';
|
||||
import { qsos, qsoChanges, syncJobs, awardProgress } from '../db/schema/index.js';
|
||||
import { max, sql, eq, and, or, desc, like } from 'drizzle-orm';
|
||||
import { updateJobProgress } from './job-queue.service.js';
|
||||
import { parseADIF, normalizeBand, normalizeMode } from '../utils/adif-parser.js';
|
||||
import { invalidateUserCache, getCachedStats, setCachedStats, invalidateStatsCache } from './cache.service.js';
|
||||
import { trackQueryPerformance, getPerformanceSummary, resetPerformanceMetrics } from './performance.service.js';
|
||||
import { trackQueryPerformance } from './performance.service.js';
|
||||
import { yieldToEventLoop, getQSOKey } from '../utils/sync-helpers.js';
|
||||
|
||||
/**
|
||||
* LoTW (Logbook of the World) Service
|
||||
@@ -81,6 +82,7 @@ const sleep = (ms) => new Promise(resolve => setTimeout(resolve, ms));
|
||||
* Fetch QSOs from LoTW with retry support
|
||||
*/
|
||||
async function fetchQSOsFromLoTW(lotwUsername, lotwPassword, sinceDate = null) {
|
||||
const startTime = Date.now();
|
||||
const url = 'https://lotw.arrl.org/lotwuser/lotwreport.adi';
|
||||
|
||||
const params = new URLSearchParams({
|
||||
@@ -176,7 +178,7 @@ async function fetchQSOsFromLoTW(lotwUsername, lotwPassword, sinceDate = null) {
|
||||
}
|
||||
}
|
||||
|
||||
const totalTime = Math.round((Date.now() - Date.now()) / 1000);
|
||||
const totalTime = Math.round((Date.now() - startTime) / 1000);
|
||||
return {
|
||||
error: `LoTW sync failed: Report not ready after ${MAX_RETRIES} attempts (${totalTime}s). LoTW may be experiencing high load. Please try again later.`
|
||||
};
|
||||
@@ -210,21 +212,6 @@ function convertQSODatabaseFormat(adifQSO, userId) {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Yield to event loop to allow other requests to be processed
|
||||
* This prevents blocking the server during long-running sync operations
|
||||
*/
|
||||
function yieldToEventLoop() {
|
||||
return new Promise(resolve => setImmediate(resolve));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get QSO key for duplicate detection
|
||||
*/
|
||||
function getQSOKey(qso) {
|
||||
return `${qso.callsign}|${qso.qsoDate}|${qso.timeOn}|${qso.band}|${qso.mode}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync QSOs from LoTW to database (optimized with batch operations)
|
||||
* @param {number} userId - User ID
|
||||
@@ -609,10 +596,58 @@ export async function getLastLoTWQSLDate(userId) {
|
||||
|
||||
/**
|
||||
* Delete all QSOs for a user
|
||||
* Also deletes related qso_changes records to satisfy foreign key constraints
|
||||
*/
|
||||
export async function deleteQSOs(userId) {
|
||||
logger.debug('Deleting all QSOs for user', { userId });
|
||||
|
||||
// Step 1: Delete qso_changes that reference QSOs for this user
|
||||
// Need to use a subquery since qso_changes doesn't have userId directly
|
||||
const qsoIdsResult = await db
|
||||
.select({ id: qsos.id })
|
||||
.from(qsos)
|
||||
.where(eq(qsos.userId, userId));
|
||||
|
||||
const qsoIds = qsoIdsResult.map(r => r.id);
|
||||
|
||||
let deletedChanges = 0;
|
||||
if (qsoIds.length > 0) {
|
||||
// Delete qso_changes where qsoId is in the list of QSO IDs
|
||||
const changesResult = await db
|
||||
.delete(qsoChanges)
|
||||
.where(sql`${qsoChanges.qsoId} IN ${sql.raw(`(${qsoIds.join(',')})`)}`);
|
||||
|
||||
deletedChanges = changesResult.changes || changesResult || 0;
|
||||
logger.debug('Deleted qso_changes', { count: deletedChanges });
|
||||
}
|
||||
|
||||
// Step 2: Delete the QSOs
|
||||
const result = await db.delete(qsos).where(eq(qsos.userId, userId));
|
||||
return result;
|
||||
logger.debug('Delete result', { result, type: typeof result, keys: Object.keys(result || {}) });
|
||||
|
||||
// Drizzle with SQLite/bun:sqlite returns various formats depending on driver
|
||||
let count = 0;
|
||||
if (result) {
|
||||
if (typeof result === 'number') {
|
||||
count = result;
|
||||
} else if (result.changes !== undefined) {
|
||||
count = result.changes;
|
||||
} else if (result.rows !== undefined) {
|
||||
count = result.rows;
|
||||
} else if (result.meta?.changes !== undefined) {
|
||||
count = result.meta.changes;
|
||||
} else if (result.meta?.rows !== undefined) {
|
||||
count = result.meta.rows;
|
||||
}
|
||||
}
|
||||
|
||||
logger.info('Deleted QSOs', { userId, count, deletedChanges });
|
||||
|
||||
// Invalidate caches for this user
|
||||
await invalidateStatsCache(userId);
|
||||
await invalidateUserCache(userId);
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
234
src/backend/services/scheduler.service.js
Normal file
234
src/backend/services/scheduler.service.js
Normal file
@@ -0,0 +1,234 @@
|
||||
import { logger } from '../config.js';
|
||||
import {
|
||||
getPendingSyncUsers,
|
||||
updateSyncTimestamps,
|
||||
} from './auto-sync.service.js';
|
||||
import {
|
||||
enqueueJob,
|
||||
getUserActiveJob,
|
||||
} from './job-queue.service.js';
|
||||
import { getUserById } from './auth.service.js';
|
||||
|
||||
/**
|
||||
* Auto-Sync Scheduler Service
|
||||
* Manages automatic synchronization of DCL and LoTW data
|
||||
* Runs every minute to check for due syncs and enqueues jobs
|
||||
*/
|
||||
|
||||
// Scheduler state
|
||||
let schedulerInterval = null;
|
||||
let isRunning = false;
|
||||
let isShuttingDown = false;
|
||||
|
||||
// Scheduler configuration
|
||||
const SCHEDULER_TICK_INTERVAL_MS = 60 * 1000; // 1 minute
|
||||
const INITIAL_DELAY_MS = 5000; // 5 seconds after server start
|
||||
|
||||
// Allow faster tick interval for testing (set via environment variable)
|
||||
const TEST_MODE = process.env.SCHEDULER_TEST_MODE === 'true';
|
||||
const TEST_TICK_INTERVAL_MS = 10 * 1000; // 10 seconds in test mode
|
||||
|
||||
/**
|
||||
* Get scheduler status
|
||||
* @returns {Object} Scheduler status
|
||||
*/
|
||||
export function getSchedulerStatus() {
|
||||
return {
|
||||
isRunning,
|
||||
isShuttingDown,
|
||||
tickIntervalMs: TEST_MODE ? TEST_TICK_INTERVAL_MS : SCHEDULER_TICK_INTERVAL_MS,
|
||||
activeInterval: !!schedulerInterval,
|
||||
testMode: TEST_MODE,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Process pending syncs for a specific service
|
||||
* @param {string} service - 'lotw' or 'dcl'
|
||||
*/
|
||||
async function processServiceSyncs(service) {
|
||||
try {
|
||||
const pendingUsers = await getPendingSyncUsers(service);
|
||||
|
||||
if (pendingUsers.length === 0) {
|
||||
logger.debug('No pending syncs', { service });
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info('Processing pending syncs', {
|
||||
service,
|
||||
count: pendingUsers.length,
|
||||
});
|
||||
|
||||
for (const user of pendingUsers) {
|
||||
if (isShuttingDown) {
|
||||
logger.info('Scheduler shutting down, skipping pending sync', {
|
||||
service,
|
||||
userId: user.userId,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if there's already an active job for this user and service
|
||||
const activeJob = await getUserActiveJob(user.userId, `${service}_sync`);
|
||||
|
||||
if (activeJob) {
|
||||
logger.debug('User already has active job, skipping', {
|
||||
service,
|
||||
userId: user.userId,
|
||||
activeJobId: activeJob.id,
|
||||
});
|
||||
|
||||
// Update the next sync time to try again later
|
||||
// This prevents continuous checking while a job is running
|
||||
await updateSyncTimestamps(user.userId, service, new Date());
|
||||
continue;
|
||||
}
|
||||
|
||||
// Enqueue the sync job
|
||||
logger.info('Enqueuing auto-sync job', {
|
||||
service,
|
||||
userId: user.userId,
|
||||
});
|
||||
|
||||
const result = await enqueueJob(user.userId, `${service}_sync`);
|
||||
|
||||
if (result.success) {
|
||||
// Update timestamps immediately on successful enqueue
|
||||
await updateSyncTimestamps(user.userId, service, new Date());
|
||||
} else {
|
||||
logger.warn('Failed to enqueue auto-sync job', {
|
||||
service,
|
||||
userId: user.userId,
|
||||
reason: result.error,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error processing user sync', {
|
||||
service,
|
||||
userId: user.userId,
|
||||
error: error.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error processing service syncs', {
|
||||
service,
|
||||
error: error.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Main scheduler tick function
|
||||
* Checks for pending LoTW and DCL syncs and processes them
|
||||
*/
|
||||
async function schedulerTick() {
|
||||
if (isShuttingDown) {
|
||||
logger.debug('Scheduler shutdown in progress, skipping tick');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
logger.debug('Scheduler tick started');
|
||||
|
||||
// Process LoTW syncs
|
||||
await processServiceSyncs('lotw');
|
||||
|
||||
// Process DCL syncs
|
||||
await processServiceSyncs('dcl');
|
||||
|
||||
logger.debug('Scheduler tick completed');
|
||||
} catch (error) {
|
||||
logger.error('Scheduler tick error', {
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the scheduler
|
||||
* Begins periodic checks for pending syncs
|
||||
*/
|
||||
export function startScheduler() {
|
||||
if (isRunning) {
|
||||
logger.warn('Scheduler already running');
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if scheduler is disabled via environment variable
|
||||
if (process.env.DISABLE_SCHEDULER === 'true') {
|
||||
logger.info('Scheduler disabled via DISABLE_SCHEDULER environment variable');
|
||||
return;
|
||||
}
|
||||
|
||||
isRunning = true;
|
||||
isShuttingDown = false;
|
||||
|
||||
const tickInterval = TEST_MODE ? TEST_TICK_INTERVAL_MS : SCHEDULER_TICK_INTERVAL_MS;
|
||||
|
||||
// Initial delay to allow server to fully start
|
||||
logger.info('Scheduler starting, initial tick in 5 seconds', {
|
||||
testMode: TEST_MODE,
|
||||
tickIntervalMs: tickInterval,
|
||||
});
|
||||
|
||||
// Schedule first tick
|
||||
setTimeout(() => {
|
||||
if (!isShuttingDown) {
|
||||
schedulerTick();
|
||||
|
||||
// Set up recurring interval
|
||||
schedulerInterval = setInterval(() => {
|
||||
if (!isShuttingDown) {
|
||||
schedulerTick();
|
||||
}
|
||||
}, tickInterval);
|
||||
|
||||
logger.info('Scheduler started', {
|
||||
tickIntervalMs: tickInterval,
|
||||
testMode: TEST_MODE,
|
||||
});
|
||||
}
|
||||
}, INITIAL_DELAY_MS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the scheduler gracefully
|
||||
* Waits for current tick to complete before stopping
|
||||
*/
|
||||
export async function stopScheduler() {
|
||||
if (!isRunning) {
|
||||
logger.debug('Scheduler not running');
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info('Stopping scheduler...');
|
||||
isShuttingDown = true;
|
||||
|
||||
// Clear the interval
|
||||
if (schedulerInterval) {
|
||||
clearInterval(schedulerInterval);
|
||||
schedulerInterval = null;
|
||||
}
|
||||
|
||||
// Wait a moment for any in-progress tick to complete
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
isRunning = false;
|
||||
logger.info('Scheduler stopped');
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger an immediate scheduler tick (for testing or manual sync)
|
||||
*/
|
||||
export async function triggerSchedulerTick() {
|
||||
if (!isRunning) {
|
||||
throw new Error('Scheduler is not running');
|
||||
}
|
||||
|
||||
logger.info('Manual scheduler tick triggered');
|
||||
await schedulerTick();
|
||||
}
|
||||
23
src/backend/utils/sync-helpers.js
Normal file
23
src/backend/utils/sync-helpers.js
Normal file
@@ -0,0 +1,23 @@
|
||||
/**
|
||||
* Sync Helper Utilities
|
||||
*
|
||||
* Shared utilities for LoTW and DCL sync operations
|
||||
*/
|
||||
|
||||
/**
|
||||
* Yield to event loop to allow other requests to be processed
|
||||
* This prevents blocking the server during long-running sync operations
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
export function yieldToEventLoop() {
|
||||
return new Promise(resolve => setImmediate(resolve));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get QSO key for duplicate detection
|
||||
* @param {object} qso - QSO object
|
||||
* @returns {string} Unique key for the QSO
|
||||
*/
|
||||
export function getQSOKey(qso) {
|
||||
return `${qso.callsign}|${qso.qsoDate}|${qso.timeOn}|${qso.band}|${qso.mode}`;
|
||||
}
|
||||
@@ -118,3 +118,15 @@ export const adminAPI = {
|
||||
|
||||
getMyActions: (limit = 50, offset = 0) => apiRequest(`/admin/actions/my?limit=${limit}&offset=${offset}`),
|
||||
};
|
||||
|
||||
// Auto-Sync API
|
||||
export const autoSyncAPI = {
|
||||
getSettings: () => apiRequest('/auto-sync/settings'),
|
||||
|
||||
updateSettings: (settings) => apiRequest('/auto-sync/settings', {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify(settings),
|
||||
}),
|
||||
|
||||
getSchedulerStatus: () => apiRequest('/auto-sync/scheduler/status'),
|
||||
};
|
||||
|
||||
@@ -103,6 +103,15 @@ function createAuthStore() {
|
||||
clearError: () => {
|
||||
update((s) => ({ ...s, error: null }));
|
||||
},
|
||||
|
||||
// Direct login with user object and token (for impersonation)
|
||||
loginWithToken: (user, token) => {
|
||||
if (browser) {
|
||||
localStorage.setItem('auth_token', token);
|
||||
localStorage.setItem('auth_user', JSON.stringify(user));
|
||||
}
|
||||
set({ user, token, loading: false, error: null });
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -2,10 +2,42 @@
|
||||
import { browser } from '$app/environment';
|
||||
import { auth } from '$lib/stores.js';
|
||||
import { goto } from '$app/navigation';
|
||||
import { adminAPI, authAPI } from '$lib/api.js';
|
||||
|
||||
let stoppingImpersonation = false;
|
||||
|
||||
function handleLogout() {
|
||||
auth.logout();
|
||||
goto('/auth/login');
|
||||
// Use hard redirect to ensure proper navigation after logout
|
||||
// goto() may not work properly due to SvelteKit client-side routing
|
||||
if (browser) {
|
||||
window.location.href = '/auth/login';
|
||||
}
|
||||
}
|
||||
|
||||
async function handleStopImpersonation() {
|
||||
if (stoppingImpersonation) return;
|
||||
|
||||
try {
|
||||
stoppingImpersonation = true;
|
||||
const data = await adminAPI.stopImpersonation();
|
||||
|
||||
if (data.success) {
|
||||
// Update auth store with admin user data and new token
|
||||
auth.loginWithToken(data.user, data.token);
|
||||
|
||||
// Hard redirect to home page
|
||||
if (browser) {
|
||||
window.location.href = '/';
|
||||
}
|
||||
} else {
|
||||
alert('Failed to stop impersonation: ' + (data.error || 'Unknown error'));
|
||||
}
|
||||
} catch (err) {
|
||||
alert('Failed to stop impersonation: ' + err.message);
|
||||
} finally {
|
||||
stoppingImpersonation = false;
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -35,6 +67,26 @@
|
||||
</div>
|
||||
</nav>
|
||||
{/if}
|
||||
|
||||
<!-- Impersonation Banner -->
|
||||
{#if $auth.user?.impersonatedBy}
|
||||
<div class="impersonation-banner">
|
||||
<div class="impersonation-content">
|
||||
<span class="warning-icon">⚠️</span>
|
||||
<span class="impersonation-text">
|
||||
You are currently impersonating <strong>{$auth.user.email}</strong>
|
||||
</span>
|
||||
<button
|
||||
class="stop-impersonation-btn"
|
||||
on:click={handleStopImpersonation}
|
||||
disabled={stoppingImpersonation}
|
||||
>
|
||||
{stoppingImpersonation ? 'Stopping...' : 'Stop Impersonation'}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<main>
|
||||
<slot />
|
||||
</main>
|
||||
@@ -152,4 +204,51 @@
|
||||
margin: 0;
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
/* Impersonation Banner */
|
||||
.impersonation-banner {
|
||||
background-color: #fff3cd;
|
||||
border: 2px solid #ffc107;
|
||||
padding: 0.75rem 1rem;
|
||||
}
|
||||
|
||||
.impersonation-content {
|
||||
max-width: 1200px;
|
||||
margin: 0 auto;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 1rem;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.warning-icon {
|
||||
font-size: 1.25rem;
|
||||
}
|
||||
|
||||
.impersonation-text {
|
||||
flex: 1;
|
||||
font-size: 0.95rem;
|
||||
color: #856404;
|
||||
}
|
||||
|
||||
.stop-impersonation-btn {
|
||||
background-color: #ffc107;
|
||||
color: #000;
|
||||
border: none;
|
||||
padding: 0.5rem 1rem;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
font-weight: 600;
|
||||
font-size: 0.9rem;
|
||||
transition: background-color 0.2s;
|
||||
}
|
||||
|
||||
.stop-impersonation-btn:hover:not(:disabled) {
|
||||
background-color: #e0a800;
|
||||
}
|
||||
|
||||
.stop-impersonation-btn:disabled {
|
||||
opacity: 0.6;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<script>
|
||||
import { onMount, onDestroy, tick } from 'svelte';
|
||||
import { auth } from '$lib/stores.js';
|
||||
import { jobsAPI } from '$lib/api.js';
|
||||
import { jobsAPI, autoSyncAPI } from '$lib/api.js';
|
||||
import { browser } from '$app/environment';
|
||||
|
||||
let jobs = [];
|
||||
@@ -9,6 +9,18 @@
|
||||
let cancellingJobs = new Map(); // Track cancelling state per job
|
||||
let pollingInterval = null;
|
||||
|
||||
// Auto-sync settings state
|
||||
let autoSyncSettings = null;
|
||||
let loadingAutoSync = false;
|
||||
|
||||
// Reactive: scheduled jobs derived from settings
|
||||
// Note: Explicitly reference autoSyncSettings to ensure Svelte tracks it as a dependency
|
||||
let scheduledJobs = [];
|
||||
$: {
|
||||
autoSyncSettings; // Touch variable so Svelte tracks reactivity
|
||||
scheduledJobs = getScheduledJobs();
|
||||
}
|
||||
|
||||
async function loadJobs() {
|
||||
try {
|
||||
const response = await jobsAPI.getRecent(5);
|
||||
@@ -22,6 +34,81 @@
|
||||
}
|
||||
}
|
||||
|
||||
async function loadAutoSyncSettings() {
|
||||
try {
|
||||
loadingAutoSync = true;
|
||||
const response = await autoSyncAPI.getSettings();
|
||||
autoSyncSettings = response.settings || null;
|
||||
} catch (error) {
|
||||
console.error('Failed to load auto-sync settings:', error);
|
||||
// Don't show error, auto-sync is optional
|
||||
} finally {
|
||||
loadingAutoSync = false;
|
||||
}
|
||||
}
|
||||
|
||||
function getScheduledJobs() {
|
||||
if (!autoSyncSettings) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const scheduled = [];
|
||||
|
||||
if (autoSyncSettings.lotwEnabled) {
|
||||
scheduled.push({
|
||||
type: 'lotw_sync',
|
||||
icon: '📡',
|
||||
name: 'LoTW Auto-Sync',
|
||||
interval: autoSyncSettings.lotwIntervalHours,
|
||||
nextSyncAt: autoSyncSettings.lotwNextSyncAt,
|
||||
lastSyncAt: autoSyncSettings.lotwLastSyncAt,
|
||||
enabled: true,
|
||||
});
|
||||
}
|
||||
|
||||
if (autoSyncSettings.dclEnabled) {
|
||||
scheduled.push({
|
||||
type: 'dcl_sync',
|
||||
icon: '🛰️',
|
||||
name: 'DCL Auto-Sync',
|
||||
interval: autoSyncSettings.dclIntervalHours,
|
||||
nextSyncAt: autoSyncSettings.dclNextSyncAt,
|
||||
lastSyncAt: autoSyncSettings.dclLastSyncAt,
|
||||
enabled: true,
|
||||
});
|
||||
}
|
||||
|
||||
return scheduled;
|
||||
}
|
||||
|
||||
function getNextSyncLabel(nextSyncAt, interval) {
|
||||
if (!nextSyncAt) return 'Pending...';
|
||||
|
||||
const now = new Date();
|
||||
const nextSync = new Date(nextSyncAt);
|
||||
const diffMs = nextSync - now;
|
||||
const diffMins = Math.floor(diffMs / 60000);
|
||||
const diffHours = Math.floor(diffMs / 3600000);
|
||||
const diffDays = Math.floor(diffMs / 86400000);
|
||||
|
||||
if (diffMs < 0) return 'Due now';
|
||||
if (diffMins < 60) return `In ${diffMins} minute${diffMins !== 1 ? 's' : ''}`;
|
||||
if (diffHours < 24) return `In ${diffHours} hour${diffHours !== 1 ? 's' : ''}`;
|
||||
return `In ${diffDays} day${diffDays !== 1 ? 's' : ''}`;
|
||||
}
|
||||
|
||||
function formatNextSyncTime(nextSyncAt) {
|
||||
if (!nextSyncAt) return null;
|
||||
const date = new Date(nextSyncAt);
|
||||
return date.toLocaleString();
|
||||
}
|
||||
|
||||
function formatLastSyncTime(lastSyncAt) {
|
||||
if (!lastSyncAt) return 'Never';
|
||||
const date = new Date(lastSyncAt);
|
||||
return formatDate(date);
|
||||
}
|
||||
|
||||
function hasActiveJobs() {
|
||||
return jobs.some(job => job.status === 'pending' || job.status === 'running');
|
||||
}
|
||||
@@ -58,6 +145,7 @@
|
||||
// Load recent jobs if authenticated
|
||||
if ($auth.user) {
|
||||
await loadJobs();
|
||||
await loadAutoSyncSettings();
|
||||
loading = false;
|
||||
}
|
||||
});
|
||||
@@ -187,6 +275,47 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Scheduled Auto-Sync Jobs -->
|
||||
{#if scheduledJobs.length > 0}
|
||||
<div class="scheduled-section">
|
||||
<h2 class="section-title">⏰ Upcoming Auto-Sync</h2>
|
||||
<div class="jobs-list">
|
||||
{#each scheduledJobs as scheduled (scheduled.type)}
|
||||
<div class="job-card job-card-scheduled">
|
||||
<div class="job-header">
|
||||
<div class="job-title">
|
||||
<span class="job-icon">{scheduled.icon}</span>
|
||||
<span class="job-name">{scheduled.name}</span>
|
||||
<span class="job-badge scheduled-badge">Scheduled</span>
|
||||
</div>
|
||||
<span class="scheduled-interval">Every {scheduled.interval}h</span>
|
||||
</div>
|
||||
|
||||
<div class="job-meta">
|
||||
<span class="job-date">
|
||||
Next: <strong title={formatNextSyncTime(scheduled.nextSyncAt)}>
|
||||
{getNextSyncLabel(scheduled.nextSyncAt, scheduled.interval)}
|
||||
</strong>
|
||||
</span>
|
||||
<span class="job-time">
|
||||
Last: {formatLastSyncTime(scheduled.lastSyncAt)}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div class="scheduled-countdown">
|
||||
<div class="countdown-bar">
|
||||
<div class="countdown-progress"></div>
|
||||
</div>
|
||||
<p class="countdown-text">
|
||||
{formatNextSyncTime(scheduled.nextSyncAt)}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<!-- Recent Sync Jobs -->
|
||||
<div class="jobs-section">
|
||||
<h2 class="section-title">🔄 Recent Sync Jobs</h2>
|
||||
@@ -484,6 +613,11 @@
|
||||
border-left: 4px solid #dc3545;
|
||||
}
|
||||
|
||||
.job-card-scheduled {
|
||||
border-left: 4px solid #8b5cf6;
|
||||
background: linear-gradient(to right, #f8f7ff, white);
|
||||
}
|
||||
|
||||
.job-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
@@ -561,6 +695,20 @@
|
||||
color: #6b21a8;
|
||||
}
|
||||
|
||||
.job-badge {
|
||||
padding: 0.2rem 0.6rem;
|
||||
border-radius: 10px;
|
||||
font-size: 0.75rem;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.5px;
|
||||
}
|
||||
|
||||
.scheduled-badge {
|
||||
background-color: #8b5cf6;
|
||||
color: white;
|
||||
}
|
||||
|
||||
.job-meta {
|
||||
display: flex;
|
||||
gap: 0.75rem;
|
||||
@@ -659,4 +807,53 @@
|
||||
opacity: 0.6;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
/* Scheduled Jobs Section */
|
||||
.scheduled-section {
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
|
||||
/* Scheduled job countdown */
|
||||
.scheduled-countdown {
|
||||
margin-top: 1rem;
|
||||
}
|
||||
|
||||
.countdown-bar {
|
||||
height: 6px;
|
||||
background: #e5e7eb;
|
||||
border-radius: 3px;
|
||||
overflow: hidden;
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
.countdown-progress {
|
||||
height: 100%;
|
||||
background: linear-gradient(90deg, #8b5cf6, #a78bfa);
|
||||
border-radius: 3px;
|
||||
width: 100%;
|
||||
animation: pulse-countdown 2s ease-in-out infinite;
|
||||
}
|
||||
|
||||
@keyframes pulse-countdown {
|
||||
0%, 100% {
|
||||
opacity: 1;
|
||||
}
|
||||
50% {
|
||||
opacity: 0.7;
|
||||
}
|
||||
}
|
||||
|
||||
.countdown-text {
|
||||
margin: 0;
|
||||
font-size: 0.85rem;
|
||||
color: #8b5cf6;
|
||||
text-align: center;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
@media (max-width: 640px) {
|
||||
.scheduled-list {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<script>
|
||||
import { onMount } from 'svelte';
|
||||
import { auth } from '$lib/stores.js';
|
||||
import { adminAPI } from '$lib/api.js';
|
||||
import { adminAPI, authAPI } from '$lib/api.js';
|
||||
import { browser } from '$app/environment';
|
||||
|
||||
let loading = true;
|
||||
@@ -90,16 +90,16 @@
|
||||
const data = await adminAPI.impersonate(userId);
|
||||
|
||||
if (data.success) {
|
||||
// Store new token
|
||||
// Store the new impersonation token
|
||||
if (browser) {
|
||||
localStorage.setItem('auth_token', data.token);
|
||||
}
|
||||
|
||||
// Update auth store with new user data
|
||||
auth.login({
|
||||
...data.impersonating,
|
||||
impersonatedBy: $auth.user.id,
|
||||
});
|
||||
// Fetch the full user profile (which includes impersonatedBy)
|
||||
const profileData = await authAPI.getProfile();
|
||||
|
||||
// Update auth store with complete user data
|
||||
auth.loginWithToken(profileData.user, data.token);
|
||||
|
||||
// Redirect to home page
|
||||
window.location.href = '/';
|
||||
@@ -114,32 +114,6 @@
|
||||
}
|
||||
}
|
||||
|
||||
async function handleStopImpersonation() {
|
||||
try {
|
||||
loading = true;
|
||||
const data = await adminAPI.stopImpersonation();
|
||||
|
||||
if (data.success) {
|
||||
// Store admin token
|
||||
if (browser) {
|
||||
localStorage.setItem('auth_token', data.token);
|
||||
}
|
||||
|
||||
// Update auth store
|
||||
auth.login(data.user);
|
||||
|
||||
alert(data.message);
|
||||
window.location.reload();
|
||||
} else {
|
||||
alert('Failed to stop impersonation: ' + (data.error || 'Unknown error'));
|
||||
}
|
||||
} catch (err) {
|
||||
alert('Failed to stop impersonation: ' + err.message);
|
||||
} finally {
|
||||
loading = false;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleDeleteUser(userId) {
|
||||
const user = users.find(u => u.id === userId);
|
||||
if (!user) return;
|
||||
@@ -203,7 +177,11 @@
|
||||
|
||||
function formatDate(dateString) {
|
||||
if (!dateString) return 'N/A';
|
||||
return new Date(dateString).toLocaleDateString('en-US', {
|
||||
// Handle Unix timestamps (seconds) by converting to milliseconds
|
||||
const date = typeof dateString === 'number'
|
||||
? new Date(dateString * 1000)
|
||||
: new Date(dateString);
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
@@ -232,21 +210,6 @@
|
||||
<div class="error">{error}</div>
|
||||
{:else}
|
||||
<div class="admin-dashboard">
|
||||
<!-- Impersonation Banner -->
|
||||
{#if $auth.user?.impersonatedBy}
|
||||
<div class="impersonation-banner">
|
||||
<div class="impersonation-content">
|
||||
<span class="warning-icon">⚠️</span>
|
||||
<span class="impersonation-text">
|
||||
You are currently impersonating <strong>{$auth.user.email}</strong>
|
||||
</span>
|
||||
<button class="stop-impersonation-btn" on:click={handleStopImpersonation}>
|
||||
Stop Impersonation
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<h1>Admin Dashboard</h1>
|
||||
|
||||
<!-- Tab Navigation -->
|
||||
@@ -573,45 +536,6 @@
|
||||
color: #c00;
|
||||
}
|
||||
|
||||
/* Impersonation Banner */
|
||||
.impersonation-banner {
|
||||
background-color: #fff3cd;
|
||||
border: 2px solid #ffc107;
|
||||
border-radius: 4px;
|
||||
padding: 1rem;
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
|
||||
.impersonation-content {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 1rem;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.warning-icon {
|
||||
font-size: 1.5rem;
|
||||
}
|
||||
|
||||
.impersonation-text {
|
||||
flex: 1;
|
||||
font-size: 1rem;
|
||||
}
|
||||
|
||||
.stop-impersonation-btn {
|
||||
background-color: #ffc107;
|
||||
color: #000;
|
||||
border: none;
|
||||
padding: 0.5rem 1rem;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.stop-impersonation-btn:hover {
|
||||
background-color: #e0a800;
|
||||
}
|
||||
|
||||
h1 {
|
||||
margin-bottom: 1.5rem;
|
||||
color: #333;
|
||||
@@ -863,12 +787,12 @@
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.action-type.impostor_start {
|
||||
.action-type.impersonate_start {
|
||||
background-color: #ffc107;
|
||||
color: #000;
|
||||
}
|
||||
|
||||
.action-type.impostor_stop {
|
||||
.action-type.impersonate_stop {
|
||||
background-color: #28a745;
|
||||
color: white;
|
||||
}
|
||||
|
||||
@@ -8,13 +8,53 @@
|
||||
let loading = true;
|
||||
let error = null;
|
||||
let groupedData = [];
|
||||
let bands = [];
|
||||
let columns = []; // Array of {band, mode?} - mode is undefined for mixed mode
|
||||
let selectedMode = 'Mixed Mode'; // Mode filter, default is all modes aggregated
|
||||
|
||||
// QSO detail modal state
|
||||
let selectedQSO = null;
|
||||
let showQSODetailModal = false;
|
||||
let loadingQSO = false;
|
||||
|
||||
// QSO list modal state
|
||||
let showQSOListModal = false;
|
||||
let selectedSlotQSOs = [];
|
||||
let selectedSlotInfo = null; // { entityName, band, mode }
|
||||
|
||||
// Get available modes from entities
|
||||
$: availableModes = ['Mixed Mode', ...new Set(entities.map(e => e.mode).filter(Boolean).sort())];
|
||||
|
||||
// Band order by wavelength (longest to shortest), SAT at the end
|
||||
const bandOrder = ['160m', '80m', '60m', '40m', '30m', '20m', '17m', '15m', '12m', '10m', '6m', '2m', '70cm', 'SAT', '23cm', '13cm', '9cm', '6cm', '3cm'];
|
||||
|
||||
// Filter entities by selected mode for summary calculations
|
||||
$: filteredEntities = selectedMode === 'Mixed Mode'
|
||||
? entities
|
||||
: entities.filter(e => e.mode === selectedMode);
|
||||
|
||||
// Calculate unique entity progress (for DXCC, DLD, etc.)
|
||||
$: uniqueEntityProgress = (() => {
|
||||
const uniqueEntities = new Map();
|
||||
|
||||
filteredEntities.forEach(e => {
|
||||
const entityName = e.entityName || e.entity || 'Unknown';
|
||||
|
||||
if (!uniqueEntities.has(entityName)) {
|
||||
uniqueEntities.set(entityName, { worked: false, confirmed: false });
|
||||
}
|
||||
|
||||
const status = uniqueEntities.get(entityName);
|
||||
if (e.worked) status.worked = true;
|
||||
if (e.confirmed) status.confirmed = true;
|
||||
});
|
||||
|
||||
return {
|
||||
total: uniqueEntities.size,
|
||||
worked: Array.from(uniqueEntities.values()).filter(s => s.worked).length,
|
||||
confirmed: Array.from(uniqueEntities.values()).filter(s => s.confirmed).length
|
||||
};
|
||||
})();
|
||||
|
||||
onMount(async () => {
|
||||
await loadAwardData();
|
||||
});
|
||||
@@ -56,17 +96,24 @@
|
||||
}
|
||||
|
||||
function groupDataForTable() {
|
||||
// Group by entity name, then create band columns
|
||||
// Group by entity name, then create columns based on mode filter
|
||||
const entityMap = new Map();
|
||||
const bandsSet = new Set();
|
||||
const columnSet = new Set();
|
||||
|
||||
const isMixedMode = selectedMode === 'Mixed Mode';
|
||||
|
||||
entities.forEach((entity) => {
|
||||
// Skip if mode filter is set and entity doesn't match
|
||||
if (!isMixedMode && entity.mode !== selectedMode) {
|
||||
return;
|
||||
}
|
||||
|
||||
const entityName = entity.entityName || entity.entity || 'Unknown';
|
||||
|
||||
if (!entityMap.has(entityName)) {
|
||||
entityMap.set(entityName, {
|
||||
entityName,
|
||||
bands: new Map(),
|
||||
slots: new Map(),
|
||||
worked: entity.worked,
|
||||
confirmed: entity.confirmed,
|
||||
});
|
||||
@@ -74,27 +121,69 @@
|
||||
|
||||
const entityData = entityMap.get(entityName);
|
||||
|
||||
if (entity.band) {
|
||||
bandsSet.add(entity.band);
|
||||
// Check if this is a satellite QSO - use "SAT" instead of band
|
||||
const isSatellite = entity.qsos && entity.qsos.some(qso => qso.satName);
|
||||
const band = isSatellite ? 'SAT' : (entity.band || 'Unknown');
|
||||
|
||||
if (!entityData.bands.has(entity.band)) {
|
||||
entityData.bands.set(entity.band, []);
|
||||
if (isMixedMode) {
|
||||
// Mixed Mode: aggregate by band only, collect all QSOs across modes
|
||||
columnSet.add(band);
|
||||
|
||||
if (!entityData.slots.has(band)) {
|
||||
entityData.slots.set(band, {
|
||||
band,
|
||||
mode: null, // No specific mode in mixed mode
|
||||
qsos: [], // Will be aggregated
|
||||
confirmed: false,
|
||||
});
|
||||
}
|
||||
|
||||
// Add QSO info to this band
|
||||
entityData.bands.get(entity.band).push({
|
||||
qsoId: entity.qsoId,
|
||||
callsign: entity.callsign,
|
||||
mode: entity.mode,
|
||||
band: entity.band,
|
||||
const slot = entityData.slots.get(band);
|
||||
// Add QSOs from this entity to the aggregated slot
|
||||
if (entity.qsos && entity.qsos.length > 0) {
|
||||
slot.qsos.push(...entity.qsos);
|
||||
if (entity.confirmed) slot.confirmed = true;
|
||||
}
|
||||
} else {
|
||||
// Specific Mode: group by (band, mode)
|
||||
const mode = entity.mode || 'Unknown';
|
||||
const columnKey = `${band}/${mode}`;
|
||||
columnSet.add(columnKey);
|
||||
|
||||
entityData.slots.set(columnKey, {
|
||||
band,
|
||||
mode,
|
||||
qsos: entity.qsos || [],
|
||||
confirmed: entity.confirmed,
|
||||
qsoDate: entity.qsoDate,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Convert bands Set to sorted array
|
||||
bands = Array.from(bandsSet).sort();
|
||||
// Convert columnSet to sorted array of column objects
|
||||
columns = Array.from(columnSet)
|
||||
.map(key => {
|
||||
if (isMixedMode) {
|
||||
return { band: key, mode: null }; // key is just the band name
|
||||
} else {
|
||||
const [band, mode] = key.split('/');
|
||||
return { band, mode };
|
||||
}
|
||||
})
|
||||
.sort((a, b) => {
|
||||
// Sort by band order (by wavelength), then by mode
|
||||
const aBandIndex = bandOrder.indexOf(a.band);
|
||||
const bBandIndex = bandOrder.indexOf(b.band);
|
||||
const aIndex = aBandIndex === -1 ? 999 : aBandIndex;
|
||||
const bIndex = bBandIndex === -1 ? 999 : bBandIndex;
|
||||
|
||||
if (aIndex !== bIndex) return aIndex - bIndex;
|
||||
|
||||
// Same band, sort by mode if present
|
||||
if (a.mode !== undefined && b.mode !== undefined) {
|
||||
return (a.mode || '').localeCompare(b.mode || '');
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
|
||||
// Convert Map to array
|
||||
groupedData = Array.from(entityMap.values());
|
||||
@@ -108,15 +197,22 @@
|
||||
const filteredEntities = getFilteredEntities();
|
||||
|
||||
const entityMap = new Map();
|
||||
const bandsSet = new Set();
|
||||
const columnSet = new Set();
|
||||
|
||||
const isMixedMode = selectedMode === 'Mixed Mode';
|
||||
|
||||
filteredEntities.forEach((entity) => {
|
||||
// Skip if mode filter is set and entity doesn't match
|
||||
if (!isMixedMode && entity.mode !== selectedMode) {
|
||||
return;
|
||||
}
|
||||
|
||||
const entityName = entity.entityName || entity.entity || 'Unknown';
|
||||
|
||||
if (!entityMap.has(entityName)) {
|
||||
entityMap.set(entityName, {
|
||||
entityName,
|
||||
bands: new Map(),
|
||||
slots: new Map(),
|
||||
worked: entity.worked,
|
||||
confirmed: entity.confirmed,
|
||||
});
|
||||
@@ -124,25 +220,68 @@
|
||||
|
||||
const entityData = entityMap.get(entityName);
|
||||
|
||||
if (entity.band) {
|
||||
bandsSet.add(entity.band);
|
||||
// Check if this is a satellite QSO - use "SAT" instead of band
|
||||
const isSatellite = entity.qsos && entity.qsos.some(qso => qso.satName);
|
||||
const band = isSatellite ? 'SAT' : (entity.band || 'Unknown');
|
||||
|
||||
if (!entityData.bands.has(entity.band)) {
|
||||
entityData.bands.set(entity.band, []);
|
||||
if (isMixedMode) {
|
||||
// Mixed Mode: aggregate by band only
|
||||
columnSet.add(band);
|
||||
|
||||
if (!entityData.slots.has(band)) {
|
||||
entityData.slots.set(band, {
|
||||
band,
|
||||
mode: null,
|
||||
qsos: [],
|
||||
confirmed: false,
|
||||
});
|
||||
}
|
||||
|
||||
entityData.bands.get(entity.band).push({
|
||||
qsoId: entity.qsoId,
|
||||
callsign: entity.callsign,
|
||||
mode: entity.mode,
|
||||
band: entity.band,
|
||||
const slot = entityData.slots.get(band);
|
||||
if (entity.qsos && entity.qsos.length > 0) {
|
||||
slot.qsos.push(...entity.qsos);
|
||||
if (entity.confirmed) slot.confirmed = true;
|
||||
}
|
||||
} else {
|
||||
// Specific Mode: group by (band, mode)
|
||||
const mode = entity.mode || 'Unknown';
|
||||
const columnKey = `${band}/${mode}`;
|
||||
columnSet.add(columnKey);
|
||||
|
||||
entityData.slots.set(columnKey, {
|
||||
band,
|
||||
mode,
|
||||
qsos: entity.qsos || [],
|
||||
confirmed: entity.confirmed,
|
||||
qsoDate: entity.qsoDate,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
bands = Array.from(bandsSet).sort();
|
||||
columns = Array.from(columnSet)
|
||||
.map(key => {
|
||||
if (isMixedMode) {
|
||||
return { band: key, mode: null };
|
||||
} else {
|
||||
const [band, mode] = key.split('/');
|
||||
return { band, mode };
|
||||
}
|
||||
})
|
||||
.sort((a, b) => {
|
||||
// Sort by band order (by wavelength), then by mode
|
||||
const aBandIndex = bandOrder.indexOf(a.band);
|
||||
const bBandIndex = bandOrder.indexOf(b.band);
|
||||
const aIndex = aBandIndex === -1 ? 999 : aBandIndex;
|
||||
const bIndex = bBandIndex === -1 ? 999 : bBandIndex;
|
||||
|
||||
if (aIndex !== bIndex) return aIndex - bIndex;
|
||||
|
||||
// Same band, sort by mode if present
|
||||
if (a.mode !== undefined && b.mode !== undefined) {
|
||||
return (a.mode || '').localeCompare(b.mode || '');
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
|
||||
groupedData = Array.from(entityMap.values());
|
||||
}
|
||||
|
||||
@@ -159,27 +298,69 @@
|
||||
return filtered;
|
||||
}
|
||||
|
||||
// Re-apply sort when entities or sort changes
|
||||
$: if (entities.length > 0) {
|
||||
// Re-apply sort when entities or mode changes
|
||||
$: if (entities.length > 0 || selectedMode) {
|
||||
applyFilter();
|
||||
}
|
||||
|
||||
// Calculate band sums
|
||||
$: bandSums = (() => {
|
||||
// Calculate column sums - counts unique entities per column (not QSO counts)
|
||||
$: columnSums = (() => {
|
||||
const sums = new Map();
|
||||
const hasPoints = entities.length > 0 && entities[0].points !== undefined;
|
||||
const isMixedMode = selectedMode === 'Mixed Mode';
|
||||
|
||||
bands.forEach(band => {
|
||||
columns.forEach(({ band, mode }) => {
|
||||
const key = isMixedMode ? band : `${band}/${mode}`;
|
||||
if (hasPoints) {
|
||||
// Sum points for confirmed QSOs in this band
|
||||
const sum = entities
|
||||
.filter(e => e.band === band && e.confirmed)
|
||||
.reduce((total, e) => total + (e.points || 0), 0);
|
||||
sums.set(band, sum);
|
||||
// Sum points for confirmed QSOs in this column
|
||||
if (isMixedMode) {
|
||||
const sum = entities
|
||||
.filter(e => {
|
||||
// For SAT column, check if entity has satellite QSOs
|
||||
if (band === 'SAT') {
|
||||
return e.qsos && e.qsos.some(qso => qso.satName) && e.confirmed;
|
||||
}
|
||||
return e.band === band && e.confirmed;
|
||||
})
|
||||
.reduce((total, e) => total + (e.points || 0), 0);
|
||||
sums.set(key, sum);
|
||||
} else {
|
||||
const sum = entities
|
||||
.filter(e => {
|
||||
// For SAT column, check if entity has satellite QSOs
|
||||
if (band === 'SAT') {
|
||||
return e.qsos && e.qsos.some(qso => qso.satName) && e.mode === mode && e.confirmed;
|
||||
}
|
||||
return e.band === band && e.mode === mode && e.confirmed;
|
||||
})
|
||||
.reduce((total, e) => total + (e.points || 0), 0);
|
||||
sums.set(key, sum);
|
||||
}
|
||||
} else {
|
||||
// Count confirmed QSOs in this band
|
||||
const count = entities.filter(e => e.band === band && e.confirmed).length;
|
||||
sums.set(band, count);
|
||||
// Count unique entities in this column (not QSO counts)
|
||||
if (isMixedMode) {
|
||||
const matchedEntities = entities.filter(e => {
|
||||
// For SAT column, check if entity has satellite QSOs
|
||||
if (band === 'SAT') {
|
||||
return e.qsos && e.qsos.some(qso => qso.satName) && e.confirmed;
|
||||
}
|
||||
return e.band === band && e.confirmed;
|
||||
});
|
||||
// Count unique entity names
|
||||
const uniqueEntities = new Set(matchedEntities.map(e => e.entityName || e.entity || 'Unknown'));
|
||||
sums.set(key, uniqueEntities.size);
|
||||
} else {
|
||||
const matchedEntities = entities.filter(e => {
|
||||
// For SAT column, check if entity has satellite QSOs
|
||||
if (band === 'SAT') {
|
||||
return e.qsos && e.qsos.some(qso => qso.satName) && e.mode === mode && e.confirmed;
|
||||
}
|
||||
return e.band === band && e.mode === mode && e.confirmed;
|
||||
});
|
||||
// Count unique entity names
|
||||
const uniqueEntities = new Set(matchedEntities.map(e => e.entityName || e.entity || 'Unknown'));
|
||||
sums.set(key, uniqueEntities.size);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -229,6 +410,23 @@
|
||||
showQSODetailModal = false;
|
||||
}
|
||||
|
||||
// QSO List Modal Functions
|
||||
function openQSOListModal(slotData, entityName, band, mode) {
|
||||
selectedSlotInfo = {
|
||||
entityName,
|
||||
band,
|
||||
mode,
|
||||
};
|
||||
selectedSlotQSOs = slotData.qsos || [];
|
||||
showQSOListModal = true;
|
||||
}
|
||||
|
||||
function closeQSOListModal() {
|
||||
selectedSlotInfo = null;
|
||||
selectedSlotQSOs = [];
|
||||
showQSOListModal = false;
|
||||
}
|
||||
|
||||
function formatDate(dateStr) {
|
||||
if (!dateStr) return '-';
|
||||
// ADIF format: YYYYMMDD
|
||||
@@ -272,16 +470,16 @@
|
||||
|
||||
<div class="summary">
|
||||
{#if entities.length > 0 && entities[0].points !== undefined}
|
||||
{@const earnedPoints = entities.reduce((sum, e) => sum + (e.confirmed ? e.points : 0), 0)}
|
||||
{@const earnedPoints = filteredEntities.reduce((sum, e) => sum + (e.confirmed ? e.points : 0), 0)}
|
||||
{@const targetPoints = award.target}
|
||||
{@const neededPoints = Math.max(0, targetPoints - earnedPoints)}
|
||||
<div class="summary-card">
|
||||
<span class="summary-label">Total Combinations:</span>
|
||||
<span class="summary-value">{entities.length}</span>
|
||||
<span class="summary-value">{filteredEntities.length}</span>
|
||||
</div>
|
||||
<div class="summary-card confirmed">
|
||||
<span class="summary-label">Confirmed:</span>
|
||||
<span class="summary-value">{entities.filter((e) => e.confirmed).length}</span>
|
||||
<span class="summary-value">{filteredEntities.filter((e) => e.confirmed).length}</span>
|
||||
</div>
|
||||
<div class="summary-card" style="background-color: #fff3cd; border-color: #ffc107;">
|
||||
<span class="summary-label">Points:</span>
|
||||
@@ -296,20 +494,18 @@
|
||||
<span class="summary-value">{targetPoints}</span>
|
||||
</div>
|
||||
{:else}
|
||||
{@const workedCount = entities.filter((e) => e.worked).length}
|
||||
{@const confirmedCount = entities.filter((e) => e.confirmed).length}
|
||||
{@const neededCount = award.target ? Math.max(0, award.target - workedCount) : entities.filter((e) => !e.worked).length}
|
||||
{@const neededCount = award.target ? Math.max(0, award.target - uniqueEntityProgress.worked) : uniqueEntityProgress.total - uniqueEntityProgress.worked}
|
||||
<div class="summary-card">
|
||||
<span class="summary-label">Total:</span>
|
||||
<span class="summary-value">{entities.length}</span>
|
||||
<span class="summary-value">{uniqueEntityProgress.total}</span>
|
||||
</div>
|
||||
<div class="summary-card confirmed">
|
||||
<span class="summary-label">Confirmed:</span>
|
||||
<span class="summary-value">{confirmedCount}</span>
|
||||
<span class="summary-value">{uniqueEntityProgress.confirmed}</span>
|
||||
</div>
|
||||
<div class="summary-card worked">
|
||||
<span class="summary-label">Worked:</span>
|
||||
<span class="summary-value">{workedCount}</span>
|
||||
<span class="summary-value">{uniqueEntityProgress.worked}</span>
|
||||
</div>
|
||||
<div class="summary-card unworked">
|
||||
<span class="summary-label">Needed:</span>
|
||||
@@ -318,6 +514,18 @@
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<div class="mode-filter">
|
||||
<label for="mode-select">Filter by mode:</label>
|
||||
<select id="mode-select" bind:value={selectedMode}>
|
||||
{#each availableModes as mode}
|
||||
<option value={mode}>{mode}</option>
|
||||
{/each}
|
||||
</select>
|
||||
{#if selectedMode !== 'Mixed Mode'}
|
||||
<button class="clear-filter-btn" on:click={() => selectedMode = 'Mixed Mode'}>Clear</button>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<div class="table-container">
|
||||
{#if groupedData.length === 0}
|
||||
<div class="empty">No entities match the current filter.</div>
|
||||
@@ -326,7 +534,7 @@
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="entity-column">Entity</th>
|
||||
{#each bands as band}
|
||||
{#each columns as { band }}
|
||||
<th class="band-column">{band}</th>
|
||||
{/each}
|
||||
</tr>
|
||||
@@ -337,26 +545,23 @@
|
||||
<td class="entity-cell">
|
||||
<div class="entity-name">{row.entityName}</div>
|
||||
</td>
|
||||
{#each bands as band}
|
||||
{@const qsos = row.bands.get(band) || []}
|
||||
{#each columns as { band, mode }}
|
||||
{@const columnKey = mode ? `${band}/${mode}` : band}
|
||||
{@const slotData = row.slots.get(columnKey)}
|
||||
<td class="band-cell">
|
||||
{#if qsos.length > 0}
|
||||
<div class="qso-list">
|
||||
{#each qsos as qso}
|
||||
<div
|
||||
class="qso-entry {qso.confirmed ? 'qso-confirmed' : 'qso-worked'}"
|
||||
on:click={() => openQSODetailModal(qso)}
|
||||
on:keydown={(e) => e.key === 'Enter' && openQSODetailModal(qso)}
|
||||
role="button"
|
||||
tabindex="0"
|
||||
>
|
||||
<span class="callsign">{qso.callsign}</span>
|
||||
<span class="mode">{qso.mode}</span>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{#if slotData && slotData.qsos && slotData.qsos.length > 0}
|
||||
<span
|
||||
class="qso-count-link"
|
||||
on:click={() => openQSOListModal(slotData, row.entityName, band, mode)}
|
||||
on:keydown={(e) => e.key === 'Enter' && openQSOListModal(slotData, row.entityName, band, mode)}
|
||||
role="button"
|
||||
tabindex="0"
|
||||
title="{slotData.qsos.length} QSO{slotData.qsos.length === 1 ? '' : 's'}"
|
||||
>
|
||||
{slotData.qsos.length}
|
||||
</span>
|
||||
{:else}
|
||||
<div class="no-qso">-</div>
|
||||
<span class="no-qso">-</span>
|
||||
{/if}
|
||||
</td>
|
||||
{/each}
|
||||
@@ -368,8 +573,9 @@
|
||||
<td class="sum-label">
|
||||
<strong>Sum</strong>
|
||||
</td>
|
||||
{#each bands as band}
|
||||
{@const sum = bandSums.get(band) ?? 0}
|
||||
{#each columns as { band, mode }}
|
||||
{@const columnKey = mode ? `${band}/${mode}` : band}
|
||||
{@const sum = columnSums.get(columnKey) ?? 0}
|
||||
<td class="sum-cell">
|
||||
<strong>{sum}</strong>
|
||||
</td>
|
||||
@@ -564,6 +770,51 @@
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<!-- QSO List Modal -->
|
||||
{#if showQSOListModal && selectedSlotInfo}
|
||||
<div class="modal-backdrop" on:click={closeQSOListModal} on:keydown={(e) => e.key === 'Escape' && closeQSOListModal()} role="dialog" aria-modal="true">
|
||||
<div class="modal-content qso-list-modal" on:click|stopPropagation>
|
||||
<div class="modal-header">
|
||||
<h2>QSOs for {selectedSlotInfo.entityName} ({selectedSlotInfo.band}{#if selectedSlotInfo.mode} {selectedSlotInfo.mode}{/if})</h2>
|
||||
<button class="modal-close" on:click={closeQSOListModal} aria-label="Close modal">×</button>
|
||||
</div>
|
||||
|
||||
<div class="modal-body">
|
||||
{#if selectedSlotQSOs.length === 0}
|
||||
<div class="empty">No QSOs found for this slot.</div>
|
||||
{:else}
|
||||
<table class="qso-list-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Callsign</th>
|
||||
<th>Date</th>
|
||||
<th>Time</th>
|
||||
<th>Mode</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{#each selectedSlotQSOs as qso}
|
||||
<tr
|
||||
class="qso-list-row"
|
||||
on:click={() => { openQSODetailModal(qso); closeQSOListModal(); }}
|
||||
on:keydown={(e) => e.key === 'Enter' && (openQSODetailModal(qso), closeQSOListModal())}
|
||||
role="button"
|
||||
tabindex="0"
|
||||
>
|
||||
<td class="callsign-cell">{qso.callsign}</td>
|
||||
<td>{formatDate(qso.qsoDate)}</td>
|
||||
<td>{formatTime(qso.timeOn)}</td>
|
||||
<td>{qso.mode || '-'}</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.container {
|
||||
max-width: 1200px;
|
||||
@@ -1037,4 +1288,129 @@
|
||||
.modal-content::-webkit-scrollbar-thumb:hover {
|
||||
background: #555;
|
||||
}
|
||||
|
||||
/* Mode Filter */
|
||||
.mode-filter {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 1rem;
|
||||
margin-bottom: 1.5rem;
|
||||
padding: 1rem;
|
||||
background-color: #f8f9fa;
|
||||
border-radius: 8px;
|
||||
border: 1px solid #e0e0e0;
|
||||
}
|
||||
|
||||
.mode-filter label {
|
||||
font-weight: 600;
|
||||
color: #333;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.mode-filter select {
|
||||
padding: 0.5rem 2rem 0.5rem 1rem;
|
||||
border: 1px solid #ccc;
|
||||
border-radius: 4px;
|
||||
background-color: white;
|
||||
font-size: 0.95rem;
|
||||
color: #333;
|
||||
cursor: pointer;
|
||||
min-width: 150px;
|
||||
}
|
||||
|
||||
.mode-filter select:hover {
|
||||
border-color: #4a90e2;
|
||||
}
|
||||
|
||||
.mode-filter select:focus {
|
||||
outline: none;
|
||||
border-color: #4a90e2;
|
||||
box-shadow: 0 0 0 2px rgba(74, 144, 226, 0.2);
|
||||
}
|
||||
|
||||
.clear-filter-btn {
|
||||
padding: 0.5rem 1rem;
|
||||
background-color: #e0e0e0;
|
||||
color: #333;
|
||||
border: none;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
font-size: 0.9rem;
|
||||
font-weight: 500;
|
||||
transition: background-color 0.2s;
|
||||
}
|
||||
|
||||
.clear-filter-btn:hover {
|
||||
background-color: #d0d0d0;
|
||||
}
|
||||
|
||||
/* QSO Count Link */
|
||||
.qso-count-link {
|
||||
cursor: pointer;
|
||||
color: #4a90e2;
|
||||
font-weight: 500;
|
||||
text-decoration: none;
|
||||
padding: 0.25rem 0.5rem;
|
||||
border-radius: 4px;
|
||||
transition: background-color 0.2s;
|
||||
}
|
||||
|
||||
.qso-count-link:hover {
|
||||
background-color: #f0f7ff;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.qso-count-link:focus {
|
||||
outline: 2px solid #4a90e2;
|
||||
outline-offset: -2px;
|
||||
}
|
||||
|
||||
.no-qso {
|
||||
color: #999;
|
||||
}
|
||||
|
||||
/* QSO List Modal */
|
||||
.qso-list-modal {
|
||||
max-width: 500px;
|
||||
}
|
||||
|
||||
.qso-list-table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
.qso-list-table th,
|
||||
.qso-list-table td {
|
||||
padding: 0.75rem;
|
||||
text-align: left;
|
||||
border-bottom: 1px solid #e0e0e0;
|
||||
}
|
||||
|
||||
.qso-list-table th {
|
||||
background-color: #f8f9fa;
|
||||
font-weight: 600;
|
||||
color: #333;
|
||||
font-size: 0.85rem;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.qso-list-row {
|
||||
cursor: pointer;
|
||||
transition: background-color 0.2s;
|
||||
}
|
||||
|
||||
.qso-list-row:hover {
|
||||
background-color: #f0f7ff;
|
||||
}
|
||||
|
||||
.qso-list-row:focus {
|
||||
outline: 2px solid #4a90e2;
|
||||
outline-offset: -2px;
|
||||
}
|
||||
|
||||
.callsign-cell {
|
||||
font-family: monospace;
|
||||
font-weight: 600;
|
||||
color: #333;
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
<script>
|
||||
import { onMount } from 'svelte';
|
||||
import { authAPI } from '$lib/api.js';
|
||||
import { browser } from '$app/environment';
|
||||
import { authAPI, autoSyncAPI } from '$lib/api.js';
|
||||
import { auth } from '$lib/stores.js';
|
||||
import { goto } from '$app/navigation';
|
||||
|
||||
@@ -16,9 +17,23 @@
|
||||
let hasLoTWCredentials = false;
|
||||
let hasDCLCredentials = false;
|
||||
|
||||
// Auto-sync settings
|
||||
let autoSyncSettings = {
|
||||
lotwEnabled: false,
|
||||
lotwIntervalHours: 24,
|
||||
lotwNextSyncAt: null,
|
||||
dclEnabled: false,
|
||||
dclIntervalHours: 24,
|
||||
dclNextSyncAt: null,
|
||||
};
|
||||
let loadingAutoSync = false;
|
||||
let savingAutoSync = false;
|
||||
let successAutoSync = false;
|
||||
|
||||
onMount(async () => {
|
||||
// Load user profile to check if credentials exist
|
||||
await loadProfile();
|
||||
await loadAutoSyncSettings();
|
||||
});
|
||||
|
||||
async function loadProfile() {
|
||||
@@ -40,6 +55,21 @@
|
||||
}
|
||||
}
|
||||
|
||||
async function loadAutoSyncSettings() {
|
||||
try {
|
||||
loadingAutoSync = true;
|
||||
const response = await autoSyncAPI.getSettings();
|
||||
if (response.settings) {
|
||||
autoSyncSettings = response.settings;
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Failed to load auto-sync settings:', err);
|
||||
// Don't show error for auto-sync, it's optional
|
||||
} finally {
|
||||
loadingAutoSync = false;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleSaveLoTW(e) {
|
||||
e.preventDefault();
|
||||
|
||||
@@ -91,9 +121,46 @@
|
||||
}
|
||||
}
|
||||
|
||||
async function handleSaveAutoSync(e) {
|
||||
e.preventDefault();
|
||||
|
||||
try {
|
||||
savingAutoSync = true;
|
||||
error = null;
|
||||
successAutoSync = false;
|
||||
|
||||
await autoSyncAPI.updateSettings({
|
||||
lotwEnabled: autoSyncSettings.lotwEnabled,
|
||||
lotwIntervalHours: parseInt(autoSyncSettings.lotwIntervalHours),
|
||||
dclEnabled: autoSyncSettings.dclEnabled,
|
||||
dclIntervalHours: parseInt(autoSyncSettings.dclIntervalHours),
|
||||
});
|
||||
|
||||
console.log('Auto-sync settings saved successfully!');
|
||||
|
||||
// Reload settings to get updated next sync times
|
||||
await loadAutoSyncSettings();
|
||||
successAutoSync = true;
|
||||
} catch (err) {
|
||||
console.error('Auto-sync save failed:', err);
|
||||
error = err.message;
|
||||
} finally {
|
||||
savingAutoSync = false;
|
||||
}
|
||||
}
|
||||
|
||||
function formatNextSyncTime(dateString) {
|
||||
if (!dateString) return 'Not scheduled';
|
||||
const date = new Date(dateString);
|
||||
return date.toLocaleString();
|
||||
}
|
||||
|
||||
function handleLogout() {
|
||||
auth.logout();
|
||||
goto('/auth/login');
|
||||
// Use hard redirect to ensure proper navigation after logout
|
||||
if (browser) {
|
||||
window.location.href = '/auth/login';
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -237,6 +304,116 @@
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="settings-section">
|
||||
<h2>Automatic Sync Settings</h2>
|
||||
<p class="help-text">
|
||||
Configure automatic synchronization for LoTW and DCL. The server will automatically
|
||||
sync your QSOs at the specified interval. Credentials must be configured above.
|
||||
</p>
|
||||
|
||||
{#if !hasLoTWCredentials && !hasDCLCredentials}
|
||||
<div class="alert alert-info">
|
||||
<strong>Note:</strong> Configure LoTW or DCL credentials above to enable automatic sync.
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<form on:submit={handleSaveAutoSync} class="settings-form">
|
||||
{#if error}
|
||||
<div class="alert alert-error">{error}</div>
|
||||
{/if}
|
||||
|
||||
{#if successAutoSync}
|
||||
<div class="alert alert-success">
|
||||
Auto-sync settings saved successfully!
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<h3>LoTW Auto-Sync</h3>
|
||||
|
||||
<div class="form-row">
|
||||
<div class="form-group checkbox-group">
|
||||
<label>
|
||||
<input
|
||||
type="checkbox"
|
||||
bind:checked={autoSyncSettings.lotwEnabled}
|
||||
disabled={!hasLoTWCredentials || savingAutoSync}
|
||||
/>
|
||||
Enable LoTW auto-sync
|
||||
</label>
|
||||
{#if !hasLoTWCredentials}
|
||||
<p class="hint">Configure LoTW credentials above first</p>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="lotwIntervalHours">Sync interval (hours)</label>
|
||||
<input
|
||||
id="lotwIntervalHours"
|
||||
type="number"
|
||||
min="1"
|
||||
max="720"
|
||||
bind:value={autoSyncSettings.lotwIntervalHours}
|
||||
disabled={!autoSyncSettings.lotwEnabled || savingAutoSync}
|
||||
/>
|
||||
<p class="hint">
|
||||
Minimum 1 hour, maximum 720 hours (30 days). Default: 24 hours.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{#if autoSyncSettings.lotwEnabled && autoSyncSettings.lotwNextSyncAt}
|
||||
<p class="next-sync-info">
|
||||
Next scheduled sync: <strong>{formatNextSyncTime(autoSyncSettings.lotwNextSyncAt)}</strong>
|
||||
</p>
|
||||
{/if}
|
||||
|
||||
<hr class="divider" />
|
||||
|
||||
<h3>DCL Auto-Sync</h3>
|
||||
|
||||
<div class="form-row">
|
||||
<div class="form-group checkbox-group">
|
||||
<label>
|
||||
<input
|
||||
type="checkbox"
|
||||
bind:checked={autoSyncSettings.dclEnabled}
|
||||
disabled={!hasDCLCredentials || savingAutoSync}
|
||||
/>
|
||||
Enable DCL auto-sync
|
||||
</label>
|
||||
{#if !hasDCLCredentials}
|
||||
<p class="hint">Configure DCL credentials above first</p>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="dclIntervalHours">Sync interval (hours)</label>
|
||||
<input
|
||||
id="dclIntervalHours"
|
||||
type="number"
|
||||
min="1"
|
||||
max="720"
|
||||
bind:value={autoSyncSettings.dclIntervalHours}
|
||||
disabled={!autoSyncSettings.dclEnabled || savingAutoSync}
|
||||
/>
|
||||
<p class="hint">
|
||||
Minimum 1 hour, maximum 720 hours (30 days). Default: 24 hours.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{#if autoSyncSettings.dclEnabled && autoSyncSettings.dclNextSyncAt}
|
||||
<p class="next-sync-info">
|
||||
Next scheduled sync: <strong>{formatNextSyncTime(autoSyncSettings.dclNextSyncAt)}</strong>
|
||||
</p>
|
||||
{/if}
|
||||
|
||||
<button type="submit" class="btn btn-primary" disabled={savingAutoSync}>
|
||||
{savingAutoSync ? 'Saving...' : 'Save Auto-Sync Settings'}
|
||||
</button>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
@@ -438,4 +615,58 @@
|
||||
.info-box a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
/* Auto-sync specific styles */
|
||||
.form-row {
|
||||
display: grid;
|
||||
grid-template-columns: 1fr 1fr;
|
||||
gap: 1.5rem;
|
||||
align-items: start;
|
||||
}
|
||||
|
||||
.checkbox-group {
|
||||
padding-top: 0.75rem;
|
||||
}
|
||||
|
||||
.checkbox-group label {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
font-weight: 500;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.checkbox-group input[type="checkbox"] {
|
||||
width: 1.25rem;
|
||||
height: 1.25rem;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.checkbox-group input[type="checkbox"]:disabled {
|
||||
cursor: not-allowed;
|
||||
opacity: 0.5;
|
||||
}
|
||||
|
||||
.divider {
|
||||
border: none;
|
||||
border-top: 1px solid #e0e0e0;
|
||||
margin: 2rem 0;
|
||||
}
|
||||
|
||||
.next-sync-info {
|
||||
padding: 0.75rem 1rem;
|
||||
background-color: #e3f2fd;
|
||||
border-left: 4px solid #4a90e2;
|
||||
border-radius: 4px;
|
||||
margin-top: 1rem;
|
||||
font-size: 0.9rem;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
@media (max-width: 640px) {
|
||||
.form-row {
|
||||
grid-template-columns: 1fr;
|
||||
gap: 1rem;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
||||
Reference in New Issue
Block a user