major: kestrel is now a tak server (#6)
All checks were successful
ci/woodpecker/push/push Pipeline was successful
All checks were successful
ci/woodpecker/push/push Pipeline was successful
## Added - CoT (Cursor on Target) server on port 8089 enabling ATAK/iTAK device connectivity - Support for TAK stream protocol and traditional XML CoT messages - TLS/SSL support with automatic fallback to plain TCP - Username/password authentication for CoT connections - Real-time device position tracking with TTL-based expiration (90s default) - API endpoints: `/api/cot/config`, `/api/cot/server-package`, `/api/cot/truststore`, `/api/me/cot-password` - TAK Server section in Settings with QR code for iTAK setup - ATAK password management in Account page for OIDC users - CoT device markers on map showing real-time positions - Comprehensive documentation in `docs/` directory - Environment variables: `COT_PORT`, `COT_TTL_MS`, `COT_REQUIRE_AUTH`, `COT_SSL_CERT`, `COT_SSL_KEY`, `COT_DEBUG` - Dependencies: `fast-xml-parser`, `jszip`, `qrcode` ## Changed - Authentication system supports CoT password management for OIDC users - Database schema includes `cot_password_hash` field - Test suite refactored to follow functional design principles ## Removed - Consolidated utility modules: `authConfig.js`, `authSkipPaths.js`, `bootstrap.js`, `poiConstants.js`, `session.js` ## Security - XML entity expansion protection in CoT parser - Enhanced input validation and SQL injection prevention - Authentication timeout to prevent hanging connections ## Breaking Changes - Port 8089 must be exposed for CoT server. Update firewall rules and Docker/Kubernetes configurations. ## Migration Notes - OIDC users must set ATAK password via Account settings before connecting - Docker: expose port 8089 (`-p 8089:8089`) - Kubernetes: update Helm values to expose port 8089 Co-authored-by: Madison Grubb <madison@elastiflow.com> Reviewed-on: #6
This commit was merged in pull request #6.
This commit is contained in:
102
test/nuxt/logger.spec.js
Normal file
102
test/nuxt/logger.spec.js
Normal file
@@ -0,0 +1,102 @@
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'
|
||||
import { registerEndpoint } from '@nuxt/test-utils/runtime'
|
||||
import { readBody } from 'h3'
|
||||
import { initLogger, logError, logWarn, logInfo, logDebug } from '../../app/utils/logger.js'
|
||||
|
||||
const wait = (ms = 10) => new Promise(resolve => setTimeout(resolve, ms))
|
||||
|
||||
describe('app/utils/logger', () => {
|
||||
const consoleMocks = {}
|
||||
const originalConsole = {}
|
||||
const testState = {
|
||||
serverCalls: [],
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
testState.serverCalls = []
|
||||
const calls = { log: [], error: [], warn: [], debug: [] }
|
||||
|
||||
Object.keys(calls).forEach((key) => {
|
||||
originalConsole[key] = console[key]
|
||||
consoleMocks[key] = vi.fn((...args) => calls[key].push(args))
|
||||
console[key] = consoleMocks[key]
|
||||
})
|
||||
|
||||
registerEndpoint('/api/log', async (event) => {
|
||||
const body = event.body || (await readBody(event).catch(() => ({})))
|
||||
testState.serverCalls.push(body)
|
||||
return { ok: true }
|
||||
}, { method: 'POST' })
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
Object.keys(originalConsole).forEach((key) => {
|
||||
console[key] = originalConsole[key]
|
||||
})
|
||||
vi.restoreAllMocks()
|
||||
})
|
||||
|
||||
describe('initLogger', () => {
|
||||
it('sets sessionId and userId for server calls', async () => {
|
||||
initLogger('session-123', 'user-456')
|
||||
logError('Test message')
|
||||
await wait()
|
||||
|
||||
expect(testState.serverCalls[0]).toMatchObject({
|
||||
sessionId: 'session-123',
|
||||
userId: 'user-456',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('log functions', () => {
|
||||
it.each([
|
||||
['logError', logError, 'error', 'error'],
|
||||
['logWarn', logWarn, 'warn', 'warn'],
|
||||
['logInfo', logInfo, 'info', 'log'],
|
||||
['logDebug', logDebug, 'debug', 'log'],
|
||||
])('%s logs to console and sends to server', async (name, logFn, level, consoleKey) => {
|
||||
initLogger('session-123', 'user-456')
|
||||
logFn('Test message', { key: 'value' })
|
||||
await wait()
|
||||
|
||||
expect(consoleMocks[consoleKey]).toHaveBeenCalledWith(`[Test message]`, { key: 'value' })
|
||||
expect(testState.serverCalls[0]).toMatchObject({
|
||||
level,
|
||||
message: 'Test message',
|
||||
data: { key: 'value' },
|
||||
})
|
||||
})
|
||||
|
||||
it('handles server fetch failure gracefully', async () => {
|
||||
registerEndpoint('/api/log', () => {
|
||||
throw new Error('Network error')
|
||||
}, { method: 'POST' })
|
||||
|
||||
initLogger('session-123', 'user-456')
|
||||
expect(() => logError('Test error')).not.toThrow()
|
||||
await wait()
|
||||
expect(consoleMocks.error).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('sendToServer', () => {
|
||||
it('includes timestamp in server request', async () => {
|
||||
initLogger('session-123', 'user-456')
|
||||
logError('Test message')
|
||||
await wait()
|
||||
|
||||
expect(testState.serverCalls[0].timestamp).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/)
|
||||
})
|
||||
|
||||
it('handles null sessionId and userId', async () => {
|
||||
initLogger(null, null)
|
||||
logError('Test message')
|
||||
await wait()
|
||||
|
||||
const { sessionId, userId } = testState.serverCalls[0]
|
||||
expect(sessionId === null || sessionId === undefined).toBe(true)
|
||||
expect(userId === null || userId === undefined).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
Reference in New Issue
Block a user