gigafibre-fsm/services/targo-hub/lib/devices.js
louispaulb 607ea54b5c refactor: reduce token count, DRY code, consolidate docs
Backend services:
- targo-hub: extract deepGetValue to helpers.js, DRY disconnect reasons
  lookup map, compact CAPABILITIES, consolidate vision.js prompts/schemas,
  extract dispatch scoring weights, trim section dividers across 9 files
- modem-bridge: extract getSession() helper (6 occurrences), resetIdleTimer(),
  consolidate DM query factory, fix duplicate username fill bug, trim headers
  (server.js -36%, tplink-session.js -47%, docker-compose.yml -57%)

Frontend:
- useWifiDiagnostic: extract THRESHOLDS const, split processDiagnostic into
  6 focused helpers (processOnlineStatus, processWanIPs, processRadios,
  processMeshNodes, processClients, checkRadioIssues)
- EquipmentDetail: merge duplicate ROLE_LABELS, remove verbose comments

Documentation (17 → 13 files, -1,400 lines):
- New consolidated README.md (architecture, services, dependencies, auth)
- Merge ECOSYSTEM-OVERVIEW into ARCHITECTURE.md
- Merge MIGRATION-PLAN + ARCHITECTURE-COMPARE + FIELD-GAP + CHANGELOG → MIGRATION.md
- Merge COMPETITIVE-ANALYSIS into PLATFORM-STRATEGY.md
- Update ROADMAP.md with current phase status
- Delete CONTEXT.md (absorbed into README)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-13 08:39:58 -04:00

552 lines
24 KiB
JavaScript

'use strict'
const cfg = require('./config')
const { log, json, parseBody, nbiRequest, deepGetValue } = require('./helpers')
const { summarizeDevice } = require('./device-extractors')
const createHostsHandler = require('./device-hosts')
const deviceCache = new Map()
const hostsDbCache = new Map()
const MAX_HISTORY = 60
let pollerTimer = null, lastPollAt = null, pollCount = 0, dbPool = null, dbReady = false
const serialAliases = new Map()
function getPool () {
if (dbPool) return dbPool
if (!cfg.CACHE_DB_URL) return null
const { Pool } = require('pg')
dbPool = new Pool({ connectionString: cfg.CACHE_DB_URL, max: 3 })
dbPool.on('error', e => log('Cache DB pool error:', e.message))
return dbPool
}
async function initCacheDb () {
const pool = getPool()
if (!pool) { log('No CACHE_DB_URL — device cache is memory-only'); return }
try {
await pool.query(`CREATE TABLE IF NOT EXISTS device_cache (serial TEXT PRIMARY KEY, summary JSONB NOT NULL DEFAULT '{}', previous JSONB, history JSONB NOT NULL DEFAULT '[]', updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW())`)
await pool.query(`CREATE TABLE IF NOT EXISTS hosts_cache (serial TEXT PRIMARY KEY, data JSONB NOT NULL DEFAULT '{}', updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW())`)
await pool.query(`CREATE TABLE IF NOT EXISTS device_events (id SERIAL PRIMARY KEY, serial TEXT NOT NULL, event TEXT NOT NULL, reason TEXT, details JSONB, created_at TIMESTAMPTZ NOT NULL DEFAULT NOW())`)
await pool.query(`CREATE INDEX IF NOT EXISTS idx_device_events_serial ON device_events (serial, created_at DESC)`)
const { rows } = await pool.query('SELECT serial, summary, previous, history, updated_at FROM device_cache')
for (const row of rows) {
deviceCache.set(row.serial, {
summary: row.summary, previous: row.previous,
updatedAt: row.updated_at?.toISOString() || new Date().toISOString(),
history: Array.isArray(row.history) ? row.history : [],
})
}
const hostsRows = await pool.query('SELECT serial, data, updated_at FROM hosts_cache')
for (const row of hostsRows.rows) hostsDbCache.set(row.serial, { data: row.data, updatedAt: row.updated_at?.toISOString() })
dbReady = true
log(`Device cache loaded from DB: ${rows.length} devices, ${hostsRows.rows.length} hosts entries`)
} catch (e) { log('Failed to init cache DB:', e.message) }
}
function createDebouncedWriter (table, serialize) {
const pending = new Set()
let timer = null
async function flush () {
timer = null
if (!dbReady || !pending.size) return
const pool = getPool()
if (!pool) return
const keys = [...pending]
pending.clear()
for (const key of keys) {
const data = serialize(key)
if (!data) continue
try { await pool.query(data.sql, data.params) } catch (e) { log(`DB write error (${table}) for ${key}: ${e.message}`) }
}
}
return function schedule (key) {
pending.add(key)
if (!timer) timer = setTimeout(flush, 2000)
}
}
const scheduleDbWrite = createDebouncedWriter('device_cache', serial => {
const entry = deviceCache.get(serial)
if (!entry) return null
return {
sql: `INSERT INTO device_cache (serial, summary, previous, history, updated_at) VALUES ($1, $2, $3, $4, NOW()) ON CONFLICT (serial) DO UPDATE SET summary = EXCLUDED.summary, previous = EXCLUDED.previous, history = EXCLUDED.history, updated_at = NOW()`,
params: [serial, JSON.stringify(entry.summary), JSON.stringify(entry.previous), JSON.stringify(entry.history)],
}
})
const scheduleHostsDbWrite = createDebouncedWriter('hosts_cache', serial => {
const entry = hostsDbCache.get(serial)
if (!entry) return null
return {
sql: `INSERT INTO hosts_cache (serial, data, updated_at) VALUES ($1, $2, NOW()) ON CONFLICT (serial) DO UPDATE SET data = EXCLUDED.data, updated_at = NOW()`,
params: [serial, JSON.stringify(entry.data)],
}
})
async function logDeviceEvent (serial, event, reason, details) {
if (!dbReady) return
const pool = getPool()
if (!pool) return
try { await pool.query('INSERT INTO device_events (serial, event, reason, details) VALUES ($1, $2, $3, $4)', [serial, event, reason || null, details ? JSON.stringify(details) : null]) } catch (e) { log(`Event log error for ${serial}: ${e.message}`) }
}
const DISCONNECT_REASONS = [
[/dying.?gasp/i, 'Dying Gasp (power failure)'],
[/LOS/i, 'LOS (loss of signal)'],
[/LOF/i, 'LOF (loss of frame)'],
]
async function fetchDisconnectReason (deviceId) {
if (!deviceId) return null
try {
const r = await nbiRequest(`/faults/?query=${encodeURIComponent(`device = "${deviceId}"`)}`)
const faults = Array.isArray(r.data) ? r.data : []
if (!faults.length) return null
faults.sort((a, b) => (b.timestamp || '').localeCompare(a.timestamp || ''))
const msg = (faults[0].detail?.message || faults[0].message || faults[0].code || '').toString()
for (const [re, label] of DISCONNECT_REASONS) { if (re.test(msg)) return label }
return msg ? msg.slice(0, 200) : null
} catch { return null }
}
async function getDeviceEvents (serial, limit = 50) {
if (!dbReady) return []
const pool = getPool()
if (!pool) return []
try {
const { rows } = await pool.query('SELECT id, serial, event, reason, details, created_at FROM device_events WHERE serial = $1 ORDER BY created_at DESC LIMIT $2', [serial, limit])
return rows
} catch { return [] }
}
function cacheDevice (summary) {
if (!summary?.serial) return
const existing = deviceCache.get(summary.serial)
const now = new Date().toISOString()
const snapshot = {
ts: now,
online: summary.lastInform ? (Date.now() - new Date(summary.lastInform).getTime()) < 900000 : false,
rxPower: summary.rxPower, txPower: summary.txPower,
wifiClients: summary.wifi?.totalClients || 0, hostsCount: summary.hostsCount,
uptime: summary.uptime, ip: summary.ip,
}
const history = existing?.history || []
const last = history[history.length - 1]
if (!last || last.online !== snapshot.online || last.wifiClients !== snapshot.wifiClients || last.rxPower !== snapshot.rxPower || last.ip !== snapshot.ip || (Date.now() - new Date(last.ts).getTime()) > 60000) {
history.push(snapshot)
if (history.length > MAX_HISTORY) history.shift()
}
if (existing) {
existing.summary = summary
existing.previous = existing.summary
existing.updatedAt = now
} else {
deviceCache.set(summary.serial, { summary, previous: null, updatedAt: now, history })
}
scheduleDbWrite(summary.serial)
}
function aliasSerial (alias, realSerial) {
if (!alias || !realSerial || alias === realSerial) return
serialAliases.set(alias, realSerial)
const entry = deviceCache.get(realSerial)
if (entry) deviceCache.set(alias, entry)
}
function getCacheEntry (serial) {
let entry = deviceCache.get(serial)
if (entry) return entry
const real = serialAliases.get(serial)
if (!real) return null
entry = deviceCache.get(real)
if (entry) deviceCache.set(serial, entry)
return entry || null
}
function cacheHosts (serial, data) {
if (!serial || !data) return
hostsDbCache.set(serial, { data, updatedAt: new Date().toISOString() })
scheduleHostsDbWrite(serial)
}
function getCachedHosts (serial) { return hostsDbCache.get(serial) || null }
function getCached (serial) { return getCacheEntry(serial) || null }
function getCacheStats () {
const entries = []
for (const [serial, entry] of deviceCache) {
const lastInform = entry.summary?.lastInform ? new Date(entry.summary.lastInform) : null
entries.push({
serial, online: lastInform ? (Date.now() - lastInform.getTime()) < 600000 : false,
model: `${entry.summary?.manufacturer || ''} ${entry.summary?.model || ''}`.trim(),
wifiClients: entry.summary?.wifi?.totalClients || 0,
updatedAt: entry.updatedAt, historyPoints: entry.history?.length || 0,
})
}
return {
devices: deviceCache.size, hostsEntries: hostsDbCache.size, lastPollAt, pollCount, dbReady,
entries: entries.sort((a, b) => (b.online ? 1 : 0) - (a.online ? 1 : 0)),
}
}
const FAST_PROJECTION = '_id,_lastInform,InternetGatewayDevice.DeviceInfo.SerialNumber,Device.DeviceInfo.SerialNumber'
async function pollOnlineStatus () {
if (!cfg.GENIEACS_NBI_URL) return
const startMs = Date.now()
const { httpRequest } = require('./helpers')
const allDevices = []
let skip = 0
const pageSize = 1000
try {
while (true) {
const r = await httpRequest(cfg.GENIEACS_NBI_URL, `/devices/?projection=${FAST_PROJECTION}&limit=${pageSize}&skip=${skip}`, { timeout: 30000 })
const page = Array.isArray(r.data) ? r.data : []
if (!page.length) break
allDevices.push(...page)
skip += page.length
if (page.length < pageSize || allDevices.length >= 10000) break
}
} catch (e) { log(`Poll fetch error (skip=${skip}): ${e.message} — processing ${allDevices.length} devices`) }
let count = 0
const now = Date.now()
for (const d of allDevices) {
try {
const serial = (deepGetValue(d, 'InternetGatewayDevice.DeviceInfo.SerialNumber') || deepGetValue(d, 'Device.DeviceInfo.SerialNumber') || '').toString()
|| (d._id ? decodeURIComponent(d._id.split('-').slice(2).join('-')) : '')
if (!serial) continue
const lastInform = d._lastInform ? new Date(d._lastInform).getTime() : 0
const online = lastInform ? (now - lastInform) < 900000 : false
const existing = deviceCache.get(serial)
if (existing) {
const wasOnline = existing.history?.length ? existing.history[existing.history.length - 1].online : null
if (wasOnline !== null && wasOnline !== online) {
if (!online) fetchDisconnectReason(d._id).then(reason => logDeviceEvent(serial, 'offline', reason, { lastInform: d._lastInform, deviceId: d._id }))
else logDeviceEvent(serial, 'online', null, { lastInform: d._lastInform, deviceId: d._id })
}
existing.summary.lastInform = d._lastInform || null
existing.summary._id = d._id
if (d._tags) existing.summary.tags = d._tags
const snapshot = { ts: new Date().toISOString(), online }
const history = existing.history
const last = history[history.length - 1]
if (!last || last.online !== online || (now - new Date(last.ts).getTime()) > 60000) {
history.push(snapshot)
if (history.length > MAX_HISTORY) history.shift()
}
existing.updatedAt = new Date().toISOString()
scheduleDbWrite(serial)
} else {
deviceCache.set(serial, {
summary: { _id: d._id, serial, lastInform: d._lastInform || null },
previous: null, updatedAt: new Date().toISOString(),
history: [{ ts: new Date().toISOString(), online }],
})
}
count++
} catch {}
}
lastPollAt = new Date().toISOString()
pollCount++
log(`Device poll: ${count} devices in ${Date.now() - startMs}ms (poll #${pollCount})`)
}
async function fetchDeviceDetails (serial) {
const existing = getCacheEntry(serial)
const acsId = existing?.summary?._id
const projection = 'DeviceID,InternetGatewayDevice,Device,_lastInform,_lastBootstrap,_registered,_tags'
if (acsId) {
try {
const r = await nbiRequest(`/devices/${encodeURIComponent(acsId)}`)
if (r.status === 200 && r.data) {
const s = summarizeDevice(r.data)
if (s.serial) cacheDevice(s)
return s
}
} catch (e) { log(`On-demand fetch error for ${serial}: ${e.message}`) }
return existing?.summary || null
}
try {
let devices = []
let q = JSON.stringify({ 'DeviceID.SerialNumber._value': serial })
let r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
devices = Array.isArray(r.data) ? r.data : []
if (!devices.length && serial.startsWith('TPLG')) {
const formatted = serial.slice(4).toUpperCase().match(/.{2}/g).join(':')
q = JSON.stringify({ 'Device.DeviceInfo.X_TP_MACAddress._value': { '$regex': formatted + '$' } })
r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
devices = Array.isArray(r.data) ? r.data : []
}
if (!devices.length) {
q = JSON.stringify({ '_id': { '$regex': serial } })
r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
devices = Array.isArray(r.data) ? r.data : []
}
if (devices.length) {
const s = summarizeDevice(devices[0])
if (s.serial) {
cacheDevice(s)
if (s.serial !== serial) aliasSerial(serial, s.serial)
}
log(`Device ${serial} found via ACS search → ${devices[0]._id}`)
return s
}
} catch (e) { log(`ACS search error for ${serial}: ${e.message}`) }
log(`Device ${serial} not found in ACS`)
return null
}
function startPoller (intervalMs = 300000) {
initCacheDb().then(() => {
setTimeout(async () => {
try { await pollOnlineStatus() } catch (e) { log('Initial poll error:', e.message, e.stack) }
pollerTimer = setInterval(async () => {
try { await pollOnlineStatus() } catch (e) { log('Poll error:', e.message) }
}, intervalMs)
}, 5000)
}).catch(e => log('Cache DB init failed:', e.message))
log(`Device cache poller scheduled every ${intervalMs / 1000}s (online status only, details on-demand)`)
}
function stopPoller () { if (pollerTimer) { clearInterval(pollerTimer); pollerTimer = null } }
const handleHosts = createHostsHandler({ nbiRequest, json, deviceCache, cacheHosts, getCachedHosts })
function findSerialByDeviceId (deviceId) {
for (const [s, entry] of deviceCache) {
if (entry.summary?._id === deviceId) return s
}
return null
}
async function handle (req, res, method, path, url) {
if (!cfg.GENIEACS_NBI_URL) return json(res, 503, { error: 'GenieACS NBI not configured' })
try {
const parts = path.replace('/devices', '').split('/').filter(Boolean)
if (parts[0] === 'cache-stats' && method === 'GET') return json(res, 200, getCacheStats())
if (parts[0] === 'events' && method === 'GET') {
const serial = url.searchParams.get('serial')
if (!serial) return json(res, 400, { error: 'Provide serial parameter' })
return json(res, 200, await getDeviceEvents(serial, Math.min(parseInt(url.searchParams.get('limit') || '50', 10), 200)))
}
if (parts[0] === 'poll' && method === 'POST') {
pollOnlineStatus().then(() => log('Manual poll completed')).catch(e => log('Manual poll error:', e.message))
return json(res, 200, { ok: true, message: 'Poll triggered' })
}
if (parts[0] === 'summary' && method === 'GET') {
const fiveMinAgo = new Date(Date.now() - 5 * 60 * 1000).toISOString()
const result = await nbiRequest('/devices/?projection=DeviceID,_lastInform,_tags&limit=10000')
const devices = Array.isArray(result.data) ? result.data : []
const stats = { total: devices.length, online: 0, offline: 0, models: {} }
for (const d of devices) {
const model = d.DeviceID?.ProductClass?._value || 'Unknown'
const isOnline = d._lastInform && new Date(d._lastInform).toISOString() > fiveMinAgo
isOnline ? stats.online++ : stats.offline++
if (!stats.models[model]) stats.models[model] = { total: 0, online: 0 }
stats.models[model].total++
if (isOnline) stats.models[model].online++
}
return json(res, 200, stats)
}
if (parts[0] === 'lookup' && method === 'GET') {
const serial = url.searchParams.get('serial'), mac = url.searchParams.get('mac')
const forceRefresh = url.searchParams.get('refresh') === '1'
if (!serial && !mac) return json(res, 400, { error: 'Provide serial or mac parameter' })
if (serial && !forceRefresh) {
const cached = getCacheEntry(serial)
if (cached?.summary && cached.summary.wifi !== undefined) return json(res, 200, [cached.summary])
for (const [, entry] of deviceCache) {
if (entry.summary?._id && entry.summary.wifi !== undefined) {
const s = entry.summary
if (s._id?.includes(serial) || s.mac === serial) return json(res, 200, [s])
}
}
}
const projection = 'DeviceID,InternetGatewayDevice,Device,_lastInform,_lastBootstrap,_registered,_tags'
let devices = []
if (serial) {
let q = JSON.stringify({ 'DeviceID.SerialNumber._value': serial })
let r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
devices = Array.isArray(r.data) ? r.data : []
if (!devices.length && serial.startsWith('TPLG')) {
q = JSON.stringify({ 'Device.Optical.Interface.1.GponAuth.GponSn._value': { '$regex': serial.slice(4).toUpperCase() + '$' } })
r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
devices = Array.isArray(r.data) ? r.data : []
}
if (!devices.length) {
q = JSON.stringify({ '_id': { '$regex': serial } })
r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
devices = Array.isArray(r.data) ? r.data : []
}
} else if (mac) {
const cleanMac = mac.replace(/[:-]/g, '').toUpperCase()
const q = JSON.stringify({ '$or': [
{ 'InternetGatewayDevice.WANDevice.1.WANEthernetInterfaceConfig.MACAddress._value': mac },
{ 'Device.Ethernet.Interface.1.MACAddress._value': mac },
{ 'Device.DeviceInfo.X_TP_MACAddress._value': { '$regex': cleanMac.slice(-6) } },
]})
const r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
devices = Array.isArray(r.data) ? r.data : []
}
const summaries = devices.map(summarizeDevice)
summaries.forEach(s => {
cacheDevice(s)
if (serial && s.serial && s.serial !== serial) deviceCache.set(serial, deviceCache.get(s.serial))
})
return json(res, 200, summaries)
}
if (parts[0] === 'cache' && method === 'GET') {
const serial = url.searchParams.get('serial')
if (serial) {
const cached = getCached(serial)
if (!cached) return json(res, 404, { error: 'Not in cache' })
return json(res, 200, { serial, summary: cached.summary, previous: cached.previous, updatedAt: cached.updatedAt, history: cached.history })
}
return json(res, 200, getCacheStats())
}
if (parts[0] === 'faults' && parts.length === 1 && method === 'GET') {
const r = await nbiRequest('/faults/')
return json(res, r.status, r.data)
}
if (!parts.length && method === 'GET') {
const limit = url.searchParams.get('limit') || '50', skip = url.searchParams.get('skip') || '0'
const query = url.searchParams.get('query') || ''
const projection = url.searchParams.get('projection') || 'DeviceID,_lastInform,_tags'
const sort = url.searchParams.get('sort') || '{"_lastInform":-1}'
let nbiPath = `/devices/?projection=${encodeURIComponent(projection)}&limit=${limit}&skip=${skip}&sort=${encodeURIComponent(sort)}`
if (query) nbiPath += `&query=${encodeURIComponent(query)}`
const result = await nbiRequest(nbiPath)
const devices = Array.isArray(result.data) ? result.data : []
if (projection === 'DeviceID,_lastInform,_tags') {
return json(res, 200, devices.map(d => ({
_id: d._id, serial: d.DeviceID?.SerialNumber?._value || '',
manufacturer: d.DeviceID?.Manufacturer?._value || '', model: d.DeviceID?.ProductClass?._value || '',
oui: d.DeviceID?.OUI?._value || '', lastInform: d._lastInform || null, tags: d._tags || [],
})))
}
return json(res, 200, devices.map(summarizeDevice))
}
const deviceId = decodeURIComponent(parts[0])
const sub = parts[1] || null
if (!sub && method === 'GET') {
const r = await nbiRequest(`/devices/?query=${encodeURIComponent(`_id = "${deviceId}"`)}&projection=DeviceID,InternetGatewayDevice,Device,_lastInform,_lastBootstrap,_registered,_tags`)
const devs = Array.isArray(r.data) ? r.data : []
if (!devs.length) return json(res, 404, { error: 'Device not found' })
return json(res, 200, summarizeDevice(devs[0]))
}
if (sub === 'tasks' && method === 'POST') {
const body = await parseBody(req)
const connReq = url.searchParams.get('connection_request') !== null
let p = `/devices/${encodeURIComponent(deviceId)}/tasks`
if (connReq) p += '?connection_request'
const timeout = url.searchParams.get('timeout')
if (timeout) p += (connReq ? '&' : '?') + `timeout=${timeout}`
const r = await nbiRequest(p, 'POST', body)
return json(res, r.status, r.data)
}
if (sub === 'tasks' && method === 'GET') {
const r = await nbiRequest(`/tasks/?query=${encodeURIComponent(`device = "${deviceId}"`)}`)
return json(res, r.status, r.data)
}
if (sub === 'hosts' && method === 'GET') {
if (!url.searchParams.has('refresh')) {
const serial = findSerialByDeviceId(deviceId)
if (serial) {
const cached = getCachedHosts(serial)
if (cached?.data) return json(res, 200, cached.data)
}
}
return handleHosts(res, deviceId)
}
if (sub === 'faults' && method === 'GET') {
const r = await nbiRequest(`/faults/?query=${encodeURIComponent(`device = "${deviceId}"`)}`)
return json(res, r.status, r.data)
}
if (sub === 'events' && method === 'GET') {
const serial = findSerialByDeviceId(deviceId)
if (!serial) return json(res, 404, { error: 'Device not found in cache' })
return json(res, 200, await getDeviceEvents(serial, Math.min(parseInt(url.searchParams.get('limit') || '50', 10), 200)))
}
if (sub === 'tasks' && parts[2] && method === 'DELETE') {
const r = await nbiRequest(`/tasks/${parts[2]}`, 'DELETE')
return json(res, r.status, r.data)
}
if (!sub && method === 'DELETE') {
const r = await nbiRequest(`/devices/${encodeURIComponent(deviceId)}`, 'DELETE')
return json(res, r.status, r.data)
}
return json(res, 400, { error: 'Unknown device endpoint' })
} catch (e) {
log('GenieACS error:', e.message)
return json(res, 502, { error: 'GenieACS NBI error: ' + e.message })
}
}
async function handleACSConfig (req, res, method, path) {
if (!cfg.GENIEACS_NBI_URL) return json(res, 503, { error: 'GenieACS NBI not configured' })
try {
const resource = path.replace('/acs/', '').split('/').filter(Boolean)[0]
const endpoints = { provisions: '/provisions/', presets: '/presets/', 'virtual-parameters': '/virtual-parameters/', files: '/files/', faults: '/faults/' }
if (resource === 'export' && method === 'GET') {
const [provisions, presets, virtualParams, files, faults] = await Promise.all(
['/provisions/', '/presets/', '/virtual-parameters/', '/files/', '/faults/'].map(p => nbiRequest(p).then(r => r.data).catch(() => []))
)
const arr = v => Array.isArray(v) ? v : []
return json(res, 200, {
exportedAt: new Date().toISOString(), source: cfg.GENIEACS_NBI_URL,
provisions: arr(provisions), presets: arr(presets), virtualParameters: arr(virtualParams),
files: arr(files).map(f => ({ _id: f._id, metadata: f.metadata || {}, filename: f.filename, length: f.length, uploadDate: f.uploadDate })),
faultCount: arr(faults).length,
summary: { provisionCount: arr(provisions).length, presetCount: arr(presets).length, virtualParamCount: arr(virtualParams).length, fileCount: arr(files).length },
})
}
if (endpoints[resource] && method === 'GET') {
const parts = path.replace('/acs/', '').split('/').filter(Boolean)
const nbiPath = parts[1] ? endpoints[resource] + encodeURIComponent(decodeURIComponent(parts[1])) : endpoints[resource]
const r = await nbiRequest(nbiPath)
return json(res, r.status, r.data)
}
return json(res, 404, { error: 'Unknown ACS config endpoint' })
} catch (e) {
log('ACS config error:', e.message)
return json(res, 502, { error: 'GenieACS config error: ' + e.message })
}
}
module.exports = { handle, handleACSConfig, summarizeDevice, cacheDevice, getCached, getCacheStats, fetchDeviceDetails, startPoller, stopPoller, logDeviceEvent, getDeviceEvents }