- contracts.js: built-in install chain fallback when no Flow Template matches on_contract_signed — every accepted contract now creates a master Issue + chained Dispatch Jobs (fiber_install template) so we never lose a signed contract to a missing flow config. - acceptance.js: export createDeferredJobs + propagate assigned_group into Dispatch Job payload (was only in notes, not queryable). - dispatch.js: chain-walk helpers (unblockDependents, _isChainTerminal, setJobStatusWithChain) + terminal-node detection that activates pending Service Subscriptions (En attente → Actif, start_date=tomorrow) and emits a prorated Sales Invoice covering tomorrow → EOM. Courtesy-day billing convention: activation day is free, first period starts next day. - dispatch.js: fix Sales Invoice 417 by resolving company default income account (Ventes - T) and passing company + income_account on each item. - dispatch.js: GET /dispatch/group-jobs + POST /dispatch/claim-job for tech self-assignment from the group queue; enriches with customer_name / service_location via per-job fetches since those fetch_from fields aren't queryable in list API. - TechTasksPage.vue: redesigned mobile-first UI with progress arc, status chips, and new "Tâches du groupe" section showing claimable unassigned jobs with a "Prendre" CTA. Live updates via SSE job-claimed / job-unblocked. - NetworkPage.vue + poller-control.js: poller toggle semantics flipped — green when enabled, red/gray when paused; explicit status chips for clarity. E2E verified end-to-end: CTR-00007 → 4 chained jobs → claim → In Progress → Completed walks chain → SUB-0000100002 activated (start=2026-04-24) → SINV-2026-700012 prorata $9.32 (= 39.95 × 7/30). Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
613 lines
28 KiB
JavaScript
613 lines
28 KiB
JavaScript
'use strict'
|
|
const cfg = require('./config')
|
|
const { log, json, parseBody, nbiRequest, deepGetValue } = require('./helpers')
|
|
const { summarizeDevice } = require('./device-extractors')
|
|
const createHostsHandler = require('./device-hosts')
|
|
|
|
const deviceCache = new Map()
|
|
const hostsDbCache = new Map()
|
|
const MAX_HISTORY = 60
|
|
let pollerTimer = null, lastPollAt = null, pollCount = 0, dbPool = null, dbReady = false
|
|
const serialAliases = new Map()
|
|
|
|
function getPool () {
|
|
if (dbPool) return dbPool
|
|
if (!cfg.CACHE_DB_URL) return null
|
|
const { Pool } = require('pg')
|
|
dbPool = new Pool({ connectionString: cfg.CACHE_DB_URL, max: 3 })
|
|
dbPool.on('error', e => log('Cache DB pool error:', e.message))
|
|
return dbPool
|
|
}
|
|
|
|
async function initCacheDb () {
|
|
const pool = getPool()
|
|
if (!pool) { log('No CACHE_DB_URL — device cache is memory-only'); return }
|
|
try {
|
|
await pool.query(`CREATE TABLE IF NOT EXISTS device_cache (serial TEXT PRIMARY KEY, summary JSONB NOT NULL DEFAULT '{}', previous JSONB, history JSONB NOT NULL DEFAULT '[]', updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW())`)
|
|
await pool.query(`CREATE TABLE IF NOT EXISTS hosts_cache (serial TEXT PRIMARY KEY, data JSONB NOT NULL DEFAULT '{}', updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW())`)
|
|
await pool.query(`CREATE TABLE IF NOT EXISTS device_events (id SERIAL PRIMARY KEY, serial TEXT NOT NULL, event TEXT NOT NULL, reason TEXT, details JSONB, created_at TIMESTAMPTZ NOT NULL DEFAULT NOW())`)
|
|
await pool.query(`CREATE TABLE IF NOT EXISTS modem_diagnostic_cache (serial TEXT PRIMARY KEY, diagnostic JSONB NOT NULL DEFAULT '{}', updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW())`)
|
|
await pool.query(`CREATE INDEX IF NOT EXISTS idx_device_events_serial ON device_events (serial, created_at DESC)`)
|
|
const { rows } = await pool.query('SELECT serial, summary, previous, history, updated_at FROM device_cache')
|
|
for (const row of rows) {
|
|
deviceCache.set(row.serial, {
|
|
summary: row.summary, previous: row.previous,
|
|
updatedAt: row.updated_at?.toISOString() || new Date().toISOString(),
|
|
history: Array.isArray(row.history) ? row.history : [],
|
|
})
|
|
}
|
|
const hostsRows = await pool.query('SELECT serial, data, updated_at FROM hosts_cache')
|
|
for (const row of hostsRows.rows) hostsDbCache.set(row.serial, { data: row.data, updatedAt: row.updated_at?.toISOString() })
|
|
dbReady = true
|
|
log(`Device cache loaded from DB: ${rows.length} devices, ${hostsRows.rows.length} hosts entries`)
|
|
} catch (e) { log('Failed to init cache DB:', e.message) }
|
|
}
|
|
|
|
function createDebouncedWriter (table, serialize) {
|
|
const pending = new Set()
|
|
let timer = null
|
|
async function flush () {
|
|
timer = null
|
|
if (!dbReady || !pending.size) return
|
|
const pool = getPool()
|
|
if (!pool) return
|
|
const keys = [...pending]
|
|
pending.clear()
|
|
for (const key of keys) {
|
|
const data = serialize(key)
|
|
if (!data) continue
|
|
try { await pool.query(data.sql, data.params) } catch (e) { log(`DB write error (${table}) for ${key}: ${e.message}`) }
|
|
}
|
|
}
|
|
return function schedule (key) {
|
|
pending.add(key)
|
|
if (!timer) timer = setTimeout(flush, 2000)
|
|
}
|
|
}
|
|
|
|
const scheduleDbWrite = createDebouncedWriter('device_cache', serial => {
|
|
const entry = deviceCache.get(serial)
|
|
if (!entry) return null
|
|
return {
|
|
sql: `INSERT INTO device_cache (serial, summary, previous, history, updated_at) VALUES ($1, $2, $3, $4, NOW()) ON CONFLICT (serial) DO UPDATE SET summary = EXCLUDED.summary, previous = EXCLUDED.previous, history = EXCLUDED.history, updated_at = NOW()`,
|
|
params: [serial, JSON.stringify(entry.summary), JSON.stringify(entry.previous), JSON.stringify(entry.history)],
|
|
}
|
|
})
|
|
|
|
const scheduleHostsDbWrite = createDebouncedWriter('hosts_cache', serial => {
|
|
const entry = hostsDbCache.get(serial)
|
|
if (!entry) return null
|
|
return {
|
|
sql: `INSERT INTO hosts_cache (serial, data, updated_at) VALUES ($1, $2, NOW()) ON CONFLICT (serial) DO UPDATE SET data = EXCLUDED.data, updated_at = NOW()`,
|
|
params: [serial, JSON.stringify(entry.data)],
|
|
}
|
|
})
|
|
|
|
async function logDeviceEvent (serial, event, reason, details) {
|
|
if (!dbReady) return
|
|
const pool = getPool()
|
|
if (!pool) return
|
|
try { await pool.query('INSERT INTO device_events (serial, event, reason, details) VALUES ($1, $2, $3, $4)', [serial, event, reason || null, details ? JSON.stringify(details) : null]) } catch (e) { log(`Event log error for ${serial}: ${e.message}`) }
|
|
}
|
|
|
|
const DISCONNECT_REASONS = [
|
|
[/dying.?gasp/i, 'Dying Gasp (power failure)'],
|
|
[/LOS/i, 'LOS (loss of signal)'],
|
|
[/LOF/i, 'LOF (loss of frame)'],
|
|
]
|
|
|
|
async function fetchDisconnectReason (deviceId) {
|
|
if (!deviceId) return null
|
|
try {
|
|
const r = await nbiRequest(`/faults/?query=${encodeURIComponent(`device = "${deviceId}"`)}`)
|
|
const faults = Array.isArray(r.data) ? r.data : []
|
|
if (!faults.length) return null
|
|
faults.sort((a, b) => (b.timestamp || '').localeCompare(a.timestamp || ''))
|
|
const msg = (faults[0].detail?.message || faults[0].message || faults[0].code || '').toString()
|
|
for (const [re, label] of DISCONNECT_REASONS) { if (re.test(msg)) return label }
|
|
return msg ? msg.slice(0, 200) : null
|
|
} catch { return null }
|
|
}
|
|
|
|
async function getDeviceEvents (serial, limit = 50) {
|
|
if (!dbReady) return []
|
|
const pool = getPool()
|
|
if (!pool) return []
|
|
try {
|
|
const { rows } = await pool.query('SELECT id, serial, event, reason, details, created_at FROM device_events WHERE serial = $1 ORDER BY created_at DESC LIMIT $2', [serial, limit])
|
|
return rows
|
|
} catch { return [] }
|
|
}
|
|
|
|
function cacheDevice (summary) {
|
|
if (!summary?.serial) return
|
|
const existing = deviceCache.get(summary.serial)
|
|
const now = new Date().toISOString()
|
|
const snapshot = {
|
|
ts: now,
|
|
online: summary.lastInform ? (Date.now() - new Date(summary.lastInform).getTime()) < 900000 : false,
|
|
rxPower: summary.rxPower, txPower: summary.txPower,
|
|
wifiClients: summary.wifi?.totalClients || 0, hostsCount: summary.hostsCount,
|
|
uptime: summary.uptime, ip: summary.ip,
|
|
}
|
|
const history = existing?.history || []
|
|
const last = history[history.length - 1]
|
|
if (!last || last.online !== snapshot.online || last.wifiClients !== snapshot.wifiClients || last.rxPower !== snapshot.rxPower || last.ip !== snapshot.ip || (Date.now() - new Date(last.ts).getTime()) > 60000) {
|
|
history.push(snapshot)
|
|
if (history.length > MAX_HISTORY) history.shift()
|
|
}
|
|
if (existing) {
|
|
existing.summary = summary
|
|
existing.previous = existing.summary
|
|
existing.updatedAt = now
|
|
} else {
|
|
deviceCache.set(summary.serial, { summary, previous: null, updatedAt: now, history })
|
|
}
|
|
scheduleDbWrite(summary.serial)
|
|
}
|
|
|
|
function aliasSerial (alias, realSerial) {
|
|
if (!alias || !realSerial || alias === realSerial) return
|
|
serialAliases.set(alias, realSerial)
|
|
const entry = deviceCache.get(realSerial)
|
|
if (entry) deviceCache.set(alias, entry)
|
|
}
|
|
|
|
function getCacheEntry (serial) {
|
|
let entry = deviceCache.get(serial)
|
|
if (entry) return entry
|
|
const real = serialAliases.get(serial)
|
|
if (!real) return null
|
|
entry = deviceCache.get(real)
|
|
if (entry) deviceCache.set(serial, entry)
|
|
return entry || null
|
|
}
|
|
|
|
function cacheHosts (serial, data) {
|
|
if (!serial || !data) return
|
|
hostsDbCache.set(serial, { data, updatedAt: new Date().toISOString() })
|
|
scheduleHostsDbWrite(serial)
|
|
}
|
|
|
|
function getCachedHosts (serial) { return hostsDbCache.get(serial) || null }
|
|
function getCached (serial) { return getCacheEntry(serial) || null }
|
|
|
|
function getCacheStats () {
|
|
const entries = []
|
|
for (const [serial, entry] of deviceCache) {
|
|
const lastInform = entry.summary?.lastInform ? new Date(entry.summary.lastInform) : null
|
|
entries.push({
|
|
serial, online: lastInform ? (Date.now() - lastInform.getTime()) < 600000 : false,
|
|
model: `${entry.summary?.manufacturer || ''} ${entry.summary?.model || ''}`.trim(),
|
|
wifiClients: entry.summary?.wifi?.totalClients || 0,
|
|
updatedAt: entry.updatedAt, historyPoints: entry.history?.length || 0,
|
|
})
|
|
}
|
|
return {
|
|
devices: deviceCache.size, hostsEntries: hostsDbCache.size, lastPollAt, pollCount, dbReady,
|
|
entries: entries.sort((a, b) => (b.online ? 1 : 0) - (a.online ? 1 : 0)),
|
|
}
|
|
}
|
|
|
|
const FAST_PROJECTION = '_id,_lastInform,InternetGatewayDevice.DeviceInfo.SerialNumber,Device.DeviceInfo.SerialNumber'
|
|
|
|
async function pollOnlineStatus () {
|
|
if (!cfg.GENIEACS_NBI_URL) return
|
|
// Pause gate — admin toggle via /admin/pollers. Skip silently-ish when
|
|
// paused so the log doesn't spam every 5 minutes.
|
|
if (require('./poller-control').isPaused('device')) {
|
|
if ((pollCount % 12) === 0) log('Device poll: skipped (paused via /admin/pollers)')
|
|
pollCount++
|
|
return
|
|
}
|
|
const startMs = Date.now()
|
|
const { httpRequest } = require('./helpers')
|
|
const allDevices = []
|
|
let skip = 0
|
|
const pageSize = 1000
|
|
try {
|
|
while (true) {
|
|
const r = await httpRequest(cfg.GENIEACS_NBI_URL, `/devices/?projection=${FAST_PROJECTION}&limit=${pageSize}&skip=${skip}`, { timeout: 30000 })
|
|
const page = Array.isArray(r.data) ? r.data : []
|
|
if (!page.length) break
|
|
allDevices.push(...page)
|
|
skip += page.length
|
|
if (page.length < pageSize || allDevices.length >= 10000) break
|
|
// Pause between pages to avoid overwhelming GenieACS
|
|
await new Promise(resolve => setTimeout(resolve, 500))
|
|
}
|
|
} catch (e) { log(`Poll fetch error (skip=${skip}): ${e.message} — processing ${allDevices.length} devices`) }
|
|
|
|
let count = 0
|
|
const now = Date.now()
|
|
for (const d of allDevices) {
|
|
try {
|
|
const serial = (deepGetValue(d, 'InternetGatewayDevice.DeviceInfo.SerialNumber') || deepGetValue(d, 'Device.DeviceInfo.SerialNumber') || '').toString()
|
|
|| (d._id ? decodeURIComponent(d._id.split('-').slice(2).join('-')) : '')
|
|
if (!serial) continue
|
|
|
|
const lastInform = d._lastInform ? new Date(d._lastInform).getTime() : 0
|
|
const online = lastInform ? (now - lastInform) < 900000 : false
|
|
|
|
const existing = deviceCache.get(serial)
|
|
if (existing) {
|
|
const wasOnline = existing.history?.length ? existing.history[existing.history.length - 1].online : null
|
|
if (wasOnline !== null && wasOnline !== online) {
|
|
if (!online) fetchDisconnectReason(d._id).then(reason => logDeviceEvent(serial, 'offline', reason, { lastInform: d._lastInform, deviceId: d._id }))
|
|
else logDeviceEvent(serial, 'online', null, { lastInform: d._lastInform, deviceId: d._id })
|
|
}
|
|
existing.summary.lastInform = d._lastInform || null
|
|
existing.summary._id = d._id
|
|
if (d._tags) existing.summary.tags = d._tags
|
|
const snapshot = { ts: new Date().toISOString(), online }
|
|
const history = existing.history
|
|
const last = history[history.length - 1]
|
|
if (!last || last.online !== online || (now - new Date(last.ts).getTime()) > 60000) {
|
|
history.push(snapshot)
|
|
if (history.length > MAX_HISTORY) history.shift()
|
|
}
|
|
existing.updatedAt = new Date().toISOString()
|
|
scheduleDbWrite(serial)
|
|
} else {
|
|
deviceCache.set(serial, {
|
|
summary: { _id: d._id, serial, lastInform: d._lastInform || null },
|
|
previous: null, updatedAt: new Date().toISOString(),
|
|
history: [{ ts: new Date().toISOString(), online }],
|
|
})
|
|
}
|
|
count++
|
|
} catch {}
|
|
}
|
|
lastPollAt = new Date().toISOString()
|
|
pollCount++
|
|
log(`Device poll: ${count} devices in ${Date.now() - startMs}ms (poll #${pollCount})`)
|
|
}
|
|
|
|
async function fetchDeviceDetails (serial) {
|
|
const existing = getCacheEntry(serial)
|
|
const acsId = existing?.summary?._id
|
|
const projection = 'DeviceID,InternetGatewayDevice,Device,_lastInform,_lastBootstrap,_registered,_tags'
|
|
|
|
if (acsId) {
|
|
try {
|
|
const r = await nbiRequest(`/devices/${encodeURIComponent(acsId)}`)
|
|
if (r.status === 200 && r.data) {
|
|
const s = summarizeDevice(r.data)
|
|
if (s.serial) cacheDevice(s)
|
|
return s
|
|
}
|
|
} catch (e) { log(`On-demand fetch error for ${serial}: ${e.message}`) }
|
|
return existing?.summary || null
|
|
}
|
|
|
|
try {
|
|
let devices = []
|
|
let q = JSON.stringify({ 'DeviceID.SerialNumber._value': serial })
|
|
let r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
|
|
devices = Array.isArray(r.data) ? r.data : []
|
|
|
|
if (!devices.length && serial.startsWith('TPLG')) {
|
|
const formatted = serial.slice(4).toUpperCase().match(/.{2}/g).join(':')
|
|
q = JSON.stringify({ 'Device.DeviceInfo.X_TP_MACAddress._value': { '$regex': formatted + '$' } })
|
|
r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
|
|
devices = Array.isArray(r.data) ? r.data : []
|
|
}
|
|
|
|
// Raisecom RCMG → try tag + UserName in GenieACS
|
|
if (!devices.length && serial.startsWith('RCMG')) {
|
|
try {
|
|
q = JSON.stringify({ '_tags': serial })
|
|
r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
|
|
devices = Array.isArray(r.data) ? r.data : []
|
|
if (!devices.length) {
|
|
q = JSON.stringify({ 'InternetGatewayDevice.X_CT-COM_UserInfo.UserName._value': serial })
|
|
r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
|
|
devices = Array.isArray(r.data) ? r.data : []
|
|
}
|
|
if (devices.length) log(`Device ${serial} found via RCMG tag/UserName`)
|
|
} catch (e) { log(`RCMG lookup failed for ${serial}: ${e.message}`) }
|
|
}
|
|
|
|
if (!devices.length) {
|
|
q = JSON.stringify({ '_id': { '$regex': serial } })
|
|
r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
|
|
devices = Array.isArray(r.data) ? r.data : []
|
|
}
|
|
|
|
if (devices.length) {
|
|
const s = summarizeDevice(devices[0])
|
|
if (s.serial) {
|
|
cacheDevice(s)
|
|
if (s.serial !== serial) aliasSerial(serial, s.serial)
|
|
}
|
|
log(`Device ${serial} found via ACS search → ${devices[0]._id}`)
|
|
return s
|
|
}
|
|
} catch (e) { log(`ACS search error for ${serial}: ${e.message}`) }
|
|
|
|
log(`Device ${serial} not found in ACS`)
|
|
return null
|
|
}
|
|
|
|
function startPoller (intervalMs = 300000) {
|
|
initCacheDb().then(() => {
|
|
setTimeout(async () => {
|
|
try { await pollOnlineStatus() } catch (e) { log('Initial poll error:', e.message, e.stack) }
|
|
pollerTimer = setInterval(async () => {
|
|
try { await pollOnlineStatus() } catch (e) { log('Poll error:', e.message) }
|
|
}, intervalMs)
|
|
}, 5000)
|
|
}).catch(e => log('Cache DB init failed:', e.message))
|
|
log(`Device cache poller scheduled every ${intervalMs / 1000}s (online status only, details on-demand)`)
|
|
}
|
|
|
|
function stopPoller () { if (pollerTimer) { clearInterval(pollerTimer); pollerTimer = null } }
|
|
|
|
const handleHosts = createHostsHandler({ nbiRequest, json, deviceCache, cacheHosts, getCachedHosts })
|
|
|
|
function findSerialByDeviceId (deviceId) {
|
|
for (const [s, entry] of deviceCache) {
|
|
if (entry.summary?._id === deviceId) return s
|
|
}
|
|
return null
|
|
}
|
|
|
|
async function handle (req, res, method, path, url) {
|
|
if (!cfg.GENIEACS_NBI_URL) return json(res, 503, { error: 'GenieACS NBI not configured' })
|
|
|
|
try {
|
|
const parts = path.replace('/devices', '').split('/').filter(Boolean)
|
|
|
|
if (parts[0] === 'cache-stats' && method === 'GET') return json(res, 200, getCacheStats())
|
|
|
|
if (parts[0] === 'events' && method === 'GET') {
|
|
const serial = url.searchParams.get('serial')
|
|
if (!serial) return json(res, 400, { error: 'Provide serial parameter' })
|
|
return json(res, 200, await getDeviceEvents(serial, Math.min(parseInt(url.searchParams.get('limit') || '50', 10), 200)))
|
|
}
|
|
|
|
if (parts[0] === 'poll' && method === 'POST') {
|
|
pollOnlineStatus().then(() => log('Manual poll completed')).catch(e => log('Manual poll error:', e.message))
|
|
return json(res, 200, { ok: true, message: 'Poll triggered' })
|
|
}
|
|
|
|
if (parts[0] === 'summary' && method === 'GET') {
|
|
const fiveMinAgo = new Date(Date.now() - 5 * 60 * 1000).toISOString()
|
|
const result = await nbiRequest('/devices/?projection=DeviceID,_lastInform,_tags&limit=10000')
|
|
const devices = Array.isArray(result.data) ? result.data : []
|
|
const stats = { total: devices.length, online: 0, offline: 0, models: {} }
|
|
for (const d of devices) {
|
|
const model = d.DeviceID?.ProductClass?._value || 'Unknown'
|
|
const isOnline = d._lastInform && new Date(d._lastInform).toISOString() > fiveMinAgo
|
|
isOnline ? stats.online++ : stats.offline++
|
|
if (!stats.models[model]) stats.models[model] = { total: 0, online: 0 }
|
|
stats.models[model].total++
|
|
if (isOnline) stats.models[model].online++
|
|
}
|
|
return json(res, 200, stats)
|
|
}
|
|
|
|
if (parts[0] === 'lookup' && method === 'GET') {
|
|
const serial = url.searchParams.get('serial'), mac = url.searchParams.get('mac')
|
|
const forceRefresh = url.searchParams.get('refresh') === '1'
|
|
if (!serial && !mac) return json(res, 400, { error: 'Provide serial or mac parameter' })
|
|
|
|
if (serial && !forceRefresh) {
|
|
const cached = getCacheEntry(serial)
|
|
if (cached?.summary && cached.summary.wifi !== undefined) return json(res, 200, [cached.summary])
|
|
for (const [, entry] of deviceCache) {
|
|
if (entry.summary?._id && entry.summary.wifi !== undefined) {
|
|
const s = entry.summary
|
|
if (s._id?.includes(serial) || s.mac === serial) return json(res, 200, [s])
|
|
}
|
|
}
|
|
}
|
|
|
|
const projection = 'DeviceID,InternetGatewayDevice,Device,_lastInform,_lastBootstrap,_registered,_tags'
|
|
let devices = []
|
|
|
|
if (serial) {
|
|
let q = JSON.stringify({ 'DeviceID.SerialNumber._value': serial })
|
|
let r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
|
|
devices = Array.isArray(r.data) ? r.data : []
|
|
|
|
if (!devices.length && serial.startsWith('TPLG')) {
|
|
q = JSON.stringify({ 'Device.Optical.Interface.1.GponAuth.GponSn._value': { '$regex': serial.slice(4).toUpperCase() + '$' } })
|
|
r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
|
|
devices = Array.isArray(r.data) ? r.data : []
|
|
}
|
|
if (!devices.length) {
|
|
q = JSON.stringify({ '_id': { '$regex': serial } })
|
|
r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
|
|
devices = Array.isArray(r.data) ? r.data : []
|
|
}
|
|
// Raisecom RCMG → resolve via OLT coords (slot/port/ontid) → SNMP WAN IP → GenieACS
|
|
if (!devices.length && serial.startsWith('RCMG')) {
|
|
try {
|
|
// 1. Try GenieACS tag match
|
|
q = JSON.stringify({ '_tags': serial })
|
|
r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
|
|
devices = Array.isArray(r.data) ? r.data : []
|
|
if (devices.length) { log(`Lookup ${serial}: found via tag`); }
|
|
|
|
// 2. Try GenieACS UserName field (older Raisecom firmware stores RCMG here)
|
|
if (!devices.length) {
|
|
q = JSON.stringify({ 'InternetGatewayDevice.X_CT-COM_UserInfo.UserName._value': serial })
|
|
r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
|
|
devices = Array.isArray(r.data) ? r.data : []
|
|
if (devices.length) log(`Lookup ${serial}: found via UserName`)
|
|
}
|
|
|
|
// 3. Resolve current WAN IP via OLT SNMP coords → search GenieACS by IP
|
|
if (!devices.length) {
|
|
const oltIp = url.searchParams.get('olt_ip')
|
|
const slot = url.searchParams.get('olt_slot')
|
|
const port = url.searchParams.get('olt_port')
|
|
const ontid = url.searchParams.get('olt_ontid')
|
|
if (oltIp && slot && port && ontid) {
|
|
const { getManageIp } = require('./olt-snmp')
|
|
const mgmt = await getManageIp(null, { oltIp, slot: parseInt(slot), port: parseInt(port), ontId: parseInt(ontid) })
|
|
if (mgmt?.manageIp) {
|
|
q = JSON.stringify({ 'InternetGatewayDevice.WANDevice.1.WANConnectionDevice.1.WANIPConnection.1.ExternalIPAddress._value': mgmt.manageIp })
|
|
r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
|
|
devices = Array.isArray(r.data) ? r.data : []
|
|
if (devices.length) log(`Lookup ${serial}: found via OLT SNMP ${oltIp} ${slot}/${port}/${ontid} → IP ${mgmt.manageIp}`)
|
|
}
|
|
}
|
|
}
|
|
} catch (e) { log(`RCMG lookup error for ${serial}: ${e.message}`) }
|
|
}
|
|
} else if (mac) {
|
|
const cleanMac = mac.replace(/[:-]/g, '').toUpperCase()
|
|
const q = JSON.stringify({ '$or': [
|
|
{ 'InternetGatewayDevice.WANDevice.1.WANEthernetInterfaceConfig.MACAddress._value': mac },
|
|
{ 'Device.Ethernet.Interface.1.MACAddress._value': mac },
|
|
{ 'Device.DeviceInfo.X_TP_MACAddress._value': { '$regex': cleanMac.slice(-6) } },
|
|
]})
|
|
const r = await nbiRequest(`/devices/?query=${encodeURIComponent(q)}&projection=${projection}`)
|
|
devices = Array.isArray(r.data) ? r.data : []
|
|
}
|
|
|
|
const summaries = devices.map(summarizeDevice)
|
|
summaries.forEach(s => {
|
|
cacheDevice(s)
|
|
if (serial && s.serial && s.serial !== serial) deviceCache.set(serial, deviceCache.get(s.serial))
|
|
})
|
|
return json(res, 200, summaries)
|
|
}
|
|
|
|
if (parts[0] === 'cache' && method === 'GET') {
|
|
const serial = url.searchParams.get('serial')
|
|
if (serial) {
|
|
const cached = getCached(serial)
|
|
if (!cached) return json(res, 404, { error: 'Not in cache' })
|
|
return json(res, 200, { serial, summary: cached.summary, previous: cached.previous, updatedAt: cached.updatedAt, history: cached.history })
|
|
}
|
|
return json(res, 200, getCacheStats())
|
|
}
|
|
|
|
if (parts[0] === 'faults' && parts.length === 1 && method === 'GET') {
|
|
const r = await nbiRequest('/faults/')
|
|
return json(res, r.status, r.data)
|
|
}
|
|
|
|
if (!parts.length && method === 'GET') {
|
|
const limit = url.searchParams.get('limit') || '50', skip = url.searchParams.get('skip') || '0'
|
|
const query = url.searchParams.get('query') || ''
|
|
const projection = url.searchParams.get('projection') || 'DeviceID,_lastInform,_tags'
|
|
const sort = url.searchParams.get('sort') || '{"_lastInform":-1}'
|
|
let nbiPath = `/devices/?projection=${encodeURIComponent(projection)}&limit=${limit}&skip=${skip}&sort=${encodeURIComponent(sort)}`
|
|
if (query) nbiPath += `&query=${encodeURIComponent(query)}`
|
|
const result = await nbiRequest(nbiPath)
|
|
const devices = Array.isArray(result.data) ? result.data : []
|
|
if (projection === 'DeviceID,_lastInform,_tags') {
|
|
return json(res, 200, devices.map(d => ({
|
|
_id: d._id, serial: d.DeviceID?.SerialNumber?._value || '',
|
|
manufacturer: d.DeviceID?.Manufacturer?._value || '', model: d.DeviceID?.ProductClass?._value || '',
|
|
oui: d.DeviceID?.OUI?._value || '', lastInform: d._lastInform || null, tags: d._tags || [],
|
|
})))
|
|
}
|
|
return json(res, 200, devices.map(summarizeDevice))
|
|
}
|
|
|
|
const deviceId = decodeURIComponent(parts[0])
|
|
const sub = parts[1] || null
|
|
|
|
if (!sub && method === 'GET') {
|
|
const r = await nbiRequest(`/devices/?query=${encodeURIComponent(`_id = "${deviceId}"`)}&projection=DeviceID,InternetGatewayDevice,Device,_lastInform,_lastBootstrap,_registered,_tags`)
|
|
const devs = Array.isArray(r.data) ? r.data : []
|
|
if (!devs.length) return json(res, 404, { error: 'Device not found' })
|
|
return json(res, 200, summarizeDevice(devs[0]))
|
|
}
|
|
|
|
if (sub === 'tasks' && method === 'POST') {
|
|
const body = await parseBody(req)
|
|
const connReq = url.searchParams.get('connection_request') !== null
|
|
let p = `/devices/${encodeURIComponent(deviceId)}/tasks`
|
|
if (connReq) p += '?connection_request'
|
|
const timeout = url.searchParams.get('timeout')
|
|
if (timeout) p += (connReq ? '&' : '?') + `timeout=${timeout}`
|
|
const r = await nbiRequest(p, 'POST', body)
|
|
return json(res, r.status, r.data)
|
|
}
|
|
|
|
if (sub === 'tasks' && method === 'GET') {
|
|
const r = await nbiRequest(`/tasks/?query=${encodeURIComponent(`device = "${deviceId}"`)}`)
|
|
return json(res, r.status, r.data)
|
|
}
|
|
|
|
if (sub === 'hosts' && method === 'GET') {
|
|
if (!url.searchParams.has('refresh')) {
|
|
const serial = findSerialByDeviceId(deviceId)
|
|
if (serial) {
|
|
const cached = getCachedHosts(serial)
|
|
if (cached?.data) return json(res, 200, cached.data)
|
|
}
|
|
}
|
|
return handleHosts(res, deviceId)
|
|
}
|
|
|
|
if (sub === 'faults' && method === 'GET') {
|
|
const r = await nbiRequest(`/faults/?query=${encodeURIComponent(`device = "${deviceId}"`)}`)
|
|
return json(res, r.status, r.data)
|
|
}
|
|
|
|
if (sub === 'events' && method === 'GET') {
|
|
const serial = findSerialByDeviceId(deviceId)
|
|
if (!serial) return json(res, 404, { error: 'Device not found in cache' })
|
|
return json(res, 200, await getDeviceEvents(serial, Math.min(parseInt(url.searchParams.get('limit') || '50', 10), 200)))
|
|
}
|
|
|
|
if (sub === 'tasks' && parts[2] && method === 'DELETE') {
|
|
const r = await nbiRequest(`/tasks/${parts[2]}`, 'DELETE')
|
|
return json(res, r.status, r.data)
|
|
}
|
|
|
|
if (!sub && method === 'DELETE') {
|
|
const r = await nbiRequest(`/devices/${encodeURIComponent(deviceId)}`, 'DELETE')
|
|
return json(res, r.status, r.data)
|
|
}
|
|
|
|
return json(res, 400, { error: 'Unknown device endpoint' })
|
|
} catch (e) {
|
|
log('GenieACS error:', e.message)
|
|
return json(res, 502, { error: 'GenieACS NBI error: ' + e.message })
|
|
}
|
|
}
|
|
|
|
async function handleACSConfig (req, res, method, path) {
|
|
if (!cfg.GENIEACS_NBI_URL) return json(res, 503, { error: 'GenieACS NBI not configured' })
|
|
try {
|
|
const resource = path.replace('/acs/', '').split('/').filter(Boolean)[0]
|
|
const endpoints = { provisions: '/provisions/', presets: '/presets/', 'virtual-parameters': '/virtual-parameters/', files: '/files/', faults: '/faults/' }
|
|
|
|
if (resource === 'export' && method === 'GET') {
|
|
const [provisions, presets, virtualParams, files, faults] = await Promise.all(
|
|
['/provisions/', '/presets/', '/virtual-parameters/', '/files/', '/faults/'].map(p => nbiRequest(p).then(r => r.data).catch(() => []))
|
|
)
|
|
const arr = v => Array.isArray(v) ? v : []
|
|
return json(res, 200, {
|
|
exportedAt: new Date().toISOString(), source: cfg.GENIEACS_NBI_URL,
|
|
provisions: arr(provisions), presets: arr(presets), virtualParameters: arr(virtualParams),
|
|
files: arr(files).map(f => ({ _id: f._id, metadata: f.metadata || {}, filename: f.filename, length: f.length, uploadDate: f.uploadDate })),
|
|
faultCount: arr(faults).length,
|
|
summary: { provisionCount: arr(provisions).length, presetCount: arr(presets).length, virtualParamCount: arr(virtualParams).length, fileCount: arr(files).length },
|
|
})
|
|
}
|
|
|
|
if (endpoints[resource] && method === 'GET') {
|
|
const parts = path.replace('/acs/', '').split('/').filter(Boolean)
|
|
const nbiPath = parts[1] ? endpoints[resource] + encodeURIComponent(decodeURIComponent(parts[1])) : endpoints[resource]
|
|
const r = await nbiRequest(nbiPath)
|
|
return json(res, r.status, r.data)
|
|
}
|
|
|
|
return json(res, 404, { error: 'Unknown ACS config endpoint' })
|
|
} catch (e) {
|
|
log('ACS config error:', e.message)
|
|
return json(res, 502, { error: 'GenieACS config error: ' + e.message })
|
|
}
|
|
}
|
|
|
|
module.exports = { handle, handleACSConfig, summarizeDevice, cacheDevice, getCached, getCacheStats, fetchDeviceDetails, startPoller, stopPoller, logDeviceEvent, getDeviceEvents }
|