Major additions accumulated over 9 days — single commit per request. Flow editor (new): - Generic visual editor for step trees, usable by project wizard + agent flows - PROJECT_KINDS / AGENT_KINDS catalogs decouple UI from domain - Drag-and-drop reorder via vuedraggable with scope isolation per peer group - Chain-aware depends_on rewrite on reorder (sequential only — DAGs preserved) - Variable picker with per-applies_to catalog (Customer / Quotation / Service Contract / Issue / Subscription), insert + copy-clipboard modes - trigger_condition helper with domain-specific JSONLogic examples - Global FlowEditorDialog mounted once in MainLayout, Odoo inline pattern - Server: targo-hub flow-runtime.js, flow-api.js, flow-templates.js - ERPNext: Flow Template/Run doctypes, scheduler, 5 seeded system templates - depends_on chips resolve to step labels instead of opaque "s4" ids QR/OCR scanner (field app): - Camera capture → Gemini Vision via targo-hub with 8s timeout - IndexedDB offline queue retries photos when signal returns - Watcher merges late-arriving scan results into the live UI Dispatch: - Planning mode (draft → publish) with offer pool for unassigned jobs - Shared presets, recurrence selector, suggested-slots dialog - PublishScheduleModal, unassign confirmation Ops app: - ClientDetailPage composables extraction (useClientData, useDeviceStatus, useWifiDiagnostic, useModemDiagnostic) - Project wizard: shared detail sections, wizard catalog/publish composables - Address pricing composable + pricing-mock data - Settings redesign hosting flow templates Targo-hub: - Contract acceptance (JWT residential + DocuSeal commercial tracks) - Referral system - Modem-bridge diagnostic normalizer - Device extractors consolidated Migration scripts: - Invoice/quote print format setup, Jinja rendering - Additional import + fix scripts (reversals, dates, customers, payments) Docs: - Consolidated: old scattered MDs → HANDOFF, ARCHITECTURE, DATA_AND_FLOWS, FLOW_EDITOR_ARCHITECTURE, BILLING_AND_PAYMENTS, CPE_MANAGEMENT, APP_DESIGN_GUIDELINES - Archived legacy wizard PHP for reference - STATUS snapshots for 2026-04-18/19 Cleanup: - Removed ~40 generated PDFs/HTMLs (invoice_preview*, rendered_jinja*) - .gitignore now covers invoice preview output + nested .DS_Store Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
218 lines
7.4 KiB
JavaScript
218 lines
7.4 KiB
JavaScript
import { ref } from 'vue'
|
|
import { HUB_URL } from 'src/config/hub'
|
|
|
|
const THRESHOLDS = {
|
|
meshSignal: { critical: 60, warning: 80 },
|
|
clientSignal: { critical: 50, warning: 70 },
|
|
packetLoss: { critical: 10, warning: 5 },
|
|
backhaulUtil: 80,
|
|
cpu: 80,
|
|
preferred2gChannels: [1, 6, 11],
|
|
}
|
|
|
|
const cache = new Map()
|
|
const CACHE_TTL = 120_000
|
|
|
|
const loading = ref(false)
|
|
const error = ref(null)
|
|
const data = ref(null)
|
|
|
|
async function fetchDiagnostic(ip, pass, user = 'superadmin') {
|
|
if (!ip || !pass) { error.value = 'IP et mot de passe requis'; return null }
|
|
|
|
const cached = cache.get(ip)
|
|
if (cached && (Date.now() - cached.ts) < CACHE_TTL) {
|
|
data.value = cached.data
|
|
return cached.data
|
|
}
|
|
|
|
loading.value = true
|
|
error.value = null
|
|
|
|
try {
|
|
const params = new URLSearchParams({ ip, user, pass })
|
|
const res = await fetch(`${HUB_URL}/modem/diagnostic?${params}`)
|
|
const json = await res.json()
|
|
|
|
if (!res.ok || json.error) throw new Error(json.error || `HTTP ${res.status}`)
|
|
|
|
const processed = processDiagnostic(json)
|
|
cache.set(ip, { data: processed, ts: Date.now() })
|
|
data.value = processed
|
|
return processed
|
|
} catch (e) {
|
|
error.value = e.message
|
|
return null
|
|
} finally {
|
|
loading.value = false
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Auto-fetch diagnostic by serial number — credentials resolved server-side from ERPNext.
|
|
* No password required from the user.
|
|
*/
|
|
async function fetchDiagnosticAuto(serial) {
|
|
if (!serial) { error.value = 'Numero de serie requis'; return null }
|
|
|
|
const cached = cache.get(`auto:${serial}`)
|
|
if (cached && (Date.now() - cached.ts) < CACHE_TTL) {
|
|
data.value = cached.data
|
|
return cached.data
|
|
}
|
|
|
|
loading.value = true
|
|
error.value = null
|
|
|
|
try {
|
|
const res = await fetch(`${HUB_URL}/modem/diagnostic/auto?serial=${encodeURIComponent(serial)}`)
|
|
const json = await res.json()
|
|
|
|
if (!res.ok || json.error) throw new Error(json.error || `HTTP ${res.status}`)
|
|
|
|
const processed = processDiagnostic(json)
|
|
cache.set(`auto:${serial}`, { data: processed, ts: Date.now() })
|
|
data.value = processed
|
|
return processed
|
|
} catch (e) {
|
|
error.value = e.message
|
|
return null
|
|
} finally {
|
|
loading.value = false
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Process unified diagnostic data from backend.
|
|
* Backend normalizes both TP-Link and Raisecom into the same shape.
|
|
* Frontend adds client-side issue detection on top.
|
|
*/
|
|
function processDiagnostic(raw) {
|
|
const issues = [...(raw.issues || [])]
|
|
|
|
const online = raw.online || null
|
|
const wanIPs = raw.wanIPs || []
|
|
const radios = raw.radios || []
|
|
const meshNodes = raw.meshNodes || []
|
|
const wifiClients = raw.wifiClients || []
|
|
const ethernetPorts = raw.ethernetPorts || []
|
|
const dhcpLeases = raw.dhcpLeases || []
|
|
const wiredEquipment = raw.wiredEquipment || []
|
|
const device = raw.device || null
|
|
const gpon = raw.gpon || null
|
|
|
|
// Client-side issue detection
|
|
checkMeshIssues(meshNodes, issues)
|
|
checkClientIssues(wifiClients, meshNodes, issues)
|
|
checkRadioIssues(radios, issues)
|
|
|
|
const severityOrder = { critical: 0, warning: 1, info: 2 }
|
|
issues.sort((a, b) => (severityOrder[a.severity] ?? 9) - (severityOrder[b.severity] ?? 9))
|
|
|
|
return {
|
|
fetchedAt: raw.fetchedAt, durationMs: raw.durationMs, modemType: raw.modemType,
|
|
issues, meshNodes, wifiClients, radios, wanIPs, online,
|
|
ethernetPorts, dhcpLeases, wiredEquipment, device, gpon,
|
|
}
|
|
}
|
|
|
|
function checkMeshIssues(meshNodes, issues) {
|
|
for (const node of meshNodes) {
|
|
if (!node.isController && node.active && node.backhaul?.type === 'Wi-Fi') {
|
|
const sig = node.backhaul.signal
|
|
const name = node.hostname
|
|
if (sig < THRESHOLDS.meshSignal.critical) {
|
|
issues.push({
|
|
severity: 'critical',
|
|
message: `${name}: signal mesh tres faible (${sig})`,
|
|
detail: `Le noeud "${name}" a un signal de backhaul de ${sig}/255. Lien a ${node.backhaul.linkRate} Mbps.`,
|
|
action: `Rapprocher le noeud "${name}" du routeur principal ou ajouter un noeud intermediaire.`,
|
|
})
|
|
} else if (sig < THRESHOLDS.meshSignal.warning) {
|
|
issues.push({
|
|
severity: 'warning',
|
|
message: `${name}: signal mesh faible (${sig})`,
|
|
detail: `Backhaul a ${node.backhaul.linkRate} Mbps, utilisation ${node.backhaul.utilization}%.`,
|
|
action: `Envisager de rapprocher "${name}" du routeur pour ameliorer la vitesse.`,
|
|
})
|
|
}
|
|
if (node.backhaul.utilization > THRESHOLDS.backhaulUtil) {
|
|
issues.push({
|
|
severity: 'warning',
|
|
message: `${name}: backhaul sature (${node.backhaul.utilization}%)`,
|
|
detail: `Le lien entre "${name}" et le routeur est utilise a ${node.backhaul.utilization}%.`,
|
|
action: `Reduire le nombre d'appareils sur ce noeud ou connecter "${name}" en Ethernet.`,
|
|
})
|
|
}
|
|
}
|
|
if (node.cpu > THRESHOLDS.cpu) {
|
|
issues.push({
|
|
severity: 'warning',
|
|
message: `${node.hostname}: CPU eleve (${node.cpu}%)`,
|
|
detail: `Le processeur du noeud est a ${node.cpu}% d'utilisation.`,
|
|
action: `Redemarrer le noeud "${node.hostname}" si le probleme persiste.`,
|
|
})
|
|
}
|
|
}
|
|
}
|
|
|
|
function checkClientIssues(wifiClients, meshNodes, issues) {
|
|
for (const c of wifiClients) {
|
|
if (!c.active) continue
|
|
const label = c.hostname || c.mac
|
|
if (c.signal > 0 && c.signal < THRESHOLDS.clientSignal.critical) {
|
|
issues.push({
|
|
severity: 'critical',
|
|
message: `${label}: signal tres faible (${c.signal}/255)`,
|
|
detail: `Appareil "${label}" sur ${c.band || '?'}, lien ${Math.round((c.linkDown || 0) / 1000)} Mbps.`,
|
|
action: `Rapprocher l'appareil du noeud mesh le plus proche ou verifier les obstacles.`,
|
|
})
|
|
} else if (c.signal > 0 && c.signal < THRESHOLDS.clientSignal.warning) {
|
|
issues.push({
|
|
severity: 'warning',
|
|
message: `${label}: signal faible (${c.signal}/255)`,
|
|
detail: `Vitesse reduite a ${Math.round((c.linkDown || 0) / 1000)} Mbps.`,
|
|
action: `Verifier le placement de l'appareil par rapport au noeud "${c.meshNode || 'principal'}".`,
|
|
})
|
|
}
|
|
if (c.lossPercent > THRESHOLDS.packetLoss.critical) {
|
|
issues.push({
|
|
severity: 'critical',
|
|
message: `${label}: ${c.lossPercent}% perte de paquets`,
|
|
detail: `Retransmissions detectees.`,
|
|
action: `Interference probable. Verifier le canal WiFi, les appareils voisins, ou changer la bande.`,
|
|
})
|
|
} else if (c.lossPercent > THRESHOLDS.packetLoss.warning) {
|
|
issues.push({
|
|
severity: 'warning',
|
|
message: `${label}: ${c.lossPercent}% perte de paquets`,
|
|
detail: `Performance reduite.`,
|
|
action: `Envisager de changer de canal ou rapprocher l'appareil.`,
|
|
})
|
|
}
|
|
}
|
|
}
|
|
|
|
function checkRadioIssues(radios, issues) {
|
|
for (const r of radios) {
|
|
if (r.band === '2.4GHz' && !r.autoChannel && r.channel > 0
|
|
&& THRESHOLDS.preferred2gChannels.indexOf(r.channel) === -1) {
|
|
issues.push({
|
|
severity: 'warning',
|
|
message: `Canal 2.4GHz non optimal (${r.channel})`,
|
|
detail: `Le canal ${r.channel} chevauche les canaux voisins. Les canaux 1, 6 ou 11 sont recommandes.`,
|
|
action: `Changer le canal 2.4GHz a 1, 6 ou 11, ou activer le canal automatique.`,
|
|
})
|
|
}
|
|
}
|
|
}
|
|
|
|
export function useModemDiagnostic() {
|
|
return { fetchDiagnostic, fetchDiagnosticAuto, loading, error, data }
|
|
}
|
|
|
|
// Keep backward-compatible export
|
|
export function useWifiDiagnostic() {
|
|
return { fetchDiagnostic, fetchDiagnosticAuto, loading, error, data }
|
|
}
|