gigafibre-fsm/scripts/migration/export_genieacs.sh
louispaulb 56ad97bc71 feat: GenieACS config export + TR-069 to TR-369 migration plan
- Add /acs/export endpoint: dumps all provisions, presets, virtual
  params, files metadata in one call (insurance policy for migration)
- Add /acs/provisions, /acs/presets, /acs/virtual-parameters, /acs/files
- Shell script export_genieacs.sh for offline full backup
- TR069-TO-TR369-MIGRATION.md: phased migration plan from GenieACS
  to Oktopus with parallel run, provision mapping, CPE batching

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-02 21:03:41 -04:00

121 lines
5.3 KiB
Bash
Executable File

#!/bin/bash
# ============================================================
# GenieACS Full Config Export
# ============================================================
# Exports all provisions, presets, virtual parameters, and file
# metadata from GenieACS NBI API to local JSON files.
#
# This is the "insurance policy" — captures all ACS logic before
# migrating to Oktopus TR-369 or if GenieACS becomes unmaintained.
#
# Usage:
# ./export_genieacs.sh [NBI_URL]
# ./export_genieacs.sh http://10.5.2.115:7557
#
# Output: ./genieacs-export-YYYY-MM-DD/ directory with:
# provisions.json — all provision scripts (the business logic)
# presets.json — trigger rules (when provisions fire)
# virtual-parameters.json — computed/virtual parameters
# files.json — firmware/config file metadata
# faults.json — current active faults
# devices-summary.json — device fleet summary
# full-export.json — everything combined
# ============================================================
set -euo pipefail
NBI="${1:-http://10.5.2.115:7557}"
DATE=$(date +%Y-%m-%d)
DIR="genieacs-export-${DATE}"
echo "=== GenieACS Config Export ==="
echo "NBI URL: ${NBI}"
echo "Output: ${DIR}/"
echo ""
mkdir -p "${DIR}"
echo "[1/7] Exporting provisions (scripts)..."
curl -sf "${NBI}/provisions/" | python3 -m json.tool > "${DIR}/provisions.json" 2>/dev/null || echo "[]" > "${DIR}/provisions.json"
COUNT=$(python3 -c "import json; print(len(json.load(open('${DIR}/provisions.json'))))" 2>/dev/null || echo "?")
echo " ${COUNT} provisions exported"
echo "[2/7] Exporting presets (trigger rules)..."
curl -sf "${NBI}/presets/" | python3 -m json.tool > "${DIR}/presets.json" 2>/dev/null || echo "[]" > "${DIR}/presets.json"
COUNT=$(python3 -c "import json; print(len(json.load(open('${DIR}/presets.json'))))" 2>/dev/null || echo "?")
echo " ${COUNT} presets exported"
echo "[3/7] Exporting virtual parameters..."
curl -sf "${NBI}/virtual-parameters/" | python3 -m json.tool > "${DIR}/virtual-parameters.json" 2>/dev/null || echo "[]" > "${DIR}/virtual-parameters.json"
COUNT=$(python3 -c "import json; print(len(json.load(open('${DIR}/virtual-parameters.json'))))" 2>/dev/null || echo "?")
echo " ${COUNT} virtual parameters exported"
echo "[4/7] Exporting files metadata..."
curl -sf "${NBI}/files/" | python3 -m json.tool > "${DIR}/files.json" 2>/dev/null || echo "[]" > "${DIR}/files.json"
COUNT=$(python3 -c "import json; print(len(json.load(open('${DIR}/files.json'))))" 2>/dev/null || echo "?")
echo " ${COUNT} files catalogued"
echo "[5/7] Exporting active faults..."
curl -sf "${NBI}/faults/" | python3 -m json.tool > "${DIR}/faults.json" 2>/dev/null || echo "[]" > "${DIR}/faults.json"
COUNT=$(python3 -c "import json; print(len(json.load(open('${DIR}/faults.json'))))" 2>/dev/null || echo "?")
echo " ${COUNT} active faults"
echo "[6/7] Exporting device summary..."
curl -sf "${NBI}/devices/?projection=DeviceID,_lastInform,_tags&limit=10000" | python3 -c "
import json, sys
from datetime import datetime, timezone
devices = json.load(sys.stdin)
now = datetime.now(timezone.utc)
stats = {'total': len(devices), 'online': 0, 'offline': 0, 'models': {}, 'manufacturers': {}}
for d in devices:
did = d.get('DeviceID', {})
model = did.get('ProductClass', {}).get('_value', 'Unknown') if isinstance(did.get('ProductClass'), dict) else 'Unknown'
mfr = did.get('Manufacturer', {}).get('_value', 'Unknown') if isinstance(did.get('Manufacturer'), dict) else 'Unknown'
li = d.get('_lastInform')
online = False
if li:
try:
age = (now - datetime.fromisoformat(li.replace('Z', '+00:00'))).total_seconds()
online = age < 300
except: pass
if online: stats['online'] += 1
else: stats['offline'] += 1
stats['models'][model] = stats['models'].get(model, 0) + 1
stats['manufacturers'][mfr] = stats['manufacturers'].get(mfr, 0) + 1
json.dump(stats, sys.stdout, indent=2)
" > "${DIR}/devices-summary.json" 2>/dev/null || echo "{}" > "${DIR}/devices-summary.json"
echo " Done"
echo "[7/7] Creating combined export..."
python3 -c "
import json
data = {
'exportedAt': '$(date -u +%Y-%m-%dT%H:%M:%SZ)',
'source': '${NBI}',
}
for name in ['provisions', 'presets', 'virtual-parameters', 'files', 'faults', 'devices-summary']:
try:
with open('${DIR}/' + name + '.json') as f:
data[name.replace('-', '_')] = json.load(f)
except:
data[name.replace('-', '_')] = []
json.dump(data, open('${DIR}/full-export.json', 'w'), indent=2)
" 2>/dev/null
echo " ${DIR}/full-export.json"
echo ""
echo "=== Export Complete ==="
echo ""
echo "Next steps:"
echo " 1. Review provisions.json — these are your business logic scripts"
echo " 2. Review presets.json — these define WHEN each provision runs"
echo " 3. Map each provision to an Oktopus USP equivalent"
echo " 4. Download firmware files: curl -o firmware.bin '${NBI}/files/<file_id>'"
echo ""
echo "Provision → Oktopus mapping guide:"
echo " GenieACS declare() → USP Set message"
echo " GenieACS ext() → Oktopus webhook/script"
echo " GenieACS log() → Oktopus event logging"
echo " Preset tags → Oktopus device groups"
echo " Preset events → Oktopus MQTT subscriptions"