Backend services: - targo-hub: extract deepGetValue to helpers.js, DRY disconnect reasons lookup map, compact CAPABILITIES, consolidate vision.js prompts/schemas, extract dispatch scoring weights, trim section dividers across 9 files - modem-bridge: extract getSession() helper (6 occurrences), resetIdleTimer(), consolidate DM query factory, fix duplicate username fill bug, trim headers (server.js -36%, tplink-session.js -47%, docker-compose.yml -57%) Frontend: - useWifiDiagnostic: extract THRESHOLDS const, split processDiagnostic into 6 focused helpers (processOnlineStatus, processWanIPs, processRadios, processMeshNodes, processClients, checkRadioIssues) - EquipmentDetail: merge duplicate ROLE_LABELS, remove verbose comments Documentation (17 → 13 files, -1,400 lines): - New consolidated README.md (architecture, services, dependencies, auth) - Merge ECOSYSTEM-OVERVIEW into ARCHITECTURE.md - Merge MIGRATION-PLAN + ARCHITECTURE-COMPARE + FIELD-GAP + CHANGELOG → MIGRATION.md - Merge COMPETITIVE-ANALYSIS into PLATFORM-STRATEGY.md - Update ROADMAP.md with current phase status - Delete CONTEXT.md (absorbed into README) Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
158 lines
4.6 KiB
Python
158 lines
4.6 KiB
Python
"""
|
|
Phase 3 standalone: Import legacy invoice.notes as Comments on Sales Invoice.
|
|
Runs from any host that can reach BOTH:
|
|
- Legacy MariaDB (legacy-db)
|
|
- ERPNext API (erp.gigafibre.ca)
|
|
|
|
Usage:
|
|
python import_invoice_notes.py
|
|
"""
|
|
import pymysql
|
|
import requests
|
|
import json
|
|
import time
|
|
import sys
|
|
import os
|
|
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 1)
|
|
|
|
# ── Config ──
|
|
LEGACY_HOST = "legacy-db"
|
|
LEGACY_USER = "facturation"
|
|
LEGACY_PASS = "VD67owoj"
|
|
LEGACY_DB = "gestionclient"
|
|
|
|
ERP_URL = "https://erp.gigafibre.ca"
|
|
ERP_KEY = os.environ.get("ERP_API_KEY", "")
|
|
ERP_SECRET = os.environ.get("ERP_API_SECRET", "")
|
|
|
|
DRY_RUN = False
|
|
|
|
# ── Connect to legacy DB ──
|
|
print("Connecting to legacy MariaDB...")
|
|
legacy = pymysql.connect(
|
|
host=LEGACY_HOST, user=LEGACY_USER, password=LEGACY_PASS,
|
|
database=LEGACY_DB, cursorclass=pymysql.cursors.DictCursor
|
|
)
|
|
|
|
with legacy.cursor() as cur:
|
|
cur.execute("""
|
|
SELECT id, notes, account_id, FROM_UNIXTIME(date_orig) as date_created
|
|
FROM invoice
|
|
WHERE notes IS NOT NULL AND notes != '' AND TRIM(notes) != ''
|
|
ORDER BY id
|
|
""")
|
|
noted_invoices = cur.fetchall()
|
|
legacy.close()
|
|
print(f" Legacy invoices with notes: {len(noted_invoices)}")
|
|
|
|
# ── ERPNext session ──
|
|
sess = requests.Session()
|
|
if ERP_KEY and ERP_SECRET:
|
|
sess.headers['Authorization'] = f'token {ERP_KEY}:{ERP_SECRET}'
|
|
else:
|
|
# Try cookie auth — login
|
|
erp_user = os.environ.get("ERP_USER", "Administrator")
|
|
erp_pass = os.environ.get("ERP_PASS", "")
|
|
if not erp_pass:
|
|
print("ERROR: Set ERP_API_KEY+ERP_API_SECRET or ERP_USER+ERP_PASS env vars")
|
|
sys.exit(1)
|
|
r = sess.post(f"{ERP_URL}/api/method/login", data={"usr": erp_user, "pwd": erp_pass})
|
|
if r.status_code != 200:
|
|
print(f"Login failed: {r.status_code} {r.text[:200]}")
|
|
sys.exit(1)
|
|
print(f" Logged in as {erp_user}")
|
|
|
|
# ── Get existing SINVs ──
|
|
print(" Fetching existing Sales Invoices...")
|
|
existing_sinv = set()
|
|
offset = 0
|
|
while True:
|
|
r = sess.get(f"{ERP_URL}/api/resource/Sales Invoice", params={
|
|
'fields': '["name"]', 'limit_page_length': 10000, 'limit_start': offset,
|
|
})
|
|
data = r.json().get('data', [])
|
|
if not data:
|
|
break
|
|
for d in data:
|
|
existing_sinv.add(d['name'])
|
|
offset += len(data)
|
|
print(f" Existing SINVs: {len(existing_sinv)}")
|
|
|
|
# ── Get existing Comments on invoices ──
|
|
print(" Fetching existing Comments on Sales Invoices...")
|
|
existing_comments = set()
|
|
offset = 0
|
|
while True:
|
|
r = sess.get(f"{ERP_URL}/api/resource/Comment", params={
|
|
'fields': '["reference_name"]',
|
|
'filters': json.dumps({"reference_doctype": "Sales Invoice", "comment_type": "Comment"}),
|
|
'limit_page_length': 10000, 'limit_start': offset,
|
|
})
|
|
data = r.json().get('data', [])
|
|
if not data:
|
|
break
|
|
for d in data:
|
|
existing_comments.add(d['reference_name'])
|
|
offset += len(data)
|
|
print(f" Existing Comments on invoices: {len(existing_comments)}")
|
|
|
|
# ── Build batch ──
|
|
batch = []
|
|
skipped = 0
|
|
for inv in noted_invoices:
|
|
sinv_name = f"SINV-{inv['id']}"
|
|
if sinv_name not in existing_sinv:
|
|
skipped += 1
|
|
continue
|
|
if sinv_name in existing_comments:
|
|
skipped += 1
|
|
continue
|
|
notes = inv['notes'].strip()
|
|
if not notes:
|
|
continue
|
|
batch.append({
|
|
'sinv': sinv_name,
|
|
'content': notes,
|
|
'creation': str(inv['date_created']) if inv['date_created'] else None,
|
|
})
|
|
|
|
print(f" Notes to import: {len(batch)}, skipped: {skipped}")
|
|
|
|
if DRY_RUN:
|
|
print(" ** DRY RUN — no changes **")
|
|
sys.exit(0)
|
|
|
|
# ── Import via API ──
|
|
t0 = time.time()
|
|
imported = 0
|
|
errors = 0
|
|
for i, note in enumerate(batch):
|
|
payload = {
|
|
'doctype': 'Comment',
|
|
'comment_type': 'Comment',
|
|
'reference_doctype': 'Sales Invoice',
|
|
'reference_name': note['sinv'],
|
|
'content': note['content'],
|
|
'comment_by': 'Système legacy',
|
|
}
|
|
try:
|
|
r = sess.post(f"{ERP_URL}/api/resource/Comment", json=payload)
|
|
if r.status_code in (200, 201):
|
|
imported += 1
|
|
else:
|
|
errors += 1
|
|
if errors <= 5:
|
|
print(f" ERR {note['sinv']}: {r.status_code} {r.text[:100]}")
|
|
except Exception as e:
|
|
errors += 1
|
|
if errors <= 5:
|
|
print(f" EXCEPTION {note['sinv']}: {e}")
|
|
|
|
if (i + 1) % 500 == 0:
|
|
elapsed = time.time() - t0
|
|
rate = (i + 1) / elapsed
|
|
print(f" Progress: {i+1}/{len(batch)} ({rate:.0f}/s) imported={imported} errors={errors}")
|
|
|
|
elapsed = time.time() - t0
|
|
print(f"\n DONE: {imported} imported, {errors} errors [{elapsed:.0f}s]")
|