Backend services: - targo-hub: extract deepGetValue to helpers.js, DRY disconnect reasons lookup map, compact CAPABILITIES, consolidate vision.js prompts/schemas, extract dispatch scoring weights, trim section dividers across 9 files - modem-bridge: extract getSession() helper (6 occurrences), resetIdleTimer(), consolidate DM query factory, fix duplicate username fill bug, trim headers (server.js -36%, tplink-session.js -47%, docker-compose.yml -57%) Frontend: - useWifiDiagnostic: extract THRESHOLDS const, split processDiagnostic into 6 focused helpers (processOnlineStatus, processWanIPs, processRadios, processMeshNodes, processClients, checkRadioIssues) - EquipmentDetail: merge duplicate ROLE_LABELS, remove verbose comments Documentation (17 → 13 files, -1,400 lines): - New consolidated README.md (architecture, services, dependencies, auth) - Merge ECOSYSTEM-OVERVIEW into ARCHITECTURE.md - Merge MIGRATION-PLAN + ARCHITECTURE-COMPARE + FIELD-GAP + CHANGELOG → MIGRATION.md - Merge COMPETITIVE-ANALYSIS into PLATFORM-STRATEGY.md - Update ROADMAP.md with current phase status - Delete CONTEXT.md (absorbed into README) Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
79 lines
2.5 KiB
Python
79 lines
2.5 KiB
Python
#!/usr/bin/env python3
|
|
"""Update opened_by_staff on Issues from legacy ticket.open_by → staff name."""
|
|
import pymysql
|
|
import psycopg2
|
|
|
|
LEGACY = {"host": "legacy-db", "user": "facturation", "password": "VD67owoj",
|
|
"database": "gestionclient", "connect_timeout": 30, "read_timeout": 600}
|
|
PG = {"host": "db", "port": 5432, "user": "postgres", "password": "123",
|
|
"dbname": "_eb65bdc0c4b1b2d6"}
|
|
|
|
def main():
|
|
mc = pymysql.connect(**LEGACY)
|
|
cur = mc.cursor(pymysql.cursors.DictCursor)
|
|
|
|
cur.execute("SELECT id, first_name, last_name FROM staff ORDER BY id")
|
|
staff_map = {}
|
|
for s in cur.fetchall():
|
|
name = ((s["first_name"] or "") + " " + (s["last_name"] or "")).strip()
|
|
if name:
|
|
staff_map[s["id"]] = name
|
|
|
|
cur.execute("SELECT id, open_by FROM ticket WHERE open_by > 0")
|
|
openers = cur.fetchall()
|
|
mc.close()
|
|
print(f"{len(openers)} tickets with open_by, {len(staff_map)} staff names")
|
|
|
|
pg = psycopg2.connect(**PG)
|
|
pgc = pg.cursor()
|
|
|
|
# Add column if not exists
|
|
pgc.execute("SELECT column_name FROM information_schema.columns WHERE table_name = 'tabIssue' AND column_name = 'opened_by_staff'")
|
|
if not pgc.fetchone():
|
|
pgc.execute('ALTER TABLE "tabIssue" ADD COLUMN opened_by_staff varchar(140)')
|
|
pg.commit()
|
|
print("Column opened_by_staff added")
|
|
else:
|
|
print("Column opened_by_staff already exists")
|
|
|
|
batch = {}
|
|
for o in openers:
|
|
name = staff_map.get(o["open_by"])
|
|
if name:
|
|
batch[o["id"]] = name
|
|
|
|
print(f"{len(batch)} tickets to update")
|
|
|
|
pgc.execute("""
|
|
CREATE TEMP TABLE _staff_open (
|
|
legacy_ticket_id integer PRIMARY KEY,
|
|
staff_name varchar(140)
|
|
)
|
|
""")
|
|
|
|
items = list(batch.items())
|
|
for i in range(0, len(items), 10000):
|
|
chunk = items[i:i+10000]
|
|
args = ",".join(pgc.mogrify("(%s,%s)", (tid, name)).decode() for tid, name in chunk)
|
|
pgc.execute("INSERT INTO _staff_open VALUES " + args)
|
|
pg.commit()
|
|
|
|
pgc.execute("""
|
|
UPDATE "tabIssue" i
|
|
SET opened_by_staff = so.staff_name
|
|
FROM _staff_open so
|
|
WHERE i.legacy_ticket_id = so.legacy_ticket_id
|
|
AND (i.opened_by_staff IS NULL OR i.opened_by_staff = '')
|
|
""")
|
|
updated = pgc.rowcount
|
|
pg.commit()
|
|
|
|
pgc.execute("DROP TABLE _staff_open")
|
|
pg.commit()
|
|
pg.close()
|
|
|
|
print(f"Updated: {updated} issues with opened_by_staff")
|
|
|
|
if __name__ == "__main__":
|
|
main()
|