gigafibre-fsm/scripts/migration/import_ticket_msgs.py
louispaulb 101faa21f1 feat: inline editing, search, notifications + full repo cleanup
- InlineField component + useInlineEdit composable for Odoo-style dblclick editing
- Client search by name, account ID, and legacy_customer_id (or_filters)
- SMS/Email notification panel on ContactCard via n8n webhooks
- Ticket reply thread via Communication docs
- All migration scripts (51 files) now tracked
- Client portal and field tech app added to monorepo
- README rewritten with full feature list, migration summary, architecture
- CHANGELOG updated with all recent work
- ROADMAP updated with current completion status
- Removed hardcoded tokens from docs (use $ERP_SERVICE_TOKEN)
- .gitignore updated (docker/, .claude/, exports/, .quasar/)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-31 07:34:41 -04:00

197 lines
6.5 KiB
Python

#!/usr/bin/env python3
"""
Import legacy ticket_msg → ERPNext Comment on Issue.
Maps: ticket_msg.ticket_id → Issue (via legacy_ticket_id)
ticket_msg.staff_id → staff name for comment_by
Uses direct PostgreSQL INSERT for speed (784k+ messages).
Skips already-imported messages (checks by name pattern).
Run inside erpnext-backend-1:
/home/frappe/frappe-bench/env/bin/python /home/frappe/frappe-bench/import_ticket_msgs.py
"""
import pymysql
import psycopg2
import uuid
from datetime import datetime, timezone
LEGACY = {"host": "10.100.80.100", "user": "facturation", "password": "VD67owoj",
"database": "gestionclient", "connect_timeout": 30, "read_timeout": 600}
PG = {"host": "db", "port": 5432, "user": "postgres", "password": "123",
"dbname": "_eb65bdc0c4b1b2d6"}
ADMIN = "Administrator"
BATCH_SIZE = 5000
def ts_to_dt(unix_ts):
if not unix_ts or unix_ts <= 0:
return None
try:
return datetime.fromtimestamp(int(unix_ts), tz=timezone.utc).strftime("%Y-%m-%d %H:%M:%S.%f")
except (ValueError, OSError):
return None
def log(msg):
print(msg, flush=True)
def main():
now = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S.%f")
log("=== Import ticket_msg → Comment on Issue ===")
# 1. Read legacy data
log("Reading legacy staff...")
mc = pymysql.connect(**LEGACY)
cur = mc.cursor(pymysql.cursors.DictCursor)
cur.execute("SELECT id, first_name, last_name, email FROM staff ORDER BY id")
staff_list = cur.fetchall()
staff_map = {}
for s in staff_list:
name = ((s.get("first_name") or "") + " " + (s.get("last_name") or "")).strip()
staff_map[s["id"]] = {"name": name or "Staff #" + str(s["id"]), "email": s.get("email", "")}
log(" {} staff loaded".format(len(staff_map)))
# 2. Connect ERPNext PG
log("Connecting to ERPNext PostgreSQL...")
pg = psycopg2.connect(**PG)
pgc = pg.cursor()
# Build issue lookup: legacy_ticket_id → issue name
pgc.execute('SELECT legacy_ticket_id, name FROM "tabIssue" WHERE legacy_ticket_id IS NOT NULL AND legacy_ticket_id > 0')
issue_map = {r[0]: r[1] for r in pgc.fetchall()}
log(" {} issues mapped".format(len(issue_map)))
# Check existing imported comments (by name pattern TMSG-)
pgc.execute("""SELECT name FROM "tabComment" WHERE name LIKE 'TMSG-%'""")
existing = set(r[0] for r in pgc.fetchall())
log(" {} existing TMSG comments (will skip)".format(len(existing)))
# 3. Read and import messages in batches
log("Reading ticket_msg from legacy (streaming)...")
cur_stream = mc.cursor(pymysql.cursors.SSDictCursor)
cur_stream.execute("""SELECT id, ticket_id, staff_id, msg, date_orig, public, important
FROM ticket_msg ORDER BY ticket_id, id""")
ok = skip_no_issue = skip_existing = skip_empty = err = 0
batch = []
total_read = 0
for row in cur_stream:
total_read += 1
tid = row["ticket_id"]
mid = row["id"]
msg_name = "TMSG-{}".format(mid)
# Skip if already imported
if msg_name in existing:
skip_existing += 1
continue
# Skip if no matching issue
issue_name = issue_map.get(tid)
if not issue_name:
skip_no_issue += 1
continue
# Skip empty messages
msg_text = row.get("msg") or ""
if not msg_text.strip():
skip_empty += 1
continue
staff = staff_map.get(row.get("staff_id"), {"name": "Système", "email": ""})
msg_date = ts_to_dt(row.get("date_orig")) or now
batch.append((
msg_name, # name
msg_date, # creation
msg_date, # modified
ADMIN, # modified_by
staff["email"] or ADMIN, # owner
0, # docstatus
0, # idx
"Comment", # comment_type
staff["email"], # comment_email
"", # subject
staff["name"], # comment_by
0, # published
1, # seen
"Issue", # reference_doctype
issue_name, # reference_name
ADMIN, # reference_owner
msg_text, # content
))
if len(batch) >= BATCH_SIZE:
try:
_insert_batch(pgc, batch)
pg.commit()
ok += len(batch)
except Exception as e:
pg.rollback()
# Fallback: row by row
for b in batch:
try:
_insert_batch(pgc, [b])
pg.commit()
ok += 1
except Exception:
pg.rollback()
err += 1
batch = []
if ok % 50000 == 0:
log(" read={} ok={} skip_issue={} skip_dup={} skip_empty={} err={}".format(
total_read, ok, skip_no_issue, skip_existing, skip_empty, err))
# Final batch
if batch:
try:
_insert_batch(pgc, batch)
pg.commit()
ok += len(batch)
except Exception:
pg.rollback()
for b in batch:
try:
_insert_batch(pgc, [b])
pg.commit()
ok += 1
except Exception:
pg.rollback()
err += 1
cur_stream.close()
mc.close()
pg.close()
log("")
log("=" * 60)
log("Total read: {}".format(total_read))
log("Imported: {}".format(ok))
log("Skip (no issue): {}".format(skip_no_issue))
log("Skip (existing): {}".format(skip_existing))
log("Skip (empty): {}".format(skip_empty))
log("Errors: {}".format(err))
log("=" * 60)
def _insert_batch(pgc, rows):
"""Insert batch of Comment rows."""
args = ",".join(
pgc.mogrify("(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", r).decode()
for r in rows
)
pgc.execute("""
INSERT INTO "tabComment" (
name, creation, modified, modified_by, owner, docstatus, idx,
comment_type, comment_email, subject, comment_by, published, seen,
reference_doctype, reference_name, reference_owner, content
) VALUES """ + args + """ ON CONFLICT (name) DO NOTHING""")
if __name__ == "__main__":
main()