gigafibre-fsm/scripts/bulk_submit.py
louispaulb 101faa21f1 feat: inline editing, search, notifications + full repo cleanup
- InlineField component + useInlineEdit composable for Odoo-style dblclick editing
- Client search by name, account ID, and legacy_customer_id (or_filters)
- SMS/Email notification panel on ContactCard via n8n webhooks
- Ticket reply thread via Communication docs
- All migration scripts (51 files) now tracked
- Client portal and field tech app added to monorepo
- README rewritten with full feature list, migration summary, architecture
- CHANGELOG updated with all recent work
- ROADMAP updated with current completion status
- Removed hardcoded tokens from docs (use $ERP_SERVICE_TOKEN)
- .gitignore updated (docker/, .claude/, exports/, .quasar/)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-31 07:34:41 -04:00

188 lines
6.5 KiB
Python

#!/usr/bin/env python3
"""
Bulk submit migration data in ERPNext:
1. Enable all disabled Items (so invoices can be submitted)
2. Submit all draft Sales Invoices
3. Submit all draft Payment Entries (which already have invoice references for reconciliation)
SAFETY: No email accounts are configured in ERPNext, so no emails will be sent.
Additionally, we pass flags.mute_emails=1 on every submit call as extra safety.
Usage:
python3 bulk_submit.py # dry-run (count only)
python3 bulk_submit.py --run # execute all steps
python3 bulk_submit.py --run --step items # only enable items
python3 bulk_submit.py --run --step inv # only submit invoices
python3 bulk_submit.py --run --step pay # only submit payments
python3 bulk_submit.py --run --customer CUST-4f09e799bd # one customer only (test)
"""
import argparse
import json
import sys
import time
import requests
from urllib.parse import quote
BASE = "https://erp.gigafibre.ca"
TOKEN = "b273a666c86d2d0:06120709db5e414"
HEADERS = {
"Authorization": f"token {TOKEN}",
"Content-Type": "application/json",
}
BATCH_SIZE = 100 # documents per batch
PAUSE = 0.3 # seconds between batches
def api_get(path, params=None):
r = requests.get(BASE + path, headers=HEADERS, params=params, timeout=30)
r.raise_for_status()
return r.json()
def api_put(path, data):
r = requests.put(BASE + path, headers=HEADERS, json=data, timeout=60)
if not r.ok:
return False, r.text[:300]
return True, r.json()
# ── Step 1: Enable all disabled items ──────────────────────────────
def enable_items(dry_run=False):
print("\n═══ Step 1: Enable disabled Items ═══")
data = api_get("/api/resource/Item", {
"filters": json.dumps({"disabled": 1}),
"fields": json.dumps(["name"]),
"limit_page_length": 0,
})
items = data.get("data", [])
print(f" Found {len(items)} disabled items")
if dry_run:
return
ok, fail = 0, 0
for item in items:
name = item["name"]
success, resp = api_put(f"/api/resource/Item/{quote(name, safe='')}", {"disabled": 0})
if success:
ok += 1
else:
fail += 1
print(f" FAIL enable {name}: {resp}")
print(f" Enabled: {ok}, Failed: {fail}")
# ── Generic bulk submit ───────────────────────────────────────────
def bulk_submit(doctype, label, filter_key, dry_run=False, customer=None):
print(f"\n═══ {label} ═══")
filters = {"docstatus": 0}
if customer:
filters[filter_key] = customer
# Count
count_data = api_get("/api/method/frappe.client.get_count", {
"doctype": doctype,
"filters": json.dumps(filters),
})
total = count_data.get("message", 0)
print(f" Total draft: {total}")
if dry_run or total == 0:
return
submitted, failed = 0, 0
errors = []
seen_failed = set() # track permanently failed names to avoid infinite loop
stall_count = 0
while True:
data = api_get(f"/api/resource/{quote(doctype, safe='')}", {
"filters": json.dumps(filters),
"fields": json.dumps(["name"]),
"limit_page_length": BATCH_SIZE,
"limit_start": 0,
"order_by": "posting_date asc",
})
batch = data.get("data", [])
if not batch:
break
# If every item in this batch already failed, we're stuck
new_in_batch = [b for b in batch if b["name"] not in seen_failed]
if not new_in_batch:
print(f"\n All remaining {len(batch)} documents have errors — stopping.")
break
batch_submitted = 0
for doc in batch:
name = doc["name"]
if name in seen_failed:
continue
# Submit with mute_emails flag
# For Sales Invoice: set_posting_time=1 to keep original posting_date
# (otherwise ERPNext resets to today, which breaks due_date validation)
submit_data = {"docstatus": 1, "flags": {"mute_emails": 1, "ignore_notifications": 1}}
if doctype == "Sales Invoice":
submit_data["set_posting_time"] = 1
success, resp = api_put(
f"/api/resource/{quote(doctype, safe='')}/{quote(name, safe='')}",
submit_data
)
if success:
submitted += 1
batch_submitted += 1
else:
failed += 1
seen_failed.add(name)
err_msg = resp[:200] if isinstance(resp, str) else str(resp)[:200]
if len(errors) < 30:
errors.append(f"{name}: {err_msg}")
done = submitted + failed
pct = int(done / total * 100) if total else 0
print(f" Progress: {done}/{total} ({pct}%) — ok={submitted} fail={failed} ", end="\r")
if batch_submitted == 0:
stall_count += 1
if stall_count > 3:
print(f"\n Stalled after {stall_count} batches with no progress — stopping.")
break
else:
stall_count = 0
time.sleep(PAUSE)
print(f"\n Done: submitted={submitted}, failed={failed}")
if errors:
print(f" Errors (first {len(errors)}):")
for e in errors:
print(f" {e}")
# ── Main ───────────────────────────────────────────────────────────
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Bulk submit ERPNext migration data")
parser.add_argument("--run", action="store_true", help="Actually execute (default is dry-run)")
parser.add_argument("--step", choices=["items", "inv", "pay"], help="Run only one step")
parser.add_argument("--customer", help="Limit to one customer (for testing)")
args = parser.parse_args()
dry_run = not args.run
if dry_run:
print("DRY RUN — pass --run to execute\n")
steps = args.step or "all"
if steps in ("all", "items"):
enable_items(dry_run)
if steps in ("all", "inv"):
bulk_submit("Sales Invoice", "Step 2: Submit Sales Invoices", "customer", dry_run, args.customer)
if steps in ("all", "pay"):
bulk_submit("Payment Entry", "Step 3: Submit Payment Entries", "party", dry_run, args.customer)
print("\nDone!")