"""Tiny HTTP server that:
1. Serves static files from current dir (factory_demo.html etc).
2. Proxies /api/shopify/* to Shopify Admin API with token injected (avoids CORS + hides token).
3. Proxies /api/meta/* to Meta Graph API (placeholder for future FB Ads panel).

Run: python proxy_server.py
Listens on http://localhost:8765
"""
import http.server
import socketserver
import urllib.request
import urllib.error
import urllib.parse
import json
import os
import re
import sys
from pathlib import Path

PORT = 8765
ROOT = Path(__file__).parent
SHOPIFY_STORE = '8ffa78-11.myshopify.com'
SHOPIFY_TOKEN = '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'
SHOPIFY_API_VERSION = '2024-04'

# Trello credentials (load from state file)
def _load_trello():
    state_path = Path(r"C:/Users/ferna/.claude/state/trello.json")
    if state_path.exists():
        return json.loads(state_path.read_text(encoding='utf-8'))
    return {}
TRELLO = _load_trello()

class ProxyHandler(http.server.SimpleHTTPRequestHandler):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, directory=str(ROOT), **kwargs)

    def end_headers(self):
        self.send_header('Access-Control-Allow-Origin', '*')
        self.send_header('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, OPTIONS')
        self.send_header('Access-Control-Allow-Headers', 'Content-Type, X-Shopify-Access-Token')
        super().end_headers()

    def do_OPTIONS(self):
        self.send_response(204)
        self.end_headers()

    def proxy_shopify(self, method='GET'):
        # /api/shopify/<path>?<query>
        rest = self.path[len('/api/shopify/'):]
        url = f"https://{SHOPIFY_STORE}/admin/api/{SHOPIFY_API_VERSION}/{rest}"
        try:
            body = None
            if method in ('POST', 'PUT', 'PATCH'):
                length = int(self.headers.get('Content-Length', 0))
                if length:
                    body = self.rfile.read(length)
            req = urllib.request.Request(url, data=body, method=method, headers={
                'X-Shopify-Access-Token': SHOPIFY_TOKEN,
                'Accept': 'application/json',
                'Content-Type': self.headers.get('Content-Type', 'application/json'),
            })
            with urllib.request.urlopen(req, timeout=30) as r:
                data = r.read()
                self.send_response(r.status)
                self.send_header('Content-Type', r.headers.get('Content-Type', 'application/json'))
                self.end_headers()
                self.wfile.write(data)
        except urllib.error.HTTPError as e:
            self.send_response(e.code)
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            try:
                self.wfile.write(e.read())
            except Exception:
                self.wfile.write(json.dumps({'error': str(e)}).encode('utf-8'))
        except Exception as e:
            self.send_response(500)
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            self.wfile.write(json.dumps({'error': str(e)}).encode('utf-8'))

    def _reset_lock_active(self):
        """msg 686 Fer 2026-05-09: lock server-side que rechaza writes a _task_state.json
        mientras esté activo. Permite que clientes con código viejo cacheado dejen de
        sobrescribir el state. TTL 10 min desde creación del lock."""
        lock = ROOT / '_reset.lock'
        if not lock.exists():
            return False
        try:
            from datetime import datetime
            ts = float(lock.read_text(encoding='utf-8').strip())
            age = (datetime.now().timestamp() - ts)
            if age > 600:  # 10 min TTL
                lock.unlink()
                return False
            return True
        except Exception:
            return True  # si lock corrupto, mejor mantenerlo

    def handle_task_state_get(self):
        # Si reset lock activo → siempre null (clientes nuevos detectan y limpian local)
        if self._reset_lock_active():
            self.send_response(200)
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            self.wfile.write(b'null')
            return
        path = ROOT / '_task_state.json'
        if not path.exists():
            self.send_response(200)
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            self.wfile.write(b'null')
            return
        try:
            data = path.read_bytes()
            self.send_response(200)
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            self.wfile.write(data)
        except Exception as e:
            self.send_error(500, str(e))

    def handle_task_state_post(self):
        # Si reset lock activo → rechaza writes (clientes viejos sin handler ignoran error)
        if self._reset_lock_active():
            self.send_response(423)  # Locked
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            self.wfile.write(b'{"error":"reset lock active","reset":true}')
            return
        try:
            length = int(self.headers.get('Content-Length', 0))
            body = self.rfile.read(length) if length else b'null'
            path = ROOT / '_task_state.json'
            # MERGE LOGS server-side — fix race condition msg 4733/4734 2026-05-09:
            # HTML persistTaskState POSTea cada 2s con SUS logs (limitados).
            # orchestrator_v2 escribe sus logs reales directo a _task_state.json.
            # Si overwriting → orchestrator logs se pierden. Solución: merge server-side.
            # Dedup por (ts, text). Sort por ts. Cap 200.
            try:
                if body and body != b'null':
                    incoming = json.loads(body.decode('utf-8'))
                    if isinstance(incoming, dict) and path.exists():
                        try:
                            disk = json.loads(path.read_bytes().decode('utf-8'))
                        except Exception:
                            disk = None
                        if isinstance(disk, dict) and disk.get('flow_id') == incoming.get('flow_id', disk.get('flow_id')):
                            disk_logs = disk.get('logs') or []
                            body_logs = incoming.get('logs') or []
                            seen = set()
                            merged = []
                            for l in disk_logs + body_logs:
                                if not isinstance(l, dict): continue
                                key = (l.get('ts', ''), l.get('text', ''))
                                if key in seen: continue
                                seen.add(key)
                                merged.append(l)
                            merged.sort(key=lambda x: x.get('ts', ''))
                            incoming['logs'] = merged[-200:]
                            # Si disk tiene state='done' o success!=None, no rebajamos
                            if disk.get('state') == 'done' and incoming.get('state') != 'done':
                                incoming['state'] = 'done'
                                incoming['success'] = disk.get('success')
                            # Si disk.routeIdx > incoming.routeIdx → tomar el mayor
                            d_idx = disk.get('routeIdx', 0) or 0
                            i_idx = incoming.get('routeIdx', 0) or 0
                            if d_idx > i_idx:
                                incoming['routeIdx'] = d_idx
                            # FIX 2026-05-09 (Fer msg 4852): preservar flow_id del disk si incoming
                            # no lo trae (HTML poll persistTaskState lo omite). Sin esto /api/caja
                            # devolvía 404 "flow_id no en state" aunque el flow estaba activo.
                            if not incoming.get('flow_id') and disk.get('flow_id'):
                                incoming['flow_id'] = disk.get('flow_id')
                        body = json.dumps(incoming, ensure_ascii=False, indent=2).encode('utf-8')
            except Exception as e:
                # Si merge falla, escribimos lo que vino (fallback al comportamiento previo)
                print(f'[merge_warn] {e}')
            path.write_bytes(body)
            self.send_response(200)
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            self.wfile.write(b'{"ok":true}')
        except Exception as e:
            self.send_error(500, str(e))

    def handle_task_state_delete(self):
        # FIX 2026-05-09 (Fer msg 4852+4872 "si se completó bien esa caja debe permanecer pasa siempre"):
        # NUNCA borrar el state si state==done. La caja completada es log permanente del rack.
        # Si hay flow procesando activo en _runs/ tampoco borrar (no romper flow vivo).
        # Solo borra si NO hay flow done activo y NO hay flow procesando reciente.
        try:
            import time as _t
            path = ROOT / '_task_state.json'
            if path.exists():
                try:
                    state = json.loads(path.read_bytes().decode('utf-8'))
                except Exception:
                    state = {}
                # done → permanente, NUNCA borrar
                if state.get('state') == 'done':
                    self.send_response(200)
                    self.send_header('Content-Type', 'application/json')
                    self.end_headers()
                    self.wfile.write(b'{"ok":true,"kept":"done_state_permanent"}')
                    return
                # processing → verificar si hay flow vivo (caja.json reciente <30min)
                runs = ROOT / '_runs'
                active = False
                if runs.exists():
                    cutoff = _t.time() - 1800
                    for d in runs.iterdir():
                        if d.is_dir():
                            caja = d / 'caja.json'
                            if caja.exists() and caja.stat().st_mtime > cutoff:
                                active = True
                                break
                if not active:
                    path.unlink()
            self.send_response(200)
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            self.wfile.write(b'{"ok":true}')
        except Exception as e:
            self.send_error(500, str(e))

    def handle_force_reset(self):
        """msg 686 Fer 2026-05-09: limpia state + crea lock 10min.
        Mientras lock activo: GET devuelve null, POST devuelve 423.
        Cualquier cliente nuevo verá server vacío y hará cleanup local."""
        try:
            from datetime import datetime
            state = ROOT / '_task_state.json'
            if state.exists():
                state.unlink()
            lock = ROOT / '_reset.lock'
            lock.write_text(str(datetime.now().timestamp()), encoding='utf-8')
            self.send_response(200)
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            self.wfile.write(b'{"ok":true,"reset_lock_active":true,"ttl_min":10}')
        except Exception as e:
            self.send_response(500)
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            self.wfile.write(json.dumps({'error': str(e)}).encode('utf-8'))

    def handle_release_reset(self):
        try:
            lock = ROOT / '_reset.lock'
            if lock.exists():
                lock.unlink()
            self.send_response(200)
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            self.wfile.write(b'{"ok":true,"reset_lock_active":false}')
        except Exception as e:
            self.send_error(500, str(e))

    def handle_cbr_get(self):
        # /api/cbr/<filename>  -> reads _cbr/<filename>.json
        rel = self.path[len('/api/cbr/'):]
        # Sanitize: only allow alphanum + underscore + dot
        if not all(c.isalnum() or c in '._-' for c in rel):
            self.send_error(400, 'invalid filename')
            return
        path = ROOT / '_cbr' / rel
        if not path.exists():
            self.send_response(200)
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            self.wfile.write(b'[]')
            return
        try:
            data = path.read_bytes()
            self.send_response(200)
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            self.wfile.write(data)
        except Exception as e:
            self.send_error(500, str(e))

    def handle_create_trello_card(self):
        """POST /api/create-trello-card
        Body: {product, url, country, saturated, history?, summary?}
        Crea card real en board 'Editor de videos' lista Input.
        Retorna {ok, short_url, card_id, name}.
        """
        try:
            length = int(self.headers.get('Content-Length', 0))
            body = self.rfile.read(length) if length else b'{}'
            data = json.loads(body.decode('utf-8'))

            product = data.get('product', 'Producto')
            input_url = data.get('url', '')
            country = data.get('country', 'ES')
            saturated = 'SATURADO' if data.get('saturated') else 'NO_SATURADO'
            summary = data.get('summary', 'Flow factory_v4 completado automáticamente')

            from datetime import datetime
            today = datetime.now().strftime('%Y-%m-%d')
            name = f"Flow factory_v4 · {product} · {country} · {saturated} · {today}"
            desc = (
                f"**Origen:** factory_v4 pipeline (auto-generada al cierre del flow)\n"
                f"**Producto:** {product}\n"
                f"**País:** {country}\n"
                f"**Saturado:** {data.get('saturated', False)}\n"
                f"**URL input:** {input_url}\n\n"
                f"---\n\n"
                f"## Síntesis\n\n{summary}\n\n"
                f"---\n\n"
                f"Card creada automáticamente por factory_v4 al avanzar la caja por las 6 etapas (DESCARGAR → ANALIZAR → INTERPRETAR → BUSCAR → COMPARADOR → TRELLO EDITOR).\n"
            )

            board_id = TRELLO.get('videos_board_id', '69dfeaff21afd9a2e13bd708')
            list_id = TRELLO.get('videos_list_id', '69dfeb002c3e1d314649ff84')  # Input

            params = urllib.parse.urlencode({
                'key': TRELLO.get('api_key', ''),
                'token': TRELLO.get('token', ''),
                'idList': list_id,
                'name': name,
                'desc': desc,
                'pos': 'top',
            })
            req = urllib.request.Request(
                f'https://api.trello.com/1/cards?{params}',
                method='POST',
            )
            with urllib.request.urlopen(req, timeout=30) as r:
                card = json.loads(r.read().decode('utf-8'))
            self.send_response(200)
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            self.wfile.write(json.dumps({
                'ok': True,
                'short_url': card.get('shortUrl'),
                'card_id': card.get('id'),
                'name': card.get('name'),
            }).encode('utf-8'))
        except Exception as e:
            self.send_response(500)
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            self.wfile.write(json.dumps({'error': str(e)}).encode('utf-8'))

    def handle_start_flow(self):
        # Save flow input to disk AND spawn orchestrator in background.
        # FIX 2026-05-09 21:30 (Fer msg 4796 "está bugueado solo salta supervisor en descargar"):
        # antes este endpoint guardaba el inbox y asumía un watcher externo. No había watcher.
        # Resultado: la caja se quedaba atascada en DESCARGAR sin avanzar nunca.
        # Ahora: tras guardar el inbox, spawneamos `python orchestrator_v2.py --from-inbox <f> --once`
        # en background. El orchestrator escribe _task_state.json y el HTML lo recoge en su poll.
        try:
            # msg 686: si reset lock activo, liberar (intencional iniciar flow nuevo)
            lock = ROOT / '_reset.lock'
            if lock.exists():
                try: lock.unlink()
                except Exception: pass
            length = int(self.headers.get('Content-Length', 0))
            body = self.rfile.read(length) if length else b'{}'
            data = json.loads(body.decode('utf-8'))
            # Inbox folder
            inbox = ROOT / '_flow_inbox'
            inbox.mkdir(exist_ok=True)
            ts = data.get('timestamp', '').replace(':', '-').replace('.', '-')
            fname = f"flow_{ts}_{data.get('product','unknown').replace(' ','_')[:30]}.json"
            inbox_file = inbox / fname
            inbox_file.write_text(json.dumps(data, ensure_ascii=False, indent=2), encoding='utf-8')
            # Spawn orchestrator en background (no bloquea el response)
            import subprocess as _sp
            spawn_err = None
            try:
                logs_dir = ROOT / '_runs'
                logs_dir.mkdir(exist_ok=True)
                log_file = logs_dir / f'_orchestrator_spawn_{ts or "now"}.log'
                # Default REAL siempre (Fer msg 4826 2026-05-09): el botón START
                # arranca el flow real end-to-end como un input de chat — descarga real,
                # whisper real, opus real, trello real. Solo va a dry si data.dry_run=true.
                args = ['python', str(ROOT / 'orchestrator_v2.py'),
                        '--from-inbox', str(inbox_file), '--once']
                if data.get('dry_run', False):
                    args.insert(2, '--dry-run')
                CREATE_NO_WINDOW = 0x08000000 if os.name == 'nt' else 0
                with open(log_file, 'ab') as lf:
                    _sp.Popen(
                        args,
                        cwd=str(ROOT),
                        stdout=lf, stderr=_sp.STDOUT,
                        creationflags=CREATE_NO_WINDOW,
                        close_fds=True,
                    )
            except Exception as e:
                spawn_err = str(e)
            self.send_response(200)
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            resp = {'ok': True, 'saved': fname, 'spawned': spawn_err is None}
            if spawn_err:
                resp['spawn_error'] = spawn_err
            self.wfile.write(json.dumps(resp).encode('utf-8'))
        except Exception as e:
            self.send_response(500)
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            self.wfile.write(json.dumps({'error': str(e)}).encode('utf-8'))

    def handle_pdf_subproceso_get(self):
        # /api/pdf/<station>  -> _pdfs/PDF_<STATION>.pdf
        rel = self.path[len('/api/pdf/'):]
        rel = rel.split('?')[0].strip().upper()
        if not re.match(r'^[A-Z_]+$', rel):
            self.send_error(400, 'invalid station')
            return
        path = ROOT / '_pdfs' / f'PDF_{rel}.pdf'
        if not path.exists():
            self.send_error(404, f'PDF no existe: {path.name}')
            return
        try:
            data = path.read_bytes()
            self.send_response(200)
            self.send_header('Content-Type', 'application/pdf')
            self.send_header('Content-Disposition', f'inline; filename="{path.name}"')
            self.end_headers()
            self.wfile.write(data)
        except Exception as e:
            self.send_error(500, str(e))

    def handle_master_pdf_get(self):
        # /api/master-pdf  -> v16 (cubre Flow D "concepto ganador 5+1" = "escalado de formato" Fer msg 4743 2026-05-09).
        # v17 reescribe SOLO Flow D-body; nuestro flow Factory_v4 es Flow D-concepto-ganador (5 similares + 1 arriesgado) que vive en v16.
        path = ROOT.parent / '_documentacion' / 'PDF_MAESTRO_ESCALADO_VIDEO_v16.pdf'
        if not path.exists():
            self.send_error(404, 'master PDF no encontrado')
            return
        try:
            data = path.read_bytes()
            self.send_response(200)
            self.send_header('Content-Type', 'application/pdf')
            self.send_header('Content-Disposition', f'inline; filename="{path.name}"')
            self.end_headers()
            self.wfile.write(data)
        except Exception as e:
            self.send_error(500, str(e))

    def handle_runs_list(self):
        """Lista todos los flows done del rack permanente (Fer msg 4872+4874).
        Devuelve: [{flow_id, product, completed_at, completed_at_short, sections,
                    trello_short_url, attachments_count}] ordenados por completed_at desc."""
        try:
            runs_dir = ROOT / '_runs'
            out = []
            if runs_dir.exists():
                for d in sorted(runs_dir.iterdir(), key=lambda x: x.stat().st_mtime, reverse=True):
                    if not d.is_dir():
                        continue
                    cj = d / 'caja.json'
                    if not cj.exists():
                        continue
                    try:
                        caja = json.loads(cj.read_text(encoding='utf-8'))
                    except Exception:
                        continue
                    sections = caja.get('sections', {})
                    # Solo flows con trello_editor (last station) o con todas las 6 secciones
                    is_done = ('trello_editor' in sections) or (len(sections) >= 6)
                    if not is_done:
                        continue
                    inp = caja.get('input_start', {})
                    product = inp.get('product', 'unknown')
                    trello = sections.get('trello_editor', {}) or {}
                    completed_ts = cj.stat().st_mtime
                    import datetime as _dt
                    completed_dt = _dt.datetime.fromtimestamp(completed_ts)
                    out.append({
                        'flow_id': d.name,
                        'product': product,
                        'completed_at': completed_dt.isoformat(),
                        'completed_at_short': completed_dt.strftime('%d/%m %H:%M'),
                        'completed_ts': completed_ts,
                        'sections': list(sections.keys()),
                        'trello_short_url': trello.get('short_url'),
                        'trello_card_id': trello.get('card_id'),
                        'attachments_count': trello.get('attachments_count'),
                        'caja_size_bytes': cj.stat().st_size,
                    })
            self.send_response(200)
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            self.wfile.write(json.dumps({'runs': out, 'count': len(out)}, ensure_ascii=False).encode('utf-8'))
        except Exception as e:
            self.send_error(500, str(e))

    def handle_caja_get(self):
        # /api/caja  -> caja del flow_id actual (lee _task_state.json para sacar flow_id)
        # /api/caja/<flow_id>  -> caja directa
        rest = self.path[len('/api/caja'):].lstrip('/').split('?')[0]
        if rest:
            flow_id = rest
            if not re.match(r'^[A-Za-z0-9_\-\.]+$', flow_id):
                self.send_error(400, 'invalid flow_id')
                return
        else:
            state_path = ROOT / '_task_state.json'
            flow_id = None
            if state_path.exists():
                try:
                    state = json.loads(state_path.read_bytes())
                    flow_id = state.get('flow_id')
                except Exception:
                    pass
            # FIX 2026-05-09 (Fer msg 4852+): si no hay state activo o le falta flow_id,
            # caer al flow más reciente en _runs/ con caja.json. Permite ver la caja del
            # último flow ya cerrado, no solo el activo.
            if not flow_id:
                runs_dir = ROOT / '_runs'
                if runs_dir.exists():
                    cajas = []
                    for d in runs_dir.iterdir():
                        if d.is_dir():
                            cj = d / 'caja.json'
                            if cj.exists():
                                cajas.append((cj.stat().st_mtime, d.name))
                    if cajas:
                        cajas.sort(reverse=True)
                        flow_id = cajas[0][1]
            if not flow_id:
                self.send_error(404, 'no hay caja en _runs/')
                return
        path = ROOT / '_runs' / flow_id / 'caja.json'
        if not path.exists():
            self.send_error(404, f'caja no existe: {flow_id}')
            return
        try:
            data = path.read_bytes()
            self.send_response(200)
            self.send_header('Content-Type', 'application/json')
            self.end_headers()
            self.wfile.write(data)
        except Exception as e:
            self.send_error(500, str(e))

    def do_GET(self):
        if self.path.startswith('/api/shopify/'):
            return self.proxy_shopify('GET')
        if self.path.startswith('/api/cbr/'):
            return self.handle_cbr_get()
        if self.path == '/api/task-state':
            return self.handle_task_state_get()
        if self.path.startswith('/api/pdf/'):
            return self.handle_pdf_subproceso_get()
        if self.path == '/api/master-pdf' or self.path.startswith('/api/master-pdf?'):
            return self.handle_master_pdf_get()
        if self.path == '/api/caja' or self.path.startswith('/api/caja/') or self.path.startswith('/api/caja?'):
            return self.handle_caja_get()
        if self.path == '/api/runs' or self.path.startswith('/api/runs?'):
            return self.handle_runs_list()
        return super().do_GET()

    def do_POST(self):
        if self.path.startswith('/api/shopify/'):
            return self.proxy_shopify('POST')
        if self.path == '/api/start-flow':
            return self.handle_start_flow()
        if self.path == '/api/create-trello-card':
            return self.handle_create_trello_card()
        if self.path == '/api/task-state':
            return self.handle_task_state_post()
        if self.path == '/api/force-reset':
            return self.handle_force_reset()
        if self.path == '/api/release-reset':
            return self.handle_release_reset()
        self.send_error(404)

    def do_PUT(self):
        if self.path.startswith('/api/shopify/'):
            return self.proxy_shopify('PUT')
        if self.path == '/api/task-state':
            return self.handle_task_state_post()
        self.send_error(404)

    def do_DELETE(self):
        if self.path.startswith('/api/shopify/'):
            return self.proxy_shopify('DELETE')
        if self.path == '/api/task-state':
            return self.handle_task_state_delete()
        self.send_error(404)


class ReusableTCPServer(socketserver.TCPServer):
    allow_reuse_address = True


if __name__ == '__main__':
    os.chdir(str(ROOT))
    sys.stdout.reconfigure(encoding='utf-8')
    print(f"[factory-demo] Serving {ROOT} on http://localhost:{PORT}")
    print(f"[factory-demo] Shopify proxy: /api/shopify/* -> {SHOPIFY_STORE}")
    with ReusableTCPServer(("", PORT), ProxyHandler) as httpd:
        try:
            httpd.serve_forever()
        except KeyboardInterrupt:
            print("\n[factory-demo] stopped")
