#!/usr/bin/env python3
"""
MOPS 重大訊息每日收集器
========================
每日自動下載證交所重大訊息 CSV，解析估價師相關公告，存入資料庫。

使用方式：
    python collector.py              # 下載今天的資料
    python collector.py --backfill   # 補抓（如果今天已抓過則跳過）

排程設定（Windows Task Scheduler）：
    每日 18:00 執行（確保當天公告已更新完畢）
"""

import sys
sys.stdout.reconfigure(encoding='utf-8')

import csv
import sqlite3
import requests
import logging
import re
import gzip
import shutil
import subprocess
import hashlib
from pathlib import Path
from datetime import datetime, timedelta
from typing import Optional, List, Dict, Any
import time

# ===== 設定 =====
BASE_DIR = Path(__file__).parent
DATA_DIR = BASE_DIR / "data"
RAW_DIR = DATA_DIR / "raw"
DB_PATH = DATA_DIR / "mops_announcements.db"
LOG_DIR = BASE_DIR / "logs"

# CSV 來源（主要：重大訊息）- 同時收集上市和上櫃
CSV_MARKETS = {
    'L': '上市',
    'O': '上櫃',
}
CSV_URL_TEMPLATE = "https://mopsfin.twse.com.tw/opendata/t187ap04_{market}.csv"

# ===== 多資料集配置 =====
# 證交所開放資料集，每個資料集可能有不同市場別（L=上市, O=上櫃, P=公開發行, R=興櫃）
DATASETS = {
    't187ap03': {
        'name': '公司基本資料',
        'markets': ['L', 'O'],
        'frequency': 'monthly',  # 不常更新
        'description': '公司基本資料（營業項目、資本額、董事長等）',
    },
    't187ap04': {
        'name': '重大訊息',
        'markets': ['L', 'O'],
        'frequency': 'daily',  # 每日覆蓋
        'description': '重大訊息公告（主要收集對象）',
    },
    't187ap05': {
        'name': '月營收',
        'markets': ['L', 'O'],
        'frequency': 'monthly',  # 每月更新
        'description': '每月營業收入資料',
    },
    't187ap11': {
        'name': '內部人持股異動',
        'markets': ['L', 'O'],
        'frequency': 'monthly',  # 每月更新
        'description': '董監事、經理人持股變動',
    },
    't187ap14': {
        'name': '季報EPS',
        'markets': ['L'],
        'frequency': 'quarterly',  # 每季更新
        'description': '每季每股盈餘資料',
    },
}

# 開放資料基礎 URL
OPENDATA_BASE_URL = "https://mopsfin.twse.com.tw/opendata"

# 估價相關關鍵字
APPRAISER_KEYWORDS = [
    "不動產估價師",
    "估價師事務所",
    "專業估價者",
    "估價報告",
    "鑑價",
]

# 不動產相關關鍵字（用於標記 is_real_estate_related）
REAL_ESTATE_KEYWORDS = [
    "不動產", "土地", "廠房", "倉庫", "建物", "房屋", "大樓", "辦公大樓",
    "租賃", "租入", "租出", "出租", "承租", "租約",
    "合建", "都更", "都市更新", "權利變換", "危老",
    "使用權資產", "地上權",
    "興建", "新建", "擴建", "改建",
    "碼頭", "港口", "停車場", "倉儲",
    "合作開發", "共同開發", "投資開發", "建案",  # 開發相關
    "工廠", "廠辦", "營業據點", "營運據點",  # 營運設施
]

# 不動產相關排除關鍵字（這些通常是金融商品，不是不動產）
# 注意：避免使用「股份」，因為會匹配到公司名稱「股份有限公司」
REAL_ESTATE_EXCLUDE_KEYWORDS = [
    "有價證券", "公司債", "金融債", "基金", "理財", "存款",
    "授信資產", "次順位", "現金增資", "專利", "訴訟", "裁罰", "背書保證",
    "資金貸與", "股票面額", "轉換公司債", "私募",
    "法人說明會", "法說會", "薪酬委員",
]

# ===== 通知設定 =====
ENABLE_TOAST_NOTIFICATION = True  # Windows Toast 通知
ENABLE_TELEGRAM_NOTIFICATION = True  # Telegram 通知

# Telegram 設定（從環境變數讀取）
import os
TELEGRAM_BOT_TOKEN = os.environ.get('TELEGRAM_BOT_TOKEN', '')
TELEGRAM_CHAT_ID = os.environ.get('TELEGRAM_CHAT_ID', '')

def escape_html(text: str) -> str:
    """轉義 HTML 特殊字元"""
    if not text:
        return ""
    return (text
            .replace('&', '&amp;')
            .replace('<', '&lt;')
            .replace('>', '&gt;'))


def send_telegram_message(message: str, use_html: bool = True) -> bool:
    """發送 Telegram 訊息

    Args:
        message: 訊息內容（支援 HTML 或純文字）
        use_html: 是否使用 HTML 格式（預設 True，更穩定）

    Returns:
        是否發送成功
    """
    if not ENABLE_TELEGRAM_NOTIFICATION:
        return False

    if not TELEGRAM_BOT_TOKEN or not TELEGRAM_CHAT_ID:
        logging.warning("Telegram 設定不完整，請設定環境變數 TELEGRAM_BOT_TOKEN 和 TELEGRAM_CHAT_ID")
        return False

    try:
        url = f"https://api.telegram.org/bot{TELEGRAM_BOT_TOKEN}/sendMessage"
        payload = {
            'chat_id': TELEGRAM_CHAT_ID,
            'text': message,
            'parse_mode': 'HTML' if use_html else None,
            'disable_web_page_preview': True,  # 避免連結預覽
        }
        # 移除 None 值
        payload = {k: v for k, v in payload.items() if v is not None}

        response = requests.post(url, json=payload, timeout=10)
        response.raise_for_status()
        return True
    except Exception as e:
        logging.warning(f"Telegram 通知發送失敗: {e}")
        # 如果 HTML 模式失敗，嘗試純文字模式
        if use_html:
            logging.info("嘗試使用純文字模式重新發送...")
            # 移除 HTML 標籤
            plain_text = (message
                         .replace('<b>', '').replace('</b>', '')
                         .replace('<i>', '').replace('</i>', '')
                         .replace('<a href="', '').replace('">', ' ').replace('</a>', ''))
            return send_telegram_message(plain_text, use_html=False)
        return False


def send_telegram_test(message: str) -> bool:
    """發送測試用 Telegram 訊息（帶有明顯標記）

    用於開發測試，避免與正式通知混淆。

    Args:
        message: 測試訊息內容

    Returns:
        是否發送成功
    """
    test_message = f"🧪 <b>[測試訊息]</b>\n\n{message}\n\n<i>此為測試訊息，非正式通知</i>"
    return send_telegram_message(test_message, use_html=True)


def send_windows_toast(title: str, message: str, duration: str = "short"):
    """發送 Windows Toast 通知

    Args:
        title: 通知標題
        message: 通知內容
        duration: "short" (5秒) 或 "long" (25秒)
    """
    if not ENABLE_TOAST_NOTIFICATION:
        return

    try:
        # 使用 PowerShell 發送 Toast 通知
        ps_script = f'''
[Windows.UI.Notifications.ToastNotificationManager, Windows.UI.Notifications, ContentType = WindowsRuntime] | Out-Null
[Windows.Data.Xml.Dom.XmlDocument, Windows.Data.Xml.Dom.XmlDocument, ContentType = WindowsRuntime] | Out-Null

$template = @"
<toast duration="{duration}">
    <visual>
        <binding template="ToastGeneric">
            <text>{title}</text>
            <text>{message}</text>
        </binding>
    </visual>
</toast>
"@

$xml = New-Object Windows.Data.Xml.Dom.XmlDocument
$xml.LoadXml($template)
$toast = [Windows.UI.Notifications.ToastNotification]::new($xml)
[Windows.UI.Notifications.ToastNotificationManager]::CreateToastNotifier("MOPS Collector").Show($toast)
'''
        subprocess.run(
            ["powershell", "-ExecutionPolicy", "Bypass", "-Command", ps_script],
            capture_output=True,
            timeout=10
        )
    except Exception as e:
        # 通知失敗不影響主程式
        logging.warning(f"Toast 通知發送失敗: {e}")


def get_cumulative_stats() -> dict:
    """取得累計統計（本週、本月）"""
    if not DB_PATH.exists():
        return {'week': 0, 'month': 0, 'week_amount': 0, 'month_amount': 0}

    conn = sqlite3.connect(DB_PATH)
    cursor = conn.cursor()

    today = datetime.now()
    # 本週一
    week_start = today - timedelta(days=today.weekday())
    week_start_str = week_start.strftime('%Y%m%d')
    # 本月一日
    month_start_str = today.strftime('%Y%m') + '01'

    # 本週估價相關筆數
    cursor.execute("""
        SELECT COUNT(*) FROM announcements
        WHERE has_appraiser_info = 1
        AND announce_date >= ?
    """, (week_start_str,))
    week_count = cursor.fetchone()[0]

    # 本月估價相關筆數
    cursor.execute("""
        SELECT COUNT(*) FROM announcements
        WHERE has_appraiser_info = 1
        AND announce_date >= ?
    """, (month_start_str,))
    month_count = cursor.fetchone()[0]

    # 本週總金額
    cursor.execute("""
        SELECT COALESCE(SUM(appraisal_amount), 0) FROM appraiser_records r
        JOIN announcements a ON r.announcement_id = a.id
        WHERE a.announce_date >= ?
    """, (week_start_str,))
    week_amount = cursor.fetchone()[0]

    # 本月總金額
    cursor.execute("""
        SELECT COALESCE(SUM(appraisal_amount), 0) FROM appraiser_records r
        JOIN announcements a ON r.announcement_id = a.id
        WHERE a.announce_date >= ?
    """, (month_start_str,))
    month_amount = cursor.fetchone()[0]

    conn.close()

    return {
        'week': week_count,
        'month': month_count,
        'week_amount': week_amount,
        'month_amount': month_amount
    }


def format_amount(amount: float) -> str:
    """格式化金額顯示"""
    if not amount:
        return ""
    if amount >= 100000000:  # 億
        return f"{amount/100000000:.2f} 億"
    elif amount >= 10000:  # 萬
        return f"{amount/10000:,.0f} 萬"
    else:
        return f"{amount:,.0f}"


def categorize_transaction(subject: str) -> str:
    """分類交易類型"""
    subject_lower = subject.lower()
    if any(k in subject_lower for k in ['取得', '購置', '購買', '承租', '租賃', '承攬']):
        return '取得'
    elif any(k in subject_lower for k in ['處分', '出售', '出租']):
        return '處分'
    elif any(k in subject_lower for k in ['續租', '續約']):
        return '續約'
    else:
        return '其他'


def build_daily_summary(rows: list, total_appraiser: int) -> str:
    """建構每日摘要通知"""
    today_str = datetime.now().strftime('%Y-%m-%d')
    weekday_names = ['一', '二', '三', '四', '五', '六', '日']
    weekday = weekday_names[datetime.now().weekday()]

    # 取得累計統計
    stats = get_cumulative_stats()

    # 分類今日公告
    appraiser_rows = [r for r in rows if r['has_appraiser_info']]
    real_estate_only = [r for r in rows if r.get('is_real_estate_related') and not r['has_appraiser_info']]

    # 提取估價資訊並計算金額
    deals = []
    firms = {}  # 事務所統計

    for row in appraiser_rows:
        appraiser_info = extract_appraiser_info(row['content'])
        amount = 0
        firm = None

        if appraiser_info:
            info = appraiser_info[0]
            amount = info.get('appraisal_amount', 0) or 0
            firm = info.get('appraiser_firm', '')

            if firm:
                firms[firm] = firms.get(firm, 0) + 1

        deals.append({
            'stock_code': row['stock_code'],
            'company_name': row['company_name'],
            'subject': row['subject'],
            'amount': amount,
            'firm': firm,
            'category': categorize_transaction(row['subject']),
            'info': appraiser_info[0] if appraiser_info else None
        })

    # 依金額排序（大的在前）
    deals.sort(key=lambda x: x['amount'], reverse=True)

    # 分離大型交易（>1億）
    big_deals = [d for d in deals if d['amount'] >= 100000000]
    normal_deals = [d for d in deals if d['amount'] < 100000000]

    # 今日總金額
    today_total = sum(d['amount'] for d in deals)

    # === 開始建構訊息 ===
    msg = f"📰 <b>MOPS 每日摘要</b>\n"
    msg += f"📅 {today_str} (週{weekday})\n"
    msg += f"{'─' * 25}\n\n"

    # 按市場統計
    market_counts = {}
    for row in rows:
        market = row.get('market_name', '上市')  # 預設上市（相容舊資料）
        market_counts[market] = market_counts.get(market, 0) + 1

    # === 統計總覽 ===
    msg += f"📊 <b>統計總覽</b>\n"
    market_str = ' + '.join(f"{name} {count}" for name, count in market_counts.items())
    msg += f"今日公告: {len(rows)} 筆 ({market_str})\n"
    msg += f"估價相關: {total_appraiser} 筆"
    if today_total > 0:
        msg += f" | 總金額 {format_amount(today_total)}元"
    msg += f"\n"
    msg += f"本週累計: {stats['week']} 筆"
    if stats['week_amount'] > 0:
        msg += f" | {format_amount(stats['week_amount'])}元"
    msg += f"\n"
    msg += f"本月累計: {stats['month']} 筆"
    if stats['month_amount'] > 0:
        msg += f" | {format_amount(stats['month_amount'])}元"
    msg += f"\n"

    # === 重大交易亮點（>1億）===
    if big_deals:
        msg += f"\n{'─' * 25}\n"
        msg += f"🔥 <b>重大交易亮點</b> (金額 &gt; 1億)\n"

        for deal in big_deals:
            stock = escape_html(deal['stock_code'])
            company = escape_html(deal['company_name'])
            msg += f"\n<b>[{stock}] {company}</b>\n"
            msg += f"💰 <b>{format_amount(deal['amount'])}元</b>\n"
            if deal['firm']:
                msg += f"📍 {escape_html(deal['firm'][:40])}\n"
            msg += f"📋 {deal['category']}\n"

    # === 一般估價公告 ===
    if normal_deals:
        msg += f"\n{'─' * 25}\n"
        msg += f"🏛 <b>估價師相關公告</b> ({len(normal_deals)} 筆)\n"

        for deal in normal_deals:
            stock = escape_html(deal['stock_code'])
            company = escape_html(deal['company_name'])
            msg += f"\n<b>[{stock}] {company}</b>\n"

            if deal['amount'] > 0:
                msg += f"💰 {format_amount(deal['amount'])}元\n"
            if deal['firm']:
                msg += f"📍 {escape_html(deal['firm'][:40])}\n"
            if deal['info'] and deal['info'].get('notes'):
                notes = escape_html(deal['info']['notes'][:50])
                msg += f"📝 {notes}\n"

    # === 事務所統計 ===
    if firms:
        msg += f"\n{'─' * 25}\n"
        msg += f"🏢 <b>今日事務所</b>\n"
        sorted_firms = sorted(firms.items(), key=lambda x: x[1], reverse=True)
        for firm, count in sorted_firms[:5]:  # 最多顯示5家
            firm_short = escape_html(firm[:20])
            msg += f"• {firm_short}: {count} 筆\n"

    # === 其他不動產相關 ===
    if real_estate_only:
        msg += f"\n{'─' * 25}\n"
        msg += f"🏢 <b>其他不動產</b> ({len(real_estate_only)} 筆)\n"

        for row in real_estate_only[:5]:  # 最多顯示5筆
            stock = escape_html(row['stock_code'])
            company = escape_html(row['company_name'])
            subject_short = escape_html(row['subject'][:40])
            msg += f"• [{stock}] {company}: {subject_short}\n"

        if len(real_estate_only) > 5:
            msg += f"...還有 {len(real_estate_only) - 5} 筆\n"

    # === 底部 ===
    msg += f"\n{'─' * 25}\n"
    msg += f'🔗 <a href="https://mops.twse.com.tw/">MOPS 公開資訊</a>'

    return msg


# ===== 日誌設定 =====
def setup_logging():
    """設定日誌"""
    LOG_DIR.mkdir(parents=True, exist_ok=True)

    log_file = LOG_DIR / f"collector_{datetime.now().strftime('%Y%m')}.log"

    logging.basicConfig(
        level=logging.INFO,
        format='%(asctime)s [%(levelname)s] %(message)s',
        handlers=[
            logging.FileHandler(log_file, encoding='utf-8'),
            logging.StreamHandler(sys.stdout)
        ]
    )
    return logging.getLogger(__name__)

logger = setup_logging()

# ===== 資料庫 =====
def init_database():
    """初始化 SQLite 資料庫"""
    DB_PATH.parent.mkdir(parents=True, exist_ok=True)

    conn = sqlite3.connect(DB_PATH)
    cursor = conn.cursor()

    # 原始公告表
    cursor.execute("""
        CREATE TABLE IF NOT EXISTS announcements (
            id INTEGER PRIMARY KEY AUTOINCREMENT,
            publish_date TEXT NOT NULL,           -- 出表日期（民國）
            announce_date TEXT NOT NULL,          -- 發言日期（民國）
            announce_time TEXT,                   -- 發言時間
            stock_code TEXT NOT NULL,             -- 公司代號
            company_name TEXT NOT NULL,           -- 公司名稱
            subject TEXT NOT NULL,                -- 主旨
            regulation TEXT,                      -- 符合條款
            event_date TEXT,                      -- 事實發生日
            content TEXT,                         -- 說明（完整內容）
            has_appraiser_info INTEGER DEFAULT 0, -- 是否包含估價師資訊
            is_real_estate_related INTEGER DEFAULT 0, -- 是否與不動產相關（都更、合建、廠房等）
            created_at TEXT DEFAULT CURRENT_TIMESTAMP,
            UNIQUE(stock_code, announce_date, announce_time, subject)
        )
    """)

    # 嘗試新增 is_real_estate_related 欄位（如果不存在）
    try:
        cursor.execute("ALTER TABLE announcements ADD COLUMN is_real_estate_related INTEGER DEFAULT 0")
    except sqlite3.OperationalError:
        pass  # 欄位已存在

    # 估價師資訊表（從公告中解析）
    cursor.execute("""
        CREATE TABLE IF NOT EXISTS appraiser_records (
            id INTEGER PRIMARY KEY AUTOINCREMENT,
            announcement_id INTEGER REFERENCES announcements(id),
            appraiser_firm TEXT,                  -- 估價師事務所
            appraiser_names TEXT,                 -- 估價師姓名（逗號分隔）
            appraiser_licenses TEXT,              -- 開業證書字號（逗號分隔）
            appraisal_amount BIGINT,              -- 估價金額
            notes TEXT,                           -- 備註（外幣金額、分配比例等）
            raw_text TEXT,                        -- 原始文字（用於驗證）
            created_at TEXT DEFAULT CURRENT_TIMESTAMP
        )
    """)

    # 嘗試新增 notes 欄位（如果不存在）
    try:
        cursor.execute("ALTER TABLE appraiser_records ADD COLUMN notes TEXT")
    except sqlite3.OperationalError:
        pass  # 欄位已存在

    # 每日收集記錄
    cursor.execute("""
        CREATE TABLE IF NOT EXISTS collection_log (
            id INTEGER PRIMARY KEY AUTOINCREMENT,
            collect_date TEXT UNIQUE NOT NULL,    -- 收集日期（西元）
            csv_date TEXT,                        -- CSV 中的日期（民國）
            total_count INTEGER,                  -- 總公告數
            appraiser_count INTEGER,              -- 估價相關公告數
            file_size INTEGER,                    -- 檔案大小（bytes）
            status TEXT,                          -- success / failed
            error_message TEXT,
            created_at TEXT DEFAULT CURRENT_TIMESTAMP
        )
    """)

    # 索引
    cursor.execute("CREATE INDEX IF NOT EXISTS idx_announcements_date ON announcements(announce_date)")
    cursor.execute("CREATE INDEX IF NOT EXISTS idx_announcements_stock ON announcements(stock_code)")
    cursor.execute("CREATE INDEX IF NOT EXISTS idx_announcements_appraiser ON announcements(has_appraiser_info)")
    cursor.execute("CREATE INDEX IF NOT EXISTS idx_announcements_real_estate ON announcements(is_real_estate_related)")

    conn.commit()
    return conn

# ===== 下載 =====
# 重試間隔（指數退避）：3分鐘、30分鐘、3小時
RETRY_DELAYS = [180, 1800, 10800]  # 秒

def download_csv(market: str = 'L', save_raw: bool = True, max_retries: int = 3) -> Optional[str]:
    """下載指定市場的 CSV 並回傳內容，支援自動重試（指數退避）

    重試策略：
        - 第 1 次失敗：等待 3 分鐘後重試
        - 第 2 次失敗：等待 30 分鐘後重試
        - 第 3 次失敗：等待 3 小時後重試

    Args:
        market: 市場別（L=上市, O=上櫃）
        save_raw: 是否儲存原始檔案
        max_retries: 最大重試次數（預設 3 次）

    Returns:
        CSV 內容字串，失敗時回傳 None
    """
    url = CSV_URL_TEMPLATE.format(market=market)
    market_name = CSV_MARKETS.get(market, market)
    logger.info(f"正在下載 {market_name} ({market}): {url}")

    for attempt in range(1, max_retries + 1):
        try:
            response = requests.get(url, timeout=60)
            response.raise_for_status()

            content = response.content

            # 儲存原始檔案（gzip 壓縮）
            if save_raw:
                RAW_DIR.mkdir(parents=True, exist_ok=True)
                today = datetime.now().strftime('%Y%m%d')
                raw_file = RAW_DIR / f"t187ap04_{market}_{today}.csv.gz"

                with gzip.open(raw_file, 'wb') as f:
                    f.write(content)

                logger.info(f"原始檔案已儲存: {raw_file} ({len(content):,} bytes)")

            return content.decode('utf-8-sig')

        except requests.RequestException as e:
            if attempt < max_retries:
                delay = RETRY_DELAYS[attempt - 1]
                delay_min = delay // 60
                logger.warning(f"下載失敗 (第 {attempt}/{max_retries} 次): {e}")
                logger.info(f"等待 {delay_min} 分鐘後重試...")
                time.sleep(delay)
            else:
                logger.error(f"下載失敗 (已重試 {max_retries} 次，放棄): {e}")

    return None


def download_all_markets(save_raw: bool = True, max_retries: int = 3) -> Dict[str, Optional[str]]:
    """下載所有市場（上市+上櫃）的 CSV

    Args:
        save_raw: 是否儲存原始檔案
        max_retries: 最大重試次數

    Returns:
        {market: csv_content} 字典，失敗的市場值為 None
    """
    results = {}
    for market in CSV_MARKETS:
        results[market] = download_csv(market, save_raw, max_retries)
    return results


def get_latest_file_hash(dataset_dir: Path, dataset_code: str, market: str) -> Optional[str]:
    """取得該資料集最新檔案的 MD5 hash

    Args:
        dataset_dir: 資料集目錄
        dataset_code: 資料集代碼
        market: 市場別

    Returns:
        MD5 hash 字串，無檔案時回傳 None
    """
    pattern = f"{dataset_code}_{market}_*.csv.gz"
    files = sorted(dataset_dir.glob(pattern), reverse=True)

    if not files:
        return None

    # 讀取最新檔案並計算 hash
    with gzip.open(files[0], 'rb') as f:
        content = f.read()
        return hashlib.md5(content).hexdigest()


def download_dataset(dataset_code: str, market: str, max_retries: int = 3, skip_unchanged: bool = True) -> tuple[str, int]:
    """下載指定資料集並儲存（支援重複檢查）

    Args:
        dataset_code: 資料集代碼（如 t187ap03）
        market: 市場別（L=上市, O=上櫃）
        max_retries: 最大重試次數
        skip_unchanged: 是否跳過內容未變更的檔案

    Returns:
        (狀態, 檔案大小 bytes)
        狀態: 'saved' | 'unchanged' | 'failed'
    """
    filename = f"{dataset_code}_{market}.csv"
    url = f"{OPENDATA_BASE_URL}/{filename}"
    logger.info(f"正在下載: {filename}")

    for attempt in range(1, max_retries + 1):
        try:
            response = requests.get(url, timeout=60)
            response.raise_for_status()

            content = response.content

            # 儲存到對應資料夾（按資料集分類）
            dataset_dir = RAW_DIR / dataset_code
            dataset_dir.mkdir(parents=True, exist_ok=True)

            # 檢查內容是否有變更
            if skip_unchanged:
                new_hash = hashlib.md5(content).hexdigest()
                old_hash = get_latest_file_hash(dataset_dir, dataset_code, market)

                if old_hash and new_hash == old_hash:
                    logger.info(f"內容未變更，跳過: {filename}")
                    return 'unchanged', len(content)

            today = datetime.now().strftime('%Y%m%d')
            raw_file = dataset_dir / f"{dataset_code}_{market}_{today}.csv.gz"

            with gzip.open(raw_file, 'wb') as f:
                f.write(content)

            logger.info(f"已儲存: {raw_file.name} ({len(content):,} bytes)")
            return 'saved', len(content)

        except requests.RequestException as e:
            if attempt < max_retries:
                delay = RETRY_DELAYS[attempt - 1] if attempt - 1 < len(RETRY_DELAYS) else RETRY_DELAYS[-1]
                delay_min = delay // 60
                logger.warning(f"下載失敗 (第 {attempt}/{max_retries} 次): {e}")
                logger.info(f"等待 {delay_min} 分鐘後重試...")
                time.sleep(delay)
            else:
                logger.error(f"下載失敗 ({filename}): {e}")

    return 'failed', 0


def collect_all_datasets() -> dict:
    """收集所有資料集（支援重複檢查）

    Returns:
        收集結果摘要 {dataset_code: {'saved': int, 'unchanged': int, 'failed': int, 'size': int}}
    """
    logger.info("=" * 50)
    logger.info("開始收集所有資料集")
    logger.info("=" * 50)

    results = {}

    for dataset_code, config in DATASETS.items():
        dataset_name = config['name']
        markets = config['markets']

        logger.info(f"\n--- {dataset_name} ({dataset_code}) ---")

        saved_count = 0
        unchanged_count = 0
        failed_count = 0
        total_size = 0

        for market in markets:
            market_name = {'L': '上市', 'O': '上櫃', 'P': '公開發行', 'R': '興櫃'}.get(market, market)
            status, size = download_dataset(dataset_code, market)

            if status == 'saved':
                saved_count += 1
                total_size += size
            elif status == 'unchanged':
                unchanged_count += 1
            else:  # failed
                failed_count += 1

        results[dataset_code] = {
            'name': dataset_name,
            'saved': saved_count,
            'unchanged': unchanged_count,
            'failed': failed_count,
            'size': total_size,
        }

        # 狀態標示
        if failed_count > 0:
            icon = '✗'
        elif saved_count > 0:
            icon = '✓'
        else:
            icon = '○'  # 全部未變更

        status_parts = []
        if saved_count > 0:
            status_parts.append(f"{saved_count} 新增")
        if unchanged_count > 0:
            status_parts.append(f"{unchanged_count} 未變更")
        if failed_count > 0:
            status_parts.append(f"{failed_count} 失敗")
        status_str = ', '.join(status_parts)

        logger.info(f"{icon} {dataset_name}: {status_str}, {total_size:,} bytes")

    # 統計總結
    total_saved = sum(r['saved'] for r in results.values())
    total_unchanged = sum(r['unchanged'] for r in results.values())
    total_failed = sum(r['failed'] for r in results.values())
    total_size = sum(r['size'] for r in results.values())

    logger.info("\n" + "=" * 50)
    logger.info(f"收集完成: {total_saved} 新增, {total_unchanged} 未變更, {total_failed} 失敗")
    logger.info(f"新增檔案大小: {total_size:,} bytes ({total_size/1024/1024:.2f} MB)")
    logger.info("=" * 50)

    return results


# ===== 解析 =====
def is_real_estate_related(content: str, subject: str) -> bool:
    """判斷公告是否與不動產相關

    Args:
        content: 公告說明內容
        subject: 公告主旨

    Returns:
        是否與不動產相關
    """
    full_text = f"{subject} {content}"

    # 先檢查是否有排除關鍵字（金融商品等）
    if any(kw in full_text for kw in REAL_ESTATE_EXCLUDE_KEYWORDS):
        return False

    # 檢查是否有不動產相關關鍵字
    return any(kw in full_text for kw in REAL_ESTATE_KEYWORDS)


def parse_csv(csv_content: str) -> List[Dict[str, Any]]:
    """解析 CSV 內容"""
    rows = []
    reader = csv.DictReader(csv_content.splitlines())

    for row in reader:
        # 檢查是否包含估價師資訊
        content = row.get('說明', '')
        subject = row.get('主旨', '')
        has_appraiser = any(kw in content for kw in APPRAISER_KEYWORDS)

        # 檢查是否與不動產相關（都更、合建、廠房等）
        real_estate_related = is_real_estate_related(content, subject)

        rows.append({
            'publish_date': row.get('出表日期', ''),
            'announce_date': row.get('發言日期', ''),
            'announce_time': row.get('發言時間', ''),
            'stock_code': row.get('公司代號', ''),
            'company_name': row.get('公司名稱', ''),
            'subject': subject,
            'regulation': row.get('符合條款', ''),
            'event_date': row.get('事實發生日', ''),
            'content': content,
            'has_appraiser_info': 1 if has_appraiser else 0,
            'is_real_estate_related': 1 if real_estate_related else 0,
        })

    return rows

def extract_appraiser_info(content: str) -> List[Dict[str, Any]]:
    """從公告內容中提取估價師資訊"""
    results = []

    # 排除詞（這些不是有效的估價師資訊）
    EXCLUDE_PATTERNS = [
        r'^不適用',      # 以「不適用」開頭
        r'^無$',         # 只有「無」
        r'^否$',         # 只有「否」
        r'^N/?A$',       # N/A 或 NA
        r'^-$',          # 只有「-」
        r'^。$',         # 只有句號
        r'^\s*$',        # 空白
    ]

    def is_invalid(value: str) -> bool:
        """檢查是否為無效值"""
        if not value:
            return True
        value = value.strip()
        # 檢查是否匹配任何排除模式
        for pattern in EXCLUDE_PATTERNS:
            if re.match(pattern, value, re.IGNORECASE):
                return True
        # 檢查內容中是否包含「不適用」（可能在冒號後面）
        if re.search(r'[:：]\s*不適用', value):
            return True
        # 太短的值可能無效（但要排除中文姓名，通常 2-3 字）
        if len(value) < 2:
            return True
        return False

    def clean_value(value: str) -> Optional[str]:
        """清理提取的值，過濾掉無效內容"""
        if not value:
            return None
        value = value.strip()

        # 移除開頭的數字編號（如 "8." "9." 等）
        value = re.sub(r'^\d+\.\s*', '', value)

        # 移除開頭的欄位名稱前綴（如「估價事務所：」「專業估價者事務所：」）
        value = re.sub(r'^(?:估價事務所|專業估價者事務所|專業估價師事務所)[:：]\s*', '', value)

        # 截斷到下一個欄位名稱之前（常見的欄位名稱）
        stop_patterns = [
            r'\d+\.\s*(?:不動產估價師|專業估價師|估價金額|估價報告|取得|處分|交易)',
            r'(?:姓名|字號|金額|目的|日期|價格|條件)[:：]',
        ]
        for pattern in stop_patterns:
            match = re.search(pattern, value)
            if match:
                value = value[:match.start()].strip()

        # 移除尾部的標點符號
        value = re.sub(r'[。，、；：]+$', '', value)

        # 檢查是否為無效值
        if is_invalid(value):
            return None

        return value

    # 模式1: 專業估價者事務所或公司名稱（更精確的匹配）
    pattern1 = r'(?:專業估價者事務所|專業估價師事務所|不動產估價師事務所)[^:：]*[:：]\s*([^,，\n]+?)(?=\s*(?:\d+\.|估價金額|$))'

    # 模式2: 估價師姓名（更精確的匹配）
    pattern2 = r'(?:專業估價師姓名|不動產估價師姓名)[^:：]*[:：]\s*([^,，\n]+?)(?=\s*(?:\d+\.|開業證書|$))'

    # 模式3: 開業證書字號
    pattern3 = r'(?:專業估價師開業證書字號|不動產估價師開業證書字號)[^:：]*[:：]\s*([^,，\n]+?)(?=\s*(?:\d+\.|估價報告|$))'

    # 模式4: 估價金額（支援新台幣前綴）
    pattern4 = r'估價金額[^0-9]*?(?:新台幣|新臺幣|NT\$?)?\s*([0-9,]+)\s*元'

    firms_raw = re.findall(pattern1, content, re.MULTILINE)
    names_raw = re.findall(pattern2, content, re.MULTILINE)
    licenses_raw = re.findall(pattern3, content, re.MULTILINE)
    amounts = re.findall(pattern4, content)

    # 清理並過濾無效值
    firms = [f for f in [clean_value(f) for f in firms_raw] if f]
    names = [n for n in [clean_value(n) for n in names_raw] if n]
    licenses = [l for l in [clean_value(l) for l in licenses_raw] if l]

    # === 提取備註資訊 ===
    notes = []

    # 1. 外幣金額（日幣、美金等）
    foreign_amounts = re.findall(
        r'(?:日幣|日圓|JPY|美金|美元|USD)\s*[0-9,]+(?:\s*\([^)]+\))?',
        content
    )
    if foreign_amounts:
        notes.append('外幣: ' + '; '.join(foreign_amounts[:2]))

    # 2. 分配比例（都更案）
    ratio_patterns = [
        r'地主分配比例[：:]\s*([0-9.%~～\-]+)',
        r'建方分配比例[：:]\s*([0-9.%~～\-]+)',
        r'共同負擔比例[約]?\s*([0-9.%~～\-]+)',
    ]
    for pattern in ratio_patterns:
        match = re.search(pattern, content)
        if match:
            ratio_type = '地主比例' if '地主' in pattern else ('建方比例' if '建方' in pattern else '共同負擔')
            notes.append(f'{ratio_type}: {match.group(1)}')

    # 3. 總成本/投入金額（更精確的匹配）
    cost_patterns = [
        r'預計投入總成本[^0-9]*?(?:約)?(?:新台幣|新臺幣)?\s*([0-9,.]+)\s*(億|萬)',
        r'投資金額[^0-9]*?(?:約)?(?:新台幣|新臺幣)?\s*([0-9,.]+)\s*(億|萬)',
    ]
    for pattern in cost_patterns:
        cost_match = re.search(pattern, content)
        if cost_match:
            notes.append(f'總成本: {cost_match.group(1)}{cost_match.group(2)}')
            break

    # 4. 租金（月租）
    rent_match = re.search(r'(?:月租金?|租金)[^0-9]*([0-9,]+)\s*元', content)
    if rent_match and not amounts:  # 如果沒抓到一般金額，才用租金
        notes.append(f'月租金: {rent_match.group(1)} 元')

    # 5. 估價結果敘述（都更案、權變案等）- 只在沒有其他備註時才加
    if not notes:
        eval_match = re.search(r'估價結果[：:]\s*([^。\n]{10,60})', content)
        if eval_match and '不適用' not in eval_match.group(1):
            notes.append(f'估價結果: {eval_match.group(1)[:35]}')

    # 只有真正有估價師資訊時才回傳（至少要有事務所名稱或估價師姓名）
    if firms or names:
        results.append({
            'appraiser_firm': '; '.join(firms) if firms else None,
            'appraiser_names': '; '.join(names) if names else None,
            'appraiser_licenses': '; '.join(licenses) if licenses else None,
            'appraisal_amount': int(amounts[0].replace(',', '')) if amounts else None,
            'notes': '; '.join(notes) if notes else None,  # 新增備註欄位
            'raw_text': content[:2000],  # 保留前 2000 字元供驗證
        })

    return results

# ===== 儲存 =====
def save_to_database(conn: sqlite3.Connection, rows: List[Dict[str, Any]]) -> tuple:
    """儲存到資料庫，回傳 (新增數, 估價相關數)"""
    cursor = conn.cursor()
    new_count = 0
    appraiser_count = 0

    for row in rows:
        try:
            cursor.execute("""
                INSERT OR IGNORE INTO announcements
                (publish_date, announce_date, announce_time, stock_code, company_name,
                 subject, regulation, event_date, content, has_appraiser_info, is_real_estate_related)
                VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
            """, (
                row['publish_date'],
                row['announce_date'],
                row['announce_time'],
                row['stock_code'],
                row['company_name'],
                row['subject'],
                row['regulation'],
                row['event_date'],
                row['content'],
                row['has_appraiser_info'],
                row.get('is_real_estate_related', 0),
            ))

            if cursor.rowcount > 0:
                new_count += 1

                # 如果有估價師資訊，解析並儲存
                if row['has_appraiser_info']:
                    appraiser_count += 1
                    announcement_id = cursor.lastrowid

                    appraiser_records = extract_appraiser_info(row['content'])
                    for record in appraiser_records:
                        cursor.execute("""
                            INSERT INTO appraiser_records
                            (announcement_id, appraiser_firm, appraiser_names,
                             appraiser_licenses, appraisal_amount, notes, raw_text)
                            VALUES (?, ?, ?, ?, ?, ?, ?)
                        """, (
                            announcement_id,
                            record['appraiser_firm'],
                            record['appraiser_names'],
                            record['appraiser_licenses'],
                            record['appraisal_amount'],
                            record.get('notes'),  # 新增備註欄位
                            record['raw_text'],
                        ))

        except sqlite3.Error as e:
            logger.warning(f"儲存失敗 ({row['stock_code']}): {e}")

    conn.commit()
    return new_count, appraiser_count

def log_collection(conn: sqlite3.Connection, csv_date: str, total: int,
                   appraiser: int, file_size: int, status: str, error: str = None):
    """記錄收集結果"""
    cursor = conn.cursor()
    today = datetime.now().strftime('%Y-%m-%d')

    cursor.execute("""
        INSERT OR REPLACE INTO collection_log
        (collect_date, csv_date, total_count, appraiser_count, file_size, status, error_message)
        VALUES (?, ?, ?, ?, ?, ?, ?)
    """, (today, csv_date, total, appraiser, file_size, status, error))

    conn.commit()

# ===== 主程式 =====
def collect_today():
    """執行今日收集（同時收集上市和上櫃）"""
    logger.info("=" * 50)
    logger.info("MOPS 重大訊息收集器啟動")
    logger.info(f"收集市場: {', '.join(f'{v}({k})' for k, v in CSV_MARKETS.items())}")
    logger.info("=" * 50)

    conn = init_database()

    # 下載所有市場的 CSV
    market_results = download_all_markets(save_raw=True)

    # 檢查是否有任何市場成功下載
    successful_markets = {k: v for k, v in market_results.items() if v}
    if not successful_markets:
        log_collection(conn, None, 0, 0, 0, 'failed', '所有市場下載失敗')
        send_windows_toast(
            "MOPS 收集器 - 失敗",
            "CSV 下載失敗，請檢查網路連線"
        )
        send_telegram_message("❌ <b>MOPS 收集器失敗</b>\n\n所有市場 CSV 下載失敗，請檢查網路連線")
        return False

    # 合併解析所有市場的資料
    all_rows = []
    total_size = 0
    for market, csv_content in successful_markets.items():
        market_name = CSV_MARKETS.get(market, market)
        rows = parse_csv(csv_content)
        # 為每筆記錄加上市場別標記
        for row in rows:
            row['market'] = market
            row['market_name'] = market_name
        all_rows.extend(rows)
        total_size += len(csv_content.encode('utf-8'))
        logger.info(f"解析完成 [{market_name}]: {len(rows)} 筆公告")

    # 報告失敗的市場
    failed_markets = [k for k, v in market_results.items() if not v]
    if failed_markets:
        failed_names = [CSV_MARKETS.get(m, m) for m in failed_markets]
        logger.warning(f"部分市場下載失敗: {', '.join(failed_names)}")

    logger.info(f"合併後總計: {len(all_rows)} 筆公告")

    if not all_rows:
        log_collection(conn, None, 0, 0, total_size, 'failed', 'CSV 無資料')
        return False

    # 取得 CSV 日期
    csv_date = all_rows[0]['announce_date'] if all_rows else None

    # 儲存
    new_count, appraiser_count = save_to_database(conn, all_rows)

    # 統計
    total_appraiser = sum(1 for r in all_rows if r['has_appraiser_info'])

    # 按市場統計
    market_stats = {}
    for market in CSV_MARKETS:
        market_rows = [r for r in all_rows if r.get('market') == market]
        market_appraiser = sum(1 for r in market_rows if r['has_appraiser_info'])
        market_stats[market] = {'total': len(market_rows), 'appraiser': market_appraiser}

    logger.info(f"儲存完成: 新增 {new_count} 筆，估價相關 {appraiser_count} 筆")
    logger.info(f"今日估價相關公告總數: {total_appraiser} 筆")
    for market, stats in market_stats.items():
        market_name = CSV_MARKETS.get(market, market)
        logger.info(f"  - {market_name}: {stats['total']} 筆 (估價: {stats['appraiser']})")

    # 記錄
    log_collection(conn, csv_date, len(all_rows), total_appraiser, total_size, 'success')

    # 顯示估價相關公告摘要
    if total_appraiser > 0:
        logger.info("-" * 50)
        logger.info("今日估價相關公告:")
        for row in all_rows:
            if row['has_appraiser_info']:
                market_tag = row.get('market_name', '')
                logger.info(f"  [{row['stock_code']}] {row['company_name']} ({market_tag}): {row['subject'][:50]}")

    conn.close()
    logger.info("收集完成")

    # 發送成功通知
    market_summary = ', '.join(f"{CSV_MARKETS[m]}:{market_stats[m]['total']}" for m in CSV_MARKETS if m in market_stats)
    send_windows_toast(
        "MOPS 收集器 - 完成",
        f"已收集 {len(all_rows)} 筆公告 ({market_summary})，其中 {total_appraiser} 筆與估價相關"
    )

    # Telegram 每日摘要通知
    tg_message = build_daily_summary(all_rows, total_appraiser)
    send_telegram_message(tg_message)

    return True

def show_stats():
    """顯示統計資訊"""
    if not DB_PATH.exists():
        print("資料庫尚未建立")
        return

    conn = sqlite3.connect(DB_PATH)
    cursor = conn.cursor()

    # 總計
    cursor.execute("SELECT COUNT(*) FROM announcements")
    total = cursor.fetchone()[0]

    cursor.execute("SELECT COUNT(*) FROM announcements WHERE has_appraiser_info = 1")
    appraiser_total = cursor.fetchone()[0]

    cursor.execute("SELECT COUNT(DISTINCT announce_date) FROM announcements")
    days = cursor.fetchone()[0]

    cursor.execute("SELECT MIN(announce_date), MAX(announce_date) FROM announcements")
    date_range = cursor.fetchone()

    print("\n" + "=" * 50)
    print("MOPS 重大訊息資料庫統計")
    print("=" * 50)
    print(f"總公告數: {total:,} 筆")
    print(f"估價相關: {appraiser_total:,} 筆")
    print(f"收集天數: {days} 天")
    print(f"日期範圍: {date_range[0]} ~ {date_range[1]}")

    # 最近收集記錄
    cursor.execute("""
        SELECT collect_date, csv_date, total_count, appraiser_count, status
        FROM collection_log ORDER BY collect_date DESC LIMIT 5
    """)
    logs = cursor.fetchall()

    if logs:
        print("\n最近收集記錄:")
        for log in logs:
            print(f"  {log[0]}: {log[2]:,} 筆 (估價: {log[3]}) [{log[4]}]")

    conn.close()

# ===== CLI =====
if __name__ == "__main__":
    import argparse

    parser = argparse.ArgumentParser(description='MOPS 重大訊息每日收集器')
    parser.add_argument('--stats', action='store_true', help='顯示統計資訊')
    parser.add_argument('--backfill', action='store_true', help='補抓模式（跳過已抓過的日期）')
    parser.add_argument('--all', action='store_true', help='收集所有資料集（公司基本資料、月營收、內部人持股等）')
    parser.add_argument('--list', action='store_true', help='列出所有可用的資料集')

    args = parser.parse_args()

    if args.list:
        print("\n可用的資料集:")
        print("=" * 60)
        for code, config in DATASETS.items():
            markets = ', '.join(config['markets'])
            print(f"  {code}: {config['name']}")
            print(f"           市場: {markets} | 更新頻率: {config['frequency']}")
            print(f"           {config['description']}")
            print()
    elif args.stats:
        show_stats()
    elif args.all:
        # 先收集重大訊息（會進資料庫）
        success = collect_today()
        # 再收集其他資料集（只儲存 CSV）
        results = collect_all_datasets()
        sys.exit(0 if success else 1)
    else:
        success = collect_today()
        sys.exit(0 if success else 1)
