"""
建立 viewer 用的合併資料檔
將所有公司資料合併成單一 JSON，加速頁面載入
"""
import sys
sys.stdout.reconfigure(encoding='utf-8')

import json
from pathlib import Path

def build_viewer_data():
    data_dir = Path(__file__).parent / "data"
    index_file = data_dir / "index.json"

    if not index_file.exists():
        print("錯誤：index.json 不存在")
        return

    with open(index_file, 'r', encoding='utf-8') as f:
        index = json.load(f)

    print(f"正在合併 {len(index)} 間公司資料...")

    all_companies = []
    for code, info in index.items():
        latest_file = data_dir / f"{code}_latest.json"
        if latest_file.exists():
            try:
                with open(latest_file, 'r', encoding='utf-8') as f:
                    company_data = json.load(f)

                all_companies.append({
                    "code": code,
                    "name": info.get("name", ""),
                    "industry": info.get("industry", ""),
                    "lastFetched": info.get("last_fetched", ""),
                    "data": company_data.get("data", {})
                })
            except Exception as e:
                print(f"  跳過 {code}: {e}")

    # 排序
    all_companies.sort(key=lambda x: x["code"])

    # 儲存合併檔案
    output_file = data_dir / "all_companies.json"
    with open(output_file, 'w', encoding='utf-8') as f:
        json.dump(all_companies, f, ensure_ascii=False)

    # 計算檔案大小
    size_kb = output_file.stat().st_size / 1024
    size_mb = size_kb / 1024

    print(f"\n完成！")
    print(f"公司數量: {len(all_companies)} 間")
    print(f"檔案大小: {size_kb:.0f} KB ({size_mb:.2f} MB)")
    print(f"輸出: {output_file}")

if __name__ == '__main__':
    build_viewer_data()
