"""
爬取所有科學園區廠商名錄
"""
import sys
from pathlib import Path

# 加入 crawlers 目錄到路徑
sys.path.insert(0, str(Path(__file__).parent.parent))

from crawlers.stsp_crawler import crawl_stsp
from crawlers.sipa_crawler import crawl_sipa
from crawlers.ctsp_crawler import crawl_ctsp

import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)


def crawl_all(fetch_details: bool = False):
    """爬取所有科學園區"""
    results = {}

    print("=" * 60)
    print("Starting Science Park Crawler")
    print("=" * 60)

    # 南科
    print("\n[1/3] 南部科學園區")
    print("-" * 40)
    try:
        df_stsp = crawl_stsp(fetch_details=fetch_details)
        results["STSP"] = {"success": True, "count": len(df_stsp)}
    except Exception as e:
        print(f"STSP Error: {e}")
        results["STSP"] = {"success": False, "error": str(e)}

    # 竹科
    print("\n[2/3] 新竹科學園區")
    print("-" * 40)
    try:
        df_sipa = crawl_sipa(fetch_details=fetch_details)
        results["SIPA"] = {"success": True, "count": len(df_sipa)}
    except Exception as e:
        print(f"SIPA Error: {e}")
        results["SIPA"] = {"success": False, "error": str(e)}

    # 中科
    print("\n[3/3] 中部科學園區")
    print("-" * 40)
    try:
        df_ctsp = crawl_ctsp(fetch_details=fetch_details)
        results["CTSP"] = {"success": True, "count": len(df_ctsp)}
    except Exception as e:
        print(f"CTSP Error: {e}")
        results["CTSP"] = {"success": False, "error": str(e)}

    # 摘要
    print("\n" + "=" * 60)
    print("Crawl Summary")
    print("=" * 60)

    total = 0
    for park, result in results.items():
        if result["success"]:
            print(f"  {park}: {result['count']} companies")
            total += result["count"]
        else:
            print(f"  {park}: FAILED - {result.get('error', 'Unknown error')}")

    print(f"\n  Total: {total} companies")
    print("=" * 60)

    return results


if __name__ == "__main__":
    import argparse

    parser = argparse.ArgumentParser(description="Crawl science park manufacturers")
    parser.add_argument("--details", action="store_true", help="Fetch detailed info (slower)")

    args = parser.parse_args()
    crawl_all(fetch_details=args.details)
