import json
import os
import time
import requests
import paramiko
from pymongo import MongoClient
from lxml import html

# ===========================================
# ✅ MongoDB Connection
# ===========================================
print("🔌 Connecting to MongoDB...")
MONGO_URI = "mongodb://DeepakBansal:OOPc%401234&*()@195.35.23.181:27017/"
client = MongoClient(MONGO_URI)
db = client["Tradeanalysis"]

collection_test = db["collection_test"]
collection_cal = db["Calculative_Fields"]
print("✅ MongoDB connected successfully.\n")

# ===========================================
# ✅ Remote Server (SFTP Upload)
# ===========================================
SERVER_IP = "194.163.33.221"
PORT = 65002
USERNAME = "u613550457"
PASSWORD = "OOPc@123"

REMOTE_BASE = "/home/u613550457/domains/stocknewshub.com/public_html/quarterly"

# ===========================================
# ✅ Input / Output Paths
# ===========================================
jsonFilePath = "/var/www/html/stocksupdates/quarterly/128.json"
processedIndexFile = "/var/www/html/stocksupdates/quarterly/processed_index.txt"
output_folder = "json/"
os.makedirs(output_folder, exist_ok=True)

# ===========================================
# ✅ SFTP HELPERS
# ===========================================
def sftp_makedirs(sftp, remote_path):
    dirs = remote_path.strip("/").split("/")
    current = ""
    for d in dirs:
        current += "/" + d
        try:
            sftp.stat(current)
        except FileNotFoundError:
            sftp.mkdir(current)

def upload_to_multiple_folders(local_file, subfolders):
    transport = None
    sftp = None
    try:
        transport = paramiko.Transport((SERVER_IP, PORT))
        transport.connect(username=USERNAME, password=PASSWORD)
        sftp = paramiko.SFTPClient.from_transport(transport)

        filename = os.path.basename(local_file)

        for folder in subfolders:
            full_path = f"{REMOTE_BASE}/{folder}".replace("//", "/")
            sftp_makedirs(sftp, full_path)
            remote_file = f"{full_path}/{filename}"
            sftp.put(local_file, remote_file)
            print(f"📤 Uploaded: {remote_file}")

    except Exception as e:
        print(f"❌ SFTP upload failed: {e}")
    finally:
        if sftp:
            sftp.close()
        if transport:
            transport.close()

# ===========================================
# ✅ Load JSON file
# ===========================================
if not os.path.exists(jsonFilePath):
    print(f"❌ ERROR: Input file {jsonFilePath} not found.")
    exit()

with open(jsonFilePath, "r", encoding="utf-8") as f:
    jsonArray = json.load(f)

jsoncount = len(jsonArray)
print(f"📦 Total records loaded: {jsoncount}")

if os.path.exists(processedIndexFile):
    with open(processedIndexFile, "r") as f:
        processedIndex = int(f.read().strip() or 0)
else:
    processedIndex = 0

if processedIndex >= jsoncount or processedIndex < 0:
    processedIndex = 0

print(f"🔖 Starting from index: {processedIndex}\n")

# ===========================================
# ✅ Runtime Config
# ===========================================
j = 128
base_url = "https://www.bseindia.com/corporates/Results.aspx"

TIMEOUT_SECONDS = 180  # 🔥 3 minutes
startTime = time.time()

# resume pointer (THIS is what we save)
processedIndexToSave = processedIndex

# ===========================================
# ✅ HTTP Session
# ===========================================
session = requests.Session()
session.headers.update({
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) Chrome/128.0.0.0",
    "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
    "Accept-Language": "en-US,en;q=0.9",
    "Referer": "https://www.bseindia.com/",
    "Connection": "keep-alive",
    "Upgrade-Insecure-Requests": "1",
})

try:
    print("🌐 Performing warm-up request...")
    session.get(base_url, timeout=20)
    print("✅ Warm-up done.\n")
except Exception as e:
    print(f"⚠️ Warm-up failed: {e}\n")

# ===========================================
# ✅ MAIN LOOP
# ===========================================
index = processedIndex
while index < len(jsonArray):

    if time.time() - startTime >= TIMEOUT_SECONDS:
        print("⏳ Timeout reached (3 minutes). Saving progress...")
        break

    document = jsonArray[index]
    sc_code = document.get("bse_code")
    print(f"\n🔹 [{index+1}] Processing BSE Code: {sc_code}")

    if not sc_code:
        index += 1
        processedIndexToSave = index
        continue

    # --- Image data ---
    try:
        image_data = collection_test.find_one(
            {"bse_code": sc_code},
            {"_id": 0, "image_url": 1, "image_id": 1}
        )
        imageurl = image_data.get("image_url") if image_data else ""
        imgid = image_data.get("image_id") if image_data else ""
    except Exception as e:
        print(f"❌ Mongo error (image): {e}")
        index += 1
        processedIndexToSave = index
        continue

    isin_code = document.get("isin_number")
    company_name = document.get("screener_name")

    # --- Market Cap ---
    try:
        marketCap_doc = collection_cal.find_one(
            {"bse_code": sc_code},
            {"Market Cap": 1}
        )
        marketCaps = float(str(marketCap_doc.get("Market Cap", "0")).replace(",", "")) if marketCap_doc else 0
    except Exception:
        marketCaps = 0

    url = f"{base_url}?Code={sc_code}&Company=in&qtr={j}"
    print(f"🌐 Fetching: {url}")

    try:
        response = session.get(url, timeout=40)
        html_content = response.text
    except Exception as e:
        print(f"❌ HTTP error: {e}")
        index += 1
        processedIndexToSave = index
        continue

    if "ContentPlaceHolder1_tbl_typeID" not in html_content:
        print("⚠️ Results table not found.")
        index += 1
        processedIndexToSave = index
        continue

    tree = html.fromstring(html_content)
    table = tree.xpath("//table[@id='ContentPlaceHolder1_tbl_typeID']")
    heading = tree.xpath("//span[@id='ContentPlaceHolder1_lblresulttype']/text()")
    tableHeading = heading[0] if heading else ""

    rows = table[0].xpath(".//tr")
    if len(rows) <= 5:
        print("⚠️ Insufficient rows.")
        index += 1
        processedIndexToSave = index
        continue

    output_doc = {
        "bsecode": sc_code,
        "isin_number": isin_code,
        "company_name": company_name,
        "image_id": imgid,
        "image_url": imageurl,
        "Market Cap": marketCaps,
        "qtr_code": j,
        "BSE/NSE": "BSE"
    }

    for row in rows:
        cols = [c.text_content().strip() for c in row.xpath(".//td")]
        for i in range(0, len(cols), 2):
            if i + 1 < len(cols) and cols[i]:
                output_doc[cols[i]] = cols[i + 1]

    output_doc["Data Type"] = "Consolidated" if "Consolidated" in tableHeading else "Standalone"

    output_path = os.path.join(output_folder, f"{sc_code}.json")

    try:
        with open(output_path, "w", encoding="utf-8") as f:
            json.dump(output_doc, f, indent=4, ensure_ascii=False)

        print(f"✅ JSON saved: {output_path}")

        upload_to_multiple_folders(
            output_path,
            ["push/json", "video/json", "video/shorts/json", "socialMedia/json","view/json"]
        )

        del jsonArray[index]
        with open(jsonFilePath, "w", encoding="utf-8") as f:
            json.dump(jsonArray, f, indent=4, ensure_ascii=False)

        processedIndexToSave = index  # stay at same index after delete

    except Exception as e:
        print(f"❌ File error: {e}")
        index += 1
        processedIndexToSave = index

# ===========================================
# ✅ SAVE PROGRESS
# ===========================================
with open(processedIndexFile, "w") as f:
    f.write(str(processedIndexToSave))

print(f"\n🏁 Stopped safely. Resume from index: {processedIndexToSave}")
