import os, json, asyncio, requests, paramiko
from datetime import datetime
from lxml import etree
from playwright.async_api import async_playwright

# Config
QUARTERLY_API = "https://www.nseindia.com/api/integrated-filing-results?index=equities&period_ended=30-Jun-2025&type=Integrated%20Filing-%20Financials&size=5000"
QUARTER_CODE = 126
LOCAL_JSON_PATH = "/var/www/html/stocksupdates/quarterly/126.json"
SERVER_IP = "194.163.33.221"
USERNAME = "u613550457"
PASSWORD = "OOPc@123"
PORT = 65002
REMOTE_BASE = "/home/u613550457/domains/stocknewshub.com/public_html/quarterly"
REMOTE_JSON_UPLOAD_PATH = "/home/u613550457/domains/stocknewshub.com/public_html/quarterly/view/126.json"
SKIPPED_JSON_PATH = "/var/www/html/stocksupdates/quarterly/skipped_126.json"

def get_first(result): return result[0].text.strip() if result and result[0].text else None
def sanitize(name): import re; return re.sub(r"(limited|ltd|\.|,)", "", name or "", flags=re.I).strip()

def parse_nse_date(raw_date):
    for fmt in ("%d-%b-%Y", "%Y-%m-%d"):
        try:
            return datetime.strptime(raw_date, fmt).strftime("%d-%b-%y")
        except ValueError:
            continue
    raise ValueError(f"Unrecognized date format: {raw_date}")

def sftp_makedirs(sftp, remote_path):
    dirs = remote_path.strip("/").split("/")
    current = ""
    for d in dirs:
        current += "/" + d
        try:
            sftp.stat(current)
        except FileNotFoundError:
            sftp.mkdir(current)

def upload_to_multiple_folders(local_file, subfolders):
    try:
        transport = paramiko.Transport((SERVER_IP, PORT))
        transport.connect(username=USERNAME, password=PASSWORD)
        sftp = paramiko.SFTPClient.from_transport(transport)

        filename = os.path.basename(local_file)

        for folder in subfolders:
            full_path = os.path.join(REMOTE_BASE, folder)
            sftp_makedirs(sftp, full_path)
            remote_file = os.path.join(full_path, filename)
            sftp.put(local_file, remote_file)
            print(f"📤 Uploaded to {remote_file}")

        sftp.close()
        transport.close()
    except Exception as e:
        print(f"❌ Upload to multiple folders failed: {e}")

def extract_data(xml_bytes):
    root = etree.fromstring(xml_bytes)
    ns = {'in-capmkt': "http://www.sebi.gov.in/xbrl/2025-01-31/in-capmkt"}
    xp = lambda path: root.xpath(path, namespaces=ns)
    def gstr(path): return get_first(xp(path)) or ""
    def gnum(path):
        val = get_first(xp(path))
        return str(round(float(val.replace(',', '')) / 1_000_000, 2)) if val else "0"

    data = {
        'bsecode': gstr("//in-capmkt:ScripCode[@contextRef='OneD']"),
        'isin_number' : gstr("//in-capmkt:ISIN[@contextRef='OneD']"),
        'company_name': gstr("//in-capmkt:NameOfTheCompany[@contextRef='OneD']"),
        'Net Sales': gnum("//in-capmkt:RevenueFromOperations[@contextRef='OneD']"),
        'Other Income': gnum("//in-capmkt:OtherIncome[@contextRef='OneD']"),
        'Total Income': gnum("//in-capmkt:Income[@contextRef='OneD']"),
        'Expenditure': gnum("//in-capmkt:Expenses[@contextRef='OneD']"),
        'Cost of Materials Consumed': gnum("//in-capmkt:CostOfMaterialsConsumed[@contextRef='OneD']"),
        'Finance Costs': gnum("//in-capmkt:FinanceCosts[@contextRef='OneD']"),
        'Changes in inventories of finished goods, work-in-progress and stock-in-trade': gnum("//in-capmkt:ChangesInInventoriesOfFinishedGoodsWorkInProgressAndStockInTrade[@contextRef='OneD']"),
        'Purchases of stock-in-trade': gnum("//in-capmkt:PurchasesOfStockInTrade[@contextRef='OneD']"),
        'Employee benefit expense': gnum("//in-capmkt:EmployeeBenefitExpense[@contextRef='OneD']"),
        'Depreciation and amortisation expense': gnum("//in-capmkt:DepreciationDepletionAndAmortisationExpense[@contextRef='OneD']"),
        'Profit after Interest but before Exceptional Items': gnum("//in-capmkt:ProfitBeforeExceptionalItemsAndTax[@contextRef='OneD']"),
        'Exceptional Item': gnum("//in-capmkt:ExceptionalItemsBeforeTax[@contextRef='OneD']"),
        'Profit (+)/ Loss (-) from Ordinary Activities before Tax': gnum("//in-capmkt:ProfitBeforeTax[@contextRef='OneD']"),
        'Tax': gnum("//in-capmkt:TaxExpense[@contextRef='OneD']"),
        'Net Profit (+)/ Loss (-) from Ordinary Activities after Tax': gnum("//in-capmkt:ProfitLossForPeriodFromContinuingOperations[@contextRef='OneD']"),
        'Net Profit': gnum("//in-capmkt:ProfitLossForPeriod[@contextRef='OneD']"),
        'Current tax': gnum("//in-capmkt:CurrentTax[@contextRef='OneD']"),
        'Deferred tax': gnum("//in-capmkt:DeferredTax[@contextRef='OneD']"),
        'Other Comprehensive Income Net of Taxes': gnum("//in-capmkt:OtherComprehensiveIncomeNetOfTaxes[@contextRef='OneD']"),
        'Total Comprehensive Income for the Period': gnum("//in-capmkt:ComprehensiveIncomeForThePeriod[@contextRef='OneD']"),
        'Share of profit(loss) of associates and joint ventures': gnum("//in-capmkt:ShareOfProfitLossOfAssociatesAndJointVenturesAccountedForUsingEquityMethod[@contextRef='OneD']"),
        'Basic EPS for continuing operation': gstr("//in-capmkt:BasicEarningsLossPerShareFromContinuingOperations[@contextRef='OneD']"),
        'Diluted EPS for continuing operation': gstr("//in-capmkt:DilutedEarningsLossPerShareFromContinuingOperations[@contextRef='OneD']"),
        'Basic for discontinued & continuing operation': gstr("//in-capmkt:BasicEarningsLossPerShareFromContinuingAndDiscontinuedOperations[@contextRef='OneD']"),
        'Diluted for discontinued & continuing operation': gstr("//in-capmkt:DilutedEarningsLossPerShareFromContinuingAndDiscontinuedOperations[@contextRef='OneD']"),
        'Data Type': gstr("//in-capmkt:NatureOfReportStandaloneConsolidated[@contextRef='OneD']")
    }

    for i in range(1, 50):
        ctx = f"OneOperatingExpenses0{i}D"
        desc = gstr(f"//in-capmkt:DescriptionOfOtherExpenses[@contextRef='{ctx}']")
        if desc.lower() in ["other expenses", "other expense", "others"]:
            data["Other Expenses"] = gnum(f"//in-capmkt:OtherExpenses[@contextRef='{ctx}']")
            break
    else:
        data["Other Expenses"] = gnum("//in-capmkt:OtherExpenses[@contextRef='OneD']")
    return data

async def fetch_nse_quarterly():
    print("📥 Fetching 126.json from server URL...")
    try:
        res = requests.get("https://stocknewshub.com/quarterly/view/126.json")
        isin_raw = res.json()
        print(f"✅ Loaded {len(isin_raw)} ISINs from URL.")
    except Exception as e:
        print(f"❌ Failed to fetch 126.json: {e}")
        return

    isin_map = {x['symbol']: x for x in isin_raw if 'symbol' in x}

    async with async_playwright() as p:
        browser = await p.firefox.launch(headless=True)
        context = await browser.new_context(user_agent="Mozilla/5.0")
        page = await context.new_page()

        print("🌐 Visiting NSE homepage to solve Akamai...")
        await page.goto("https://www.nseindia.com", timeout=60000)
        await page.wait_for_timeout(3000)

        print("📦 Fetching Quarterly Results API via browser (async)...")

        page2 = await context.new_page()
        await page2.goto("https://www.nseindia.com/companies-listing/corporate-integrated-filing", timeout=60000)
        await page2.wait_for_timeout(5000)

        # Extract cookies and user agent
        cookies = await context.cookies()
        cookie_dict = {cookie['name']: cookie['value'] for cookie in cookies}
        user_agent = await page2.evaluate("() => navigator.userAgent")

        session = requests.Session()
        headers = {
            "User-Agent": user_agent,
            "Referer": "https://www.nseindia.com/",
            "Accept": "application/json",
            "Accept-Language": "en-US,en;q=0.9",
            "X-Requested-With": "XMLHttpRequest"
        }

        try:
            response = session.get(QUARTERLY_API, headers=headers, cookies=cookie_dict)
            if response.status_code == 200:
                json_data = response.json()
                print(f"✅ Fetched {len(json_data['data'])} results from API.")
            else:
                print(f"❌ API Error: {response.status_code}")
                return
        except Exception as e:
            print(f"❌ Failed to fetch Quarterly API: {str(e)}")
            return

        valid = [
            x for x in json_data["data"]
            if isinstance(x, dict) and x.get("symbol") in isin_map and x.get("bank") != 'B'
        ]

        print(f"✅ Valid symbol entries to process: {len(valid)}")
        skipped_isins = []

        for item in valid:
            xml_url = item["xbrl"].split("<br>")[0]
            print(f"➡️ Processing: {item['symbol']} | {xml_url}")
            try:
                response = await page.goto(xml_url, timeout=90000)
                if not response or not response.ok:
                    print("❌ Failed to load XML")
                    continue
                xml_text = await response.text()
                xml_bytes = xml_text.encode("utf-8")
            except Exception as e:
                print(f"❌ Error fetching XML: {e}")
                continue

            data = extract_data(xml_bytes)
            meta = isin_map[item['symbol']]

            data.update({
                # "Type": item["audited"],
                "Date Begin": '01-Apr-25',
                "symbol": item["symbol"],
                "Date End": parse_nse_date(item["qe_Date"]),
                "Description": "Amount (Rs. million)",
                "qtr_code": QUARTER_CODE,
                "image_id": meta.get("image_id"),
                "image_url": meta.get("image_url", ""),
                "BSE/NSE": "NSE"
            })

            required_keys = ["Net Sales", "Net Profit", "Basic EPS for continuing operation", "Diluted EPS for continuing operation", "Basic for discontinued & continuing operation", "Diluted for discontinued & continuing operation"]
            if all(data.get(k) in ["", "0", 0, None] for k in required_keys):
                print(f"⚠️ Skipping {item['symbol']} as all key financial values are empty or zero.")
                skipped_isins.append(meta)
                continue

            # Remove invalid BSE code
            if data.get("bsecode") in ["000000", "123456"]:
                data.pop("bsecode")
                continue

            bse = data.get("bsecode")
            cname = sanitize(data.get("company_name", item['symbol']))
            filename = f"/var/www/html/stocksupdates/quarterly/json/{bse}.json"
            with open(filename, "w") as f:
                json.dump(data, f, indent=4)

            upload_to_multiple_folders(
                filename,
                ["post/json", "push/json", "video/json", "view/json", "video/shorts/json" , "socialMedia/json"]
            )
            # for platform in ["twitter", "facebook", "insta"]:
            #     upload_to_multiple_folders(filename, [f"socialMedia/json/{platform}"])

            print(f"✅ Saved & uploaded: {bse} - {data['company_name']}")
        
        processed_isins = [i['symbol'] for i in valid]
        new_isin_list = [i for i in isin_raw if i.get('symbol') not in processed_isins]
        with open(LOCAL_JSON_PATH, "w") as f:
            json.dump(new_isin_list, f, indent=4)
        
        if skipped_isins:
            existing_skipped = []
            if os.path.exists(SKIPPED_JSON_PATH):
                with open(SKIPPED_JSON_PATH, "r") as f:
                    try:
                        existing_skipped = json.load(f)
                    except json.JSONDecodeError:
                        existing_skipped = []
            existing_symbols = {entry['symbol'] for entry in existing_skipped if 'symbol' in entry}
            new_entries = [entry for entry in skipped_isins if entry['symbol'] not in existing_symbols]
            combined_skipped = existing_skipped + new_entries

            with open(SKIPPED_JSON_PATH, "w") as f:
                json.dump(combined_skipped, f, indent=4)

        try:
            transport = paramiko.Transport((SERVER_IP, PORT))
            transport.connect(username=USERNAME, password=PASSWORD)
            sftp = paramiko.SFTPClient.from_transport(transport)
            sftp.put(LOCAL_JSON_PATH, REMOTE_JSON_UPLOAD_PATH)
            sftp.close()
            transport.close()
            print(f"🧹 Updated 126.json uploaded to server.")
        except Exception as e:
            print(f"❌ Failed to upload updated 126.json: {e}")

        print("🏁 Done!")

if __name__ == "__main__":
    asyncio.run(fetch_nse_quarterly())
