import os, json, asyncio, requests, paramiko
from datetime import datetime
from lxml import etree
from playwright.async_api import async_playwright

# Config
QUARTERLY_API = "https://www.nseindia.com/api/corporates-financial-results?index=equities&period=Quarterly"
QUARTER_CODE = 124
LOCAL_JSON_PATH = "124.json"
SERVER_IP = "194.163.33.221"
USERNAME = "u613550457"
PASSWORD = "OOPc@123"
PORT = 65002
REMOTE_BASE = "/home/u613550457/public_html/Tradeanalysis/quarterly"
REMOTE_JSON_UPLOAD_PATH = "/home/u613550457/public_html/Tradeanalysis/quarterly/view/124_test.json"

def get_first(result): return result[0].text.strip() if result and result[0].text else None
def sanitize(name): import re; return re.sub(r"(limited|ltd|\.|,)", "", name or "", flags=re.I).strip()

def parse_nse_date(raw_date):
    for fmt in ("%d-%b-%Y", "%Y-%m-%d"):
        try:
            return datetime.strptime(raw_date, fmt).strftime("%d-%b-%y")
        except ValueError:
            continue
    raise ValueError(f"Unrecognized date format: {raw_date}")

def sftp_makedirs(sftp, remote_path):
    dirs = remote_path.strip("/").split("/")
    current = ""
    for d in dirs:
        current += "/" + d
        try:
            sftp.stat(current)
        except FileNotFoundError:
            sftp.mkdir(current)

def upload_to_multiple_folders(local_file, subfolders):
    try:
        transport = paramiko.Transport((SERVER_IP, PORT))
        transport.connect(username=USERNAME, password=PASSWORD)
        sftp = paramiko.SFTPClient.from_transport(transport)

        filename = os.path.basename(local_file)

        for folder in subfolders:
            full_path = os.path.join(REMOTE_BASE, folder)
            sftp_makedirs(sftp, full_path)
            remote_file = os.path.join(full_path, filename)
            sftp.put(local_file, remote_file)
            print(f"📤 Uploaded to {remote_file}")

        sftp.close()
        transport.close()
    except Exception as e:
        print(f"❌ Upload to multiple folders failed: {e}")

def extract_data(xml_bytes):
    root = etree.fromstring(xml_bytes)
    ns = {'in-bse-fin': "http://www.bseindia.com/xbrl/fin/2020-03-31/in-bse-fin"}
    xp = lambda path: root.xpath(path, namespaces=ns)
    def gstr(path): return get_first(xp(path)) or ""
    def gnum(path):
        val = get_first(xp(path))
        return str(round(float(val.replace(',', '')) / 1_000_000, 2)) if val else "0"

    data = {
        'bsecode': gstr("//in-bse-fin:ScripCode[@contextRef='OneD']"),
        'company_name': gstr("//in-bse-fin:NameOfTheCompany[@contextRef='OneD']"),
        'Net Sales': gnum("//in-bse-fin:RevenueFromOperations[@contextRef='OneD']"),
        'Other Income': gnum("//in-bse-fin:OtherIncome[@contextRef='OneD']"),
        'Total Income': gnum("//in-bse-fin:Income[@contextRef='OneD']"),
        'Expenditure': gnum("//in-bse-fin:Expenses[@contextRef='OneD']"),
        'Cost of Materials Consumed': gnum("//in-bse-fin:CostOfMaterialsConsumed[@contextRef='OneD']"),
        'Finance Costs': gnum("//in-bse-fin:FinanceCosts[@contextRef='OneD']"),
        'Changes in inventories of finished goods, work-in-progress and stock-in-trade': gnum("//in-bse-fin:ChangesInInventoriesOfFinishedGoodsWorkInProgressAndStockInTrade[@contextRef='OneD']"),
        'Purchases of stock-in-trade': gnum("//in-bse-fin:PurchasesOfStockInTrade[@contextRef='OneD']"),
        'Employee benefit expense': gnum("//in-bse-fin:EmployeeBenefitExpense[@contextRef='OneD']"),
        'Depreciation and amortisation expense': gnum("//in-bse-fin:DepreciationDepletionAndAmortisationExpense[@contextRef='OneD']"),
        'Profit after Interest but before Exceptional Items': gnum("//in-bse-fin:ProfitBeforeExceptionalItemsAndTax[@contextRef='OneD']"),
        'Exceptional Item': gnum("//in-bse-fin:ExceptionalItemsBeforeTax[@contextRef='OneD']"),
        'Profit (+)/ Loss (-) from Ordinary Activities before Tax': gnum("//in-bse-fin:ProfitBeforeTax[@contextRef='OneD']"),
        'Tax': gnum("//in-bse-fin:TaxExpense[@contextRef='OneD']"),
        'Net Profit (+)/ Loss (-) from Ordinary Activities after Tax': gnum("//in-bse-fin:ProfitLossForPeriodFromContinuingOperations[@contextRef='OneD']"),
        'Net Profit': gnum("//in-bse-fin:ProfitLossForPeriod[@contextRef='OneD']"),
        'Current tax': gnum("//in-bse-fin:CurrentTax[@contextRef='OneD']"),
        'Deferred tax': gnum("//in-bse-fin:DeferredTax[@contextRef='OneD']"),
        'Other Comprehensive Income Net of Taxes': gnum("//in-bse-fin:OtherComprehensiveIncomeNetOfTaxes[@contextRef='OneD']"),
        'Total Comprehensive Income for the Period': gnum("//in-bse-fin:ComprehensiveIncomeForThePeriod[@contextRef='OneD']"),
        'Share of profit(loss) of associates and joint ventures': gnum("//in-bse-fin:ShareOfProfitLossOfAssociatesAndJointVenturesAccountedForUsingEquityMethod[@contextRef='OneD']"),
        'Basic EPS for continuing operation': gstr("//in-bse-fin:BasicEarningsLossPerShareFromContinuingOperations[@contextRef='OneD']"),
        'Diluted EPS for continuing operation': gstr("//in-bse-fin:DilutedEarningsLossPerShareFromContinuingOperations[@contextRef='OneD']"),
        'Basic for discontinued & continuing operation': gstr("//in-bse-fin:BasicEarningsLossPerShareFromContinuingAndDiscontinuedOperations[@contextRef='OneD']"),
        'Diluted for discontinued & continuing operation': gstr("//in-bse-fin:DilutedEarningsLossPerShareFromContinuingAndDiscontinuedOperations[@contextRef='OneD']"),
        'Data Type': gstr("//in-bse-fin:NatureOfReportStandaloneConsolidated[@contextRef='OneD']")
    }

    for i in range(1, 50):
        ctx = f"OneOperatingExpenses0{i}D"
        desc = gstr(f"//in-bse-fin:DescriptionOfOtherExpenses[@contextRef='{ctx}']")
        if desc.lower() in ["other expenses", "other expense", "others"]:
            data["Other Expenses"] = gnum(f"//in-bse-fin:OtherExpenses[@contextRef='{ctx}']")
            break
    else:
        data["Other Expenses"] = gnum("//in-bse-fin:OtherExpenses[@contextRef='OneD']")
    return data

async def fetch_nse_quarterly():
    print("📥 Fetching 124.json from server URL...")
    try:
        res = requests.get("https://stocknewshub.com/quarterly/view/124_test.json")
        isin_raw = res.json()
        print(f"✅ Loaded {len(isin_raw)} ISINs from URL.")
    except Exception as e:
        print(f"❌ Failed to fetch 124.json: {e}")
        return

    isin_map = {x['isin_number']: x for x in isin_raw if 'isin_number' in x}

    async with async_playwright() as p:
        browser = await p.firefox.launch(headless=True)
        context = await browser.new_context(user_agent="Mozilla/5.0")
        page = await context.new_page()

        print("🌐 Visiting NSE homepage to solve Akamai...")
        await page.goto("https://www.nseindia.com", timeout=60000)
        await page.wait_for_timeout(3000)

        print("📦 Fetching Quarterly Results API...")
        response = await context.request.get(QUARTERLY_API, headers={"Accept": "application/json"})
        json_data = await response.json()
        print(f"✅ Fetched {len(json_data)} results from API.")

        valid = [x for x in json_data if x.get("isin") in isin_map and x.get("bank") != 'B']
        print(f"✅ Valid ISIN entries to process: {len(valid)}")

        for item in valid:
            xml_url = item["xbrl"].split("<br>")[0]
            print(f"➡️ Processing: {item['symbol']} | {xml_url}")
            try:
                response = await page.goto(xml_url, timeout=90000)
                if not response or not response.ok:
                    print("❌ Failed to load XML")
                    continue
                xml_text = await response.text()
                xml_bytes = xml_text.encode("utf-8")
            except Exception as e:
                print(f"❌ Error fetching XML: {e}")
                continue

            data = extract_data(xml_bytes)
            meta = isin_map[item['isin']]

            data.update({
                "isin_number": item["isin"],
                "Type": item["audited"],
                "Date Begin": parse_nse_date(item["fromDate"]),
                "Date End": parse_nse_date(item["toDate"]),
                "Description": "Amount (Rs. million)",
                "qtr_code": QUARTER_CODE,
                "image_id": meta.get("image_id"),
                "image_url": meta.get("image_url", ""),
                "BSE/NSE": "NSE"
            })

            bse = data.get("bsecode")
            cname = sanitize(data.get("company_name", item['symbol']))
            filename = f"/var/www/html/stocksupdates/quarterly/json/{bse}.json"
            with open(filename, "w") as f:
                json.dump(data, f, indent=4)

            upload_to_multiple_folders(
                filename,
                ["post/json", "push/json", "video/json", "view/json", "video/shorts/json"]
            )
            for platform in ["twitter", "facebook", "insta"]:
                upload_to_multiple_folders(filename, [f"socialMedia/json/{platform}"])

            print(f"✅ Saved & uploaded: {bse} - {data['company_name']}")
        
        processed_isins = [i['isin'] for i in valid]
        new_isin_list = [i for i in isin_raw if i['isin_number'] not in processed_isins]
        with open(LOCAL_JSON_PATH, "w") as f:
            json.dump(new_isin_list, f, indent=4)

        try:
            transport = paramiko.Transport((SERVER_IP, PORT))
            transport.connect(username=USERNAME, password=PASSWORD)
            sftp = paramiko.SFTPClient.from_transport(transport)
            sftp.put(LOCAL_JSON_PATH, REMOTE_JSON_UPLOAD_PATH)
            sftp.close()
            transport.close()
            print(f"🧹 Updated 124.json uploaded to server.")
        except Exception as e:
            print(f"❌ Failed to upload updated 124.json: {e}")

        print("🏁 Done!")

if __name__ == "__main__":
    asyncio.run(fetch_nse_quarterly())
