| |
| """Download and compile default/bankruptcy event data for labels. -> Labels/""" |
|
|
| import os |
| import csv |
| import json |
| import pandas as pd |
| from datetime import datetime |
|
|
| BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) |
| DATA_DIR = os.path.join(BASE_DIR, "Labels") |
| LOG_FILE = os.path.join(BASE_DIR, "logs", "07_defaults.log") |
| os.makedirs(DATA_DIR, exist_ok=True) |
|
|
| log = open(LOG_FILE, 'w') |
| log.write(f"Default data compilation started: {datetime.now()}\n\n") |
|
|
| |
| print("[1/3] Saving curated energy sector default events...") |
|
|
| defaults = [ |
| ("Enron Corp", "ENE", "2001-12-02", "Chapter 11", "Accounting fraud, largest bankruptcy at the time"), |
| ("Dynegy Inc", "DYN", "2011-11-07", "Chapter 11", "Energy trader, couldn't service debt"), |
| ("Patriot Coal", "PCX", "2012-07-09", "Chapter 11", "Coal producer, declining demand"), |
| ("Energy Future Holdings", "TXU", "2014-04-29", "Chapter 11", "Leveraged buyout gone wrong, $49B debt"), |
| ("Samson Resources", "N/A", "2015-09-16", "Chapter 11", "Oil price crash 2014-15"), |
| ("Magnum Hunter Resources", "MHR", "2015-12-15", "Chapter 11", "Oil price crash, liquidity crisis"), |
| ("Penn Virginia Corp", "PVA", "2016-05-12", "Chapter 11", "Oil price crash, overleveraged"), |
| ("Breitburn Energy Partners", "BBEP", "2016-05-15", "Chapter 11", "Oil price crash"), |
| ("Linn Energy", "LINE", "2016-05-11", "Chapter 11", "Oil price crash, $6B debt"), |
| ("Halcon Resources", "HK", "2016-07-27", "Chapter 11", "Oil price crash, high debt"), |
| ("Rex Energy", "REXX", "2018-10-05", "Chapter 11", "Gas price pressure, debt load"), |
| ("Weatherford International", "WFT", "2019-07-01", "Chapter 11", "Oilfield services downturn, $7.6B debt"), |
| ("McDermott International", "MDR", "2020-01-21", "Chapter 11", "Cost overruns, heavy debt from CB&I merger"), |
| ("Whiting Petroleum", "WLL", "2020-04-01", "Chapter 11", "COVID oil crash"), |
| ("Diamond Offshore", "DO", "2020-04-26", "Chapter 11", "COVID oil crash, offshore drilling decline"), |
| ("J.C. Penney (non-energy comparison)", "JCP", "2020-05-15", "Chapter 11", "COVID + retail decline"), |
| ("Chesapeake Energy", "CHK", "2020-06-28", "Chapter 11", "COVID + legacy debt from gas overexpansion"), |
| ("Chaparral Energy", "CHAP", "2020-08-16", "Chapter 11", "COVID oil crash"), |
| ("Oasis Petroleum", "OAS", "2020-09-30", "Chapter 11", "COVID oil crash"), |
| ("Denbury Resources", "DEN", "2020-07-29", "Chapter 11", "COVID oil crash, CO2 EOR focus"), |
| ("Covia Holdings", "CVIA", "2020-06-29", "Chapter 11", "Frac sand demand collapse"), |
| ("Superior Energy Services", "SPN", "2020-12-07", "Chapter 11", "Oilfield services downturn"), |
| ("Seadrill Ltd", "SDRL", "2021-02-07", "Chapter 11", "Second filing, offshore drilling overcapacity"), |
| ("Evergrande (non-energy comparison)", "3333.HK", "2021-12-09", "Default", "Real estate, cross-sector contagion example"), |
| ] |
|
|
| defaults_file = os.path.join(DATA_DIR, "energy_defaults_curated.csv") |
| with open(defaults_file, 'w', newline='') as f: |
| writer = csv.writer(f) |
| writer.writerow(["company", "ticker", "event_date", "event_type", "details"]) |
| writer.writerows(defaults) |
|
|
| print(f" OK {len(defaults)} default events saved to {defaults_file}") |
| log.write(f"Curated defaults: {len(defaults)} events\n") |
|
|
| |
| print("\n[2/3] Generating distress labels from stock price drawdowns...") |
|
|
| COMPANY_LIST = os.path.join(BASE_DIR, "data", "company_list.csv") |
| PRICES_FILE = os.path.join(BASE_DIR, "Module_1", "market_data", "all_prices.parquet") |
|
|
| if os.path.exists(PRICES_FILE): |
| try: |
| prices = pd.read_parquet(PRICES_FILE) |
| companies = pd.read_csv(COMPANY_LIST) |
| tickers = companies['ticker'].tolist() |
|
|
| distress_events = [] |
| for t in tickers: |
| try: |
| if t in prices.columns.get_level_values(0): |
| close = prices[t]['Close'].dropna() |
| elif ('Close', t) in prices.columns: |
| close = prices[('Close', t)].dropna() |
| else: |
| continue |
|
|
| rolling_max = close.rolling(126, min_periods=1).max() |
| drawdown = (close - rolling_max) / rolling_max |
|
|
| crisis_periods = drawdown[drawdown < -0.50] |
| if not crisis_periods.empty: |
| groups = (crisis_periods.index.to_series().diff() > pd.Timedelta(days=30)).cumsum() |
| for _, group in crisis_periods.groupby(groups): |
| distress_events.append({ |
| 'ticker': t, |
| 'distress_start': group.index[0].strftime('%Y-%m-%d'), |
| 'distress_end': group.index[-1].strftime('%Y-%m-%d'), |
| 'max_drawdown': f"{group.min():.2%}", |
| 'label': 'severe_distress' |
| }) |
| except Exception: |
| continue |
|
|
| if distress_events: |
| dd_file = os.path.join(DATA_DIR, "distress_from_drawdowns.csv") |
| pd.DataFrame(distress_events).to_csv(dd_file, index=False) |
| print(f" OK {len(distress_events)} distress events from drawdowns saved") |
| log.write(f"Drawdown distress events: {len(distress_events)}\n") |
| else: |
| print(" No severe drawdown events found") |
| except Exception as e: |
| print(f" Error processing prices: {e}") |
| log.write(f"Drawdown analysis FAILED: {e}\n") |
| else: |
| print(f" Prices file not found. Run script 01 first, then re-run this script.") |
| log.write("Prices file not found, skipping drawdown analysis\n") |
|
|
| |
| print("\n[3/3] Saving LoPucki BRD reference info...") |
|
|
| lopucki_info = { |
| "database": "Florida-UCLA-LoPucki Bankruptcy Research Database (BRD)", |
| "url": "https://lopucki.law.ufl.edu", |
| "coverage": "1,000+ large public company bankruptcies, Oct 1979 – Dec 2022", |
| "access": "Free abbreviated version (26 fields) available online", |
| "full_data": "Full dataset available for academic licensing — email maintainers", |
| "note": "No longer actively maintained as of 2022, but historical data is comprehensive", |
| "how_to_use": [ |
| "1. Visit https://lopucki.law.ufl.edu", |
| "2. Use the WebBRD search interface to find cases", |
| "3. Download the Cases Spreadsheet for all data", |
| "4. Cross-reference company names with our ticker list", |
| "5. Use filing dates as default event timestamps for labels" |
| ] |
| } |
| with open(os.path.join(DATA_DIR, "lopucki_brd_reference.json"), 'w') as f: |
| json.dump(lopucki_info, f, indent=2) |
| print(f" OK LoPucki BRD reference saved.") |
|
|
| log.write(f"\nFinished: {datetime.now()}\n") |
| log.close() |
| print(f"\nLog saved to {LOG_FILE}") |
| print(f"Data saved to: {DATA_DIR}") |
|
|