Personal_Code / DRW /DRW-Crypto /inplemental.py
ChanceuxMJ's picture
Upload folder using huggingface_hub
c687548 verified
# -*- coding: utf-8 -*-
# @Time : 2025/7/4 19:42
# @Author : Lukax
# @Email : Lukarxiang@gmail.com
# @File : inplemental.py
# -*- presentd: PyCharm -*-
import torch
import numpy as np
import pandas as pd
from Settings import Config
from torch.utils.data import DataLoader, TensorDataset
def getDataLoader(X, Y, hparams, device, shuffle = True):
X = torch.tensor(X, dtype = torch.float32, device = device)
if Y is None:
dataset = TensorDataset(X)
else:
Y = torch.tensor(Y.values if hasattr(Y, 'values') else Y,
dtype = torch.float32, device = device).unsqueeze(1) # y need 2 dimensions
dataset = TensorDataset(X, Y)
dataloader = DataLoader(dataset, batch_size = hparams['batch_size'], shuffle = shuffle,
generator = torch.Generator().manual_seed(hparams['seed']))
return dataloader
class Config:
TRAIN_PATH = "/AI4M/users/mjzhang/workspace/DRW/new_data/train.parquet"
TEST_PATH = "/AI4M/users/mjzhang/workspace/DRW/new_data/test.parquet"
SUBMISSION_PATH = "/AI4M/users/mjzhang/workspace/DRW/new_data/sample_submission.csv"
# Original features plus additional market features
FEATURES = [
"X175", "X198", "X179", "X173", "X169", "X181", "X94",
"X197", "X137", "X133", "X163", "X196", "sell_qty",
"bid_qty", "ask_qty", "buy_qty", "volume"]
EX_FEATURES = [
'X598', 'X385', 'X603', 'X674', 'X415', 'X345', 'X174',
'X302', 'X178', 'X168', 'X612', 'X421', 'X333', 'X586', 'X292'
]
TARGET = "label"
N_FOLDS = 3
RANDOM_STATE = 42
def add_featrues1(df):
# Original features
df['bid_ask_interaction'] = df['bid_qty'] * df['ask_qty']
df['bid_buy_interaction'] = df['bid_qty'] * df['buy_qty']
df['bid_sell_interaction'] = df['bid_qty'] * df['sell_qty']
df['ask_buy_interaction'] = df['ask_qty'] * df['buy_qty']
df['ask_sell_interaction'] = df['ask_qty'] * df['sell_qty']
df['volume_weighted_sell'] = df['sell_qty'] * df['volume']
df['buy_sell_ratio'] = df['buy_qty'] / (df['sell_qty'] + 1e-10)
df['selling_pressure'] = df['sell_qty'] / (df['volume'] + 1e-10)
df['log_volume'] = np.log1p(df['volume'])
df['effective_spread_proxy'] = np.abs(df['buy_qty'] - df['sell_qty']) / (df['volume'] + 1e-10)
df['bid_ask_imbalance'] = (df['bid_qty'] - df['ask_qty']) / (df['bid_qty'] + df['ask_qty'] + 1e-10)
df['order_flow_imbalance'] = (df['buy_qty'] - df['sell_qty']) / (df['buy_qty'] + df['sell_qty'] + 1e-10)
df['liquidity_ratio'] = (df['bid_qty'] + df['ask_qty']) / (df['volume'] + 1e-10)
# === NEW MICROSTRUCTURE FEATURES ===
# Price Pressure Indicators
df['net_order_flow'] = df['buy_qty'] - df['sell_qty']
df['normalized_net_flow'] = df['net_order_flow'] / (df['volume'] + 1e-10)
df['buying_pressure'] = df['buy_qty'] / (df['volume'] + 1e-10)
df['volume_weighted_buy'] = df['buy_qty'] * df['volume']
# Liquidity Depth Measures
df['total_depth'] = df['bid_qty'] + df['ask_qty']
df['depth_imbalance'] = (df['bid_qty'] - df['ask_qty']) / (df['total_depth'] + 1e-10)
df['relative_spread'] = np.abs(df['bid_qty'] - df['ask_qty']) / (df['total_depth'] + 1e-10)
df['log_depth'] = np.log1p(df['total_depth'])
# Order Flow Toxicity Proxies
df['kyle_lambda'] = np.abs(df['net_order_flow']) / (df['volume'] + 1e-10)
df['flow_toxicity'] = np.abs(df['order_flow_imbalance']) * df['volume']
df['aggressive_flow_ratio'] = (df['buy_qty'] + df['sell_qty']) / (df['total_depth'] + 1e-10)
# Market Activity Indicators
df['volume_depth_ratio'] = df['volume'] / (df['total_depth'] + 1e-10)
df['activity_intensity'] = (df['buy_qty'] + df['sell_qty']) / (df['volume'] + 1e-10)
df['log_buy_qty'] = np.log1p(df['buy_qty'])
df['log_sell_qty'] = np.log1p(df['sell_qty'])
df['log_bid_qty'] = np.log1p(df['bid_qty'])
df['log_ask_qty'] = np.log1p(df['ask_qty'])
# Microstructure Volatility Proxies
df['realized_spread_proxy'] = 2 * np.abs(df['net_order_flow']) / (df['volume'] + 1e-10)
df['price_impact_proxy'] = df['net_order_flow'] / (df['total_depth'] + 1e-10)
df['quote_volatility_proxy'] = np.abs(df['depth_imbalance'])
# Complex Interaction Terms
df['flow_depth_interaction'] = df['net_order_flow'] * df['total_depth']
df['imbalance_volume_interaction'] = df['order_flow_imbalance'] * df['volume']
df['depth_volume_interaction'] = df['total_depth'] * df['volume']
df['buy_sell_spread'] = np.abs(df['buy_qty'] - df['sell_qty'])
df['bid_ask_spread'] = np.abs(df['bid_qty'] - df['ask_qty'])
# Information Asymmetry Measures
df['trade_informativeness'] = df['net_order_flow'] / (df['bid_qty'] + df['ask_qty'] + 1e-10)
df['execution_shortfall_proxy'] = df['buy_sell_spread'] / (df['volume'] + 1e-10)
df['adverse_selection_proxy'] = df['net_order_flow'] / (df['total_depth'] + 1e-10) * df['volume']
# Market Efficiency Indicators
df['fill_probability'] = df['volume'] / (df['buy_qty'] + df['sell_qty'] + 1e-10)
df['execution_rate'] = (df['buy_qty'] + df['sell_qty']) / (df['total_depth'] + 1e-10)
df['market_efficiency'] = df['volume'] / (df['bid_ask_spread'] + 1e-10)
# Non-linear Transformations
df['sqrt_volume'] = np.sqrt(df['volume'])
df['sqrt_depth'] = np.sqrt(df['total_depth'])
df['volume_squared'] = df['volume'] ** 2
df['imbalance_squared'] = df['order_flow_imbalance'] ** 2
# Relative Measures
df['bid_ratio'] = df['bid_qty'] / (df['total_depth'] + 1e-10)
df['ask_ratio'] = df['ask_qty'] / (df['total_depth'] + 1e-10)
df['buy_ratio'] = df['buy_qty'] / (df['buy_qty'] + df['sell_qty'] + 1e-10)
df['sell_ratio'] = df['sell_qty'] / (df['buy_qty'] + df['sell_qty'] + 1e-10)
# Market Stress Indicators
df['liquidity_consumption'] = (df['buy_qty'] + df['sell_qty']) / (df['total_depth'] + 1e-10)
df['market_stress'] = df['volume'] / (df['total_depth'] + 1e-10) * np.abs(df['order_flow_imbalance'])
df['depth_depletion'] = df['volume'] / (df['bid_qty'] + df['ask_qty'] + 1e-10)
# Directional Indicators
df['net_buying_ratio'] = df['net_order_flow'] / (df['volume'] + 1e-10)
df['directional_volume'] = df['net_order_flow'] * np.log1p(df['volume'])
df['signed_volume'] = np.sign(df['net_order_flow']) * df['volume']
# Replace infinities and NaNs
df = df.replace([np.inf, -np.inf], 0).fillna(0)
return df
def load_data():
features = list(set(Config.FEATURES + Config.MLP_FEATURES))
train = pd.read_parquet(Config.TRAIN_PATH, columns = features + [Config.TARGET])
train = train.dropna(subset=[Config.TARGET]).reset_index(drop=True)
assert not train[Config.TARGET].isna().any(), "label still has NaN"
test = pd.read_parquet(Config.TEST_PATH, columns = features)
submission = pd.read_csv(Config.SUBMISSION_PATH)
print(f'Origin: train {train.shape}, test {test.shape}, submission {submission.shape}')
train, test = add_featrues1(train), add_featrues1(test)
Config.FEATURES = test.columns.tolist()
return train.reset_index(drop = True), test.reset_index(drop = True), submission