first_stage / rna_dataset.py
Yimingbear's picture
Upload folder using huggingface_hub
f9073ae verified
import os
import random
import math
import torch
import json
from tqdm import tqdm
import numpy as np
from torch.utils.data import Dataset, DataLoader
import torch.nn.functional as F
from torch_geometric.data import Data
import pickle
from itertools import product
from nucleotides.load_nucleotide import get_standard_nucleotide
from scipy.spatial.transform import Rotation
import copy
from torch_geometric.data import Batch
N1 = 0
N9 = 0
N_dis_list = []
def continuous_cropping(data, rna_max_len):
rna_len = data.x.size(0)
start_idx = random.randint(0, rna_len - rna_max_len)
end_idx = start_idx + rna_max_len
data.x = data.x[start_idx:end_idx]
if hasattr(data, 'pos'):
data.pos = data.pos[start_idx:end_idx]
# add struc_emb [0403 by TIANRUI]
data.struc_emb = data.struc_emb[start_idx:end_idx]
# add protenix_emb [1406 by YIMING]
if hasattr(data, 'protenix_emb'):
data.protenix_emb = data.protenix_emb[start_idx:end_idx]
if hasattr(data, 'bead_pos'):
data.bead_pos = data.bead_pos[start_idx:end_idx]
edge_mask = (data.edge_index >= start_idx) & (data.edge_index < end_idx)
edge_mask = edge_mask.all(dim=0)
data.edge_index = data.edge_index[:, edge_mask] - start_idx
data.edge_attr = data.edge_attr[edge_mask]
data.pos_mask = data.pos_mask[start_idx:end_idx]
data.res_id = data.res_id[start_idx:end_idx]
data.residue_list = data.residue_list[start_idx:end_idx]
return data
def spatial_cropping(data, rna_max_len):
rna_len = data.x.size(0)
if hasattr(data, 'pos') and data.pos.shape[1] == 9 and data.pos.dim() == 2:
c_pos = data.pos[:, 3:6] # shape = [num_res, 3]
elif hasattr(data, 'pos') and data.pos.dim() == 3 and data.pos.shape[1] == 3:
c_pos = data.pos[:, 0]
elif hasattr(data, 'pos') and data.pos.dim() == 3:
c_pos = data.pos[:, 6] # c4' is the 7th in pos
elif hasattr(data, 'bead_pos'):
c_pos = data.bead_pos[:, 3:6] # shape = [num_res, 3]
rand_center = c_pos[random.randint(0, rna_len - 1)].view(1, 3)
dist = torch.linalg.vector_norm(c_pos - rand_center, dim=1) # shape = [num_res]
topk = torch.topk(dist, rna_max_len, largest=False).indices
topk = topk.sort().values
## update the node features
data.x = data.x[topk]
if hasattr(data, 'pos'):
data.pos = data.pos[topk]
# add struc_emb [0403 by TIANRUI]
data.struc_emb = data.struc_emb[topk]
# add protenix_emb [1406 by YIMING]
if hasattr(data, 'protenix_emb'):
data.protenix_emb = data.protenix_emb[topk]
if hasattr(data, 'bead_pos'):
data.bead_pos = data.bead_pos[topk]
data.pos_mask = data.pos_mask[topk]
data.res_id = data.res_id[topk]
data.residue_list = [data.residue_list[i] for i in topk.tolist()]
## create a mapping from the original edge index to the new edge index
edge_mapping = torch.ones(rna_len, dtype=torch.long) * -1
edge_mapping[topk] = torch.arange(rna_max_len, dtype=torch.long)
## update the edge index
new_edge_index = edge_mapping[data.edge_index.long()]
edge_mask = (new_edge_index >= 0).all(dim=0) # shape = [num_edges]
data.edge_index = new_edge_index[:, edge_mask]
data.edge_attr = data.edge_attr[edge_mask]
return data
def cropping(data, rna_max_len, spatial_crop_ratio=0.5):
if random.random() < spatial_crop_ratio:
data = spatial_cropping(data, rna_max_len)
else:
data = continuous_cropping(data, rna_max_len)
return data
def random_rotate_pos(data, seed=None):
"""
对 data.pos 进行统一随机旋转增强,返回新 Data
- data.pos: [N, R, 3],N 个节点,每个节点 R 个点
"""
if seed is not None:
rng = np.random.default_rng(seed)
else:
rng = np.random.default_rng()
# 生成一个随机旋转矩阵
rot = Rotation.random(random_state=rng)
# 提取 pos 并旋转
pos = data.pos.view(-1, 3) # [N*R, 3]
rotated_pos = rot.apply(pos) # [N*R, 3]
rotated_pos = torch.from_numpy(rotated_pos).to(data.pos.dtype)
data.pos = rotated_pos.view_as(data.pos) # reshape back to [N, R, 3]
return data
def random_translation_pos(data, translation_scale=0.1):
dtype = data.pos.dtype
trans_aug = translation_scale * torch.randn(3, dtype=dtype)
data.pos = data.pos + trans_aug.view(1, 1, 3)
return data
def aug(data):
# random_num = random.random()
# if random_num < 1 / 3:
data = random_rotate_pos(data)
# elif random_num > 2 / 3:
data = random_translation_pos(data)
return data
class RNADataset(Dataset):
def __init__(self, processed_path, rna_max_len=50, spatial_crop_ratio=0.5, disable_ss=False, mode='train', new_aa=True, new_res=False, fix_N_bug=False, struc_emb_path = '/home/hui007/rna/DiffRNA/data/RNAData20250323/struc_emb.pt', use_protenix_emb=True, protenix_emb_path='/home/hui007/protenix_0612/Protenix/protenix_embedding'):
super(RNADataset, self).__init__()
self.new_aa = new_aa
self.new_res = new_res
self.rna_max_len = rna_max_len
self.residue_types = {'A':0, 'G':1, 'U':2, 'C':3}
self.fix_N_bug = fix_N_bug
self.use_protenix_emb = use_protenix_emb
self.protenix_emb_path = protenix_emb_path
if not os.path.exists(processed_path):
raise FileNotFoundError(f'processed_path = `{processed_path}` not found! please preprocess it first!')
## load data
assert processed_path.endswith('.npz')
np_data = np.load(processed_path)
self.cluster_mapping = pickle.loads(np_data['clstr'])
# print(len(self.cluster_mapping), len(self.cluster_mapping[0]), len(self.cluster_mapping[0][0]), self.cluster_mapping[0][0])
self.data_list = pickle.loads(np_data['data_list'])
self.ss_list = {}
for ss_id in np_data.keys():
if ss_id.startswith('ss'):
new_id = int(ss_id.split('_')[1])
self.ss_list[new_id] = np_data[ss_id]
self.rna_class_idx = {'[unclassed]': 0, 'solo': 1, 'protein-RNA': 2, 'DNA-RNA': 3}
self.rnaclass = dict()
# add struc_emb [0403 by TIANRUI]
self.struc_emb = torch.load(struc_emb_path)
self.mode = mode
self.disable_ss = disable_ss
self.pos_std = 20.3689
self.spatial_crop_ratio = spatial_crop_ratio
self.aa_mapping = get_standard_nucleotide(True, False)
self.total_conf_num = len(self.data_list)
assert self.total_conf_num == sum([len(idx_list) for cluster in self.cluster_mapping for idx_list in cluster])
if self.mode == 'train':
self.num_seq = sum([len(cluster) for cluster in self.cluster_mapping])
self.seq_list = [inner for outer in self.cluster_mapping for inner in outer]
def __len__(self):
if self.mode == 'train':
return self.num_seq
elif self.mode in {'valid', 'test'}:
return self.total_conf_num
else:
raise ValueError(f'Unsupported mode: {self.mode}')
def len(self):
return self.__len__()
def get_idx_data(self, idx: int):
data = self.data_list[idx]
full_id = data['full_id']
## construct fully connected graph based on secondary structure
num_res = len(data['data'])
ss = self.ss_list[data['ss_id']]
ss = torch.from_numpy(ss).to(torch.float)
assert ss.shape[0] == num_res
indices = np.mgrid[0:num_res, 0:num_res].reshape(2, -1)
full_edge_index = torch.from_numpy(indices) # [2, N_edge]
full_edge_attr = ss.reshape(-1, ss.size(-1)) # [N_edge, dim]
rna_data = data['data']
x = []
residue_list = []
pos = []
pos_mask = []
# add struc_emb [0403 by TIANRUI]
struc_emb_list = []
for _, res, atom_coords in rna_data:
x.append(self.residue_types[res])
residue_list.append(res)
atom_list = atom_coords['atom_list']
coord_list = atom_coords['coord_list']
if self.new_aa:
atom_mapping = self.aa_mapping[res][-1]
aa_pos = torch.zeros((24, 3), dtype=torch.float)
aa_mask = torch.ones((24,), dtype=torch.bool)
for i, atom in enumerate(atom_list):
idx = atom_mapping.get(atom, None)
if idx is None:
continue
idx = idx[0]
aa_pos[idx] = torch.from_numpy(coord_list[i])
aa_mask[idx] = False
pos.append(aa_pos)
pos_mask.append(aa_mask)
# add struc_emb [0403 by TIANRUI]
struc_emb = self.struc_emb[res]
struc_emb_list.append(struc_emb)
else:
if self.new_res:
atom_mapping = {'P': 0, 'C4\'': 1, 'N1': 2}
bead_pos = torch.zeros((3, 3), dtype=torch.float)
bead_pos_mask = torch.ones((3,), dtype=torch.bool)
for i, atom in enumerate(atom_list):
idx = atom_mapping.get(atom, None)
if idx is None:
continue
bead_pos[idx] = torch.from_numpy(coord_list[i])
bead_pos_mask[idx] = False
pos.append(bead_pos)
pos_mask.append(bead_pos_mask)
else:
bead_idx = np.asarray([-1, -1, -1])
for i, atom in enumerate(atom_list):
if atom == 'P':
bead_idx[0] = i
elif atom == 'C4\'':
bead_idx[1] = i
else:
if self.fix_N_bug:
if atom == 'N9' and (res == 'A' or res == 'G'):
bead_idx[2] = i
elif atom == 'N1' and (res == 'C' or res == 'U'):
bead_idx[2] = i
else:
if atom == 'N1' or atom == 'N9':
bead_idx[2] = i
if (bead_idx == -1).any():
pos.append(torch.zeros((9,), dtype=torch.float))
pos_mask.append(True)
else:
bead_pos = coord_list[bead_idx].reshape(-1) # shape = [9]
pos.append(torch.from_numpy(bead_pos))
pos_mask.append(False)
x = torch.LongTensor(x)
x = F.one_hot(x, num_classes=4).to(torch.float)
rna_class = self.rnaclass.get(full_id.split('-')[0], '[unclassed]')
rna_class = self.rna_class_idx[rna_class]
if self.new_aa:
pos = torch.stack(pos, dim=0) # shape = [num_res, res_len, 3]
pos_mask = torch.stack(pos_mask, dim=0) # shape = [num_res, res_len]
data = Data(x=x, pos=pos, pos_mask=pos_mask, rna_seq_id=full_id, edge_index=full_edge_index, edge_attr=full_edge_attr, res_id=torch.arange(num_res), residue_list=residue_list)
data['class'] = rna_class
# add struc_emb [0403 by TIANRUI]
struc_emb = torch.stack(struc_emb_list, dim=0)
data['struc_emb'] = struc_emb
# add protenix embedding [1206 by YIMING]
if self.use_protenix_emb:
protenix_emb = os.path.join(self.protenix_emb_path, f'{full_id}.pt')
if os.path.exists(protenix_emb):
data['protenix_emb'] = torch.load(protenix_emb, map_location='cpu')
else:
data['protenix_emb'] = torch.zeros(x.shape[0], 384, device='cpu')
else:
if self.new_res:
pos_mask = torch.stack(pos_mask, dim=0)
else:
pos_mask = torch.BoolTensor(pos_mask)
pos = torch.stack(pos, dim=0)
data = Data(x=x, pos=pos, pos_mask=pos_mask, rna_seq_id=full_id, edge_index=full_edge_index, edge_attr=full_edge_attr, res_id=torch.arange(num_res), residue_list=residue_list)
data['class'] = rna_class
return data
def __getitem__(self, idx):
if self.mode == 'train':
assert 0 <= idx < self.num_seq
data_id = random.choices(self.seq_list[idx])[0]
data = self.get_idx_data(data_id)
elif self.mode in {'valid', 'test'}:
data = self.get_idx_data(idx)
else:
raise ValueError(f'Unsupported mode: {self.mode}')
if self.mode == 'test':
if self.new_aa:
## center the data
pos_unmask = ~data.pos_mask # shape = [num_res, res_len]
centroid = data.pos.reshape(-1, 3).sum(dim=0, keepdim=True) / torch.clamp(pos_unmask.sum(), min=1) # shape = [1, 3]
data.pos = (data.pos - centroid.unsqueeze(0)) * pos_unmask.unsqueeze(-1)
else:
if self.new_res:
## center the data
pos_unmask = ~data.pos_mask # shape = [num_res, 3]
centroid = data.pos.reshape(-1, 3).sum(dim=0, keepdim=True) / torch.clamp(pos_unmask.sum(), min=1) # shape = [1, 3]
data.pos = (data.pos - centroid.view(1, 1, 3)) * pos_unmask.view(-1, 3, 1)
else:
## center the data
pos_unmask = ~data.pos_mask # shape = [num_res]
centroid = data.pos.reshape(-1, 3).sum(dim=0, keepdim=True) / torch.clamp((pos_unmask.sum() * 3), min=1) # shape = [1, 3]
data.pos = (data.pos - centroid.repeat(1, 3)) * pos_unmask.view(-1, 1)
return data
data_copy = copy.deepcopy(data)
rna_len = data.x.size(0)
if rna_len > self.rna_max_len:
data = cropping(data, self.rna_max_len, self.spatial_crop_ratio)
data_copy = cropping(data_copy, self.rna_max_len, self.spatial_crop_ratio)
if self.new_aa:
## center the data
pos_unmask = ~data.pos_mask # shape = [num_res, res_len]
centroid = data.pos.reshape(-1, 3).sum(dim=0, keepdim=True) / torch.clamp(pos_unmask.sum(), min=1) # shape = [1, 3]
data.pos = (data.pos - centroid.unsqueeze(0)) * pos_unmask.unsqueeze(-1) / self.pos_std
pop_unmask_copy = ~data_copy.pos_mask
centroid_copy = data_copy.pos.reshape(-1, 3).sum(dim=0, keepdim=True) / torch.clamp(pop_unmask_copy.sum(), min=1)
data_copy.pos = (data_copy.pos - centroid_copy.unsqueeze(0)) * pop_unmask_copy.unsqueeze(-1) / self.pos_std
else:
if self.new_res:
## center the data
pos_unmask = ~data.pos_mask # shape = [num_res, 3]
centroid = data.pos.reshape(-1, 3).sum(dim=0, keepdim=True) / torch.clamp(pos_unmask.sum(), min=1) # shape = [1, 3]
data.pos = (data.pos - centroid.view(1, 1, 3)) * pos_unmask.view(-1, 3, 1) / self.pos_std
else:
## center the data
pos_unmask = ~data.pos_mask # shape = [num_res]
centroid = data.pos.reshape(-1, 3).sum(dim=0, keepdim=True) / torch.clamp((pos_unmask.sum() * 3), min=1) # shape = [1, 3]
data.pos = (data.pos - centroid.repeat(1, 3)) * pos_unmask.view(-1, 1) / self.pos_std
assert not torch.isnan(data.pos).any()
data = aug(data)
data_copy = aug(data_copy)
return data, data_copy
class RNACollater:
def __call__(self, batch):
# batch: List of tuples (g1, g2)
g1_list, g2_list = zip(*batch)
g1_batch = Batch.from_data_list(g1_list)
g2_batch = Batch.from_data_list(g2_list)
# batch['max_seqlen'] = int((batch['ptr'][1:] - batch['ptr'][:-1]).max())
g1_batch['edge_attr'] = g1_batch['edge_attr'].to(torch.float)
g1_batch['edge_index'] = g1_batch['edge_index'].to(torch.long)
g1_batch['class'] = torch.LongTensor(g1_batch['class'])
# if hasattr(batch, 'res_id'):
g1_batch['seq_pos'] = g1_batch.pop('res_id').to(torch.long)
if hasattr(g1_batch, 'bead_pos'):
g1_batch['gt_pos'] = g1_batch.pop('bead_pos')
elif hasattr(g1_batch, 'pos'):
g1_batch['gt_pos'] = g1_batch.pop('pos')
# if self.new_res:
# batch['gt_pos'] = batch['gt_pos'].view(-1, 9)
# batch['pos_mask'] = batch.pos_mask.any(dim=-1)
pos_unmask = ~g1_batch['pos_mask'].unsqueeze(-1)
g1_batch['gt_pos'] = g1_batch['gt_pos'] * pos_unmask
g2_batch['edge_attr'] = g2_batch['edge_attr'].to(torch.float)
g2_batch['edge_index'] = g2_batch['edge_index'].to(torch.long)
g2_batch['class'] = torch.LongTensor(g2_batch['class'])
g2_batch['seq_pos'] = g2_batch.pop('res_id').to(torch.long)
if hasattr(g2_batch, 'bead_pos'):
g2_batch['gt_pos'] = g2_batch.pop('bead_pos')
elif hasattr(g2_batch, 'pos'):
g2_batch['gt_pos'] = g2_batch.pop('pos')
pos_unmask = ~g2_batch['pos_mask'].unsqueeze(-1)
g2_batch['gt_pos'] = g2_batch['gt_pos'] * pos_unmask
return g1_batch, g2_batch
class RNACollater_v2:
def add_noise(self, batch):
noise = torch.randn(batch.gt_pos.shape) # shape = [\sum N_i, 9]
batch['noise'] = noise
pos_t = batch.gt_pos + 0.1 * noise # shape = [\sum N_i, 9]
batch['pos'] = pos_t
return batch
def __call__(self, batch):
# batch: List of tuples (g1, g2)
g1_list, g2_list = zip(*batch)
g1_batch = Batch.from_data_list(g1_list)
g2_batch = Batch.from_data_list(g2_list)
# batch['max_seqlen'] = int((batch['ptr'][1:] - batch['ptr'][:-1]).max())
g1_batch['edge_attr'] = g1_batch['edge_attr'].to(torch.float)
g1_batch['edge_index'] = g1_batch['edge_index'].to(torch.long)
g1_batch['class'] = torch.LongTensor(g1_batch['class'])
# if hasattr(batch, 'res_id'):
g1_batch['seq_pos'] = g1_batch.pop('res_id').to(torch.long)
if hasattr(g1_batch, 'bead_pos'):
g1_batch['gt_pos'] = g1_batch.pop('bead_pos')
elif hasattr(g1_batch, 'pos'):
g1_batch['gt_pos'] = g1_batch.pop('pos')
g1_batch = self.add_noise(g1_batch)
# if self.new_res:
# batch['gt_pos'] = batch['gt_pos'].view(-1, 9)
# batch['pos_mask'] = batch.pos_mask.any(dim=-1)
pos_unmask = ~g1_batch['pos_mask'].unsqueeze(-1)
g1_batch['gt_pos'] = g1_batch['gt_pos'] * pos_unmask
g1_batch['pos'] = g1_batch['pos'] * pos_unmask
g1_batch['noise'] = g1_batch['noise'] * pos_unmask
g2_batch['edge_attr'] = g2_batch['edge_attr'].to(torch.float)
g2_batch['edge_index'] = g2_batch['edge_index'].to(torch.long)
g2_batch['class'] = torch.LongTensor(g2_batch['class'])
g2_batch['seq_pos'] = g2_batch.pop('res_id').to(torch.long)
if hasattr(g2_batch, 'bead_pos'):
g2_batch['gt_pos'] = g2_batch.pop('bead_pos')
elif hasattr(g2_batch, 'pos'):
g2_batch['gt_pos'] = g2_batch.pop('pos')
g2_batch = self.add_noise(g2_batch)
pos_unmask = ~g2_batch['pos_mask'].unsqueeze(-1)
g2_batch['gt_pos'] = g2_batch['gt_pos'] * pos_unmask
g2_batch['pos'] = g2_batch['pos'] * pos_unmask
g2_batch['noise'] = g2_batch['noise'] * pos_unmask
return g1_batch, g2_batch
if __name__ == '__main__':
dataset = RNADataset('/home/hui007/rna/rna_repr/zhiyuan/train_data_final.npz')
for i in range(10):
print(dataset[i])
loader = DataLoader(
dataset,
batch_size=4,
shuffle=False,
num_workers=0,
drop_last=True,
collate_fn=RNACollater()
)
for g1, g2 in loader:
print(g1)
print(g2)
break