ProximileAdmin commited on
Commit
a26b74a
·
verified ·
1 Parent(s): 6ab39d3

Upload 2 files

Browse files
create_diffusion_dataset.py ADDED
@@ -0,0 +1,436 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import numpy as np
3
+ import networkx as nx
4
+ from typing import List, Dict, Tuple, Set
5
+ import json
6
+ import random
7
+ from collections import defaultdict, Counter
8
+
9
+ class ProteinNetworkConversationDataset:
10
+ def __init__(self, filename: str):
11
+ """
12
+ Create conversational dataset for protein network prediction using diffusion models
13
+ """
14
+ self.filename = filename
15
+ self.df = None
16
+ self.graph = nx.Graph()
17
+ self.protein_to_id = {}
18
+ self.id_to_protein = {}
19
+ self.interactions_by_protein = defaultdict(list)
20
+
21
+ def load_and_parse_biogrid(self):
22
+ """Load and parse BioGRID data"""
23
+ print("Loading BioGRID data...")
24
+ self.df = pd.read_csv(
25
+ self.filename,
26
+ sep='\t',
27
+ comment='#',
28
+ low_memory=False,
29
+ dtype=str
30
+ )
31
+
32
+ # Based on your sample output - adjust column indices as needed
33
+ protein_a_col = 7 # MAP2K4, MYPN, etc.
34
+ protein_b_col = 8 # FLNC, etc.
35
+ interaction_type_col = 11 # "Two-hybrid", "physical", etc.
36
+
37
+ interactions = []
38
+ protein_set = set()
39
+
40
+ for idx, row in self.df.iterrows():
41
+ try:
42
+ protein_a = str(row.iloc[protein_a_col]).strip()
43
+ protein_b = str(row.iloc[protein_b_col]).strip()
44
+ interaction_type = str(row.iloc[interaction_type_col]).strip()
45
+
46
+ if protein_a in ['-', 'nan', ''] or protein_b in ['-', 'nan', '']:
47
+ continue
48
+
49
+ protein_set.add(protein_a)
50
+ protein_set.add(protein_b)
51
+
52
+ interaction = {
53
+ 'protein_a': protein_a,
54
+ 'protein_b': protein_b,
55
+ 'interaction_type': interaction_type
56
+ }
57
+ interactions.append(interaction)
58
+
59
+ # Build protein interaction index
60
+ self.interactions_by_protein[protein_a].append(interaction)
61
+ self.interactions_by_protein[protein_b].append(interaction)
62
+
63
+ except Exception:
64
+ continue
65
+
66
+ print(f"Extracted {len(interactions)} valid interactions")
67
+ print(f"Found {len(protein_set)} unique proteins")
68
+
69
+ return interactions, sorted(list(protein_set))
70
+
71
+ def build_network_neighborhoods(self, interactions, proteins, min_connections=3, max_connections=15):
72
+ """
73
+ Build subnetworks around high-degree proteins for training examples
74
+ """
75
+ # Count protein degrees
76
+ protein_degrees = Counter()
77
+ for interaction in interactions:
78
+ protein_degrees[interaction['protein_a']] += 1
79
+ protein_degrees[interaction['protein_b']] += 1
80
+
81
+ # Find proteins with moderate connectivity (good for examples)
82
+ candidate_proteins = [
83
+ protein for protein, degree in protein_degrees.items()
84
+ if min_connections <= degree <= max_connections
85
+ ]
86
+
87
+ print(f"Found {len(candidate_proteins)} proteins with degree {min_connections}-{max_connections}")
88
+
89
+ neighborhoods = []
90
+ for protein in candidate_proteins[:500]: # Limit for processing
91
+ neighborhood = self.extract_neighborhood(protein, interactions, max_size=10)
92
+ if len(neighborhood['proteins']) >= 3: # Minimum viable network
93
+ neighborhoods.append(neighborhood)
94
+
95
+ return neighborhoods
96
+
97
+ def extract_neighborhood(self, center_protein, interactions, max_size=10):
98
+ """
99
+ Extract neighborhood around a protein
100
+ """
101
+ # Get direct neighbors
102
+ neighbors = set()
103
+ relevant_interactions = []
104
+
105
+ for interaction in self.interactions_by_protein[center_protein]:
106
+ other_protein = (interaction['protein_b'] if interaction['protein_a'] == center_protein
107
+ else interaction['protein_a'])
108
+ neighbors.add(other_protein)
109
+ relevant_interactions.append(interaction)
110
+
111
+ # Limit neighborhood size
112
+ if len(neighbors) > max_size - 1:
113
+ neighbors = set(random.sample(list(neighbors), max_size - 1))
114
+
115
+ # Get all interactions within this neighborhood
116
+ neighborhood_proteins = {center_protein} | neighbors
117
+ neighborhood_interactions = []
118
+
119
+ for interaction in interactions:
120
+ if (interaction['protein_a'] in neighborhood_proteins and
121
+ interaction['protein_b'] in neighborhood_proteins):
122
+ neighborhood_interactions.append(interaction)
123
+
124
+ return {
125
+ 'center_protein': center_protein,
126
+ 'proteins': sorted(list(neighborhood_proteins)),
127
+ 'interactions': neighborhood_interactions
128
+ }
129
+
130
+ def create_conversation_examples(self, neighborhoods):
131
+ """
132
+ Create different types of conversation examples for diffusion training
133
+ """
134
+ conversations = []
135
+
136
+ for neighborhood in neighborhoods:
137
+ # Task 1: Complete protein network given protein list
138
+ conversations.extend(self.create_protein_list_to_network_examples(neighborhood))
139
+
140
+ # Task 2: Predict interactions for new protein
141
+ conversations.extend(self.create_new_protein_prediction_examples(neighborhood))
142
+
143
+ # Task 3: Complete partial network
144
+ conversations.extend(self.create_partial_network_completion_examples(neighborhood))
145
+
146
+ # Task 4: Network property prediction
147
+ conversations.extend(self.create_network_property_examples(neighborhood))
148
+
149
+ return conversations
150
+
151
+ def create_protein_list_to_network_examples(self, neighborhood):
152
+ """
153
+ Context: List of proteins
154
+ Generation: Complete interaction network
155
+ """
156
+ examples = []
157
+ proteins = neighborhood['proteins']
158
+ interactions = neighborhood['interactions']
159
+
160
+ # Create network representation
161
+ network_text = self.format_network_as_text(proteins, interactions)
162
+
163
+ system_msg = {
164
+ "role": "system",
165
+ "content": "You are a protein interaction prediction system. Given a list of proteins, predict all likely interactions between them based on biological knowledge."
166
+ }
167
+
168
+ user_msg = {
169
+ "role": "user",
170
+ "content": f"Predict the protein interaction network for these proteins: {', '.join(proteins)}"
171
+ }
172
+
173
+ assistant_msg = {
174
+ "role": "assistant",
175
+ "content": network_text
176
+ }
177
+
178
+ conversation = [system_msg, user_msg, assistant_msg]
179
+ examples.append({"updated": conversation})
180
+
181
+ return examples
182
+
183
+ def create_new_protein_prediction_examples(self, neighborhood):
184
+ """
185
+ Context: Known network + new protein
186
+ Generation: Interactions for the new protein
187
+ """
188
+ examples = []
189
+ if len(neighborhood['proteins']) < 4:
190
+ return examples
191
+
192
+ proteins = neighborhood['proteins']
193
+ interactions = neighborhood['interactions']
194
+
195
+ # Remove one protein and its interactions for prediction
196
+ target_protein = random.choice(proteins)
197
+ remaining_proteins = [p for p in proteins if p != target_protein]
198
+
199
+ # Known network (without target protein)
200
+ known_interactions = [
201
+ i for i in interactions
202
+ if target_protein not in [i['protein_a'], i['protein_b']]
203
+ ]
204
+
205
+ # Target interactions (what we want to predict)
206
+ target_interactions = [
207
+ i for i in interactions
208
+ if target_protein in [i['protein_a'], i['protein_b']]
209
+ ]
210
+
211
+ if not target_interactions:
212
+ return examples
213
+
214
+ known_network_text = self.format_network_as_text(remaining_proteins, known_interactions)
215
+ target_network_text = self.format_interactions_as_text(target_interactions)
216
+
217
+ system_msg = {
218
+ "role": "system",
219
+ "content": "You are a protein interaction prediction system. Given a known protein network and a new protein, predict which proteins in the network the new protein will interact with."
220
+ }
221
+
222
+ user_msg = {
223
+ "role": "user",
224
+ "content": f"Known protein network:\n{known_network_text}\n\nNew protein to integrate: {target_protein}\n\nPredict the interactions for {target_protein}:"
225
+ }
226
+
227
+ assistant_msg = {
228
+ "role": "assistant",
229
+ "content": target_network_text
230
+ }
231
+
232
+ conversation = [system_msg, user_msg, assistant_msg]
233
+ examples.append({"updated": conversation})
234
+
235
+ return examples
236
+
237
+ def create_partial_network_completion_examples(self, neighborhood):
238
+ """
239
+ Context: Partial network with some missing interactions
240
+ Generation: Complete network
241
+ """
242
+ examples = []
243
+ proteins = neighborhood['proteins']
244
+ interactions = neighborhood['interactions']
245
+
246
+ if len(interactions) < 3:
247
+ return examples
248
+
249
+ # Randomly hide some interactions
250
+ n_hidden = max(1, len(interactions) // 3)
251
+ hidden_interactions = random.sample(interactions, n_hidden)
252
+ visible_interactions = [i for i in interactions if i not in hidden_interactions]
253
+
254
+ partial_network_text = self.format_network_as_text(proteins, visible_interactions)
255
+ complete_network_text = self.format_network_as_text(proteins, interactions)
256
+
257
+ system_msg = {
258
+ "role": "system",
259
+ "content": "You are a protein interaction prediction system. Given a partial protein network, predict the complete network including missing interactions."
260
+ }
261
+
262
+ user_msg = {
263
+ "role": "user",
264
+ "content": f"Complete this partial protein network:\n{partial_network_text}"
265
+ }
266
+
267
+ assistant_msg = {
268
+ "role": "assistant",
269
+ "content": complete_network_text
270
+ }
271
+
272
+ conversation = [system_msg, user_msg, assistant_msg]
273
+ examples.append({"updated": conversation})
274
+
275
+ return examples
276
+
277
+ def create_network_property_examples(self, neighborhood):
278
+ """
279
+ Context: Network properties and constraints
280
+ Generation: Network that satisfies those properties
281
+ """
282
+ examples = []
283
+ proteins = neighborhood['proteins']
284
+ interactions = neighborhood['interactions']
285
+
286
+ # Calculate network properties
287
+ n_proteins = len(proteins)
288
+ n_interactions = len(interactions)
289
+ density = (2 * n_interactions) / (n_proteins * (n_proteins - 1)) if n_proteins > 1 else 0
290
+
291
+ # Find hub proteins (high degree)
292
+ protein_degrees = Counter()
293
+ for interaction in interactions:
294
+ protein_degrees[interaction['protein_a']] += 1
295
+ protein_degrees[interaction['protein_b']] += 1
296
+
297
+ hub_proteins = [p for p, degree in protein_degrees.most_common(2)]
298
+
299
+ network_text = self.format_network_as_text(proteins, interactions)
300
+
301
+ system_msg = {
302
+ "role": "system",
303
+ "content": "You are a protein interaction network generator. Given network constraints and properties, generate a biologically plausible protein network."
304
+ }
305
+
306
+ properties_text = (f"Generate a protein network with the following properties:\n"
307
+ f"- Proteins: {', '.join(proteins)}\n"
308
+ f"- Network density: approximately {density:.2f}\n"
309
+ f"- Hub proteins (highly connected): {', '.join(hub_proteins)}\n"
310
+ f"- Total interactions: approximately {n_interactions}")
311
+
312
+ user_msg = {
313
+ "role": "user",
314
+ "content": properties_text
315
+ }
316
+
317
+ assistant_msg = {
318
+ "role": "assistant",
319
+ "content": network_text
320
+ }
321
+
322
+ conversation = [system_msg, user_msg, assistant_msg]
323
+ examples.append({"updated": conversation})
324
+
325
+ return examples
326
+
327
+ def format_network_as_text(self, proteins, interactions):
328
+ """
329
+ Format network as structured text for the model to predict
330
+ """
331
+ # Sort for consistency
332
+ proteins = sorted(proteins)
333
+
334
+ # Group interactions by type
335
+ interactions_by_type = defaultdict(list)
336
+ for interaction in interactions:
337
+ int_type = interaction.get('interaction_type', 'physical')
338
+ # Ensure consistent ordering
339
+ p1, p2 = sorted([interaction['protein_a'], interaction['protein_b']])
340
+ interactions_by_type[int_type].append(f"{p1}--{p2}")
341
+
342
+ result = f"PROTEINS: {', '.join(proteins)}\n\n"
343
+
344
+ for int_type, edges in interactions_by_type.items():
345
+ if edges:
346
+ result += f"{int_type.upper()} INTERACTIONS:\n"
347
+ for edge in sorted(edges):
348
+ result += f" {edge}\n"
349
+ result += "\n"
350
+
351
+ result += f"NETWORK SUMMARY: {len(proteins)} proteins, {len(interactions)} interactions"
352
+ return result.strip()
353
+
354
+ def format_interactions_as_text(self, interactions):
355
+ """
356
+ Format just interactions as text
357
+ """
358
+ if not interactions:
359
+ return "No interactions predicted."
360
+
361
+ interactions_by_type = defaultdict(list)
362
+ for interaction in interactions:
363
+ int_type = interaction.get('interaction_type', 'physical')
364
+ p1, p2 = sorted([interaction['protein_a'], interaction['protein_b']])
365
+ interactions_by_type[int_type].append(f"{p1}--{p2}")
366
+
367
+ result = ""
368
+ for int_type, edges in interactions_by_type.items():
369
+ if edges:
370
+ result += f"{int_type.upper()} INTERACTIONS:\n"
371
+ for edge in sorted(edges):
372
+ result += f" {edge}\n"
373
+ result += "\n"
374
+
375
+ return result.strip()
376
+
377
+ def save_conversation_dataset(self, output_file="processed_dataset.json"):
378
+ """
379
+ Create and save the full conversation dataset
380
+ """
381
+ # Load and process data
382
+ interactions, proteins = self.load_and_parse_biogrid()
383
+ neighborhoods = self.build_network_neighborhoods(interactions, proteins)
384
+
385
+ print(f"Built {len(neighborhoods)} protein neighborhoods")
386
+
387
+ # Create conversation examples
388
+ conversations = self.create_conversation_examples(neighborhoods)
389
+
390
+ print(f"Created {len(conversations)} conversation examples")
391
+
392
+ # Shuffle the dataset
393
+ random.shuffle(conversations)
394
+
395
+ # Save dataset
396
+ with open(output_file, 'w') as f:
397
+ json.dump(conversations, f, indent=2)
398
+
399
+ print(f"Saved dataset to {output_file}")
400
+
401
+ # Show examples
402
+ print("\n=== Example Conversations ===")
403
+ for i, conv in enumerate(conversations[:3]):
404
+ print(f"\n--- Example {i+1} ---")
405
+ for msg in conv["updated"]:
406
+ print(f"{msg['role'].upper()}: {msg['content'][:200]}...")
407
+
408
+ return conversations
409
+
410
+ # Usage
411
+ if __name__ == "__main__":
412
+ creator = ProteinNetworkConversationDataset(
413
+ "./unzipped/BIOGRID-ALL-4.4.246.tab3/BIOGRID-ALL-4.4.246.tab3.txt"
414
+ )
415
+
416
+ conversations = creator.save_conversation_dataset("processed_dataset.json")
417
+
418
+ print(f"\n=== Dataset Summary ===")
419
+ print(f"Total conversations: {len(conversations)}")
420
+
421
+ # Count conversation types by system message content
422
+ task_types = Counter()
423
+ for conv in conversations:
424
+ system_content = conv["updated"][0]["content"]
425
+ if "list of proteins" in system_content:
426
+ task_types["protein_list_to_network"] += 1
427
+ elif "new protein" in system_content:
428
+ task_types["new_protein_integration"] += 1
429
+ elif "partial" in system_content:
430
+ task_types["partial_completion"] += 1
431
+ elif "properties" in system_content:
432
+ task_types["property_based_generation"] += 1
433
+
434
+ print("\nTask distribution:")
435
+ for task, count in task_types.items():
436
+ print(f" {task}: {count}")
processed_dataset.json ADDED
The diff for this file is too large to render. See raw diff