adaptai / projects /elizabeth /training /create_clean_dataset.py
ADAPT-Chase's picture
Add files using upload-large-folder tool
fbf3c28 verified
#!/usr/bin/env python3
"""
Create clean training dataset with only valid examples
"""
import json
def create_clean_dataset(input_path, output_path):
"""Create a clean dataset with only valid text examples"""
clean_data = []
with open(input_path, 'r') as f:
for line in f:
try:
data = json.loads(line)
# Only keep examples with simple text (no tool calls)
messages = data.get('messages', [])
# Skip if any message contains tool_call (complex structure)
has_tool_call = any('tool_call' in str(msg) for msg in messages)
if has_tool_call:
continue
# Only keep if we have at least 2 messages
if len(messages) >= 2:
# Ensure each message has required fields
valid_messages = []
for msg in messages:
if isinstance(msg, dict) and 'role' in msg and 'content' in msg:
valid_messages.append({
'role': msg['role'],
'content': str(msg['content'])
})
if len(valid_messages) >= 2:
clean_data.append({'messages': valid_messages})
except json.JSONDecodeError:
continue
# Write clean data
with open(output_path, 'w') as f:
for entry in clean_data:
f.write(json.dumps(entry, ensure_ascii=False) + '\n')
print(f"✅ Created clean dataset with {len(clean_data)} examples")
print(f"💾 Saved to: {output_path}")
return clean_data
# Create clean datasets
clean_train = create_clean_dataset(
"/home/x/adaptai/aiml/e-train-1/elizabeth_tooluse_minipack_v1.jsonl",
"/home/x/adaptai/aiml/e-train-1/clean_training_data.jsonl"
)
print(f"Total clean examples: {len(clean_train)}")