CC100-sinhala / analyze_cc100.py
realtmxi's picture
Upload folder using huggingface_hub
bdf0373 verified
import os
from math import ceil
def calculate_shards_for_billion(file_path, target_sentences_per_shard=1_000_000_000):
# Get file size
file_size = os.path.getsize(file_path)
file_size_gb = file_size / (1024**3)
# Your provided metrics
total_sentences = 12_643_262
non_empty_sentences = 11_977_669
total_chars = 1_508_593_606
# Calculate metrics
gb_per_million_sentences = file_size_gb / (total_sentences / 1_000_000)
chars_per_sentence = total_chars / non_empty_sentences
# Calculate how many more sentences needed for one full shard
sentences_needed_for_shard = target_sentences_per_shard - total_sentences
print("\nShard Analysis for 1B Sentences per Shard:")
print("-" * 50)
print(f"Current metrics:")
print(f"- File size: {file_size_gb:.2f} GB")
print(f"- Total sentences: {total_sentences:,}")
print(f"- Non-empty sentences: {non_empty_sentences:,}")
print(f"- Average characters per sentence: {chars_per_sentence:.1f}")
print(f"- GB per million sentences: {gb_per_million_sentences:.2f}")
print(f"\nFor reaching 1B sentences per shard:")
print(f"- Additional sentences needed: {sentences_needed_for_shard:,}")
print(f"- Scaling factor needed: {target_sentences_per_shard / total_sentences:.1f}x more data")
# Estimate storage requirements
estimated_gb_for_billion = gb_per_million_sentences * 1000 # for 1B sentences
print(f"\nStorage estimates for 1B sentences:")
print(f"- Estimated size needed: {estimated_gb_for_billion:.1f} GB")
# Recommendations
print(f"\nRecommendations:")
print(f"1. You need approximately {target_sentences_per_shard / total_sentences:.1f}x more data to reach 1B sentences")
print(f"2. Current data would be ~{(total_sentences/target_sentences_per_shard*100):.2f}% of one full shard")
print(f"3. For a full 1B-sentence shard, you'd need to collect about {sentences_needed_for_shard:,} more sentences")
if __name__ == "__main__":
file_path = "si.txt"
calculate_shards_for_billion(file_path)