realtmxi commited on
Commit
bdf0373
·
verified ·
1 Parent(s): c904b46

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. .gitattributes +1 -0
  2. analyze_cc100.py +47 -0
  3. si.txt +3 -0
.gitattributes CHANGED
@@ -57,3 +57,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
57
  # Video files - compressed
58
  *.mp4 filter=lfs diff=lfs merge=lfs -text
59
  *.webm filter=lfs diff=lfs merge=lfs -text
 
 
57
  # Video files - compressed
58
  *.mp4 filter=lfs diff=lfs merge=lfs -text
59
  *.webm filter=lfs diff=lfs merge=lfs -text
60
+ si.txt filter=lfs diff=lfs merge=lfs -text
analyze_cc100.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from math import ceil
3
+
4
+ def calculate_shards_for_billion(file_path, target_sentences_per_shard=1_000_000_000):
5
+ # Get file size
6
+ file_size = os.path.getsize(file_path)
7
+ file_size_gb = file_size / (1024**3)
8
+
9
+ # Your provided metrics
10
+ total_sentences = 12_643_262
11
+ non_empty_sentences = 11_977_669
12
+ total_chars = 1_508_593_606
13
+
14
+ # Calculate metrics
15
+ gb_per_million_sentences = file_size_gb / (total_sentences / 1_000_000)
16
+ chars_per_sentence = total_chars / non_empty_sentences
17
+
18
+ # Calculate how many more sentences needed for one full shard
19
+ sentences_needed_for_shard = target_sentences_per_shard - total_sentences
20
+
21
+ print("\nShard Analysis for 1B Sentences per Shard:")
22
+ print("-" * 50)
23
+ print(f"Current metrics:")
24
+ print(f"- File size: {file_size_gb:.2f} GB")
25
+ print(f"- Total sentences: {total_sentences:,}")
26
+ print(f"- Non-empty sentences: {non_empty_sentences:,}")
27
+ print(f"- Average characters per sentence: {chars_per_sentence:.1f}")
28
+ print(f"- GB per million sentences: {gb_per_million_sentences:.2f}")
29
+
30
+ print(f"\nFor reaching 1B sentences per shard:")
31
+ print(f"- Additional sentences needed: {sentences_needed_for_shard:,}")
32
+ print(f"- Scaling factor needed: {target_sentences_per_shard / total_sentences:.1f}x more data")
33
+
34
+ # Estimate storage requirements
35
+ estimated_gb_for_billion = gb_per_million_sentences * 1000 # for 1B sentences
36
+ print(f"\nStorage estimates for 1B sentences:")
37
+ print(f"- Estimated size needed: {estimated_gb_for_billion:.1f} GB")
38
+
39
+ # Recommendations
40
+ print(f"\nRecommendations:")
41
+ print(f"1. You need approximately {target_sentences_per_shard / total_sentences:.1f}x more data to reach 1B sentences")
42
+ print(f"2. Current data would be ~{(total_sentences/target_sentences_per_shard*100):.2f}% of one full shard")
43
+ print(f"3. For a full 1B-sentence shard, you'd need to collect about {sentences_needed_for_shard:,} more sentences")
44
+
45
+ if __name__ == "__main__":
46
+ file_path = "si.txt"
47
+ calculate_shards_for_billion(file_path)
si.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f86f061af6b0bf50af52b6c528d08452a6a132dd12a049ed56185e2ba4fee008
3
+ size 3915469275