rajveerb Claude Haiku 4.5 commited on
Commit ·
7bd86c9
1
Parent(s): 299e4c2
Suppress console output from workload stats scripts and save to tables/ directory
Browse files- anns/compute_workload_stats.py: Replace all print() statements with file writes
* Add --output-dir parameter (default: "tables")
* Save output to workload_stats_anns.txt
- crawl/compute_workload_stats.py: Replace all print() statements with file writes
* Add --output-dir parameter (default: "tables")
* Save output to workload_stats_crawler.txt
No console output - all statistics are written to files in the specified output directory.
Co-Authored-By: Claude Haiku 4.5 <noreply@anthropic.com>
- anns/compute_workload_stats.py +29 -22
- crawl/compute_workload_stats.py +29 -22
anns/compute_workload_stats.py
CHANGED
|
@@ -35,6 +35,9 @@ def main():
|
|
| 35 |
parser.add_argument("--tokenizer-model",
|
| 36 |
default="meta-llama/Llama-3.1-8B-Instruct",
|
| 37 |
help="HuggingFace tokenizer model")
|
|
|
|
|
|
|
|
|
|
| 38 |
|
| 39 |
args = parser.parse_args()
|
| 40 |
|
|
@@ -122,28 +125,32 @@ def main():
|
|
| 122 |
except Exception as e:
|
| 123 |
continue
|
| 124 |
|
| 125 |
-
# Compute and
|
| 126 |
-
|
| 127 |
-
|
| 128 |
-
|
| 129 |
-
|
| 130 |
-
|
| 131 |
-
|
| 132 |
-
|
| 133 |
-
|
| 134 |
-
|
| 135 |
-
|
| 136 |
-
|
| 137 |
-
|
| 138 |
-
|
| 139 |
-
|
| 140 |
-
|
| 141 |
-
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
|
| 146 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 147 |
|
| 148 |
|
| 149 |
if __name__ == "__main__":
|
|
|
|
| 35 |
parser.add_argument("--tokenizer-model",
|
| 36 |
default="meta-llama/Llama-3.1-8B-Instruct",
|
| 37 |
help="HuggingFace tokenizer model")
|
| 38 |
+
parser.add_argument("--output-dir",
|
| 39 |
+
default="tables",
|
| 40 |
+
help="Output directory for statistics file")
|
| 41 |
|
| 42 |
args = parser.parse_args()
|
| 43 |
|
|
|
|
| 125 |
except Exception as e:
|
| 126 |
continue
|
| 127 |
|
| 128 |
+
# Compute statistics and save to file
|
| 129 |
+
os.makedirs(args.output_dir, exist_ok=True)
|
| 130 |
+
output_file = os.path.join(args.output_dir, "workload_stats_anns.txt")
|
| 131 |
+
|
| 132 |
+
with open(output_file, 'w') as f:
|
| 133 |
+
f.write("\n" + "=" * 70 + "\n")
|
| 134 |
+
f.write("ANNS WORKLOAD STATISTICS\n")
|
| 135 |
+
f.write("=" * 70 + "\n")
|
| 136 |
+
|
| 137 |
+
if total_query_tokens:
|
| 138 |
+
total_query_tokens = np.array(total_query_tokens)
|
| 139 |
+
f.write(f"\nTotal Tokens per Query (n={len(total_query_tokens)})\n")
|
| 140 |
+
f.write(f" Mean: {total_query_tokens.mean():.0f} tokens\n")
|
| 141 |
+
f.write(f" P50: {np.percentile(total_query_tokens, 50):.0f} tokens\n")
|
| 142 |
+
f.write(f" P75: {np.percentile(total_query_tokens, 75):.0f} tokens\n")
|
| 143 |
+
f.write(f" P95: {np.percentile(total_query_tokens, 95):.0f} tokens\n")
|
| 144 |
+
|
| 145 |
+
if query_durations:
|
| 146 |
+
query_durations = np.array(query_durations)
|
| 147 |
+
f.write(f"\nQuery Duration (n={len(query_durations)})\n")
|
| 148 |
+
f.write(f" Mean: {query_durations.mean():.3f} seconds\n")
|
| 149 |
+
f.write(f" P50: {np.percentile(query_durations, 50):.3f} seconds\n")
|
| 150 |
+
f.write(f" P75: {np.percentile(query_durations, 75):.3f} seconds\n")
|
| 151 |
+
f.write(f" P95: {np.percentile(query_durations, 95):.3f} seconds\n")
|
| 152 |
+
|
| 153 |
+
f.write("=" * 70 + "\n")
|
| 154 |
|
| 155 |
|
| 156 |
if __name__ == "__main__":
|
crawl/compute_workload_stats.py
CHANGED
|
@@ -97,6 +97,9 @@ def main():
|
|
| 97 |
type=int,
|
| 98 |
default=None,
|
| 99 |
help="Maximum number of queries to process")
|
|
|
|
|
|
|
|
|
|
| 100 |
|
| 101 |
args = parser.parse_args()
|
| 102 |
|
|
@@ -138,28 +141,32 @@ def main():
|
|
| 138 |
total_tokens_list.append(result['total_tokens'])
|
| 139 |
total_time_list.append(result['total_time'])
|
| 140 |
|
| 141 |
-
# Compute and
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
|
| 146 |
-
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
|
| 151 |
-
|
| 152 |
-
|
| 153 |
-
|
| 154 |
-
|
| 155 |
-
|
| 156 |
-
|
| 157 |
-
|
| 158 |
-
|
| 159 |
-
|
| 160 |
-
|
| 161 |
-
|
| 162 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 163 |
|
| 164 |
|
| 165 |
if __name__ == "__main__":
|
|
|
|
| 97 |
type=int,
|
| 98 |
default=None,
|
| 99 |
help="Maximum number of queries to process")
|
| 100 |
+
parser.add_argument("--output-dir",
|
| 101 |
+
default="tables",
|
| 102 |
+
help="Output directory for statistics file")
|
| 103 |
|
| 104 |
args = parser.parse_args()
|
| 105 |
|
|
|
|
| 141 |
total_tokens_list.append(result['total_tokens'])
|
| 142 |
total_time_list.append(result['total_time'])
|
| 143 |
|
| 144 |
+
# Compute statistics and save to file
|
| 145 |
+
os.makedirs(args.output_dir, exist_ok=True)
|
| 146 |
+
output_file = os.path.join(args.output_dir, "workload_stats_crawler.txt")
|
| 147 |
+
|
| 148 |
+
with open(output_file, 'w') as f:
|
| 149 |
+
f.write("\n" + "=" * 70 + "\n")
|
| 150 |
+
f.write("CRAWLER WORKLOAD STATISTICS\n")
|
| 151 |
+
f.write("=" * 70 + "\n")
|
| 152 |
+
|
| 153 |
+
if total_tokens_list:
|
| 154 |
+
total_tokens = np.array(total_tokens_list)
|
| 155 |
+
f.write(f"\nQuery Total Tokens (n={len(total_tokens)})\n")
|
| 156 |
+
f.write(f" Mean: {total_tokens.mean():.0f} tokens\n")
|
| 157 |
+
f.write(f" P50: {np.percentile(total_tokens, 50):.0f} tokens\n")
|
| 158 |
+
f.write(f" P75: {np.percentile(total_tokens, 75):.0f} tokens\n")
|
| 159 |
+
f.write(f" P95: {np.percentile(total_tokens, 95):.0f} tokens\n")
|
| 160 |
+
|
| 161 |
+
if total_time_list:
|
| 162 |
+
total_time = np.array(total_time_list)
|
| 163 |
+
f.write(f"\nTotal Collection Time (n={len(total_time)})\n")
|
| 164 |
+
f.write(f" Mean: {total_time.mean():.3f} seconds\n")
|
| 165 |
+
f.write(f" P50: {np.percentile(total_time, 50):.3f} seconds\n")
|
| 166 |
+
f.write(f" P75: {np.percentile(total_time, 75):.3f} seconds\n")
|
| 167 |
+
f.write(f" P95: {np.percentile(total_time, 95):.3f} seconds\n")
|
| 168 |
+
|
| 169 |
+
f.write("=" * 70 + "\n")
|
| 170 |
|
| 171 |
|
| 172 |
if __name__ == "__main__":
|