Datasets:
Delete token_counter.py
Browse files- token_counter.py +0 -43
token_counter.py
DELETED
|
@@ -1,43 +0,0 @@
|
|
| 1 |
-
import json
|
| 2 |
-
import tiktoken
|
| 3 |
-
|
| 4 |
-
# Load the JSON data from your file
|
| 5 |
-
with open('your_json_file_here.json', 'r') as json_file:
|
| 6 |
-
data = json.load(json_file)
|
| 7 |
-
|
| 8 |
-
# Initialize a list to store token lengths
|
| 9 |
-
token_lengths = []
|
| 10 |
-
|
| 11 |
-
# Iterate through items in the JSON file
|
| 12 |
-
for item in data:
|
| 13 |
-
if 'conversations' in item:
|
| 14 |
-
all_content = ""
|
| 15 |
-
for conversation in item['conversations']:
|
| 16 |
-
if 'message' in conversation:
|
| 17 |
-
all_content += conversation['message']
|
| 18 |
-
|
| 19 |
-
# Calculate token length for all content
|
| 20 |
-
encoding = tiktoken.get_encoding("cl100k_base")
|
| 21 |
-
num_tokens = len(encoding.encode(all_content))
|
| 22 |
-
|
| 23 |
-
# Add token length as a property to the item
|
| 24 |
-
item['token_length'] = num_tokens
|
| 25 |
-
token_lengths.append(num_tokens)
|
| 26 |
-
|
| 27 |
-
# Calculate the average token length for each item
|
| 28 |
-
for i, item in enumerate(data):
|
| 29 |
-
num_contents = len(item.get('conversations', []))
|
| 30 |
-
if num_contents > 0:
|
| 31 |
-
item['average_token_length'] = item['token_length'] / num_contents
|
| 32 |
-
|
| 33 |
-
# Save the updated data with average token lengths to a new JSON file
|
| 34 |
-
with open('json_file_w_token_counts.json', 'w') as updated_json_file:
|
| 35 |
-
json.dump(data, updated_json_file, indent=4)
|
| 36 |
-
|
| 37 |
-
# Optionally, you can print the average token length for each item
|
| 38 |
-
for i, item in enumerate(data):
|
| 39 |
-
print(f"Item {i + 1} - Average Token Length: {item.get('average_token_length')}")
|
| 40 |
-
|
| 41 |
-
# Optionally, you can print the total average token length for all items
|
| 42 |
-
total_average_token_length = sum(item.get('average_token_length', 0) for item in data) / len(data)
|
| 43 |
-
print(f"Total Average Token Length for all items: {total_average_token_length}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|