AiAF commited on
Commit
f31d6a6
·
verified ·
1 Parent(s): fc33cb3

upload create_board_sft_dataset.py

Browse files

uploading script used to automate the dataset creation. Needs to be a board folder with thread text files in the same format as the ones found in the lesserfield/4chan-datasets repo (https://huggingface.co/datasets/lesserfield/4chan-datasets)

Files changed (1) hide show
  1. create_board_sft_dataset.py +139 -0
create_board_sft_dataset.py ADDED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import json
3
+ import sys
4
+ import re
5
+ from pathlib import Path
6
+ import argparse
7
+ import subprocess
8
+ import importlib
9
+
10
+ def install_tqdm_if_needed():
11
+ """Checks for tqdm and installs it if not found."""
12
+ try:
13
+ importlib.import_module('tqdm')
14
+ except ImportError:
15
+ print("`tqdm` library not found. Installing...")
16
+ try:
17
+ subprocess.check_call([sys.executable, "-m", "pip", "install", "tqdm"])
18
+ except subprocess.CalledProcessError as e:
19
+ print(f"Error: Failed to install tqdm. Please install it manually using 'pip install tqdm'.")
20
+ sys.exit(1)
21
+
22
+ def create_sft_dataset(input_folder, output_file):
23
+ """
24
+ Scans a directory for .txt thread files, parses them, and creates a
25
+ JSONL dataset suitable for Supervised Fine-Tuning (SFT).
26
+ """
27
+ from tqdm import tqdm
28
+
29
+ folder_path = Path(input_folder)
30
+ output_path = Path(output_file)
31
+
32
+ if not folder_path.is_dir():
33
+ print(f"Error: The input folder '{input_folder}' does not exist.")
34
+ sys.exit(1)
35
+
36
+ try:
37
+ output_path.parent.mkdir(parents=True, exist_ok=True)
38
+ except Exception as e:
39
+ print(f"Error: Could not create output directory '{output_path.parent}'. Error: {e}")
40
+ sys.exit(1)
41
+
42
+ thread_files = list(folder_path.glob("*.txt"))
43
+
44
+ if not thread_files:
45
+ print(f"Error: No .txt files found in '{input_folder}'.")
46
+ sys.exit(1)
47
+
48
+ print(f"Found {len(thread_files)} thread files. Processing...")
49
+
50
+ total_conversations = 0
51
+ system_prompt = "You are an anonymous message board user. You are opinionated and use informal language. You discuss various topics."
52
+
53
+ reply_pattern = re.compile(r'>>(\d{8,9})')
54
+
55
+ with open(output_path, 'w', encoding='utf-8') as f_out:
56
+ for file_path in tqdm(thread_files, desc="Processing Threads"):
57
+ try:
58
+ with open(file_path, 'r', encoding='utf-8') as f:
59
+ content = f.read()
60
+
61
+ posts_map = {}
62
+ ordered_post_ids = []
63
+ raw_posts = [p.strip() for p in content.split('--- ') if p.strip()]
64
+
65
+ for post_text in raw_posts:
66
+ lines = post_text.splitlines()
67
+ if lines and lines[0].strip().isdigit():
68
+ post_id = lines[0].strip()
69
+ post_content = "\n".join(lines[1:]).strip()
70
+ if post_content:
71
+ posts_map[post_id] = post_content
72
+ ordered_post_ids.append(post_id)
73
+
74
+ if not ordered_post_ids:
75
+ continue
76
+
77
+ op_id = ordered_post_ids[0]
78
+ op_content = posts_map[op_id]
79
+
80
+ # --- LOGIC REMAINS THE SAME FOR DIRECT REPLIES ---
81
+ for post_id, post_content in posts_map.items():
82
+ reply_ids = reply_pattern.findall(post_content)
83
+ if reply_ids:
84
+ assistant_text = post_content.strip()
85
+ for parent_id in reply_ids:
86
+ if parent_id in posts_map:
87
+ human_text = posts_map[parent_id]
88
+ conversation = {
89
+ "conversations": [
90
+ {"from": "system", "value": system_prompt},
91
+ {"from": "human", "value": human_text},
92
+ {"from": "assistant", "value": assistant_text}
93
+ ]
94
+ }
95
+ f_out.write(json.dumps(conversation) + '\n')
96
+ total_conversations += 1
97
+
98
+ # --- NEW LOGIC FOR STANDALONE POSTS ---
99
+ for post_id, post_content in posts_map.items():
100
+ # A post is standalone if it's not the OP and has no reply links
101
+ if post_id != op_id and not reply_pattern.search(post_content):
102
+ assistant_text = post_content.strip()
103
+ conversation = {
104
+ "conversations": [
105
+ {"from": "system", "value": system_prompt},
106
+ {"from": "human", "value": op_content},
107
+ {"from": "assistant", "value": assistant_text}
108
+ ]
109
+ }
110
+ f_out.write(json.dumps(conversation) + '\n')
111
+ total_conversations += 1
112
+
113
+ except Exception as e:
114
+ tqdm.write(f"Warning: Could not read or process file {file_path}. Error: {e}")
115
+
116
+ print(f"\nProcessing complete.")
117
+ print(f"Successfully created SFT dataset with {total_conversations} conversations.")
118
+ print(f"Output file: {output_path}")
119
+
120
+ if __name__ == "__main__":
121
+ install_tqdm_if_needed()
122
+
123
+ parser = argparse.ArgumentParser(
124
+ description="Create an SFT dataset in JSONL format from a folder of message board thread files.",
125
+ formatter_class=argparse.RawTextHelpFormatter
126
+ )
127
+ parser.add_argument(
128
+ "input_folder",
129
+ help="The path to the folder containing your .txt thread files."
130
+ )
131
+ parser.add_argument(
132
+ "-o", "--output_file",
133
+ default="sft_dataset.jsonl",
134
+ help="The name of the output .jsonl file. Defaults to 'sft_dataset.jsonl'."
135
+ )
136
+
137
+ args = parser.parse_args()
138
+
139
+ create_sft_dataset(args.input_folder, args.output_file)