readCtrl_lambda / code /data_creation /dataset_creation_subclaim_support_gpt5.py
mshahidul
Initial commit of readCtrl code without large models
030876e
from openai import OpenAI
import json, os
import tqdm
# Load prompt template (v3) with INPUT_TEXT placeholder
with open("/home/mshahidul/readctrl/prompts/syn_dataset_subclaims_support_check_v3.txt", "r") as f:
prompt_template = f.read()
# Load translated source articles that will be plugged into the prompt
source_path = "/home/mshahidul/readctrl/data/translated_data/multiclinsum_gs_train_en2bn_gemma_(0-200).json"
with open(source_path, "r") as f:
source_data = json.load(f)
api_file = "/home/mshahidul/api_new.json"
with open(api_file, "r") as f:
api_keys = json.load(f)
openai_api_key = api_keys["openai"]
client = OpenAI(api_key=openai_api_key)
def openai_return(prompt, model="gpt-5"):
"""Send a prompt to GPT and parse JSON."""
response = client.chat.completions.create(
model=model,
messages=[
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": prompt},
],
)
content = response.choices[0].message.content.strip()
cleaned = content.replace("```json", "").replace("```", "").strip()
try:
return json.loads(cleaned)
except json.JSONDecodeError:
print("⚠️ JSON parse failed — storing raw text.")
return cleaned
# Save path for the new dataset generated from translated_fulltext
save_dir = "/home/mshahidul/readctrl/data/finetuning_data/new_v2"
os.makedirs(save_dir, exist_ok=True)
save_path = os.path.join(save_dir, "finetune_dataset_subclaim_support_bn.json")
res = []
if os.path.exists(save_path):
with open(save_path, "r") as f:
res = json.load(f)
# Resume from where we left off, if any previous results exist
start_idx = len(res)
for idx in tqdm.tqdm(range(start_idx, len(source_data))):
item = source_data[idx]
input_text = item.get("translated_fulltext", "").strip()
if not input_text:
continue
# Fill the prompt template with the current article text
prompt = prompt_template.replace("{{INPUT_TEXT}}", input_text)
model_output = openai_return(prompt, model="gpt-5")
# import ipdb; ipdb.set_trace()
res.append(
{
"id": item.get("id"),
"input_text": input_text,
"model_output": model_output,
}
)
if len(res) % 2 == 0:
with open(save_path, "w") as f:
json.dump(res, f, indent=2, ensure_ascii=False)
print(f"Saved {len(res)} samples so far.")
with open(save_path, "w") as f:
json.dump(res, f, indent=2, ensure_ascii=False)