File size: 1,448 Bytes
6f9b699
 
 
a07bb50
6f9b699
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a07bb50
a31b023
efb93f5
c1d2342
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54

import json
from collections import defaultdict
from datasets import load_dataset

def check_jsonl_file(filename):
	valid = True
	errors = []
	column_types = defaultdict(set)
	with open(filename, 'r', encoding='utf-8') as f:
		for i, line in enumerate(f):
			line = line.strip()
			if not line:
				continue
			try:
				obj = json.loads(line)
			except Exception as e:
				valid = False
				errors.append(f"Line {i+1}: Invalid JSON ({e})")
				continue
			if not isinstance(obj, dict):
				valid = False
				errors.append(f"Line {i+1}: Not a JSON object")
				continue
			for k, v in obj.items():
				column_types[k].add(type(v).__name__)

	# Check for consistent column types
	inconsistent = {k: v for k, v in column_types.items() if len(v) > 1}
	if inconsistent:
		valid = False
		for k, types in inconsistent.items():
			errors.append(f"Column '{k}' has inconsistent types: {types}")

	# Check for trailing data (extra non-JSON lines)
	with open(filename, 'rb') as f:
		f.seek(-2, 2)
		last = f.read().decode(errors='ignore')
		if not last.endswith('\n'):
			errors.append("File does not end with a newline (possible trailing data)")

	if valid:
		print("data.jsonl passed all checks.")
	else:
		print("data.jsonl failed checks:")
		for err in errors:
			print(err)

if __name__ == "__main__":
	check_jsonl_file("data.jsonl")

	ds = load_dataset("cwolff/schemapile", split="full")
	print(f"Loaded dataset with {len(ds)} records.")
	print(ds[0])