| import random | |
| def construct_inputs(): | |
| inputs_list = [] | |
| # Case 1: All same characters with many queries | |
| def case_same_chars(): | |
| s = "a" * 1000 | |
| ops = ["Q 1 500", "Q 100 900", "Q 1 1000", "Q 500 501"] | |
| return f"{s}\n{len(ops)}\n" + "\n".join(ops) | |
| # Case 2: Alternating pattern with modifications | |
| def case_alternating(): | |
| s = "ab" * 500 | |
| ops = [] | |
| ops.extend([f"Q {i} {i+500}" for i in range(1, 11)]) | |
| ops.extend([f"R {i} c" for i in range(1, 21, 2)]) | |
| ops.extend([f"Q {i} {i+100}" for i in range(1, 11)]) | |
| return f"{s}\n{len(ops)}\n" + "\n".join(ops) | |
| # Case 3: Heavy insertions at beginning | |
| def case_heavy_insertions(): | |
| s = "test" | |
| ops = [] | |
| for i in range(100): | |
| ops.append(f"I 0 z") | |
| ops.extend([f"Q 1 {50+i}" for i in range(10)]) | |
| return f"{s}\n{len(ops)}\n" + "\n".join(ops) | |
| # Case 4: Maximum operations with mixed types | |
| def case_max_operations(): | |
| s = "x" * 100 | |
| ops = [] | |
| # Add many modifications | |
| for i in range(1, 101): | |
| ops.append(f"R {i} {'abcdefghij'[i % 10]}") | |
| # Add insertions | |
| for i in range(50): | |
| ops.append(f"I {i} y") | |
| # Add queries | |
| for i in range(1, 51): | |
| ops.append(f"Q {i} {i+50}") | |
| return f"{s}\n{len(ops)}\n" + "\n".join(ops) | |
| # Case 5: Long common prefixes | |
| def case_long_prefixes(): | |
| s = "a" * 500 + "b" * 500 | |
| ops = [] | |
| ops.extend([f"Q 1 {i}" for i in range(2, 501)]) | |
| ops.extend([f"Q 501 {i}" for i in range(502, 1001)]) | |
| return f"{s}\n{len(ops)}\n" + "\n".join(ops) | |
| # Case 6: Palindromic structure | |
| def case_palindrome(): | |
| s = "abcdefghijklmnopqrstuvwxyz" + "zyxwvutsrqponmlkjihgfedcba" | |
| ops = [] | |
| for i in range(1, 27): | |
| ops.append(f"Q {i} {53-i}") | |
| for i in range(1, 27): | |
| ops.append(f"R {i} z") | |
| for i in range(1, 27): | |
| ops.append(f"Q {i} {27+i}") | |
| return f"{s}\n{len(ops)}\n" + "\n".join(ops) | |
| # Case 7: Stress test with maximum string length | |
| def case_max_length(): | |
| s = "".join(chr(ord('a') + (i % 26)) for i in range(100000)) | |
| ops = [] | |
| # Strategic queries | |
| positions = [1, 1000, 10000, 50000, 99999, 100000] | |
| for i in range(len(positions)): | |
| for j in range(i+1, len(positions)): | |
| ops.append(f"Q {positions[i]} {positions[j]}") | |
| # Some modifications | |
| for i in range(1, 101, 10): | |
| ops.append(f"R {i} z") | |
| return f"{s}\n{len(ops)}\n" + "\n".join(ops) | |
| # Case 8: Edge case with single character | |
| def case_single_char(): | |
| s = "a" | |
| ops = ["Q 1 1", "R 1 b", "Q 1 1", "I 1 c", "Q 1 2", "I 0 d", "Q 1 3"] | |
| return f"{s}\n{len(ops)}\n" + "\n".join(ops) | |
| # Case 9: Repeated pattern with disruptions | |
| def case_repeated_pattern(): | |
| s = "abc" * 1000 | |
| ops = [] | |
| # Query same positions in pattern | |
| for i in range(1, 1001, 100): | |
| ops.append(f"Q {i} {i+3}") | |
| ops.append(f"Q {i} {i+6}") | |
| # Disrupt pattern | |
| for i in range(1, 3001, 300): | |
| ops.append(f"R {i} x") | |
| # Query again | |
| for i in range(1, 1001, 100): | |
| ops.append(f"Q {i} {i+300}") | |
| return f"{s}\n{len(ops)}\n" + "\n".join(ops) | |
| # Case 10: Insertions creating long matches | |
| def case_insertion_matches(): | |
| s = "abcdef" | |
| ops = [] | |
| # Insert to create matches | |
| for i in range(6): | |
| ops.append(f"I {6} {'abcdef'[i]}") | |
| # Query the matches | |
| for i in range(1, 7): | |
| ops.append(f"Q {i} {i+6}") | |
| return f"{s}\n{len(ops)}\n" + "\n".join(ops) | |
| inputs_list.extend([ | |
| case_same_chars(), | |
| case_alternating(), | |
| case_heavy_insertions(), | |
| case_max_operations(), | |
| case_long_prefixes(), | |
| case_palindrome(), | |
| case_max_length(), | |
| case_single_char(), | |
| case_repeated_pattern(), | |
| case_insertion_matches() | |
| ]) | |
| return inputs_list | |
Xet Storage Details
- Size:
- 4.19 kB
- Xet hash:
- f62956f4d2ecefa791821421db411f2f42e18f31ad7747bb8c14fe402085789e
·
Xet efficiently stores files, intelligently splitting them into unique chunks and accelerating uploads and downloads. More info.