| import random | |
| import string | |
| def generate_random_string(length): | |
| return ''.join(random.choice(string.ascii_lowercase) for _ in range(length)) | |
| def generate_operation(current_length, query_count, max_queries): | |
| operations = [] | |
| if query_count < max_queries and random.random() < 0.4: # 40% chance for query | |
| op_type = 'Q' | |
| x = random.randint(1, current_length) | |
| y = random.randint(1, current_length) | |
| operations.append(f"{op_type} {x} {y}") | |
| query_count += 1 | |
| elif random.random() < 0.5: # 30% chance for modify (50% of remaining 60%) | |
| op_type = 'R' | |
| x = random.randint(1, current_length) | |
| d = random.choice(string.ascii_lowercase) | |
| operations.append(f"{op_type} {x} {d}") | |
| else: # 30% chance for insert | |
| op_type = 'I' | |
| x = random.randint(0, current_length) | |
| d = random.choice(string.ascii_lowercase) | |
| operations.append(f"{op_type} {x} {d}") | |
| current_length += 1 | |
| return operations, current_length, query_count | |
| def construct_inputs(): | |
| inputs_list = [] | |
| # Small test cases | |
| for _ in range(20): | |
| initial_length = random.randint(5, 20) | |
| initial_string = generate_random_string(initial_length) | |
| m = random.randint(5, 30) | |
| max_queries = min(m // 2, 20) | |
| operations = [] | |
| current_length = initial_length | |
| query_count = 0 | |
| for _ in range(m): | |
| ops, current_length, query_count = generate_operation(current_length, query_count, max_queries) | |
| operations.extend(ops) | |
| if current_length > 50: # Prevent string from getting too long | |
| break | |
| test_case = f"{initial_string}\n{len(operations)}\n" + "\n".join(operations) | |
| inputs_list.append(test_case) | |
| # Medium test cases | |
| for _ in range(15): | |
| initial_length = random.randint(50, 200) | |
| initial_string = generate_random_string(initial_length) | |
| m = random.randint(100, 500) | |
| max_queries = min(m // 3, 100) | |
| operations = [] | |
| current_length = initial_length | |
| query_count = 0 | |
| for _ in range(m): | |
| ops, current_length, query_count = generate_operation(current_length, query_count, max_queries) | |
| operations.extend(ops) | |
| if current_length > 1000: # Prevent string from getting too long | |
| break | |
| test_case = f"{initial_string}\n{len(operations)}\n" + "\n".join(operations) | |
| inputs_list.append(test_case) | |
| # Large test cases | |
| for _ in range(10): | |
| initial_length = random.randint(1000, 5000) | |
| initial_string = generate_random_string(initial_length) | |
| m = random.randint(5000, 20000) | |
| max_queries = min(m // 4, 1000) | |
| operations = [] | |
| current_length = initial_length | |
| query_count = 0 | |
| for _ in range(m): | |
| ops, current_length, query_count = generate_operation(current_length, query_count, max_queries) | |
| operations.extend(ops) | |
| if current_length > 10000: # Prevent string from getting too long | |
| break | |
| test_case = f"{initial_string}\n{len(operations)}\n" + "\n".join(operations) | |
| inputs_list.append(test_case) | |
| # Edge cases | |
| # Single character string | |
| test_case = "a\n3\nQ 1 1\nR 1 b\nQ 1 1" | |
| inputs_list.append(test_case) | |
| # String with repeated characters | |
| test_case = "aaaa\n5\nQ 1 2\nQ 2 3\nI 2 b\nQ 1 3\nQ 3 5" | |
| inputs_list.append(test_case) | |
| # Maximum constraints test | |
| initial_string = generate_random_string(10000) | |
| operations = [] | |
| current_length = 10000 | |
| query_count = 0 | |
| max_queries = 2000 | |
| for _ in range(30000): | |
| ops, current_length, query_count = generate_operation(current_length, query_count, max_queries) | |
| operations.extend(ops) | |
| if len(operations) >= 25000 or current_length > 50000: | |
| break | |
| test_case = f"{initial_string}\n{len(operations)}\n" + "\n".join(operations) | |
| inputs_list.append(test_case) | |
| return inputs_list | |
Xet Storage Details
- Size:
- 4.18 kB
- Xet hash:
- b93bd3e49df050bdec4f5a7d3615d3ee51bf237dea4ea25fc193b9ee8a35ee1f
·
Xet efficiently stores files, intelligently splitting them into unique chunks and accelerating uploads and downloads. More info.