| import random | |
| import string | |
| def generate_random_string(length): | |
| return ''.join(random.choice(string.ascii_lowercase) for _ in range(length)) | |
| def generate_operation(current_length, query_count, max_queries): | |
| operations = [] | |
| if query_count < max_queries and random.random() < 0.4: # 40% chance for query | |
| op_type = 'Q' | |
| x = random.randint(1, current_length) | |
| y = random.randint(1, current_length) | |
| operations.append(f"{op_type} {x} {y}") | |
| query_count += 1 | |
| elif random.random() < 0.5: # 30% chance for modify (50% of remaining 60%) | |
| op_type = 'R' | |
| x = random.randint(1, current_length) | |
| d = random.choice(string.ascii_lowercase) | |
| operations.append(f"{op_type} {x} {d}") | |
| else: # 30% chance for insert | |
| op_type = 'I' | |
| x = random.randint(0, current_length) | |
| d = random.choice(string.ascii_lowercase) | |
| operations.append(f"{op_type} {x} {d}") | |
| current_length += 1 | |
| return operations[0], current_length, query_count | |
| def construct_inputs(): | |
| inputs_list = [] | |
| # Small test cases | |
| for _ in range(10): | |
| initial_length = random.randint(5, 20) | |
| initial_string = generate_random_string(initial_length) | |
| m = random.randint(5, 30) | |
| max_queries = min(m // 2, 50) | |
| operations = [] | |
| current_length = initial_length | |
| query_count = 0 | |
| for _ in range(m): | |
| op, current_length, query_count = generate_operation(current_length, query_count, max_queries) | |
| operations.append(op) | |
| if current_length > 100: # Prevent string from getting too long | |
| break | |
| test_case = f"{initial_string}\n{len(operations)}\n" + "\n".join(operations) | |
| inputs_list.append(test_case) | |
| # Medium test cases | |
| for _ in range(10): | |
| initial_length = random.randint(50, 200) | |
| initial_string = generate_random_string(initial_length) | |
| m = random.randint(100, 1000) | |
| max_queries = min(m // 3, 500) | |
| operations = [] | |
| current_length = initial_length | |
| query_count = 0 | |
| for _ in range(m): | |
| op, current_length, query_count = generate_operation(current_length, query_count, max_queries) | |
| operations.append(op) | |
| if current_length > 1000: # Prevent string from getting too long | |
| break | |
| test_case = f"{initial_string}\n{len(operations)}\n" + "\n".join(operations) | |
| inputs_list.append(test_case) | |
| # Large test cases | |
| for _ in range(5): | |
| initial_length = random.randint(1000, 10000) | |
| initial_string = generate_random_string(initial_length) | |
| m = random.randint(10000, 50000) | |
| max_queries = min(m // 5, 2000) | |
| operations = [] | |
| current_length = initial_length | |
| query_count = 0 | |
| for _ in range(m): | |
| op, current_length, query_count = generate_operation(current_length, query_count, max_queries) | |
| operations.append(op) | |
| if current_length > 100000: # Prevent string from getting too long | |
| break | |
| test_case = f"{initial_string}\n{len(operations)}\n" + "\n".join(operations) | |
| inputs_list.append(test_case) | |
| # Edge cases | |
| # Single character string | |
| test_case = "a\n3\nQ 1 1\nI 1 b\nQ 1 2" | |
| inputs_list.append(test_case) | |
| # String with repeated characters | |
| test_case = "aaaa\n5\nQ 1 2\nQ 2 3\nR 2 b\nQ 1 3\nI 0 c" | |
| inputs_list.append(test_case) | |
| # Maximum constraints test | |
| initial_string = generate_random_string(10000) | |
| operations = [] | |
| current_length = 10000 | |
| query_count = 0 | |
| max_queries = 1000 | |
| for _ in range(20000): | |
| op, current_length, query_count = generate_operation(current_length, query_count, max_queries) | |
| operations.append(op) | |
| if current_length > 100000 or len(operations) >= 15000: | |
| break | |
| test_case = f"{initial_string}\n{len(operations)}\n" + "\n".join(operations) | |
| inputs_list.append(test_case) | |
| return inputs_list | |
Xet Storage Details
- Size:
- 4.18 kB
- Xet hash:
- 1cf483c6a1c57e3c421048432f523d6e8c3d875c903bfecac7a5cd91f4c92b84
·
Xet efficiently stores files, intelligently splitting them into unique chunks and accelerating uploads and downloads. More info.