File size: 8,297 Bytes
1fd0050 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 | #include <bits/stdc++.h>
using namespace std;
// Key idea: at each depth, we have tight_L, tight_R, and free states.
// tight_L and tight_R transition to different children.
// What if we MERGE tight_L and tight_R into a single node at some depth?
// The merged node would have edges from BOTH tight_L and tight_R.
// This creates extra paths (tight_L prefix + tight_R edge, and vice versa).
// These extra paths might represent numbers outside [L,R] -> BAD.
// BUT: if the extra paths DON'T reach END (because they lead to dead ends),
// then the merge is safe!
// When would extra paths NOT reach END?
// If tight_L's bit-0 edge leads to a state that eventually reaches END,
// and tight_R's bit-0 edge leads to a different state that also reaches END,
// then the merged node's bit-0 edge would go to... both? No, we'd need
// to pick one or combine them.
// Actually, let me think about this differently.
// Instead of merging within a depth, what about a completely different
// DAG structure?
// APPROACH: Difference encoding
// Instead of tracking tight_L and tight_R separately, can we track
// the DIFFERENCE between the current prefix and L (or R)?
// APPROACH: Shared suffix tree
// Build from the END backwards. At depth 19 (1 bit remaining):
// State [0,1] accepts both 0 and 1 -> "full" state F1
// State [1,1] accepts only 1 -> "high" state H1
// At depth 18 (2 bits remaining):
// State [0,3] accepts all -> full state F2
// State [0,1] accepts 00, 01 -> needs F1 on 0, nothing on 1
// State [1,3] accepts 01, 10, 11 -> needs H1 on 0... wait [1,3]:
// bit 0: [1, min(3,1)] = [1,1] -> H1
// bit 1: [max(1,2)-2, 3-2] = [0,1] -> F1
// So [1,3] -> 0:H1, 1:F1
// Hmm, this is getting complex. Let me try a brute-force optimization approach.
// Start with the 55-node ADFA, then try random merges and verify.
int L = 577837, R = 979141;
struct Edge { int to, w; };
bool verify(int n, vector<vector<Edge>>& adj) {
vector<int> indeg(n + 1, 0), outdeg(n + 1, 0);
for (int i = 1; i <= n; i++) {
outdeg[i] = adj[i].size();
for (auto& e : adj[i]) indeg[e.to]++;
}
int start = -1, end_node = -1;
int cnt1 = 0, cnt2 = 0;
for (int i = 1; i <= n; i++) {
if (indeg[i] == 0) { cnt1++; start = i; }
if (outdeg[i] == 0) { cnt2++; end_node = i; }
}
if (cnt1 != 1 || cnt2 != 1) return false;
for (auto& e : adj[start]) if (e.w == 0) return false;
// Check it's a DAG (topological sort)
vector<int> topo;
vector<int> deg(n + 1, 0);
for (int i = 1; i <= n; i++) deg[i] = indeg[i];
queue<int> q;
for (int i = 1; i <= n; i++) if (deg[i] == 0) q.push(i);
while (!q.empty()) {
int u = q.front(); q.pop();
topo.push_back(u);
for (auto& e : adj[u]) if (--deg[e.to] == 0) q.push(e.to);
}
if ((int)topo.size() != n) return false; // has cycle
int cnt = 0;
vector<int> vis(R + 1, 0);
bool error = false;
function<void(int, long long)> dfs = [&](int x, long long val) {
if (error) return;
if (val > R) { error = true; return; } // prune
if (adj[x].empty()) {
cnt++;
if (cnt > 500000 || val < L || val > R) { error = true; return; }
vis[(int)val]++;
if (vis[(int)val] > 1) { error = true; return; }
return;
}
for (auto& e : adj[x]) dfs(e.to, val * 2 + e.w);
};
dfs(start, 0);
if (error) return false;
for (int i = L; i <= R; i++) if (vis[i] == 0) return false;
return true;
}
int main() {
// Build the 55-node ADFA
map<tuple<int,int,int>, int> state_id;
int next_id = 0;
vector<array<int,2>> trans;
vector<tuple<int,int,int>> info;
function<int(int, int, int)> get_state = [&](int k, int lo, int hi) -> int {
if (lo > hi) return -1;
auto key = make_tuple(k, lo, hi);
auto it = state_id.find(key);
if (it != state_id.end()) return it->second;
int id = next_id++;
state_id[key] = id;
trans.push_back({-1, -1});
info.push_back(key);
if (k == 0) return id;
int mid = 1 << (k - 1);
trans[id][0] = get_state(k - 1, lo, min(hi, mid - 1));
trans[id][1] = get_state(k - 1, max(lo, mid) - mid, hi - mid);
return id;
};
int base = 1 << 19;
int root_child = get_state(19, max(0, L - base), min(base - 1, R - base));
int ns = next_id; // number of ADFA states (not counting START)
// Convert to adjacency list with 1-indexed nodes
// Node 1 = START, Node 2..ns+1 = ADFA states (mapped from 0..ns-1)
int total_n = ns + 1;
vector<vector<Edge>> adj(total_n + 1);
// Map: ADFA state id -> node number (2-indexed)
auto node_of = [](int state_id) { return state_id + 2; };
// START (node 1) -> root_child (weight 1)
adj[1].push_back({node_of(root_child), 1});
for (int i = 0; i < ns; i++) {
auto [k, lo, hi] = info[i];
if (k == 0) continue; // END node has no edges
if (trans[i][0] != -1) adj[node_of(i)].push_back({node_of(trans[i][0]), 0});
if (trans[i][1] != -1) adj[node_of(i)].push_back({node_of(trans[i][1]), 1});
}
cout << "Original: " << total_n << " nodes" << endl;
cout << "Verifying... " << flush;
if (verify(total_n, adj)) {
cout << "OK!" << endl;
} else {
cout << "FAIL!" << endl;
return 1;
}
// Now try merging pairs of nodes
// For each pair (i, j) where i < j, try replacing all references to j with i
// and merging j's edges into i.
// This is O(n^2) which is fine for n=55.
cout << "\nTrying all pairwise merges..." << endl;
int found = 0;
for (int ni = 1; ni <= total_n; ni++) {
for (int nj = ni + 1; nj <= total_n; nj++) {
// Try merging nj into ni
vector<vector<Edge>> new_adj(total_n + 1); // we'll compact later
for (int u = 1; u <= total_n; u++) {
if (u == nj) continue; // skip merged node
int mapped_u = u;
for (auto& e : adj[u]) {
int mapped_to = (e.to == nj) ? ni : e.to;
new_adj[mapped_u].push_back({mapped_to, e.w});
}
}
// Add nj's edges to ni
for (auto& e : adj[nj]) {
int mapped_to = (e.to == nj) ? ni : e.to;
// Check if this edge already exists in ni
bool dup = false;
for (auto& ee : new_adj[ni]) {
if (ee.to == mapped_to && ee.w == e.w) { dup = true; break; }
}
if (!dup) new_adj[ni].push_back({mapped_to, e.w});
}
// Compact: remove node nj, renumber
int new_n = total_n - 1;
vector<vector<Edge>> compact(new_n + 1);
auto remap = [&](int x) -> int {
if (x < nj) return x;
if (x == nj) return ni; // shouldn't happen after above
return x - 1;
};
for (int u = 1; u <= total_n; u++) {
if (u == nj) continue;
int nu = remap(u);
for (auto& e : new_adj[u]) {
compact[nu].push_back({remap(e.to), e.w});
}
}
if (verify(new_n, compact)) {
auto [k1, lo1, hi1] = (ni == 1) ? make_tuple(-1, -1, -1) : info[ni - 2];
auto [k2, lo2, hi2] = (nj == 1) ? make_tuple(-1, -1, -1) : info[nj - 2];
cout << "SUCCESS! Merge node " << ni;
if (ni > 1) cout << " (k=" << k1 << " [" << lo1 << "," << hi1 << "])";
else cout << " (START)";
cout << " with node " << nj;
if (nj > 1) cout << " (k=" << k2 << " [" << lo2 << "," << hi2 << "])";
else cout << " (START)";
cout << " -> " << new_n << " nodes" << endl;
found++;
}
}
if (ni % 10 == 0) cout << " Tried merges with node " << ni << "..." << endl;
}
if (found == 0) {
cout << "No valid pairwise merges found. 55 nodes is optimal." << endl;
} else {
cout << "Found " << found << " valid merges!" << endl;
}
return 0;
}
|