quantum / qlbm /visualize_counts.py
harishaseebat92
Turbo Colorscale, EM : IONQ QPU Integration
b0f5437
import numpy as np
from sklearn.neighbors import KernelDensity
import plotly.graph_objects as go
def bitstring_to_xyz(bs):
"""
Interpret the bitstring `bs` by dividing it into three equal thirds,
and converting each third to an integer or normalized float.
Returns (x, y, z).
"""
n = len(bs)
assert n % 3 == 0, "Bitstring length must be divisible by 3"
t = n // 3
bx = bs[0:t]
by = bs[t:2*t]
bz = bs[2*t:3*t]
# convert each to integer (or normalized in [0,1])
ix = int(bx, 2)
iy = int(by, 2)
iz = int(bz, 2)
# Optionally normalize by 2^t
maxv = (1 << t)
return ix / maxv, iy / maxv, iz / maxv
def load_samples(d, T_total, logger=None, flag_qubits=False, midcircuit_meas=True):
"""
Load samples from measurement counts dictionary.
Parameters
----------
d : dict
Dictionary mapping bitstrings to counts
T_total : int
Total number of timesteps (used to determine how many direction bits to check)
logger : callable, optional
Function to log messages
Returns
-------
pts : ndarray
Array of (x, y, z) points
counts : ndarray
Array of counts for each point
"""
def log(msg):
if logger:
logger(str(msg))
else:
print(msg)
pts = []
counts = []
if flag_qubits:
if midcircuit_meas:
pref_length=12*T_total
else:
pref_length=6*(T_total+1)
else:
pref_length=6*T_total
if d is None or len(d) == 0:
log("Warning: Empty counts dictionary")
return np.array(pts), np.array(counts)
# Debug: show sample bitstrings
sample_keys = list(d.keys())[:3]
log(f"Sample bitstrings (first 3): {sample_keys}")
if sample_keys:
log(f"Bitstring length: {len(sample_keys[0])}")
log(f"Expected prefix length: {pref_length}")
for bs, cnt in d.items():
# Check if the direction qubits (first 6*T_total bits) are all zeros
bs=bs.replace(" ","")
prefix = bs[:pref_length]
expected_prefix = "0" * pref_length
if prefix == expected_prefix:
if cnt < 0:
continue
remaining_bits = bs[pref_length:]
# Check if remaining bits are divisible by 3
if len(remaining_bits) % 3 != 0:
log(f"Warning: Remaining bitstring length {len(remaining_bits)} not divisible by 3")
continue
x, y, z = bitstring_to_xyz(remaining_bits)
pts.append([x, y, z])
counts.append(cnt)
pts = np.array(pts)
counts = np.array(counts)
log(f"Number of valid counts: {np.sum(counts)}")
log(f"Number of valid bitstrings: {len(counts)}")
if len(counts) == 0:
log("Warning: No bitstrings matched the zero-prefix filter. This may indicate:")
log(" - All measurement outcomes had non-zero direction qubits")
log(" - Bitstring format mismatch")
return pts, counts
def estimate_density(pts, counts, bandwidth=0.05, grid_size=64):
"""
Fit KDE weighted by counts, and evaluate on a grid.
Returns (grid_x, grid_y, grid_z, grid_density) where
grid_x, etc. are 3D mesh arrays, and grid_density has same shape.
"""
# Handle empty input
if len(pts) == 0 or len(counts) == 0 or np.sum(counts) == 0:
print("Warning: No valid samples to estimate density. Returning uniform distribution.")
mins = [0, 0, 0]
maxs = [1, 1, 1]
xs = np.linspace(mins[0], maxs[0], grid_size)
ys = np.linspace(mins[1], maxs[1], grid_size)
zs = np.linspace(mins[2], maxs[2], grid_size)
xx, yy, zz = np.meshgrid(xs, ys, zs, indexing='ij')
dens = np.ones(xx.shape) * 0.001 # Small uniform density
return xx, yy, zz, dens
# Expand points by weights: we can replicate points (if counts small),
# or directly use weights in log-likelihood calculation.
# sklearn's KernelDensity doesn't support sample weights in .fit,
# but you can approximate by replication or by customizing the KDE.
# Here, for simplicity, replicate (careful of explosion):
pts_rep = np.repeat(pts, counts.astype(int), axis=0)
# Handle case where all counts are zero after conversion
if len(pts_rep) == 0:
print("Warning: No points after replication. Returning uniform distribution.")
mins = [0, 0, 0]
maxs = [1, 1, 1]
xs = np.linspace(mins[0], maxs[0], grid_size)
ys = np.linspace(mins[1], maxs[1], grid_size)
zs = np.linspace(mins[2], maxs[2], grid_size)
xx, yy, zz = np.meshgrid(xs, ys, zs, indexing='ij')
dens = np.ones(xx.shape) * 0.001 # Small uniform density
return xx, yy, zz, dens
kde = KernelDensity(bandwidth=bandwidth, kernel='gaussian')
kde.fit(pts_rep)
# create a 3D grid over the bounding box
# mins = pts.min(axis=0)
# maxs = pts.max(axis=0)
mins=[0,0,0]
maxs=[1,1,1]
xs = np.linspace(mins[0], maxs[0], grid_size)
ys = np.linspace(mins[1], maxs[1], grid_size)
zs = np.linspace(mins[2], maxs[2], grid_size)
xx, yy, zz = np.meshgrid(xs, ys, zs, indexing='ij')
grid_coords = np.vstack([xx.ravel(), yy.ravel(), zz.ravel()]).T
logdens = kde.score_samples(grid_coords) # log density
dens = np.exp(logdens)
dens = dens.reshape(xx.shape)
dens = (grid_size**3)*dens/np.sum(dens.flatten())
print("Mins:", mins)
print("Maxs:", maxs)
print("dens:", dens)
return xx, yy, zz, dens
def plot_density_isosurface(xx, yy, zz, dens, level=None):
"""
Plot an isosurface of density using Plotly.
If level is None, choose a percentile.
"""
if level is None:
level = np.percentile(dens, 90) # for example, top 10% density
fig = go.Figure(
data=go.Isosurface(
x=xx.ravel(),
y=yy.ravel(),
z=zz.ravel(),
value=dens.ravel(),
isomin=level,
isomax= dens.max(),
# surface_count=1, # one isosurface
opacity=0.4, # needs to be small to see through all surfaces
surface_count=5, # needs to be a large number for good volume rendering,
# caps=dict(x_show=False, y_show=False, z_show=False)
caps=dict(x_show=False, y_show=False, z_show=False),
showscale=True,
)
)
fig.update_layout(
scene=dict(
xaxis_title="x",
yaxis_title="y",
zaxis_title="z",
),
title="3D Density Isosurface"
)
fig.show(renderer="browser")
def plot_density_isosurface_slider(outputs, T_list=None):
"""
Plot an isosurface of density using Plotly with a slider for timesteps.
outputs: list of (xx, yy, zz, dens) tuples.
T_list: list of timestep values corresponding to outputs.
"""
if not outputs:
print("No output to plot.")
return
# If T_list is not provided, generate indices
if T_list is None:
T_list = list(range(len(outputs)))
# Compute global min/max for consistent color scaling
# dens is the 4th element (index 3) in the tuple (xx, yy, zz, dens)
all_dens = [out[3] for out in outputs]
global_min = min(np.min(d) for d in all_dens)
global_max = max(np.max(d) for d in all_dens)
fig = go.Figure()
# Add a trace for each timestep
for i, (xx, yy, zz, dens) in enumerate(outputs):
visible = (i == 0) # Only the first trace is visible initially
fig.add_trace(go.Isosurface(
x=xx.ravel(),
y=yy.ravel(),
z=zz.ravel(),
value=dens.ravel(),
isomin=global_min,
isomax=global_max,
opacity=0.4,
surface_count=10,
caps=dict(x_show=False, y_show=False, z_show=False),
colorscale='Turbo',
colorbar=dict(title="Density"),
visible=visible,
name=f"T={T_list[i]}"
))
# Create slider steps
steps = []
for i, T in enumerate(T_list):
step = dict(
method="update",
args=[{"visible": [False] * len(outputs)},
{"title": f"QLBM Simulation - Timestep T={T}"}],
label=str(T)
)
step["args"][0]["visible"][i] = True # Toggle i-th trace to True
steps.append(step)
sliders = [dict(
active=0,
currentvalue={"prefix": "Timestep: "},
pad={"t": 50},
steps=steps
)]
fig.update_layout(
title=f"QLBM Simulation - Timestep T={T_list[0]}",
scene=dict(
xaxis_title="X",
yaxis_title="Y",
zaxis_title="Z",
aspectmode='cube',
),
sliders=sliders
)
# fig.show(renderer="browser")
return fig
# if __name__ == '__main__':
# pts, counts = load_samples('counts_7_3.json')
# # pts, counts = load_samples('quasis_8_3.txt')
# xx, yy, zz, dens = estimate_density(pts, counts, bandwidth=0.05, grid_size=40)
# plot_density_isosurface(xx, yy, zz, dens)