File size: 14,610 Bytes
a0589da
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation
from mpl_toolkits.mplot3d import Axes3D
import plotly.graph_objects as go
import pandas as pd
import json
import os

# Constants
c = 299792458  # Speed of light in m/s
E_mc2 = c**2  # Mass-energy equivalence in J/kg
TSR = E_mc2 / (1.38e-23)  # Temperature to Speed Ratio in K/m/s
alpha = 1.0  # Proportional constant for TSR
Q = 2 ** (1 / 12)  # Fractal structure parameter
dark_energy_density = 5.96e-27  # Density of dark energy in kg/m^3
dark_matter_density = 2.25e-27  # Density of dark matter in kg/m^3
collision_distance = 1e-10  # Distance for collision detection
Hubble_constant = 70.0  # km/s/Mpc (approximation)
Hubble_constant_SI = (
    Hubble_constant * 1000 / 3.086e22
)  # Hubble constant in SI units (s^-1)

# Initial conditions
temperature_initial = 1.42e32  # Planck temperature in K
particle_density_initial = 5.16e96  # Planck density in kg/m^3
particle_speed_initial = c  # Initially at the speed of light

# Simulation time
t_planck = 5.39e-44  # Planck time in s
t_simulation = t_planck * 1e3  # Shorter timescale for simulation

# Quark masses (in GeV) - used for initial mass values and comparison
quark_masses = {
    "up": 2.3e-3,
    "down": 4.8e-3,
    "charm": 1.28,
    "strange": 0.095,
    "top": 173.0,
    "bottom": 4.18,
}

# Conversion factor from GeV to J
GeV_to_J = 1.60217662e-10

# Simulation setup
num_steps = int(t_simulation / t_planck)

# Tunneling probabilities to investigate
tunneling_probabilities = np.arange(0.1, 1.1, 0.1)  # Exclude 1.0

# Create a directory to store the data
data_dir = "big_bang_simulation_data"
os.makedirs(data_dir, exist_ok=True)

# Functions to incorporate relativistic effects
def relativistic_energy(particle_speed, particle_mass):
    if particle_speed >= c:
        return np.inf
    return particle_mass * c**2 / np.sqrt(max(1e-10, 1 - (particle_speed / c) ** 2))


def relativistic_momentum(particle_speed, particle_mass):
    if particle_speed >= c:
        return np.inf
    return (
        particle_mass
        * particle_speed
        / np.sqrt(max(1e-10, 1 - (particle_speed / c) ** 2))
    )


def update_speed(current_speed, current_temperature, particle_mass):
    rel_momentum = relativistic_momentum(current_speed, particle_mass)
    return c * np.sqrt(
        max(1e-10, 1 - (rel_momentum / (rel_momentum + dark_energy_density)) ** 2)
    )


# Simulate the Big Bang with Dark Energy, Dark Matter, Tunneling, and Relativistic Effects
correlation_matrices = []  # Initialize correlation_matrices list

for tunneling_probability in tunneling_probabilities:
    print(f"Running simulation for tunneling probability: {tunneling_probability}")

    # Initialize arrays for simulation
    particle_speeds = np.zeros((len(quark_masses), num_steps))  # 2D array for speeds
    particle_temperatures = np.zeros(
        (len(quark_masses), num_steps)
    )  # 2D array for temperatures
    particle_masses_evolution = np.zeros(
        (len(quark_masses), num_steps)
    )  # 2D array for mass evolution
    particle_positions = np.zeros(
        (len(quark_masses), num_steps)
    )  # 2D array for positions
    tunneling_steps = np.zeros(
        (len(quark_masses), num_steps), dtype=bool
    )  # 2D array for tunneling steps

    # Create an array of masses for each quark
    particle_masses = np.array([mass * GeV_to_J for mass in quark_masses.values()])

    for j, (quark, mass) in enumerate(quark_masses.items()):
        particle_masses_evolution[j, 0] = particle_masses[j]  # Initialize mass
        particle_positions[j, 0] = 0  # Initialize position

        for i in range(1, num_steps):
            particle_speeds[j, i] = update_speed(
                particle_speeds[j, i - 1],
                particle_temperatures[j, i - 1],
                particle_masses[j],
            )
            particle_positions[j, i] = (
                particle_positions[j, i - 1] + particle_speeds[j, i] * t_planck
            )  # Update position

            value = (
                1
                - (particle_speeds[j, i] / (TSR * temperature_initial))
                + dark_matter_density
            )

            if np.random.rand() < tunneling_probability:
                particle_speeds[j, i] = particle_speeds[j, 0]  # Tunneling effect
                tunneling_steps[j, i] = True  # Mark tunneling step

            if value < 0:
                value = 0

            particle_temperatures[j, i] = (
                alpha * particle_speeds[j, i] ** 2
            )  # Apply TSR equation

            # Update mass based on energy conversion
            speed_squared_diff = (
                particle_speeds[j, i] ** 2 - particle_speeds[j, i - 1] ** 2
            )

            # Avoid division by zero (if speed doesn't change, mass doesn't change)
            if speed_squared_diff == 0:
                particle_masses_evolution[j, i] = particle_masses_evolution[j, i - 1]
            else:
                # Calculate the change in relativistic energy
                energy_diff = relativistic_energy(
                    particle_speeds[j, i], particle_masses[j]
                ) - relativistic_energy(particle_speeds[j, i - 1], particle_masses[j])

                # Avoid NaN by checking if energy_diff is practically zero
                if abs(energy_diff) < 1e-15:  # Adjust the tolerance as needed
                    particle_masses_evolution[j, i] = particle_masses_evolution[
                        j, i - 1
                    ]
                else:
                    # Update mass based on energy difference
                    new_mass = (
                        particle_masses_evolution[j, i - 1] + energy_diff / c**2
                    )
                    if np.isfinite(new_mass):  # Check if the new mass is finite
                        particle_masses_evolution[j, i] = new_mass
                    else:
                        particle_masses_evolution[j, i] = particle_masses_evolution[
                            j, i - 1
                        ]

            # Collision detection and resolution
            for k in range(j + 1, len(quark_masses)):
                if (
                    abs(particle_positions[j, i] - particle_positions[k, i])
                    < collision_distance
                ):
                    # Resolve collision (simplified example)
                    # Calculate relative speed before the collision
                    v_rel = particle_speeds[j, i] - particle_speeds[k, i]

                    # Calculate the new speeds after the collision
                    particle_speeds[j, i] = (
                        particle_speeds[j, i]
                        * (particle_masses[j] - particle_masses[k])
                        + 2 * particle_masses[k] * particle_speeds[k, i]
                    ) / (particle_masses[j] + particle_masses[k])
                    particle_speeds[k, i] = (
                        particle_speeds[k, i]
                        * (particle_masses[k] - particle_masses[j])
                        + 2 * particle_masses[j] * particle_speeds[j, i]
                    ) / (particle_masses[j] + particle_masses[k])

                    # Limit speed after collision
                    max_speed = c * 0.99  # Adjust the maximum speed as needed
                    particle_speeds[j, i] = np.clip(particle_speeds[j, i], 0, max_speed)
                    particle_speeds[k, i] = np.clip(particle_speeds[k, i], 0, max_speed)

                    # Update temperatures based on TSR
                    particle_temperatures[j, i] = alpha * particle_speeds[j, i] ** 2
                    particle_temperatures[k, i] = alpha * particle_speeds[k, i] ** 2

            # Apply expansion of the universe (redshift)
            particle_speeds[j, i] *= 1 - Hubble_constant_SI * t_planck

            # Apply expansion of the universe (cooling)
            particle_temperatures[j, i] *= 1 - Hubble_constant_SI * t_planck

            # Debugging output
            if np.isnan(particle_speeds[j, i]) or np.isnan(particle_temperatures[j, i]):
                print(f"NaN detected at step {i} for quark {quark}")
                print(f"Previous speed: {particle_speeds[j, i - 1]}")
                print(f"Previous temperature: {particle_temperatures[j, i - 1]}")
                print(f"Current speed: {particle_speeds[j, i]}")
                print(f"Current temperature: {particle_temperatures[j, i]}")
                break

        # Cap speed to avoid unphysical values
        particle_speeds[j] = np.clip(particle_speeds[j], 0, c)

    # --- Plotly Interactive Visualization (3D) ---
    # Create the 3D scatter plot using Plotly
    fig = go.Figure(
        data=[
            go.Scatter3d(
                x=particle_speeds[j],
                y=particle_temperatures[j],
                z=np.arange(num_steps),
                mode="lines+markers",
                name=quark.capitalize(),
            )
            for j, quark in enumerate(quark_masses.keys())
        ]
    )
    fig.update_layout(
        title=f"Big Bang Simulation: Temperature vs. Speed (Tunneling Probability: {tunneling_probability})",
        autosize=False,
        width=800,
        height=600,
        margin=dict(l=65, r=50, b=65, t=90),
    )
    fig.show()

    # --- Matplotlib Animation (3D) ---
    fig = plt.figure()
    ax = fig.add_subplot(111, projection="3d")
    (line,) = ax.plot([], [], [], "b-")

    # Set axis limits
    ax.set_xlim(min(particle_speeds.flatten()), max(particle_speeds.flatten()))
    ax.set_ylim(
        min(particle_temperatures.flatten()), max(particle_temperatures.flatten())
    )
    ax.set_zlim(0, num_steps)

    ax.set_xlabel("Particle Speed")
    ax.set_ylabel("Particle Temperature")
    ax.set_zlabel("Time")
    ax.set_title(
        f"Big Bang Simulation Animation (Tunneling Probability: {tunneling_probability})"
    )

    def init():
        line.set_data([], [])
        line.set_3d_properties([])
        return (line,)

    def update(frame):
        line.set_data(
            particle_speeds[:, :frame].flatten(),
            particle_temperatures[:, :frame].flatten(),
        )
        line.set_3d_properties(np.tile(np.arange(frame), len(quark_masses)))
        return (line,)

    ani = FuncAnimation(fig, update, frames=num_steps, init_func=init, blit=True)
    ani.save(f"big_bang_simulation_3d_{tunneling_probability}.gif", writer="pillow")
    plt.show()

    # --- Plotly Mass Evolution (3D) ---
    X, Y = np.meshgrid(
        particle_speeds[0], np.arange(num_steps)
    )  # Create 2D meshgrid for x and y
    fig = go.Figure(
        data=[
            go.Surface(
                z=particle_masses_evolution[j],
                x=X,
                y=Y,
                colorscale="Viridis",
                name=quark.capitalize(),
            )
            for j, quark in enumerate(quark_masses.keys())
        ]
    )
    fig.update_layout(
        title=f"Big Bang Simulation: Mass Evolution (Tunneling Probability: {tunneling_probability})",
        autosize=False,
        width=800,
        height=600,
        margin=dict(l=65, r=50, b=65, t=90),
    )
    fig.show()

    # --- Plotly Tunneling Effect (3D) ---
    X, Y = np.meshgrid(
        particle_speeds[0], np.arange(num_steps)
    )  # Create 2D meshgrid for x and y
    fig = go.Figure(
        data=[
            go.Surface(
                z=tunneling_steps[j],
                x=X,
                y=Y,
                colorscale="Blues",
                name=quark.capitalize(),
            )
            for j, quark in enumerate(quark_masses.keys())
        ]
    )
    fig.update_layout(
        title=f"Big Bang Simulation: Tunneling Effect (Tunneling Probability: {tunneling_probability})",
        autosize=False,
        width=800,
        height=600,
        margin=dict(l=65, r=50, b=65, t=90),
    )
    fig.show()

    # --- Correlation Analysis ---
    df = pd.DataFrame(
        {
            "Speed": particle_speeds.flatten(),
            "Temperature": particle_temperatures.flatten(),
            "Mass": particle_masses_evolution.flatten(),
            "Tunneling": tunneling_steps.flatten(),
        }
    )

    correlation_matrix = df.corr()
    correlation_matrices.append(correlation_matrix)

    print("Correlation Matrix:")
    print(correlation_matrix)

    # Print calculated masses at the end of the simulation
    print(
        f"Calculated masses at the end of the simulation (Tunneling Probability: {tunneling_probability}):"
    )
    for j, quark in enumerate(quark_masses.keys()):
        print(f"{quark}: {particle_masses_evolution[j, -1] / GeV_to_J:.4e} GeV")
    print("Real masses:")
    for quark, mass in quark_masses.items():
        print(f"{quark}: {mass:.4e} GeV")

    # Save data to JSON file
    data_filename = os.path.join(
        data_dir, f"big_bang_simulation_data_{tunneling_probability:.1f}.json"
    )
    data = {
        "tunneling_probability": tunneling_probability,
        "particle_speeds": particle_speeds.tolist(),
        "particle_temperatures": particle_temperatures.tolist(),
        "particle_masses_evolution": particle_masses_evolution.tolist(),
        "tunneling_steps": tunneling_steps.tolist(),
        "correlation_matrix": correlation_matrix.values.tolist(),  # Use values.tolist() to convert DataFrame to list
    }
    with open(data_filename, "w") as f:
        json.dump(data, f)

correlation_matrices = []
for tunneling_probability in tunneling_probabilities:
    #... (rest of the code remains the same)
    correlation_matrix = df.corr()
    if correlation_matrix.shape[0] == correlation_matrix.shape[1]:
        correlation_matrices.append(correlation_matrix)
    else:
        print(f"Skipping correlation matrix for tunneling probability {tunneling_probability} because it is not a square matrix.")

# Flatten the correlation matrices
flat_correlation_matrices = []
for i, matrix in enumerate(correlation_matrices):
    flattened = matrix.values.flatten()
    flat_correlation_matrices.append(flattened)

# Convert to 2D array
flat_correlation_matrices = np.array(flat_correlation_matrices)

# Create DataFrame with appropriate column names
columns = [f"Corr_{i}_{j}" for i in range(flat_correlation_matrices.shape[1] // 4) for j in range(4)]
correlation_matrices_df = pd.DataFrame(flat_correlation_matrices, columns=columns, index=tunneling_probabilities)

# Print or save DataFrame
print("Correlation Matrices for Different Tunneling Probabilities:")
print(correlation_matrices_df)