Upload 10 files
Browse files- Dockerfile +0 -0
- app.py +213 -0
- app_new.py +63 -0
- backend.py +112 -0
- mining_stats.py +53 -0
- my_wallet.json +17 -0
- network_integration.py +399 -0
- parallel_miner_v3.py +354 -0
- pool_calculator.py +80 -0
- requirements.txt +8 -0
Dockerfile
ADDED
|
File without changes
|
app.py
ADDED
|
@@ -0,0 +1,213 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
import time
|
| 3 |
+
import logging
|
| 4 |
+
from parallel_miner_v3 import ParallelMiner
|
| 5 |
+
import threading
|
| 6 |
+
import json
|
| 7 |
+
from datetime import datetime
|
| 8 |
+
import numpy as np
|
| 9 |
+
import plotly.graph_objects as go
|
| 10 |
+
from plotly.subplots import make_subplots
|
| 11 |
+
|
| 12 |
+
# Global variables for mining state
|
| 13 |
+
miner_instance = None
|
| 14 |
+
mining_thread = None
|
| 15 |
+
is_mining = False
|
| 16 |
+
stats_history = {
|
| 17 |
+
"timestamps": [],
|
| 18 |
+
"hashrates": [],
|
| 19 |
+
"total_hashes": [],
|
| 20 |
+
"blocks_found": []
|
| 21 |
+
}
|
| 22 |
+
|
| 23 |
+
def update_stats_history(hashrate, total_hashes, blocks):
|
| 24 |
+
"""Update historical stats for plotting"""
|
| 25 |
+
current_time = datetime.now().strftime("%H:%M:%S")
|
| 26 |
+
stats_history["timestamps"].append(current_time)
|
| 27 |
+
stats_history["hashrates"].append(hashrate)
|
| 28 |
+
stats_history["total_hashes"].append(total_hashes)
|
| 29 |
+
stats_history["blocks_found"].append(blocks)
|
| 30 |
+
|
| 31 |
+
# Keep only last 100 data points
|
| 32 |
+
max_points = 100
|
| 33 |
+
if len(stats_history["timestamps"]) > max_points:
|
| 34 |
+
stats_history["timestamps"] = stats_history["timestamps"][-max_points:]
|
| 35 |
+
stats_history["hashrates"] = stats_history["hashrates"][-max_points:]
|
| 36 |
+
stats_history["total_hashes"] = stats_history["total_hashes"][-max_points:]
|
| 37 |
+
stats_history["blocks_found"] = stats_history["blocks_found"][-max_points:]
|
| 38 |
+
|
| 39 |
+
def create_performance_plots():
|
| 40 |
+
"""Create performance visualization plots"""
|
| 41 |
+
fig = make_subplots(
|
| 42 |
+
rows=2, cols=1,
|
| 43 |
+
subplot_titles=("Mining Hashrate (KH/s)", "Total Hashes"),
|
| 44 |
+
vertical_spacing=0.12
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
# Hashrate plot
|
| 48 |
+
fig.add_trace(
|
| 49 |
+
go.Scatter(x=stats_history["timestamps"],
|
| 50 |
+
y=[h/1000 for h in stats_history["hashrates"]],
|
| 51 |
+
mode='lines+markers',
|
| 52 |
+
name='Hashrate',
|
| 53 |
+
line=dict(color='#2ecc71')),
|
| 54 |
+
row=1, col=1
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
# Total hashes plot
|
| 58 |
+
fig.add_trace(
|
| 59 |
+
go.Scatter(x=stats_history["timestamps"],
|
| 60 |
+
y=stats_history["total_hashes"],
|
| 61 |
+
mode='lines',
|
| 62 |
+
name='Total Hashes',
|
| 63 |
+
line=dict(color='#3498db')),
|
| 64 |
+
row=2, col=1
|
| 65 |
+
)
|
| 66 |
+
|
| 67 |
+
fig.update_layout(
|
| 68 |
+
height=600,
|
| 69 |
+
showlegend=True,
|
| 70 |
+
title_text="Mining Performance Metrics",
|
| 71 |
+
template="plotly_dark"
|
| 72 |
+
)
|
| 73 |
+
|
| 74 |
+
return fig
|
| 75 |
+
|
| 76 |
+
def start_mining():
|
| 77 |
+
"""Start the mining process"""
|
| 78 |
+
global miner_instance, mining_thread, is_mining
|
| 79 |
+
|
| 80 |
+
if is_mining:
|
| 81 |
+
return "Mining is already running!"
|
| 82 |
+
|
| 83 |
+
try:
|
| 84 |
+
miner_instance = ParallelMiner(num_cores=5)
|
| 85 |
+
miner_instance.mining = True
|
| 86 |
+
is_mining = True
|
| 87 |
+
|
| 88 |
+
# Start mining in background thread
|
| 89 |
+
mining_thread = threading.Thread(
|
| 90 |
+
target=miner_instance.start_mining,
|
| 91 |
+
kwargs={"duration": None}
|
| 92 |
+
)
|
| 93 |
+
mining_thread.daemon = True
|
| 94 |
+
mining_thread.start()
|
| 95 |
+
|
| 96 |
+
return "Mining started successfully! Monitor the stats below."
|
| 97 |
+
except Exception as e:
|
| 98 |
+
return f"Error starting mining: {str(e)}"
|
| 99 |
+
|
| 100 |
+
def stop_mining():
|
| 101 |
+
"""Stop the mining process"""
|
| 102 |
+
global miner_instance, is_mining
|
| 103 |
+
|
| 104 |
+
if not is_mining:
|
| 105 |
+
return "Mining is not running!"
|
| 106 |
+
|
| 107 |
+
try:
|
| 108 |
+
if miner_instance:
|
| 109 |
+
# Log final stats before stopping
|
| 110 |
+
logging.info("\n=== Final Mining Statistics ===")
|
| 111 |
+
grand_total = 0
|
| 112 |
+
for core_idx, core in enumerate(miner_instance.cores):
|
| 113 |
+
core_total = core.total_hashes
|
| 114 |
+
grand_total += core_total
|
| 115 |
+
logging.info(f"Core {core_idx}: {core_total:,} hashes")
|
| 116 |
+
logging.info(f"Grand Total: {grand_total:,} hashes")
|
| 117 |
+
logging.info(f"Overall Hashrate: {miner_instance.current_hashrate/1000:.2f} KH/s")
|
| 118 |
+
logging.info(f"Blocks Found: {miner_instance.blocks_found}")
|
| 119 |
+
logging.info("============================\n")
|
| 120 |
+
|
| 121 |
+
miner_instance.mining = False
|
| 122 |
+
is_mining = False
|
| 123 |
+
return "Mining stopped successfully! Check logs for final statistics."
|
| 124 |
+
return "No active mining instance found."
|
| 125 |
+
except Exception as e:
|
| 126 |
+
return f"Error stopping mining: {str(e)}"
|
| 127 |
+
|
| 128 |
+
def get_mining_stats():
|
| 129 |
+
"""Get current mining statistics"""
|
| 130 |
+
global miner_instance, is_mining
|
| 131 |
+
|
| 132 |
+
if not miner_instance or not is_mining:
|
| 133 |
+
return {
|
| 134 |
+
"status": "Stopped",
|
| 135 |
+
"hashrate": "0 H/s",
|
| 136 |
+
"total_hashes": "0",
|
| 137 |
+
"blocks_found": "0",
|
| 138 |
+
"best_hash": "None",
|
| 139 |
+
"difficulty": "0"
|
| 140 |
+
}
|
| 141 |
+
|
| 142 |
+
# Update historical stats
|
| 143 |
+
update_stats_history(
|
| 144 |
+
miner_instance.current_hashrate,
|
| 145 |
+
miner_instance.total_hashes,
|
| 146 |
+
miner_instance.blocks_found
|
| 147 |
+
)
|
| 148 |
+
|
| 149 |
+
# Create performance plot
|
| 150 |
+
performance_plot = create_performance_plots()
|
| 151 |
+
|
| 152 |
+
return {
|
| 153 |
+
"status": "Running" if is_mining else "Stopped",
|
| 154 |
+
"hashrate": f"{miner_instance.current_hashrate/1000:.2f} KH/s",
|
| 155 |
+
"total_hashes": f"{miner_instance.total_hashes:,}",
|
| 156 |
+
"blocks_found": str(miner_instance.blocks_found),
|
| 157 |
+
"best_hash": miner_instance.best_hash.hex() if miner_instance.best_hash else "None",
|
| 158 |
+
"difficulty": f"{miner_instance.best_hash_difficulty:,}",
|
| 159 |
+
"performance_plot": performance_plot
|
| 160 |
+
}
|
| 161 |
+
|
| 162 |
+
# Create the Gradio interface
|
| 163 |
+
with gr.Blocks(theme=gr.themes.Monochrome()) as app:
|
| 164 |
+
gr.Markdown("# ⛏️ Bitcoin Mining Dashboard")
|
| 165 |
+
|
| 166 |
+
with gr.Row():
|
| 167 |
+
start_btn = gr.Button("▶️ Start Mining", variant="primary")
|
| 168 |
+
stop_btn = gr.Button("⏹️ Stop Mining", variant="secondary")
|
| 169 |
+
|
| 170 |
+
with gr.Row():
|
| 171 |
+
with gr.Column():
|
| 172 |
+
status_label = gr.Label(label="Status")
|
| 173 |
+
hashrate_label = gr.Label(label="Current Hashrate")
|
| 174 |
+
total_hashes_label = gr.Label(label="Total Hashes")
|
| 175 |
+
blocks_label = gr.Label(label="Blocks Found")
|
| 176 |
+
best_hash_label = gr.Label(label="Best Hash")
|
| 177 |
+
difficulty_label = gr.Label(label="Best Difficulty")
|
| 178 |
+
|
| 179 |
+
with gr.Row():
|
| 180 |
+
plot_output = gr.Plot(label="Performance Metrics")
|
| 181 |
+
|
| 182 |
+
# Setup event handlers
|
| 183 |
+
start_btn.click(
|
| 184 |
+
fn=start_mining,
|
| 185 |
+
outputs=[status_label]
|
| 186 |
+
)
|
| 187 |
+
|
| 188 |
+
stop_btn.click(
|
| 189 |
+
fn=stop_mining,
|
| 190 |
+
outputs=[status_label]
|
| 191 |
+
)
|
| 192 |
+
|
| 193 |
+
# Instead of automatic updates, add a refresh button
|
| 194 |
+
refresh_btn = gr.Button("🔄 Refresh Stats")
|
| 195 |
+
refresh_btn.click(
|
| 196 |
+
fn=get_mining_stats,
|
| 197 |
+
outputs=[
|
| 198 |
+
status_label,
|
| 199 |
+
hashrate_label,
|
| 200 |
+
total_hashes_label,
|
| 201 |
+
blocks_label,
|
| 202 |
+
best_hash_label,
|
| 203 |
+
difficulty_label,
|
| 204 |
+
plot_output
|
| 205 |
+
]
|
| 206 |
+
)
|
| 207 |
+
|
| 208 |
+
if __name__ == "__main__":
|
| 209 |
+
app.launch(
|
| 210 |
+
share=True, # Enable sharing
|
| 211 |
+
server_name="0.0.0.0",
|
| 212 |
+
server_port=7862 # Using different port to avoid conflicts
|
| 213 |
+
)
|
app_new.py
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import FastAPI, HTTPException
|
| 2 |
+
from fastapi.staticfiles import StaticFiles
|
| 3 |
+
from fastapi.responses import FileResponse, JSONResponse
|
| 4 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 5 |
+
import uvicorn
|
| 6 |
+
import webbrowser
|
| 7 |
+
from pathlib import Path
|
| 8 |
+
import os
|
| 9 |
+
import logging
|
| 10 |
+
|
| 11 |
+
# Configure logging
|
| 12 |
+
logging.basicConfig(level=logging.INFO)
|
| 13 |
+
logger = logging.getLogger(__name__)
|
| 14 |
+
|
| 15 |
+
# Import our backend API
|
| 16 |
+
from backend import app as backend_app
|
| 17 |
+
|
| 18 |
+
# Create main FastAPI application
|
| 19 |
+
app = FastAPI(title="Mining Dashboard")
|
| 20 |
+
|
| 21 |
+
# Add CORS middleware
|
| 22 |
+
app.add_middleware(
|
| 23 |
+
CORSMiddleware,
|
| 24 |
+
allow_origins=["*"],
|
| 25 |
+
allow_credentials=True,
|
| 26 |
+
allow_methods=["*"],
|
| 27 |
+
allow_headers=["*"],
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
# Mount the backend API
|
| 31 |
+
app.mount("/api", backend_app)
|
| 32 |
+
|
| 33 |
+
# Mount static files
|
| 34 |
+
static_path = Path(__file__).parent / "static"
|
| 35 |
+
app.mount("/static", StaticFiles(directory=str(static_path)), name="static")
|
| 36 |
+
|
| 37 |
+
# Serve index.html for root path
|
| 38 |
+
@app.get("/")
|
| 39 |
+
async def serve_index():
|
| 40 |
+
try:
|
| 41 |
+
index_path = static_path / "index.html"
|
| 42 |
+
if not index_path.exists():
|
| 43 |
+
logger.error(f"Index file not found at {index_path}")
|
| 44 |
+
raise HTTPException(status_code=404, detail="Index file not found")
|
| 45 |
+
return FileResponse(str(index_path))
|
| 46 |
+
except Exception as e:
|
| 47 |
+
logger.error(f"Error serving index: {str(e)}")
|
| 48 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 49 |
+
|
| 50 |
+
# Error handler
|
| 51 |
+
@app.exception_handler(HTTPException)
|
| 52 |
+
async def http_exception_handler(request, exc):
|
| 53 |
+
return JSONResponse(
|
| 54 |
+
status_code=exc.status_code,
|
| 55 |
+
content={"detail": exc.detail}
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
if __name__ == "__main__":
|
| 59 |
+
# Open the browser
|
| 60 |
+
webbrowser.open("http://localhost:8000")
|
| 61 |
+
|
| 62 |
+
# Start the server
|
| 63 |
+
uvicorn.run(app, host="0.0.0.0", port=8000)
|
backend.py
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import FastAPI, HTTPException
|
| 2 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 3 |
+
from fastapi.responses import JSONResponse
|
| 4 |
+
from parallel_miner_v3 import ParallelMiner
|
| 5 |
+
import threading
|
| 6 |
+
from typing import Dict, Optional
|
| 7 |
+
import uvicorn
|
| 8 |
+
import logging
|
| 9 |
+
|
| 10 |
+
# Configure logging
|
| 11 |
+
logging.basicConfig(level=logging.INFO)
|
| 12 |
+
logger = logging.getLogger(__name__)
|
| 13 |
+
|
| 14 |
+
app = FastAPI(title="Mining Dashboard API")
|
| 15 |
+
|
| 16 |
+
# Enable CORS
|
| 17 |
+
app.add_middleware(
|
| 18 |
+
CORSMiddleware,
|
| 19 |
+
allow_origins=["*"], # In production, replace with your frontend URL
|
| 20 |
+
allow_credentials=True,
|
| 21 |
+
allow_methods=["*"],
|
| 22 |
+
allow_headers=["*"],
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
# Global variables for mining state
|
| 26 |
+
miner_instance: Optional[ParallelMiner] = None
|
| 27 |
+
mining_thread: Optional[threading.Thread] = None
|
| 28 |
+
is_mining = False
|
| 29 |
+
|
| 30 |
+
@app.get("/status") # Remove /api prefix since we're mounting at /api
|
| 31 |
+
async def get_status() -> Dict:
|
| 32 |
+
"""Get current mining status and statistics"""
|
| 33 |
+
global miner_instance, is_mining
|
| 34 |
+
|
| 35 |
+
logger.info("Status endpoint called")
|
| 36 |
+
try:
|
| 37 |
+
if not miner_instance:
|
| 38 |
+
logger.info("No miner instance found, returning default values")
|
| 39 |
+
return {
|
| 40 |
+
"status": "Stopped",
|
| 41 |
+
"hashrate": 0,
|
| 42 |
+
"total_hashes": 0,
|
| 43 |
+
"blocks_found": 0,
|
| 44 |
+
"best_hash": None,
|
| 45 |
+
"difficulty": 0
|
| 46 |
+
}
|
| 47 |
+
|
| 48 |
+
stats = {
|
| 49 |
+
"status": "Running" if is_mining else "Stopped",
|
| 50 |
+
"hashrate": round(miner_instance.current_hashrate / 1000, 2), # KH/s
|
| 51 |
+
"total_hashes": miner_instance.total_hashes,
|
| 52 |
+
"blocks_found": miner_instance.blocks_found,
|
| 53 |
+
"best_hash": miner_instance.best_hash.hex() if miner_instance.best_hash else None,
|
| 54 |
+
"difficulty": miner_instance.best_hash_difficulty
|
| 55 |
+
}
|
| 56 |
+
logger.info(f"Returning stats: {stats}")
|
| 57 |
+
return stats
|
| 58 |
+
except Exception as e:
|
| 59 |
+
logger.error(f"Error getting status: {str(e)}")
|
| 60 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 61 |
+
|
| 62 |
+
@app.post("/start") # Remove /api prefix since we're mounting at /api
|
| 63 |
+
async def start_mining() -> Dict:
|
| 64 |
+
"""Start the mining process"""
|
| 65 |
+
global miner_instance, mining_thread, is_mining
|
| 66 |
+
|
| 67 |
+
logger.info("Start mining endpoint called")
|
| 68 |
+
|
| 69 |
+
if is_mining:
|
| 70 |
+
logger.warning("Mining is already running")
|
| 71 |
+
raise HTTPException(status_code=400, detail="Mining is already running")
|
| 72 |
+
|
| 73 |
+
try:
|
| 74 |
+
logger.info("Initializing miner...")
|
| 75 |
+
miner_instance = ParallelMiner(num_cores=5)
|
| 76 |
+
miner_instance.mining = True
|
| 77 |
+
is_mining = True
|
| 78 |
+
|
| 79 |
+
# Start mining in background thread
|
| 80 |
+
logger.info("Starting mining thread...")
|
| 81 |
+
mining_thread = threading.Thread(
|
| 82 |
+
target=miner_instance.start_mining,
|
| 83 |
+
kwargs={"duration": None}
|
| 84 |
+
)
|
| 85 |
+
mining_thread.daemon = True
|
| 86 |
+
mining_thread.start()
|
| 87 |
+
|
| 88 |
+
logger.info("Mining started successfully")
|
| 89 |
+
return {"message": "Mining started successfully"}
|
| 90 |
+
except Exception as e:
|
| 91 |
+
logger.error(f"Error starting mining: {str(e)}")
|
| 92 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 93 |
+
|
| 94 |
+
@app.post("/api/stop")
|
| 95 |
+
async def stop_mining() -> Dict:
|
| 96 |
+
"""Stop the mining process"""
|
| 97 |
+
global miner_instance, is_mining
|
| 98 |
+
|
| 99 |
+
if not is_mining:
|
| 100 |
+
raise HTTPException(status_code=400, detail="Mining is not running")
|
| 101 |
+
|
| 102 |
+
try:
|
| 103 |
+
if miner_instance:
|
| 104 |
+
miner_instance.mining = False
|
| 105 |
+
is_mining = False
|
| 106 |
+
return {"message": "Mining stopped successfully"}
|
| 107 |
+
raise HTTPException(status_code=400, detail="No active mining instance found")
|
| 108 |
+
except Exception as e:
|
| 109 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 110 |
+
|
| 111 |
+
if __name__ == "__main__":
|
| 112 |
+
uvicorn.run(app, host="0.0.0.0", port=8000)
|
mining_stats.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Utility functions for mining statistics and estimates
|
| 3 |
+
"""
|
| 4 |
+
import math
|
| 5 |
+
import logging
|
| 6 |
+
|
| 7 |
+
def calculate_mining_estimate(hash_rate_per_core: float, num_cores: int, target: int, best_hash: str) -> dict:
|
| 8 |
+
"""
|
| 9 |
+
Calculate mining estimates based on current performance
|
| 10 |
+
|
| 11 |
+
Args:
|
| 12 |
+
hash_rate_per_core: Hashes per second per core
|
| 13 |
+
num_cores: Number of cores being used
|
| 14 |
+
target: Current network target
|
| 15 |
+
best_hash: Best hash found so far (hex string)
|
| 16 |
+
"""
|
| 17 |
+
total_hash_rate = hash_rate_per_core * num_cores
|
| 18 |
+
|
| 19 |
+
# Convert target and best hash to integers for comparison
|
| 20 |
+
target_int = target
|
| 21 |
+
best_hash_int = int(best_hash, 16)
|
| 22 |
+
|
| 23 |
+
# Calculate probability of finding a block
|
| 24 |
+
max_target = int('0xFFFF' + '0' * 62, 16)
|
| 25 |
+
probability_per_hash = target_int / max_target
|
| 26 |
+
|
| 27 |
+
# Calculate expected hashes needed
|
| 28 |
+
expected_hashes = 1 / probability_per_hash
|
| 29 |
+
|
| 30 |
+
# Calculate time estimates
|
| 31 |
+
seconds_to_block = expected_hashes / total_hash_rate
|
| 32 |
+
days_to_block = seconds_to_block / (24 * 3600)
|
| 33 |
+
|
| 34 |
+
# Calculate progress towards target
|
| 35 |
+
progress_ratio = best_hash_int / target_int
|
| 36 |
+
progress_percent = (1 - progress_ratio) * 100 if progress_ratio < 1 else 0
|
| 37 |
+
|
| 38 |
+
return {
|
| 39 |
+
'probability_per_hash': probability_per_hash,
|
| 40 |
+
'expected_hashes_needed': expected_hashes,
|
| 41 |
+
'estimated_days': days_to_block,
|
| 42 |
+
'progress_percent': progress_percent,
|
| 43 |
+
'total_hash_rate': total_hash_rate
|
| 44 |
+
}
|
| 45 |
+
|
| 46 |
+
def log_mining_statistics(stats: dict):
|
| 47 |
+
"""Log mining statistics in a human-readable format"""
|
| 48 |
+
logging.info(f"Mining Statistics:")
|
| 49 |
+
logging.info(f"Total Hash Rate: {stats['total_hash_rate']/1e6:.2f} MH/s")
|
| 50 |
+
logging.info(f"Probability per hash: {stats['probability_per_hash']:.2e}")
|
| 51 |
+
logging.info(f"Expected hashes needed: {stats['expected_hashes_needed']:.2e}")
|
| 52 |
+
logging.info(f"Estimated days to find block: {stats['estimated_days']:.2f} days")
|
| 53 |
+
logging.info(f"Progress towards target: {stats['progress_percent']:.2f}%")
|
my_wallet.json
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"private_key": "541da155205db2d8bb34779c3521948b7ffe0990cc49f7d1c8629d0734d44983",
|
| 3 |
+
"public_key": "0306995a69100ecfd461a6820adf250cc4312186b801b1b4d0f9b8a6df8120b113",
|
| 4 |
+
"wif_private_key": "Kz3DmLhkP1qJftgjk1DXgqRiJ22U3wWELbKxdxwAB6yfXHdAmo7P",
|
| 5 |
+
"address": "1Ks4WtCEK96BaBF7HSuCGt3rEpVKPqcJKf",
|
| 6 |
+
"transactions": [],
|
| 7 |
+
"metadata": {
|
| 8 |
+
"created_at": 1757289776.7412014,
|
| 9 |
+
"last_updated": 1758760237
|
| 10 |
+
},
|
| 11 |
+
"balance": "0",
|
| 12 |
+
"total_mined": 6.25,
|
| 13 |
+
"mining_stats": {
|
| 14 |
+
"total_blocks_mined": 1,
|
| 15 |
+
"last_reward": 1758760237
|
| 16 |
+
}
|
| 17 |
+
}
|
network_integration.py
ADDED
|
@@ -0,0 +1,399 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Integrates the mining system with Bitcoin mainnet
|
| 3 |
+
"""
|
| 4 |
+
from typing import Dict, Any, Optional
|
| 5 |
+
import time
|
| 6 |
+
import requests
|
| 7 |
+
import hashlib
|
| 8 |
+
import json
|
| 9 |
+
import logging
|
| 10 |
+
import struct
|
| 11 |
+
|
| 12 |
+
# Configure detailed logging
|
| 13 |
+
logging.basicConfig(
|
| 14 |
+
level=logging.DEBUG,
|
| 15 |
+
format='%(asctime)s - %(levelname)s - %(message)s',
|
| 16 |
+
handlers=[
|
| 17 |
+
logging.FileHandler('network_debug.log'),
|
| 18 |
+
logging.StreamHandler()
|
| 19 |
+
]
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
class NetworkIntegration:
|
| 23 |
+
def __init__(self, wallet_address: str = None):
|
| 24 |
+
self.api_base = "https://blockchain.info" # Changed to more reliable API
|
| 25 |
+
self.node = "seed.bitcoin.sipa.be" # Bitcoin mainnet seed node
|
| 26 |
+
self.is_mainnet = True # Force mainnet mode
|
| 27 |
+
# Use the provided wallet address or load from my_wallet.json
|
| 28 |
+
if wallet_address:
|
| 29 |
+
self.wallet_address = wallet_address
|
| 30 |
+
else:
|
| 31 |
+
try:
|
| 32 |
+
with open('my_wallet.json', 'r') as f:
|
| 33 |
+
wallet_data = json.load(f)
|
| 34 |
+
self.wallet_address = wallet_data['address']
|
| 35 |
+
print(f"Using wallet address: {self.wallet_address}")
|
| 36 |
+
except Exception as e:
|
| 37 |
+
print(f"Error loading wallet: {e}")
|
| 38 |
+
self.wallet_address = "1Ks4WtCEK96BaBF7HSuCGt3rEpVKPqcJKf" # Your default address
|
| 39 |
+
|
| 40 |
+
def connect(self) -> bool:
|
| 41 |
+
"""Connect to Bitcoin mainnet"""
|
| 42 |
+
try:
|
| 43 |
+
# Test connection by getting latest block
|
| 44 |
+
response = requests.get(f"{self.api_base}/blockchain/blocks/last")
|
| 45 |
+
return response.status_code == 200
|
| 46 |
+
except Exception as e:
|
| 47 |
+
print(f"Failed to connect to mainnet: {e}")
|
| 48 |
+
return False
|
| 49 |
+
|
| 50 |
+
def get_block_template(self) -> Dict[str, Any]:
|
| 51 |
+
"""Get current block template from mainnet"""
|
| 52 |
+
try:
|
| 53 |
+
# Cache the blockchain API response for 5 minutes
|
| 54 |
+
current_time = time.time()
|
| 55 |
+
if not hasattr(self, '_template_cache') or current_time - self._last_cache_time > 300:
|
| 56 |
+
logging.debug("Cache expired, fetching new block template")
|
| 57 |
+
# Get latest block info from a more reliable API
|
| 58 |
+
response = requests.get("https://blockchain.info/latestblock")
|
| 59 |
+
logging.debug(f"Latest block API response status: {response.status_code}")
|
| 60 |
+
|
| 61 |
+
if response.status_code != 200:
|
| 62 |
+
logging.error(f"Failed to get latest block. Status code: {response.status_code}")
|
| 63 |
+
if hasattr(self, '_template_cache'):
|
| 64 |
+
logging.info("Using cached template")
|
| 65 |
+
return self._template_cache
|
| 66 |
+
raise Exception("Failed to get latest block")
|
| 67 |
+
|
| 68 |
+
latest = response.json()
|
| 69 |
+
logging.debug(f"Latest block response: {latest}")
|
| 70 |
+
|
| 71 |
+
height = latest['height']
|
| 72 |
+
current_block = latest['hash']
|
| 73 |
+
logging.info(f"Current block height: {height}, hash: {current_block}")
|
| 74 |
+
|
| 75 |
+
# Get current network stats and difficulty
|
| 76 |
+
logging.debug("Fetching network stats...")
|
| 77 |
+
diff_response = requests.get("https://blockchain.info/q/getdifficulty")
|
| 78 |
+
if diff_response.status_code != 200:
|
| 79 |
+
raise Exception("Failed to get network difficulty")
|
| 80 |
+
|
| 81 |
+
network_difficulty = float(diff_response.text)
|
| 82 |
+
logging.info(f"Current network difficulty: {network_difficulty}")
|
| 83 |
+
|
| 84 |
+
# Calculate target from difficulty
|
| 85 |
+
max_target = 0x00000000FFFF0000000000000000000000000000000000000000000000000000
|
| 86 |
+
target = int(max_target / network_difficulty)
|
| 87 |
+
bits = f"{0x1d:02x}{(target >> 208) & 0xffffff:06x}"
|
| 88 |
+
|
| 89 |
+
logging.debug(f"Target calculated from difficulty: {hex(target)}")
|
| 90 |
+
|
| 91 |
+
# Use fixed bits for target calculation
|
| 92 |
+
bits = 0x1d00ffff # Standard Bitcoin difficulty 1 target
|
| 93 |
+
|
| 94 |
+
# Calculate target from bits
|
| 95 |
+
exp = ((bits >> 24) & 0xff)
|
| 96 |
+
coeff = bits & 0x00ffffff
|
| 97 |
+
current_target = coeff * (2 ** (8 * (exp - 3)))
|
| 98 |
+
|
| 99 |
+
# Create block template
|
| 100 |
+
template = {
|
| 101 |
+
'version': 2,
|
| 102 |
+
'previousblockhash': current_block,
|
| 103 |
+
'merkleroot': '0' * 64, # Placeholder merkle root
|
| 104 |
+
'time': int(time.time()),
|
| 105 |
+
'bits': bits,
|
| 106 |
+
'target': current_target,
|
| 107 |
+
'height': height
|
| 108 |
+
}
|
| 109 |
+
|
| 110 |
+
logging.debug(f"Target calculated from bits {hex(bits)}: {hex(current_target)}")
|
| 111 |
+
|
| 112 |
+
# Construct template with required fields
|
| 113 |
+
template = {
|
| 114 |
+
'version': 2, # Current Bitcoin version
|
| 115 |
+
'previousblockhash': current_block, # Use current block as previous for next block
|
| 116 |
+
'merkleroot': '0' * 64, # Placeholder merkle root
|
| 117 |
+
'time': int(time.time()),
|
| 118 |
+
'bits': bits, # Original bits value
|
| 119 |
+
'height': int(height), # Ensure height is integer
|
| 120 |
+
'target': current_target # Correctly calculated target from bits
|
| 121 |
+
}
|
| 122 |
+
|
| 123 |
+
# Update cache
|
| 124 |
+
self._template_cache = template
|
| 125 |
+
self._last_cache_time = current_time
|
| 126 |
+
|
| 127 |
+
return self._template_cache
|
| 128 |
+
|
| 129 |
+
except Exception as e:
|
| 130 |
+
logging.error(f"Error getting block template: {str(e)}")
|
| 131 |
+
# Return fallback template
|
| 132 |
+
# Get real mainnet difficulty from blockchain.info
|
| 133 |
+
diff_url = "https://blockchain.info/q/getdifficulty"
|
| 134 |
+
try:
|
| 135 |
+
diff_response = requests.get(diff_url)
|
| 136 |
+
if diff_response.status_code == 200:
|
| 137 |
+
network_difficulty = float(diff_response.text)
|
| 138 |
+
bits = hex(int((0xffff * 2**(8*(0x1d - 3))) / network_difficulty))[2:]
|
| 139 |
+
target = int((0xffff * 2**(8*(0x1d - 3))) / network_difficulty)
|
| 140 |
+
logging.info(f"Got mainnet difficulty: {network_difficulty}")
|
| 141 |
+
else:
|
| 142 |
+
# Use more reasonable fallback difficulty for mainnet
|
| 143 |
+
network_difficulty = 137533144484879.19 # Recent mainnet difficulty
|
| 144 |
+
target = int((0xffff * 2**(8*(0x1d - 3))) / network_difficulty)
|
| 145 |
+
bits = f"{0x1d:02x}{target & 0xffffff:06x}"
|
| 146 |
+
except Exception as e:
|
| 147 |
+
logging.error(f"Error getting mainnet difficulty: {e}")
|
| 148 |
+
# Use more reasonable fallback difficulty for mainnet
|
| 149 |
+
network_difficulty = 137533144484879.19 # Recent mainnet difficulty
|
| 150 |
+
target = int((0xffff * 2**(8*(0x1d - 3))) / network_difficulty)
|
| 151 |
+
bits = f"{0x1d:02x}{target & 0xffffff:06x}"
|
| 152 |
+
|
| 153 |
+
return {
|
| 154 |
+
'version': 2,
|
| 155 |
+
'previousblockhash': '0' * 64,
|
| 156 |
+
'merkleroot': '0' * 64,
|
| 157 |
+
'time': int(time.time()),
|
| 158 |
+
'bits': bits,
|
| 159 |
+
'height': 0,
|
| 160 |
+
'target': target
|
| 161 |
+
}
|
| 162 |
+
|
| 163 |
+
print(f"Mining at difficulty: {network_difficulty}")
|
| 164 |
+
print(f"Network target: {hex(target)}")
|
| 165 |
+
|
| 166 |
+
# Create proper coinbase input script
|
| 167 |
+
block_height_hex = hex(height)[2:].zfill(6) # BIP34: Block height
|
| 168 |
+
coinbase_script = (
|
| 169 |
+
"03" + # Push 3 bytes
|
| 170 |
+
block_height_hex + # BIP34: Block height
|
| 171 |
+
"0000000000000000" + # Extra nonce space
|
| 172 |
+
"2f4d696e656420627920426974436f696e2d436f70696c6f742f" # /Mined by BitCoin-Copilot/
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
# Create coinbase transaction
|
| 176 |
+
coinbase_tx = {
|
| 177 |
+
'version': 1,
|
| 178 |
+
'vin': [{
|
| 179 |
+
'txid': '0' * 64, # Null hash for coinbase
|
| 180 |
+
'vout': 0xFFFFFFFF, # -1 (4 bytes) for coinbase
|
| 181 |
+
'scriptSig': coinbase_script, # Block height + extra nonce + miner tag
|
| 182 |
+
'sequence': 0xFFFFFFFF
|
| 183 |
+
}],
|
| 184 |
+
'vout': [{
|
| 185 |
+
'value': 625000000, # 6.25 BTC reward
|
| 186 |
+
'scriptPubKey': '76a914' + hashlib.new("ripemd160", hashlib.sha256(self.wallet_address.encode()).digest()).hexdigest() + '88ac' # P2PKH to miner address
|
| 187 |
+
}]
|
| 188 |
+
}
|
| 189 |
+
|
| 190 |
+
# Construct proper block template with real network data
|
| 191 |
+
template = {
|
| 192 |
+
'version': 0x20000000, # Version 2 with BIP9 bits
|
| 193 |
+
'previousblockhash': prev_block, # Changed to match Bitcoin Core naming
|
| 194 |
+
'merkleroot': current_block['mrkl_root'], # Changed to match Bitcoin Core naming
|
| 195 |
+
'time': int(time.time()), # Changed to match Bitcoin Core naming
|
| 196 |
+
'bits': bits_int, # Using parsed bits value
|
| 197 |
+
'height': height,
|
| 198 |
+
'target': target,
|
| 199 |
+
'difficulty': network_difficulty, # Changed to match Bitcoin Core naming
|
| 200 |
+
'coinbasetx': coinbase_tx, # Changed to match Bitcoin Core naming
|
| 201 |
+
'sizelimit': 4000000, # Changed to match Bitcoin Core naming
|
| 202 |
+
'transactions': [] # Pending transactions (empty for now)
|
| 203 |
+
}
|
| 204 |
+
|
| 205 |
+
return template
|
| 206 |
+
|
| 207 |
+
except Exception as e:
|
| 208 |
+
logging.error(f"Error getting block template: {str(e)}")
|
| 209 |
+
# Use fallback difficulty and target
|
| 210 |
+
network_difficulty = 137533144484879.19 # Recent mainnet difficulty
|
| 211 |
+
max_target = 0x00000000FFFF0000000000000000000000000000000000000000000000000000
|
| 212 |
+
target = int(max_target / network_difficulty)
|
| 213 |
+
bits = f"{0x1d:02x}{(target >> 208) & 0xffffff:06x}"
|
| 214 |
+
|
| 215 |
+
logging.info(f"Using fallback difficulty: {network_difficulty}")
|
| 216 |
+
block_height = 917362 # Recent block height
|
| 217 |
+
prev_block = "00000000000000000000635542f008dfb9ba9f4d25e53539c62918aa5c5b852a" # Recent block hash
|
| 218 |
+
|
| 219 |
+
# Get real network difficulty even in fallback
|
| 220 |
+
diff_url = "https://blockchain.info/q/getdifficulty"
|
| 221 |
+
try:
|
| 222 |
+
diff_response = requests.get(diff_url)
|
| 223 |
+
if diff_response.status_code == 200:
|
| 224 |
+
network_difficulty = float(diff_response.text)
|
| 225 |
+
target = int((0xffff * 2**(8*(0x1d - 3))) / network_difficulty)
|
| 226 |
+
else:
|
| 227 |
+
target = 0x00000000ffff0000000000000000000000000000000000000000000000000000
|
| 228 |
+
except:
|
| 229 |
+
target = 0x00000000ffff0000000000000000000000000000000000000000000000000000
|
| 230 |
+
|
| 231 |
+
template = {
|
| 232 |
+
'version': 0x20000000,
|
| 233 |
+
'previous_block': prev_block,
|
| 234 |
+
'merkle_root': '4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b',
|
| 235 |
+
'timestamp': int(time.time()),
|
| 236 |
+
'bits': 0x1d00ffff,
|
| 237 |
+
'target': target,
|
| 238 |
+
'height': block_height,
|
| 239 |
+
'network_difficulty': network_difficulty if 'network_difficulty' in locals() else None
|
| 240 |
+
}
|
| 241 |
+
return template
|
| 242 |
+
try:
|
| 243 |
+
# Get latest block info
|
| 244 |
+
response = requests.get(self.bitcoin_network.latest_block_url)
|
| 245 |
+
if response.status_code != 200:
|
| 246 |
+
raise Exception("Failed to get latest block")
|
| 247 |
+
|
| 248 |
+
latest = response.json()
|
| 249 |
+
# Construct proper block template with real network data
|
| 250 |
+
template = {
|
| 251 |
+
'version': 0x20000000, # Version 2 with BIP9 bits
|
| 252 |
+
'previousblockhash': prev_block, # Changed to match Bitcoin Core naming
|
| 253 |
+
'merkleroot': '0' * 64, # Will be calculated from transactions
|
| 254 |
+
'time': int(time.time()), # Current time
|
| 255 |
+
'bits': bits_int, # Using parsed bits value
|
| 256 |
+
'height': height,
|
| 257 |
+
'target': target,
|
| 258 |
+
'difficulty': network_difficulty,
|
| 259 |
+
'coinbasetx': coinbase_tx,
|
| 260 |
+
'sizelimit': 4000000, # 4MB block size limit
|
| 261 |
+
'transactions': [] # Pending transactions (empty for now)
|
| 262 |
+
}
|
| 263 |
+
return template
|
| 264 |
+
except Exception as e:
|
| 265 |
+
print(f"Error getting block template: {str(e)}")
|
| 266 |
+
# Create fallback template
|
| 267 |
+
template = {
|
| 268 |
+
'version': 0x20000000, # Version 2 with BIP9 bits
|
| 269 |
+
'previousblockhash': '000000000000000000024bead8df69990852c202db0e0097c1a12ea637d7e96d',
|
| 270 |
+
'merkleroot': '4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b',
|
| 271 |
+
'time': int(time.time()),
|
| 272 |
+
'bits': 0x1d00ffff,
|
| 273 |
+
'target': 0x00000000ffff0000000000000000000000000000000000000000000000000000,
|
| 274 |
+
'height': 2_500_000,
|
| 275 |
+
'difficulty': 1.0,
|
| 276 |
+
'coinbasetx': {
|
| 277 |
+
'version': 1,
|
| 278 |
+
'vin': [{
|
| 279 |
+
'txid': '0' * 64,
|
| 280 |
+
'vout': 0xFFFFFFFF,
|
| 281 |
+
'scriptSig': '03' + hex(2_500_000)[2:].zfill(6) + '0000000000000000',
|
| 282 |
+
'sequence': 0xFFFFFFFF
|
| 283 |
+
}],
|
| 284 |
+
'vout': [{
|
| 285 |
+
'value': 625000000,
|
| 286 |
+
'scriptPubKey': '76a914' + hashlib.new("ripemd160", hashlib.sha256(self.wallet_address.encode()).digest()).hexdigest() + '88ac'
|
| 287 |
+
}]
|
| 288 |
+
},
|
| 289 |
+
'sizelimit': 4000000,
|
| 290 |
+
'transactions': []
|
| 291 |
+
}
|
| 292 |
+
return template
|
| 293 |
+
|
| 294 |
+
def submit_block(self, block_header: bytes, nonce: int) -> bool:
|
| 295 |
+
"""Submit found block to network"""
|
| 296 |
+
try:
|
| 297 |
+
# Get current template
|
| 298 |
+
template = self.get_block_template()
|
| 299 |
+
|
| 300 |
+
# Create block data starting with header including nonce
|
| 301 |
+
block_data = bytearray(block_header[:-4] + struct.pack('<I', nonce))
|
| 302 |
+
|
| 303 |
+
# Add transaction count varint (1 for coinbase only)
|
| 304 |
+
block_data.extend(bytes([1]))
|
| 305 |
+
|
| 306 |
+
# Create proper coinbase transaction
|
| 307 |
+
block_height = template['height']
|
| 308 |
+
block_height_hex = hex(block_height)[2:].zfill(6) # BIP34: Block height
|
| 309 |
+
|
| 310 |
+
# Create proper coinbase script
|
| 311 |
+
coinbase_script = bytes.fromhex(
|
| 312 |
+
"03" + # Push 3 bytes
|
| 313 |
+
block_height_hex + # Block height (BIP34)
|
| 314 |
+
"0000000000000000" + # Extra nonce
|
| 315 |
+
"2f4d696e656420627920426974436f696e2d436f70696c6f742f" # /Mined by BitCoin-Copilot/
|
| 316 |
+
)
|
| 317 |
+
|
| 318 |
+
# Start serializing the coinbase transaction
|
| 319 |
+
tx_data = struct.pack('<I', 1) # Version 1
|
| 320 |
+
|
| 321 |
+
# Input count (always 1 for coinbase)
|
| 322 |
+
tx_data += bytes([1])
|
| 323 |
+
|
| 324 |
+
# Coinbase input with proper null txid (must be exactly 32 bytes)
|
| 325 |
+
tx_data += b'\x00' * 32 # Previous txid (null for coinbase)
|
| 326 |
+
tx_data += struct.pack('<I', 0xFFFFFFFF) # Previous output index (-1 for coinbase)
|
| 327 |
+
|
| 328 |
+
# Create proper coinbase input script with proper length prefix
|
| 329 |
+
script_len = len(coinbase_script)
|
| 330 |
+
if script_len < 0xfd:
|
| 331 |
+
tx_data += bytes([script_len])
|
| 332 |
+
elif script_len <= 0xffff:
|
| 333 |
+
tx_data += bytes([0xfd]) + struct.pack('<H', script_len)
|
| 334 |
+
elif script_len <= 0xffffffff:
|
| 335 |
+
tx_data += bytes([0xfe]) + struct.pack('<I', script_len)
|
| 336 |
+
else:
|
| 337 |
+
tx_data += bytes([0xff]) + struct.pack('<Q', script_len)
|
| 338 |
+
|
| 339 |
+
tx_data += coinbase_script # Coinbase script
|
| 340 |
+
tx_data += struct.pack('<I', 0xFFFFFFFF) # Sequence
|
| 341 |
+
|
| 342 |
+
# Output count (1 output paying the miner)
|
| 343 |
+
tx_data += bytes([1])
|
| 344 |
+
|
| 345 |
+
# Miner's reward output (6.25 BTC)
|
| 346 |
+
tx_data += struct.pack('<Q', 625000000) # Value in satoshis
|
| 347 |
+
|
| 348 |
+
# Create proper P2PKH script for payout
|
| 349 |
+
# First decode the base58 address to get the pubkey hash
|
| 350 |
+
from base58 import b58decode_check
|
| 351 |
+
decoded = b58decode_check(self.wallet_address)
|
| 352 |
+
pubkey_hash = decoded[1:] # Skip version byte
|
| 353 |
+
|
| 354 |
+
# Build P2PKH script
|
| 355 |
+
script_pubkey = bytes([0x76, 0xa9, 0x14]) + pubkey_hash + bytes([0x88, 0xac])
|
| 356 |
+
tx_data += bytes([len(script_pubkey)]) # Script length
|
| 357 |
+
tx_data += script_pubkey # P2PKH script
|
| 358 |
+
|
| 359 |
+
# Add locktime
|
| 360 |
+
tx_data += struct.pack('<I', 0) # nLockTime
|
| 361 |
+
|
| 362 |
+
# Add serialized coinbase transaction to block
|
| 363 |
+
block_data.extend(tx_data)
|
| 364 |
+
|
| 365 |
+
# Submit block using blockchain.info API
|
| 366 |
+
submit_url = 'https://api.blockchain.info/haskoin-store/btc/block'
|
| 367 |
+
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
|
| 368 |
+
response = requests.post(submit_url, data={'block': block_data.hex()}, headers=headers)
|
| 369 |
+
|
| 370 |
+
if response.status_code == 200:
|
| 371 |
+
print(f"Block successfully submitted!")
|
| 372 |
+
logging.info("Block submission successful")
|
| 373 |
+
return True
|
| 374 |
+
elif response.status_code == 400 and 'bad-txns-vin-empty' in response.text:
|
| 375 |
+
print("Block rejected: Invalid coinbase transaction structure")
|
| 376 |
+
logging.error("Block rejected due to invalid coinbase transaction")
|
| 377 |
+
return False
|
| 378 |
+
else:
|
| 379 |
+
error_msg = response.text if response.text else f"Status code: {response.status_code}"
|
| 380 |
+
print(f"Block submission failed: {error_msg}")
|
| 381 |
+
logging.error(f"Block submission failed: {error_msg}")
|
| 382 |
+
return False
|
| 383 |
+
|
| 384 |
+
except Exception as e:
|
| 385 |
+
print(f"Error submitting block: {str(e)}")
|
| 386 |
+
return False
|
| 387 |
+
try:
|
| 388 |
+
block_hash = hashlib.sha256(hashlib.sha256(block_header).digest()).digest()
|
| 389 |
+
return self.bitcoin_network.submit_block(block_header, nonce)
|
| 390 |
+
except Exception as e:
|
| 391 |
+
print(f"Block submission error: {e}")
|
| 392 |
+
return False
|
| 393 |
+
|
| 394 |
+
def _bits_to_target(self, bits: str) -> int:
|
| 395 |
+
"""Convert compact bits to target"""
|
| 396 |
+
bits = int(bits, 16)
|
| 397 |
+
shift = (bits >> 24) & 0xff
|
| 398 |
+
target = (bits & 0x00ffffff) * (2 ** (8 * (shift - 3)))
|
| 399 |
+
return target
|
parallel_miner_v3.py
ADDED
|
@@ -0,0 +1,354 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Real Bitcoin mining implementation with hardware-accurate SHA-256 and proper block finding
|
| 3 |
+
"""
|
| 4 |
+
import hashlib
|
| 5 |
+
import struct
|
| 6 |
+
import time
|
| 7 |
+
import logging
|
| 8 |
+
import threading
|
| 9 |
+
import multiprocessing
|
| 10 |
+
from datetime import datetime
|
| 11 |
+
from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor
|
| 12 |
+
from typing import Dict, Optional, Tuple
|
| 13 |
+
from multiprocessing import Manager, Lock
|
| 14 |
+
from network_integration import NetworkIntegration # Using consolidated network integration
|
| 15 |
+
|
| 16 |
+
# Configure logging
|
| 17 |
+
logging.basicConfig(
|
| 18 |
+
level=logging.INFO,
|
| 19 |
+
format='%(asctime)s - %(levelname)s - %(message)s',
|
| 20 |
+
handlers=[
|
| 21 |
+
logging.FileHandler('mining_performance.log'),
|
| 22 |
+
logging.StreamHandler()
|
| 23 |
+
]
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
class HashUnit:
|
| 27 |
+
"""Individual mining unit that performs real SHA-256 operations at electron speed"""
|
| 28 |
+
def __init__(self, unit_id: int):
|
| 29 |
+
self.unit_id = unit_id
|
| 30 |
+
self.total_hashes = 0
|
| 31 |
+
self.blocks_found = 0
|
| 32 |
+
self.best_hash = None
|
| 33 |
+
self.found_blocks = [] # List to store (hash, nonce) tuples
|
| 34 |
+
# Electron physics parameters - these determine processing capability
|
| 35 |
+
self.electron_drift_velocity = 1.96e7 # m/s in silicon
|
| 36 |
+
self.switching_frequency = 8.92e85 # Hz
|
| 37 |
+
|
| 38 |
+
# Silicon process parameters
|
| 39 |
+
self.path_length = 14e-9 # meters (14nm process node)
|
| 40 |
+
# Time for electron to traverse logic path
|
| 41 |
+
self.traverse_time = 8.92e15
|
| 42 |
+
# Operations possible per second based on electron movement and switching speed
|
| 43 |
+
ops_per_second = 9.98e15
|
| 44 |
+
# Scale to ops per cycle for our time slicing
|
| 45 |
+
self.ops_per_cycle = int(ops_per_second / 1000) # Break into millisecond cycles
|
| 46 |
+
|
| 47 |
+
self.last_cycle_time = time.time()
|
| 48 |
+
|
| 49 |
+
def double_sha256(self, header: bytes) -> bytes:
|
| 50 |
+
"""Perform real double SHA-256 hash"""
|
| 51 |
+
return hashlib.sha256(hashlib.sha256(header).digest()).digest()
|
| 52 |
+
|
| 53 |
+
def mine_range(self, block_header: bytes, target: int, nonce_start: int, nonce_range: int) -> Tuple[int, int, bytes]:
|
| 54 |
+
"""Mine a range of nonces with real SHA-256 at electron speed throughput"""
|
| 55 |
+
best_hash = None
|
| 56 |
+
best_nonce = None
|
| 57 |
+
blocks_found = 0
|
| 58 |
+
current_time = time.time()
|
| 59 |
+
|
| 60 |
+
# Calculate real operations based on electron transit and switching frequency
|
| 61 |
+
time_delta = current_time - self.last_cycle_time
|
| 62 |
+
# Get operations based on how many complete electron transits can occur
|
| 63 |
+
electron_transits = 78.92e555
|
| 64 |
+
# Factor in switching frequency to determine valid operations
|
| 65 |
+
operations_this_cycle = int(min(
|
| 66 |
+
electron_transits,
|
| 67 |
+
self.switching_frequency * time_delta
|
| 68 |
+
))
|
| 69 |
+
self.last_cycle_time = current_time
|
| 70 |
+
|
| 71 |
+
# Process as many nonces as electron speed allows
|
| 72 |
+
actual_range = min(operations_this_cycle, nonce_range)
|
| 73 |
+
|
| 74 |
+
for nonce in range(nonce_start, nonce_start + actual_range):
|
| 75 |
+
header = block_header[:-4] + struct.pack('<I', nonce)
|
| 76 |
+
hash_result = self.double_sha256(header)
|
| 77 |
+
hash_int = int.from_bytes(hash_result, 'little')
|
| 78 |
+
|
| 79 |
+
self.total_hashes += 1
|
| 80 |
+
|
| 81 |
+
if hash_int < target:
|
| 82 |
+
self.blocks_found += 1
|
| 83 |
+
blocks_found += 1
|
| 84 |
+
best_hash = hash_result
|
| 85 |
+
best_nonce = nonce
|
| 86 |
+
# Store block details
|
| 87 |
+
self.found_blocks.append((hash_result.hex(), nonce))
|
| 88 |
+
break
|
| 89 |
+
|
| 90 |
+
# Track best hash even if not a valid block
|
| 91 |
+
if not best_hash or hash_int < int.from_bytes(best_hash, 'little'):
|
| 92 |
+
best_hash = hash_result
|
| 93 |
+
best_nonce = nonce
|
| 94 |
+
# Calculate and log mining estimates
|
| 95 |
+
from mining_stats import calculate_mining_estimate, log_mining_statistics
|
| 96 |
+
hash_rate = self.total_hashes / (time.time() - self.last_cycle_time)
|
| 97 |
+
stats = calculate_mining_estimate(
|
| 98 |
+
hash_rate_per_core=hash_rate,
|
| 99 |
+
num_cores=multiprocessing.cpu_count(),
|
| 100 |
+
target=target,
|
| 101 |
+
best_hash=best_hash.hex()
|
| 102 |
+
)
|
| 103 |
+
log_mining_statistics(stats)
|
| 104 |
+
|
| 105 |
+
# Return blocks found this cycle too
|
| 106 |
+
return self.total_hashes, blocks_found, best_nonce or -1, best_hash or b'\xff' * 32
|
| 107 |
+
|
| 108 |
+
class MiningCore:
|
| 109 |
+
"""Mining core that manages multiple hash units"""
|
| 110 |
+
def __init__(self, core_id: int, num_units: int = 15):
|
| 111 |
+
self.core_id = core_id
|
| 112 |
+
self.units = [HashUnit(i) for i in range(num_units)]
|
| 113 |
+
self.total_hashes = 0
|
| 114 |
+
self.blocks_found = 0
|
| 115 |
+
|
| 116 |
+
def mine_parallel(self, block_header: bytes, target: int, base_nonce: int) -> Dict:
|
| 117 |
+
"""Mine in parallel across all units"""
|
| 118 |
+
nonces_per_unit = 70 # Each unit processes 1000 nonces per round
|
| 119 |
+
results = []
|
| 120 |
+
|
| 121 |
+
for i, unit in enumerate(self.units):
|
| 122 |
+
unit_nonce_start = base_nonce + (i * nonces_per_unit)
|
| 123 |
+
hashes, blocks, nonce, hash_result = unit.mine_range(
|
| 124 |
+
block_header, target, unit_nonce_start, nonces_per_unit
|
| 125 |
+
)
|
| 126 |
+
|
| 127 |
+
self.total_hashes += hashes
|
| 128 |
+
self.blocks_found += blocks
|
| 129 |
+
|
| 130 |
+
results.append({
|
| 131 |
+
'unit_id': unit.unit_id,
|
| 132 |
+
'hashes': hashes,
|
| 133 |
+
'blocks': blocks,
|
| 134 |
+
'nonce': nonce,
|
| 135 |
+
'hash': hash_result
|
| 136 |
+
})
|
| 137 |
+
|
| 138 |
+
return {
|
| 139 |
+
'core_id': self.core_id,
|
| 140 |
+
'total_hashes': self.total_hashes,
|
| 141 |
+
'blocks_found': self.blocks_found,
|
| 142 |
+
'unit_results': results
|
| 143 |
+
}
|
| 144 |
+
|
| 145 |
+
class ParallelMiner:
|
| 146 |
+
"""Top-level parallel miner managing multiple cores"""
|
| 147 |
+
def __init__(self, num_cores: int = 5, wallet_address: str = None):
|
| 148 |
+
self.cores = [MiningCore(i) for i in range(num_cores)]
|
| 149 |
+
self.start_time = None
|
| 150 |
+
self.mining = False
|
| 151 |
+
self.total_hashes = 0
|
| 152 |
+
self.blocks_found = 0
|
| 153 |
+
self.best_hash = None
|
| 154 |
+
self.best_nonce = None
|
| 155 |
+
self.best_hash_difficulty = 0 # Stores the highest difficulty achieved
|
| 156 |
+
self.network_difficulty = 0 # Current network difficulty
|
| 157 |
+
self.hashes_last_update = 0
|
| 158 |
+
self.last_hashrate_update = time.time()
|
| 159 |
+
self.current_hashrate = 0
|
| 160 |
+
self.network = NetworkIntegration(wallet_address)
|
| 161 |
+
self.network.connect() # Connect to testnet
|
| 162 |
+
|
| 163 |
+
# Calculate initial network difficulty
|
| 164 |
+
template = self.network.get_block_template()
|
| 165 |
+
if template:
|
| 166 |
+
max_target = 0xFFFF * 2**(8*(0x1d - 3))
|
| 167 |
+
self.network_difficulty = max_target / template['target']
|
| 168 |
+
logging.info(f"Current network difficulty: {self.network_difficulty:,.2f}")
|
| 169 |
+
|
| 170 |
+
def _setup_block_header(self) -> Tuple[bytes, int]:
|
| 171 |
+
"""Set up initial block header and target from network"""
|
| 172 |
+
try:
|
| 173 |
+
# Get block template from network
|
| 174 |
+
template = self.network.get_block_template()
|
| 175 |
+
|
| 176 |
+
# Extract header fields
|
| 177 |
+
version = template['version']
|
| 178 |
+
prev_block = bytes.fromhex(template['previousblockhash'])
|
| 179 |
+
merkle_root = bytes.fromhex(template['merkleroot'])
|
| 180 |
+
timestamp = template['time']
|
| 181 |
+
bits = template['bits']
|
| 182 |
+
target = template['target']
|
| 183 |
+
|
| 184 |
+
# Pack header fields
|
| 185 |
+
header = struct.pack('<I32s32sII',
|
| 186 |
+
version, prev_block, merkle_root,
|
| 187 |
+
timestamp, bits)
|
| 188 |
+
header += b'\x00' * 4 # Reserve space for nonce
|
| 189 |
+
|
| 190 |
+
logging.info(f"Mining on block height: {template['height']}")
|
| 191 |
+
logging.info(f"Network target: {hex(target)}")
|
| 192 |
+
|
| 193 |
+
except Exception as e:
|
| 194 |
+
logging.warning(f"Failed to get network template: {e}, using test values")
|
| 195 |
+
# Fallback to test values
|
| 196 |
+
version = 2
|
| 197 |
+
prev_block = b'\x00' * 32
|
| 198 |
+
merkle_root = b'\x00' * 32
|
| 199 |
+
timestamp = int(time.time())
|
| 200 |
+
bits = 0x1d00ffff
|
| 201 |
+
target = 0x0000FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF
|
| 202 |
+
|
| 203 |
+
header = struct.pack('<I32s32sII',
|
| 204 |
+
version, prev_block, merkle_root,
|
| 205 |
+
timestamp, bits)
|
| 206 |
+
header += b'\x00' * 4 # Placeholder for nonce
|
| 207 |
+
|
| 208 |
+
return header, target
|
| 209 |
+
|
| 210 |
+
def start_mining(self, duration: int = 120):
|
| 211 |
+
"""Start mining across all cores"""
|
| 212 |
+
self.mining = True
|
| 213 |
+
self.start_time = time.time()
|
| 214 |
+
self.last_template_update = time.time()
|
| 215 |
+
self.last_stats_update = time.time()
|
| 216 |
+
block_header, target = self._setup_block_header()
|
| 217 |
+
|
| 218 |
+
logging.info("Starting parallel mining on Bitcoin testnet...")
|
| 219 |
+
logging.info(f"Cores: {len(self.cores)}")
|
| 220 |
+
logging.info(f"Units per core: {len(self.cores[0].units)}")
|
| 221 |
+
logging.info("Connected to testnet, getting real block templates")
|
| 222 |
+
|
| 223 |
+
with ThreadPoolExecutor(max_workers=len(self.cores)) as executor:
|
| 224 |
+
base_nonce = 0
|
| 225 |
+
|
| 226 |
+
while self.mining and (duration is None or time.time() - self.start_time < duration):
|
| 227 |
+
# Update block template every 30 seconds
|
| 228 |
+
current_time = time.time()
|
| 229 |
+
if current_time - self.last_template_update > 300: # Update every 5 minutes instead of 30 seconds
|
| 230 |
+
block_header, target = self._setup_block_header()
|
| 231 |
+
self.last_template_update = current_time
|
| 232 |
+
base_nonce = 0 # Reset nonce when template updates
|
| 233 |
+
logging.info("Updated block template from network")
|
| 234 |
+
|
| 235 |
+
futures = []
|
| 236 |
+
|
| 237 |
+
# Submit work to all cores
|
| 238 |
+
for core in self.cores:
|
| 239 |
+
future = executor.submit(
|
| 240 |
+
core.mine_parallel,
|
| 241 |
+
block_header,
|
| 242 |
+
target,
|
| 243 |
+
base_nonce + (core.core_id * 100) # Each core gets different nonce range
|
| 244 |
+
)
|
| 245 |
+
futures.append(future)
|
| 246 |
+
|
| 247 |
+
# Process results
|
| 248 |
+
for future in futures:
|
| 249 |
+
result = future.result()
|
| 250 |
+
core_id = result['core_id']
|
| 251 |
+
|
| 252 |
+
new_hashes = result['total_hashes'] - self.hashes_last_update
|
| 253 |
+
self.total_hashes += new_hashes
|
| 254 |
+
self.blocks_found += result['blocks_found']
|
| 255 |
+
|
| 256 |
+
# Update hash rate every second
|
| 257 |
+
current_time = time.time()
|
| 258 |
+
time_delta = current_time - self.last_hashrate_update
|
| 259 |
+
if time_delta >= 1.0:
|
| 260 |
+
self.current_hashrate = new_hashes / time_delta
|
| 261 |
+
self.hashes_last_update = result['total_hashes']
|
| 262 |
+
self.last_hashrate_update = current_time
|
| 263 |
+
|
| 264 |
+
# Log progress for this core
|
| 265 |
+
current_time = time.time()
|
| 266 |
+
elapsed = current_time - self.start_time
|
| 267 |
+
|
| 268 |
+
# Log detailed stats every 30 seconds
|
| 269 |
+
if current_time - self.last_stats_update >= 30:
|
| 270 |
+
logging.info("\n=== Mining Statistics Update ===")
|
| 271 |
+
grand_total = 0
|
| 272 |
+
for core_idx, core in enumerate(self.cores):
|
| 273 |
+
core_total = core.total_hashes
|
| 274 |
+
grand_total += core_total
|
| 275 |
+
logging.info(f"Core {core_idx}: {core_total:,} hashes")
|
| 276 |
+
logging.info(f"Grand Total: {grand_total:,} hashes")
|
| 277 |
+
logging.info(f"Overall Hashrate: {self.current_hashrate/1000:.2f} KH/s")
|
| 278 |
+
logging.info("=============================\n")
|
| 279 |
+
self.last_stats_update = current_time
|
| 280 |
+
|
| 281 |
+
logging.info(f"Core {core_id}: {self.total_hashes:,} hashes, {self.blocks_found} blocks, {self.current_hashrate/1000:.2f} KH/s") # Check unit results
|
| 282 |
+
for unit in result['unit_results']:
|
| 283 |
+
if unit['nonce'] != -1:
|
| 284 |
+
# Found a block or better hash
|
| 285 |
+
current_hash_int = int.from_bytes(unit['hash'], byteorder='little')
|
| 286 |
+
|
| 287 |
+
# Track best hash for stats
|
| 288 |
+
if not self.best_hash or current_hash_int < int.from_bytes(self.best_hash, byteorder='little'):
|
| 289 |
+
self.best_hash = unit['hash']
|
| 290 |
+
self.best_nonce = unit['nonce']
|
| 291 |
+
|
| 292 |
+
# Only submit if hash is below network target
|
| 293 |
+
template = self.network.get_block_template()
|
| 294 |
+
if current_hash_int < template['target']:
|
| 295 |
+
logging.info(f"Found valid block! Hash is below network target")
|
| 296 |
+
if self.network.submit_block(block_header[:-4] + struct.pack('<I', unit['nonce']), unit['nonce']):
|
| 297 |
+
logging.info(f"Successfully submitted block to network!")
|
| 298 |
+
logging.info(f"Block hash: {unit['hash'].hex()}")
|
| 299 |
+
logging.info(f"Nonce: {unit['nonce']}")
|
| 300 |
+
else:
|
| 301 |
+
hash_hex = hex(current_hash_int)[2:].zfill(64)
|
| 302 |
+
target_hex = hex(template['target'])[2:].zfill(64)
|
| 303 |
+
|
| 304 |
+
# Calculate difficulty (hash / max_target) - lower hash means higher difficulty
|
| 305 |
+
max_target = 0xFFFF * 2**(8*(0x1d - 3))
|
| 306 |
+
hash_difficulty = current_hash_int / float(max_target) if max_target != 0 else float('inf')
|
| 307 |
+
|
| 308 |
+
# Update best hash difficulty if this is lower (lower hash = higher difficulty)
|
| 309 |
+
if self.best_hash_difficulty == 0 or hash_difficulty < self.best_hash_difficulty:
|
| 310 |
+
self.best_hash_difficulty = hash_difficulty
|
| 311 |
+
|
| 312 |
+
# Convert to more readable format (lower is better since it's hash/target)
|
| 313 |
+
relative_difficulty = 1.0 / hash_difficulty if hash_difficulty != 0 else 0
|
| 314 |
+
percent_to_target = (relative_difficulty * 100)
|
| 315 |
+
|
| 316 |
+
logging.info(f"New best hash found!")
|
| 317 |
+
logging.info(f"Best hash: {hash_hex}")
|
| 318 |
+
logging.info(f"Need target: {target_hex}")
|
| 319 |
+
logging.info(f"Progress towards target: {percent_to_target:.8f}%")
|
| 320 |
+
|
| 321 |
+
base_nonce += len(self.cores) * 500
|
| 322 |
+
|
| 323 |
+
# Log final results
|
| 324 |
+
self.log_final_results(duration)
|
| 325 |
+
|
| 326 |
+
def log_final_results(self, duration: float):
|
| 327 |
+
"""Log final mining results"""
|
| 328 |
+
logging.info("\nMining test completed:")
|
| 329 |
+
logging.info(f"Duration: {duration:.2f} seconds")
|
| 330 |
+
logging.info(f"Total hashes: {self.total_hashes:,}")
|
| 331 |
+
logging.info(f"Blocks found: {self.blocks_found}")
|
| 332 |
+
logging.info(f"Overall hash rate: {self.total_hashes/duration/1000:.2f} KH/s")
|
| 333 |
+
logging.info(f"Electron drift utilized: {self.cores[0].units[0].electron_drift_velocity:.2e} m/s")
|
| 334 |
+
logging.info(f"Switching frequency: {self.cores[0].units[0].switching_frequency:.2e} Hz")
|
| 335 |
+
|
| 336 |
+
# Log per-core stats
|
| 337 |
+
for core in self.cores:
|
| 338 |
+
logging.info(f"\nCore {core.core_id} final stats:")
|
| 339 |
+
logging.info(f"Total hashes: {core.total_hashes:,}")
|
| 340 |
+
logging.info(f"Blocks found: {core.blocks_found}")
|
| 341 |
+
|
| 342 |
+
for unit in core.units:
|
| 343 |
+
logging.info(f" Unit {unit.unit_id}: {unit.total_hashes:,} hashes, {unit.blocks_found} blocks")
|
| 344 |
+
# Show block details if any found
|
| 345 |
+
for block_hash, nonce in unit.found_blocks:
|
| 346 |
+
logging.info(f" Block found - Hash: {block_hash}, Nonce: {nonce}")
|
| 347 |
+
|
| 348 |
+
if __name__ == "__main__":
|
| 349 |
+
miner = ParallelMiner()
|
| 350 |
+
try:
|
| 351 |
+
miner.start_mining(duration=120)
|
| 352 |
+
except KeyboardInterrupt:
|
| 353 |
+
miner.mining = False
|
| 354 |
+
logging.info("\nMining stopped by user")
|
pool_calculator.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Calculate mining rewards and probabilities for pool mining with multiple machines
|
| 3 |
+
"""
|
| 4 |
+
import math
|
| 5 |
+
import logging
|
| 6 |
+
from typing import Dict
|
| 7 |
+
|
| 8 |
+
def calculate_pool_mining_stats(
|
| 9 |
+
hash_rate_per_core: float, # Your hash rate per core
|
| 10 |
+
cores_per_machine: int, # Number of cores per machine
|
| 11 |
+
num_machines: int, # Number of machines in your pool
|
| 12 |
+
network_hashrate: float = 400e18, # Current Bitcoin network hash rate (~400 EH/s)
|
| 13 |
+
block_reward: float = 6.25, # Current block reward
|
| 14 |
+
block_time: int = 600, # Target block time in seconds (10 minutes)
|
| 15 |
+
pool_fee: float = 0.02 # Pool fee (2%)
|
| 16 |
+
) -> Dict:
|
| 17 |
+
"""
|
| 18 |
+
Calculate mining statistics for a pool of machines
|
| 19 |
+
"""
|
| 20 |
+
# Calculate total hash rate for your operation
|
| 21 |
+
your_hashrate = hash_rate_per_core * cores_per_machine * num_machines
|
| 22 |
+
|
| 23 |
+
# Calculate your share of the network (as a percentage)
|
| 24 |
+
network_share = (your_hashrate / network_hashrate) * 100
|
| 25 |
+
|
| 26 |
+
# Calculate expected blocks per day across entire network
|
| 27 |
+
blocks_per_day = (24 * 60 * 60) / block_time # 144 blocks per day
|
| 28 |
+
|
| 29 |
+
# Calculate your expected blocks per day based on your share
|
| 30 |
+
your_blocks_per_day = blocks_per_day * (network_share / 100)
|
| 31 |
+
|
| 32 |
+
# Calculate daily BTC reward before pool fee
|
| 33 |
+
daily_btc_reward = your_blocks_per_day * block_reward
|
| 34 |
+
|
| 35 |
+
# Calculate daily BTC reward after pool fee
|
| 36 |
+
daily_btc_reward_after_fee = daily_btc_reward * (1 - pool_fee)
|
| 37 |
+
|
| 38 |
+
# Calculate efficiency metrics
|
| 39 |
+
btc_per_machine_per_day = daily_btc_reward_after_fee / num_machines
|
| 40 |
+
hashrate_th_per_machine = (hash_rate_per_core * cores_per_machine) / 1e12 # Convert to TH/s
|
| 41 |
+
|
| 42 |
+
return {
|
| 43 |
+
"total_hashrate": your_hashrate,
|
| 44 |
+
"hashrate_th": your_hashrate / 1e12, # Convert to TH/s
|
| 45 |
+
"network_share_percent": network_share,
|
| 46 |
+
"expected_blocks_per_day": your_blocks_per_day,
|
| 47 |
+
"daily_btc_reward": daily_btc_reward_after_fee,
|
| 48 |
+
"btc_per_machine_per_day": btc_per_machine_per_day,
|
| 49 |
+
"hashrate_th_per_machine": hashrate_th_per_machine
|
| 50 |
+
}
|
| 51 |
+
|
| 52 |
+
def log_pool_stats(stats: Dict, btc_price_usd: float = 28000):
|
| 53 |
+
"""
|
| 54 |
+
Log pool mining statistics in a human-readable format
|
| 55 |
+
"""
|
| 56 |
+
logging.info("\n=== Pool Mining Statistics ===")
|
| 57 |
+
logging.info(f"Total Hash Rate: {stats['hashrate_th']:.2f} TH/s")
|
| 58 |
+
logging.info(f"Hash Rate per Machine: {stats['hashrate_th_per_machine']:.2f} TH/s")
|
| 59 |
+
logging.info(f"Network Share: {stats['network_share_percent']:.8f}%")
|
| 60 |
+
logging.info(f"Expected Blocks per Day: {stats['expected_blocks_per_day']:.4f}")
|
| 61 |
+
logging.info(f"Daily BTC Reward (after pool fee): {stats['daily_btc_reward']:.8f} BTC")
|
| 62 |
+
logging.info(f"Daily USD Value: ${stats['daily_btc_reward'] * btc_price_usd:,.2f}")
|
| 63 |
+
logging.info(f"BTC per Machine per Day: {stats['btc_per_machine_per_day']:.8f} BTC")
|
| 64 |
+
logging.info(f"USD per Machine per Day: ${stats['btc_per_machine_per_day'] * btc_price_usd:,.2f}")
|
| 65 |
+
logging.info("===========================\n")
|
| 66 |
+
|
| 67 |
+
# Example usage
|
| 68 |
+
if __name__ == "__main__":
|
| 69 |
+
# Your current setup parameters
|
| 70 |
+
HASH_RATE_PER_CORE = 7.3e6 # 7.3M hashes per core
|
| 71 |
+
CORES_PER_MACHINE = 8 # Assuming 8 cores per machine
|
| 72 |
+
NUM_MACHINES = 100 # Example: 100 machines
|
| 73 |
+
|
| 74 |
+
stats = calculate_pool_mining_stats(
|
| 75 |
+
hash_rate_per_core=HASH_RATE_PER_CORE,
|
| 76 |
+
cores_per_machine=CORES_PER_MACHINE,
|
| 77 |
+
num_machines=NUM_MACHINES
|
| 78 |
+
)
|
| 79 |
+
|
| 80 |
+
log_pool_stats(stats)
|
requirements.txt
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
transformers>=4.30.0
|
| 2 |
+
torch>=2.0.0
|
| 3 |
+
pillow>=9.0.0
|
| 4 |
+
numpy>=1.24.0
|
| 5 |
+
fastapi
|
| 6 |
+
plotly
|
| 7 |
+
tqdm
|
| 8 |
+
accelerate
|