File size: 16,191 Bytes
5879890
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77e2486
 
5879890
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3f5c253
 
 
 
 
 
 
 
 
 
5879890
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e5b124c
006af53
5879890
e5b124c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5879890
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e5b124c
 
 
 
5879890
e5b124c
 
 
 
 
 
5879890
e5b124c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5879890
e5b124c
5879890
e5b124c
 
 
 
 
 
 
 
 
5879890
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
import os
import json
import time
import threading
from fastapi import FastAPI, HTTPException, BackgroundTasks
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse
import uvicorn
from typing import Dict
from datetime import datetime

# Import from parallel_miner_v3
from parallel_miner_v3 import ParallelMiner, MiningCore, HashUnit

# FastAPI App Definition
app = FastAPI(
    title="Bitcoin Mining API", 
    description="API endpoints for Bitcoin mining operations using electron-speed SHA-256", 
    version="3.0.0"
)

# Add CORS middleware to allow cross-origin requests
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],  # Allows all origins
    allow_credentials=True,
    allow_methods=["*"],  # Allows all methods
    allow_headers=["*"],
)



# Stats file path
STATS_FILE = "mining_stats_history.json"

# Global variables to track mining status
miner_instance = None
mining_thread = None
mining_stats = {
    "is_mining": False,
    "total_hashes": 0,
    "blocks_found": 0,
    "hash_rate": 0.0,
    "best_hash": None,
    "best_hash_difficulty": 0,
    "start_time": None,
    "total_runtime": 0,  # Cumulative runtime across all sessions
    "session_count": 0,
    "best_session_hashrate": 0,
    "all_time_total_hashes": 0,
    "logs": []
}

@app.get("/")
async def root():
    """API root endpoint"""
    return {
        "message": "MP4 Processing API",
        "version": "1.0.0",
        "status": "running"
    }


def save_mining_stats():
    """Save mining statistics to file"""
    try:
        if os.path.exists(STATS_FILE):
            with open(STATS_FILE, 'r') as f:
                historical_stats = json.load(f)
        else:
            historical_stats = {"sessions": []}
        
        # Calculate final stats for this session
        end_time = time.time()
        elapsed = end_time - mining_stats["start_time"] if mining_stats["start_time"] else 0
        hash_rate = mining_stats["total_hashes"] / elapsed if elapsed > 0 else 0
        
        session_stats = {
            "timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
            "duration": f"{elapsed:.2f}s",
            "total_hashes": mining_stats["total_hashes"],
            "avg_hash_rate": f"{hash_rate/1000:.2f} KH/s",
            "blocks_found": mining_stats["blocks_found"],
            "best_hash": mining_stats["best_hash"].hex() if mining_stats["best_hash"] else None,
            "best_hash_difficulty": mining_stats["best_hash_difficulty"]
        }
        
        historical_stats["sessions"].append(session_stats)
        historical_stats["total_runtime"] = mining_stats["total_runtime"] + elapsed
        historical_stats["total_hashes"] = mining_stats["all_time_total_hashes"] + mining_stats["total_hashes"]
        historical_stats["total_blocks"] = sum(s["blocks_found"] for s in historical_stats["sessions"])
        historical_stats["best_session_hashrate"] = max(
            mining_stats["best_session_hashrate"],
            hash_rate/1000  # Convert to KH/s
        )
        
        with open(STATS_FILE, 'w') as f:
            json.dump(historical_stats, f, indent=2)
            
    except Exception as e:
        log_mining(f"Error saving mining stats: {str(e)}")

def log_mining(message):
    """Add a mining log message with timestamp"""
    timestamp = datetime.now().strftime("%H:%M:%S")
    log_entry = f"[{timestamp}] {message}"
    mining_stats["logs"].append(log_entry)
    
    # Keep only the last 100 logs
    if len(mining_stats["logs"]) > 100:
        mining_stats["logs"] = mining_stats["logs"][-100:]
    
    print(log_entry)

@app.on_event("startup")
async def startup_event():
    """Initialize mining components and start mining on startup"""
    global miner_instance, mining_thread
    if not (miner_instance and hasattr(miner_instance, 'mining') and miner_instance.mining):
        log_mining("πŸš€ Initializing Bitcoin mining components...")
        try:
            miner_instance = ParallelMiner(num_cores=5)  # Increased cores for better performance
            miner_instance.mining = True  # Set mining flag before starting
            
            # Start mining in background thread
            mining_thread = threading.Thread(
                target=miner_instance.start_mining,
                kwargs={"duration": None}  # Run forever
            )
            mining_thread.daemon = True
            mining_thread.start()
            
            # Initialize mining stats
            mining_stats["is_mining"] = True
            mining_stats["start_time"] = time.time()
            mining_stats["total_hashes"] = 0
            mining_stats["blocks_found"] = 0
            mining_stats["best_hash"] = None
            
            log_mining(f"Started mining automatically with {len(miner_instance.cores)} cores")
        except Exception as e:
            log_mining(f"Error starting mining: {str(e)}")
            raise

from fastapi.staticfiles import StaticFiles

# Serve static files



@app.get("/mining/status")
async def get_mining_status():
    """Get current mining status and statistics"""
    if not miner_instance:
        return {"error": "Mining system not initialized"}
        
    current_time = time.time()
    if mining_stats["start_time"]:
        elapsed = current_time - mining_stats["start_time"]
    else:
        elapsed = 0

    # Update mining stats from miner instance
    mining_stats["total_hashes"] = miner_instance.total_hashes
    mining_stats["blocks_found"] = miner_instance.blocks_found
    mining_stats["best_hash"] = miner_instance.best_hash
    mining_stats["best_hash_difficulty"] = miner_instance.best_hash_difficulty

    return {
        "is_mining": mining_stats["is_mining"],
        "total_hashes": miner_instance.total_hashes,
        "hash_rate": f"{miner_instance.current_hashrate/1000:.2f} KH/s",
        "blocks_found": miner_instance.blocks_found,
        "best_hash": miner_instance.best_hash.hex() if miner_instance.best_hash else None,
        "difficulty": {
            "network": miner_instance.network_difficulty,
            "best_achieved": miner_instance.best_hash_difficulty,
            "percent_to_network": f"{(miner_instance.best_hash_difficulty / miner_instance.network_difficulty * 100):.4f}%" if miner_instance.network_difficulty > 0 else "0%"
        },
        "uptime": f"{elapsed:.2f}s" if mining_stats["start_time"] else "0s",
        "cores_active": len(miner_instance.cores) if miner_instance else 0,
        "units_per_core": len(miner_instance.cores[0].units) if miner_instance and miner_instance.cores else 0,
        "logs": mining_stats["logs"][-10:]  # Last 10 logs
    }

@app.get("/mining/performance")
async def get_mining_performance():
    """Get detailed mining performance metrics"""
    global miner_instance, mining_stats
    
    if not miner_instance:
        return {"error": "Mining system not initialized"}
    
    current_time = time.time()
    if mining_stats["start_time"]:
        elapsed = current_time - mining_stats["start_time"]
        hash_rate = mining_stats["total_hashes"] / elapsed if elapsed > 0 else 0
        hashes_per_core = mining_stats["total_hashes"] / len(miner_instance.cores) if miner_instance.cores else 0
    else:
        elapsed = 0
        hash_rate = 0
        hashes_per_core = 0
    
    core_stats = []
    if miner_instance and miner_instance.cores:
        for i, core in enumerate(miner_instance.cores):
            core_stats.append({
                "core_id": i,
                "active_units": len(core.units),
                "status": "active" if mining_stats["is_mining"] else "idle"
            })
    
    return {
        "overall_performance": {
            "hash_rate": f"{miner_instance.current_hashrate/1000:.2f} KH/s",
            "total_hashes": miner_instance.total_hashes,
            "blocks_found": miner_instance.blocks_found,
            "uptime": f"{elapsed:.2f}s",
            "hashes_per_core": f"{(miner_instance.total_hashes/len(miner_instance.cores))/1000:.2f}K"
        },
        "core_utilization": {
            "total_cores": len(miner_instance.cores),
            "active_cores": len([c for c in core_stats if c["status"] == "active"]),
            "cores": core_stats
        },
        "memory_usage": {
            "core_memory": len(miner_instance.cores) * 1024 * 1024,  # Approximate memory usage per core
            "total_allocated": len(miner_instance.cores) * len(miner_instance.cores[0].units) * 1024 if miner_instance.cores else 0
        }
    }

from fastapi.encoders import jsonable_encoder


# @app.post("/mining/start")
# async def start_mining():
#     """Start Bitcoin mining operations if not already running"""
#     global mining_thread, miner_instance, mining_stats
    
#     if mining_stats["is_mining"]:
#         if miner_instance and miner_instance.mining:
#             return {"message": "Mining is already running", "status": "already_running"}
#         else:
#             # Reset if mining_stats shows mining but miner isn't actually running
#             mining_stats["is_mining"] = False
    
#     try:
#         if not miner_instance:
#             miner_instance = ParallelMiner()
        
#         # Ensure mining flag is set before starting thread
#         miner_instance.mining = True
        
#         # Reset statistics
#         mining_stats["is_mining"] = True
#         mining_stats["start_time"] = time.time()
#         mining_stats["total_hashes"] = 0
#         mining_stats["blocks_found"] = 0
#         mining_stats["best_hash"] = None
        
#         # Start mining in background thread
#         mining_thread = threading.Thread(
#             target=miner_instance.start_mining,
#             kwargs={"duration": None}  # Always run indefinitely
#         )
#         mining_thread.daemon = True
#         mining_thread.start()
    
#     log_mining(f"Started mining with {len(miner_instance.cores)} cores")
    
#     return {
#         "message": "Mining started successfully",
#         "status": "started",
#         "config": {
#             "cores": len(miner_instance.cores),
#             "units_per_core": len(miner_instance.cores[0].units),
#             "duration": "indefinite",
#         }
#     }

@app.post("/mining/stop")
async def stop_mining():
    """Stop Bitcoin mining operations"""
    global miner_instance, mining_stats
    
    if not mining_stats["is_mining"]:
        return {"message": "Mining is not currently running", "status": "not_running"}
    
    if miner_instance:
        miner_instance.mining = False
        mining_stats["is_mining"] = False
        
        # Calculate final statistics
        end_time = time.time()
        elapsed = end_time - mining_stats["start_time"]
        hash_rate = miner_instance.current_hashrate
        
        # Update cumulative statistics
        mining_stats["total_runtime"] += elapsed
        mining_stats["all_time_total_hashes"] += miner_instance.total_hashes
        mining_stats["best_session_hashrate"] = max(
            mining_stats["best_session_hashrate"],
            hash_rate/1000  # KH/s
        )
        
        # Get final stats from miner
        mining_stats["total_hashes"] = miner_instance.total_hashes
        mining_stats["blocks_found"] = miner_instance.blocks_found
        mining_stats["best_hash"] = miner_instance.best_hash
        mining_stats["best_hash_difficulty"] = miner_instance.best_hash_difficulty
        
        # Save stats to file
        save_mining_stats()
        
        # Log comprehensive statistics
        log_mining("=== Mining Session Completed ===")
        log_mining(f"Session Duration: {elapsed:.2f}s")
        log_mining(f"Total Hashes: {mining_stats['total_hashes']:,}")
        log_mining(f"Average Hash Rate: {hash_rate/1000:.2f} KH/s")
        log_mining(f"Blocks Found: {mining_stats['blocks_found']}")
        log_mining(f"Best Hash: {mining_stats['best_hash'].hex() if mining_stats['best_hash'] else 'None'}")
        log_mining(f"Best Hash Difficulty: {mining_stats['best_hash_difficulty']}")
        log_mining("\n=== All-Time Statistics ===")
        log_mining(f"Total Runtime: {mining_stats['total_runtime']:.2f}s")
        log_mining(f"Total Hashes: {mining_stats['all_time_total_hashes']:,}")
        log_mining(f"Total Blocks Found: {mining_stats['blocks_found']}")
        log_mining(f"Best Session Hash Rate: {mining_stats['best_session_hashrate']:.2f} KH/s")
        
        return {
            "message": "Mining stopped successfully",
            "status": "stopped",
            "session_stats": {
                "duration": f"{elapsed:.2f}s",
                "total_hashes": mining_stats["total_hashes"],
                "avg_hash_rate": f"{hash_rate/1000:.2f} KH/s",
                "blocks_found": mining_stats["blocks_found"],
                "best_hash": mining_stats["best_hash"].hex() if mining_stats["best_hash"] else None,
                "best_hash_difficulty": mining_stats["best_hash_difficulty"]
            },
            "all_time_stats": {
                "total_runtime": f"{mining_stats['total_runtime']:.2f}s",
                "total_hashes": mining_stats["all_time_total_hashes"],
                "total_blocks": mining_stats["blocks_found"],
                "best_session_hashrate": f"{mining_stats['best_session_hashrate']:.2f} KH/s"
            }
        }
    
    return {"message": "Mining instance not found", "status": "error"}

 
@app.get("/mining/history")
async def get_mining_history():
    """Get historical mining statistics"""
    try:
        if os.path.exists(STATS_FILE):
            with open(STATS_FILE, 'r') as f:
                historical_stats = json.load(f)
            return historical_stats
        else:
            return {
                "sessions": [],
                "total_runtime": 0,
                "total_hashes": 0,
                "total_blocks": 0,
                "best_session_hashrate": 0
            }
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"Error reading mining history: {str(e)}")

def handle_shutdown():
    """Handle graceful shutdown and save statistics"""
    if mining_stats["is_mining"] and miner_instance:
        log_mining("\nπŸ›‘ Server shutdown detected - Saving final mining statistics...")
        miner_instance.mining = False
        mining_stats["is_mining"] = False
        
        # Calculate and save final statistics
        end_time = time.time()
        elapsed = end_time - mining_stats["start_time"]
        hash_rate = mining_stats["total_hashes"] / elapsed if elapsed > 0 else 0
        
        # Update cumulative statistics
        mining_stats["total_runtime"] += elapsed
        mining_stats["all_time_total_hashes"] += mining_stats["total_hashes"]
        mining_stats["best_session_hashrate"] = max(
            mining_stats["best_session_hashrate"],
            hash_rate/1000
        )
        
        # Log final stats before shutdown
        log_mining("=== Final Mining Statistics ===")
        log_mining(f"Session Duration: {elapsed:.2f}s")
        log_mining(f"Total Hashes: {mining_stats['total_hashes']:,}")
        log_mining(f"Average Hash Rate: {hash_rate/1000:.2f} KH/s")
        log_mining(f"Blocks Found: {mining_stats['blocks_found']}")
        if mining_stats["best_hash"]:
            log_mining(f"Best Hash: {mining_stats['best_hash'].hex()}")
        
        # Save stats to file
        save_mining_stats()
        log_mining("Statistics saved successfully")
        log_mining("Server shutting down... Goodbye! πŸ‘‹")

if __name__ == "__main__":
    # Start the FastAPI server
    print("API Documentation will be available at: http://localhost:8000/docs")
    print("API Root endpoint: http://localhost:8000/")
    
    try:
        uvicorn.run(
            app, 
            host="0.0.0.0", 
            port=8000, 
            log_level="info",
            reload=False  # Set to False for production
        )
    except KeyboardInterrupt:
        handle_shutdown()
    except Exception as e:
        log_mining(f"Error during server operation: {str(e)}")
        handle_shutdown()
        raise