Spaces:
Runtime error
Runtime error
Upload 72 files
Browse filesThis view is limited to 50 files because it contains too many changes. Β
See raw diff
- RockPaperScissor/.DS_Store +0 -0
- RockPaperScissor/__init__.py +2 -0
- RockPaperScissor/__pycache__/__init__.cpython-310.pyc +0 -0
- RockPaperScissor/__pycache__/__init__.cpython-313.pyc +0 -0
- RockPaperScissor/config/.gitkeep +1 -0
- RockPaperScissor/config/database.py +31 -0
- RockPaperScissor/game_cache/__init__.py +8 -0
- RockPaperScissor/game_cache/__pycache__/__init__.cpython-310.pyc +0 -0
- RockPaperScissor/game_cache/__pycache__/__init__.cpython-313.pyc +0 -0
- RockPaperScissor/game_cache/__pycache__/memory_cache.cpython-310.pyc +0 -0
- RockPaperScissor/game_cache/__pycache__/memory_cache.cpython-313.pyc +0 -0
- RockPaperScissor/game_cache/memory_cache.py +84 -0
- RockPaperScissor/models/__init__.py +17 -0
- RockPaperScissor/models/__pycache__/__init__.cpython-310.pyc +0 -0
- RockPaperScissor/models/__pycache__/__init__.cpython-313.pyc +0 -0
- RockPaperScissor/models/__pycache__/adaptive_markov_ai.cpython-310.pyc +0 -0
- RockPaperScissor/models/__pycache__/adaptive_markov_ai.cpython-313.pyc +0 -0
- RockPaperScissor/models/__pycache__/base_ai.cpython-310.pyc +0 -0
- RockPaperScissor/models/__pycache__/base_ai.cpython-313.pyc +0 -0
- RockPaperScissor/models/__pycache__/random_ai.cpython-310.pyc +0 -0
- RockPaperScissor/models/__pycache__/random_ai.cpython-313.pyc +0 -0
- RockPaperScissor/models/adaptive_markov_ai.py +151 -0
- RockPaperScissor/models/base_ai.py +17 -0
- RockPaperScissor/models/random_ai.py +11 -0
- RockPaperScissor/repositories/.gitkeep +1 -0
- RockPaperScissor/repositories/__init__.py +12 -0
- RockPaperScissor/repositories/__pycache__/__init__.cpython-310.pyc +0 -0
- RockPaperScissor/repositories/__pycache__/__init__.cpython-313.pyc +0 -0
- RockPaperScissor/repositories/__pycache__/combined_storage.cpython-310.pyc +0 -0
- RockPaperScissor/repositories/__pycache__/combined_storage.cpython-313.pyc +0 -0
- RockPaperScissor/repositories/__pycache__/s3_storage.cpython-310.pyc +0 -0
- RockPaperScissor/repositories/__pycache__/s3_storage.cpython-313.pyc +0 -0
- RockPaperScissor/repositories/__pycache__/sql_storage.cpython-310.pyc +0 -0
- RockPaperScissor/repositories/__pycache__/sql_storage.cpython-313.pyc +0 -0
- RockPaperScissor/repositories/__pycache__/storage.cpython-310.pyc +0 -0
- RockPaperScissor/repositories/__pycache__/storage.cpython-313.pyc +0 -0
- RockPaperScissor/repositories/combined_storage.py +71 -0
- RockPaperScissor/repositories/s3_storage.py +321 -0
- RockPaperScissor/repositories/sql_storage.py +307 -0
- RockPaperScissor/repositories/storage.py +23 -0
- RockPaperScissor/routes/__init__.py +9 -0
- RockPaperScissor/routes/__pycache__/__init__.cpython-310.pyc +0 -0
- RockPaperScissor/routes/__pycache__/__init__.cpython-313.pyc +0 -0
- RockPaperScissor/routes/__pycache__/game.cpython-310.pyc +0 -0
- RockPaperScissor/routes/__pycache__/game.cpython-313.pyc +0 -0
- RockPaperScissor/routes/__pycache__/game_o.cpython-310.pyc +0 -0
- RockPaperScissor/routes/__pycache__/history.cpython-313.pyc +0 -0
- RockPaperScissor/routes/__pycache__/stats.cpython-310.pyc +0 -0
- RockPaperScissor/routes/__pycache__/stats.cpython-313.pyc +0 -0
- RockPaperScissor/routes/game.py +77 -0
RockPaperScissor/.DS_Store
ADDED
|
Binary file (6.15 kB). View file
|
|
|
RockPaperScissor/__init__.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
|
RockPaperScissor/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (126 Bytes). View file
|
|
|
RockPaperScissor/__pycache__/__init__.cpython-313.pyc
ADDED
|
Binary file (172 Bytes). View file
|
|
|
RockPaperScissor/config/.gitkeep
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# Placeholder to keep the config directory tracked by git
|
RockPaperScissor/config/database.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Minimal placeholder for database configuration
|
| 2 |
+
# RockPaperScissor/config/database.py
|
| 3 |
+
import os
|
| 4 |
+
from pathlib import Path
|
| 5 |
+
|
| 6 |
+
# Base directory for data storage
|
| 7 |
+
BASE_DATA_DIR = Path("data")
|
| 8 |
+
BASE_DATA_DIR.mkdir(parents=True, exist_ok=True)
|
| 9 |
+
|
| 10 |
+
# SQLite configuration
|
| 11 |
+
SQLITE_CONFIG = {
|
| 12 |
+
"db_path": str(BASE_DATA_DIR / "game_history.db"),
|
| 13 |
+
"timeout": 5.0,
|
| 14 |
+
"check_same_thread": False,
|
| 15 |
+
}
|
| 16 |
+
|
| 17 |
+
# Storage configuration
|
| 18 |
+
STORAGE_CONFIG = {
|
| 19 |
+
"primary": "combined",
|
| 20 |
+
"cache_size": 1000,
|
| 21 |
+
"auto_cleanup": True,
|
| 22 |
+
"cleanup_interval": 3600 # 1 hour in seconds
|
| 23 |
+
}
|
| 24 |
+
|
| 25 |
+
# S3 configuration
|
| 26 |
+
S3_CONFIG = {
|
| 27 |
+
"bucket_name": os.getenv("AWS_S3_BUCKET_NAME", "your-bucket-name"),
|
| 28 |
+
"region_name": os.getenv("AWS_REGION", "us-east-1"),
|
| 29 |
+
"access_key_id": os.getenv("AWS_ACCESS_KEY_ID"),
|
| 30 |
+
"secret_access_key": os.getenv("AWS_SECRET_ACCESS_KEY")
|
| 31 |
+
}
|
RockPaperScissor/game_cache/__init__.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# RockPaperScissor/game_cache/__init__.py
|
| 2 |
+
from .memory_cache import GameSessionCache # This will be our DummyGameSessionCache
|
| 3 |
+
# from .llm_cache import LLMCache # Add LLMCache later if/when LLMService is integrated
|
| 4 |
+
|
| 5 |
+
__all__ = [
|
| 6 |
+
'GameSessionCache',
|
| 7 |
+
# 'LLMCache',
|
| 8 |
+
]
|
RockPaperScissor/game_cache/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (214 Bytes). View file
|
|
|
RockPaperScissor/game_cache/__pycache__/__init__.cpython-313.pyc
ADDED
|
Binary file (271 Bytes). View file
|
|
|
RockPaperScissor/game_cache/__pycache__/memory_cache.cpython-310.pyc
ADDED
|
Binary file (2.81 kB). View file
|
|
|
RockPaperScissor/game_cache/__pycache__/memory_cache.cpython-313.pyc
ADDED
|
Binary file (4.17 kB). View file
|
|
|
RockPaperScissor/game_cache/memory_cache.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# RockPaperScissor/game_cache/memory_cache.py
|
| 2 |
+
from typing import Dict, Any, Optional
|
| 3 |
+
from dataclasses import dataclass
|
| 4 |
+
from datetime import datetime
|
| 5 |
+
|
| 6 |
+
@dataclass
|
| 7 |
+
class GameData:
|
| 8 |
+
session_id: str
|
| 9 |
+
player_move: str
|
| 10 |
+
ai_move: str
|
| 11 |
+
result: str
|
| 12 |
+
ai_type: str
|
| 13 |
+
ai_state: Dict[str, Any]
|
| 14 |
+
created_at: datetime = datetime.now()
|
| 15 |
+
|
| 16 |
+
class GameSessionCache:
|
| 17 |
+
def __init__(self):
|
| 18 |
+
self._sessions: Dict[str, Dict[str, Any]] = {}
|
| 19 |
+
self._max_cache_size = 1000 # Maximum number of sessions to cache
|
| 20 |
+
|
| 21 |
+
def update_session(self, session_id: str, game_data: Dict[str, Any]) -> None:
|
| 22 |
+
"""Update session data in cache"""
|
| 23 |
+
if session_id not in self._sessions:
|
| 24 |
+
self._sessions[session_id] = {
|
| 25 |
+
'total_rounds': 0,
|
| 26 |
+
'player_wins': 0,
|
| 27 |
+
'ai_wins': 0,
|
| 28 |
+
'draws': 0,
|
| 29 |
+
'rounds': [],
|
| 30 |
+
'last_updated': datetime.now()
|
| 31 |
+
}
|
| 32 |
+
|
| 33 |
+
session = self._sessions[session_id]
|
| 34 |
+
|
| 35 |
+
# Update session stats
|
| 36 |
+
session['total_rounds'] += 1
|
| 37 |
+
if game_data['result'] == 'player_win':
|
| 38 |
+
session['player_wins'] += 1
|
| 39 |
+
elif game_data['result'] == 'ai_win':
|
| 40 |
+
session['ai_wins'] += 1
|
| 41 |
+
else:
|
| 42 |
+
session['draws'] += 1
|
| 43 |
+
|
| 44 |
+
# Add round data
|
| 45 |
+
session['rounds'].append({
|
| 46 |
+
'round_number': session['total_rounds'],
|
| 47 |
+
'player_move': game_data['player_move'],
|
| 48 |
+
'ai_move': game_data['ai_move'],
|
| 49 |
+
'result': game_data['result'],
|
| 50 |
+
'ai_type': game_data.get('ai_type', 'random'),
|
| 51 |
+
'ai_state': game_data.get('ai_state', {}),
|
| 52 |
+
'created_at': datetime.now()
|
| 53 |
+
})
|
| 54 |
+
|
| 55 |
+
# Update last updated timestamp
|
| 56 |
+
session['last_updated'] = datetime.now()
|
| 57 |
+
|
| 58 |
+
# Enforce cache size limit
|
| 59 |
+
if len(self._sessions) > self._max_cache_size:
|
| 60 |
+
self._remove_oldest_session()
|
| 61 |
+
|
| 62 |
+
def get_session(self, session_id: str) -> Optional[Dict[str, Any]]:
|
| 63 |
+
"""Get session data from cache"""
|
| 64 |
+
return self._sessions.get(session_id)
|
| 65 |
+
|
| 66 |
+
def _remove_oldest_session(self) -> None:
|
| 67 |
+
"""Remove the oldest session from cache"""
|
| 68 |
+
if not self._sessions:
|
| 69 |
+
return
|
| 70 |
+
|
| 71 |
+
oldest_session = min(
|
| 72 |
+
self._sessions.items(),
|
| 73 |
+
key=lambda x: x[1]['last_updated']
|
| 74 |
+
)
|
| 75 |
+
del self._sessions[oldest_session[0]]
|
| 76 |
+
|
| 77 |
+
def clear(self) -> None:
|
| 78 |
+
"""Clear all cached data"""
|
| 79 |
+
self._sessions.clear()
|
| 80 |
+
|
| 81 |
+
def remove_session(self, session_id: str) -> None:
|
| 82 |
+
"""Remove a specific session from cache"""
|
| 83 |
+
if session_id in self._sessions:
|
| 84 |
+
del self._sessions[session_id]
|
RockPaperScissor/models/__init__.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# RockPaperScissor/models/__init__.py
|
| 2 |
+
from .random_ai import RandomAI
|
| 3 |
+
from .adaptive_markov_ai import AdaptiveMarkovAI
|
| 4 |
+
# Import other AIs like PatternAI, MarkovAI later
|
| 5 |
+
|
| 6 |
+
# This dictionary will be used by GameService to get AI instances
|
| 7 |
+
AI_MODELS = {
|
| 8 |
+
"random": RandomAI(),
|
| 9 |
+
"adaptive_markov": AdaptiveMarkovAI(),
|
| 10 |
+
# "pattern": PatternAI(), # Add later
|
| 11 |
+
# "markov": MarkovAI(), # Add later
|
| 12 |
+
}
|
| 13 |
+
|
| 14 |
+
def get_ai(model_name: str):
|
| 15 |
+
"""Retrieves an AI instance based on the model name."""
|
| 16 |
+
# Fallback to RandomAI if the requested model isn't found or if empty
|
| 17 |
+
return AI_MODELS.get(model_name.lower(), RandomAI())
|
RockPaperScissor/models/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (501 Bytes). View file
|
|
|
RockPaperScissor/models/__pycache__/__init__.cpython-313.pyc
ADDED
|
Binary file (690 Bytes). View file
|
|
|
RockPaperScissor/models/__pycache__/adaptive_markov_ai.cpython-310.pyc
ADDED
|
Binary file (4.31 kB). View file
|
|
|
RockPaperScissor/models/__pycache__/adaptive_markov_ai.cpython-313.pyc
ADDED
|
Binary file (6.27 kB). View file
|
|
|
RockPaperScissor/models/__pycache__/base_ai.cpython-310.pyc
ADDED
|
Binary file (945 Bytes). View file
|
|
|
RockPaperScissor/models/__pycache__/base_ai.cpython-313.pyc
ADDED
|
Binary file (1.07 kB). View file
|
|
|
RockPaperScissor/models/__pycache__/random_ai.cpython-310.pyc
ADDED
|
Binary file (711 Bytes). View file
|
|
|
RockPaperScissor/models/__pycache__/random_ai.cpython-313.pyc
ADDED
|
Binary file (988 Bytes). View file
|
|
|
RockPaperScissor/models/adaptive_markov_ai.py
ADDED
|
@@ -0,0 +1,151 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Dict, Any, Optional, Tuple
|
| 2 |
+
import math
|
| 3 |
+
import numpy as np
|
| 4 |
+
|
| 5 |
+
from .base_ai import BaseAI
|
| 6 |
+
|
| 7 |
+
class AdaptiveMarkovAI(BaseAI):
|
| 8 |
+
"""
|
| 9 |
+
Adaptive RPS AI that uses entropy-based weighting between Markov and Frequency models.
|
| 10 |
+
"""
|
| 11 |
+
def __init__(self, smoothing_factor=1.0, temperature=1.0):
|
| 12 |
+
super().__init__()
|
| 13 |
+
self.smoothing = smoothing_factor
|
| 14 |
+
self.temperature = temperature
|
| 15 |
+
|
| 16 |
+
def make_move(self, model_state: Optional[Dict[str, Any]] = None) -> Tuple[str, Dict[str, Any]]:
|
| 17 |
+
"""
|
| 18 |
+
Generate AI's next move based on model state.
|
| 19 |
+
|
| 20 |
+
Args:
|
| 21 |
+
model_state: Dictionary containing:
|
| 22 |
+
- markov_counts: Transition count matrix
|
| 23 |
+
- frequency_counts: Overall frequency counts
|
| 24 |
+
- player_last_move: The player's last move from previous round
|
| 25 |
+
- smoothing: Smoothing factor for probability calculations
|
| 26 |
+
- temperature: Temperature parameter for entropy weighting
|
| 27 |
+
- last_lambdas: Last calculated model weights
|
| 28 |
+
|
| 29 |
+
Returns:
|
| 30 |
+
Tuple containing:
|
| 31 |
+
- str: AI's chosen move (rock, paper, scissors)
|
| 32 |
+
- Dict: Updated model state (ready for next round after player moves)
|
| 33 |
+
"""
|
| 34 |
+
# Initialize model state if None
|
| 35 |
+
if model_state is None:
|
| 36 |
+
model_state = {
|
| 37 |
+
"player_last_move": None,
|
| 38 |
+
"ai_last_move": None,
|
| 39 |
+
"last_result": None,
|
| 40 |
+
"markov_counts": np.ones((3, 3)) * self.smoothing,
|
| 41 |
+
"frequency_counts": np.ones(3) * self.smoothing,
|
| 42 |
+
"player_second_last_move": None,
|
| 43 |
+
"smoothing": self.smoothing,
|
| 44 |
+
"temperature": self.temperature,
|
| 45 |
+
"last_lambdas": {"markov": 0.5, "freq": 0.5}
|
| 46 |
+
}
|
| 47 |
+
|
| 48 |
+
# Extract values from model state
|
| 49 |
+
markov_counts = model_state.get("markov_counts", np.ones((3, 3)) * self.smoothing)
|
| 50 |
+
frequency_counts = model_state.get("frequency_counts", np.ones(3) * self.smoothing)
|
| 51 |
+
player_last_move = model_state.get("player_last_move")
|
| 52 |
+
player_second_last_move = model_state.get("player_second_last_move")
|
| 53 |
+
smoothing = model_state.get("smoothing", self.smoothing)
|
| 54 |
+
temperature = model_state.get("temperature", self.temperature)
|
| 55 |
+
last_lambdas = model_state.get("last_lambdas", {"markov": 0.5, "freq": 0.5})
|
| 56 |
+
|
| 57 |
+
# Define move mappings
|
| 58 |
+
move_to_idx = {"rock": 0, "paper": 1, "scissors": 2}
|
| 59 |
+
idx_to_move = {0: "rock", 1: "paper", 2: "scissors"}
|
| 60 |
+
counters = {"rock": "paper", "paper": "scissors", "scissors": "rock"}
|
| 61 |
+
|
| 62 |
+
# Helper functions
|
| 63 |
+
def calculate_entropy(probs):
|
| 64 |
+
"""Calculate Shannon entropy of a probability distribution"""
|
| 65 |
+
entropy = 0
|
| 66 |
+
for p in probs:
|
| 67 |
+
if p > 0:
|
| 68 |
+
entropy -= p * math.log2(p)
|
| 69 |
+
return entropy
|
| 70 |
+
|
| 71 |
+
def get_markov_probabilities(move):
|
| 72 |
+
"""Get transition probabilities from the Markov model"""
|
| 73 |
+
if move not in move_to_idx:
|
| 74 |
+
# Default to uniform if unknown move
|
| 75 |
+
return [1/3, 1/3, 1/3]
|
| 76 |
+
|
| 77 |
+
move_idx = move_to_idx[move]
|
| 78 |
+
row_sum = np.sum(markov_counts[move_idx])
|
| 79 |
+
return markov_counts[move_idx] / row_sum
|
| 80 |
+
|
| 81 |
+
def get_frequency_probabilities():
|
| 82 |
+
"""Get overall move probabilities from the frequency model"""
|
| 83 |
+
total = np.sum(frequency_counts)
|
| 84 |
+
return frequency_counts / total
|
| 85 |
+
|
| 86 |
+
def calculate_lambdas(markov_probs, freq_probs):
|
| 87 |
+
"""Calculate adaptive weights using entropy-based formula"""
|
| 88 |
+
# Calculate entropies
|
| 89 |
+
markov_entropy = calculate_entropy(markov_probs)
|
| 90 |
+
freq_entropy = calculate_entropy(freq_probs)
|
| 91 |
+
|
| 92 |
+
# Apply temperature and calculate weights
|
| 93 |
+
denom = math.exp(-temperature * markov_entropy) + math.exp(-temperature * freq_entropy)
|
| 94 |
+
lambda_markov = math.exp(-temperature * markov_entropy) / denom
|
| 95 |
+
lambda_freq = math.exp(-temperature * freq_entropy) / denom
|
| 96 |
+
|
| 97 |
+
# Return weights and entropy values for monitoring
|
| 98 |
+
return lambda_markov, lambda_freq, {
|
| 99 |
+
"markov": lambda_markov,
|
| 100 |
+
"freq": lambda_freq,
|
| 101 |
+
"markov_entropy": markov_entropy,
|
| 102 |
+
"freq_entropy": freq_entropy
|
| 103 |
+
}
|
| 104 |
+
|
| 105 |
+
# Update the models with historical data if available
|
| 106 |
+
if player_last_move and player_last_move in move_to_idx:
|
| 107 |
+
# Update frequency counts
|
| 108 |
+
last_idx = move_to_idx[player_last_move]
|
| 109 |
+
frequency_counts[last_idx] += 1
|
| 110 |
+
|
| 111 |
+
# Update Markov model if we have two consecutive moves
|
| 112 |
+
if player_second_last_move and player_second_last_move in move_to_idx:
|
| 113 |
+
second_last_idx = move_to_idx[player_second_last_move]
|
| 114 |
+
markov_counts[second_last_idx][last_idx] += 1
|
| 115 |
+
|
| 116 |
+
# For prediction, use player_last_move for Markov model
|
| 117 |
+
if player_last_move is None:
|
| 118 |
+
# No history yet, use random move (not counter)
|
| 119 |
+
ai_move = np.random.choice(self.possible_moves)
|
| 120 |
+
else:
|
| 121 |
+
# Get probabilities from each model
|
| 122 |
+
markov_probs = get_markov_probabilities(player_last_move)
|
| 123 |
+
freq_probs = get_frequency_probabilities()
|
| 124 |
+
|
| 125 |
+
# Calculate adaptive lambda weights
|
| 126 |
+
lambda_markov, lambda_freq, new_lambdas = calculate_lambdas(markov_probs, freq_probs)
|
| 127 |
+
|
| 128 |
+
# Combine predictions with lambda weights
|
| 129 |
+
combined_probs = lambda_markov * np.array(markov_probs) + lambda_freq * np.array(freq_probs)
|
| 130 |
+
|
| 131 |
+
# Predict most likely move
|
| 132 |
+
predicted_idx = np.argmax(combined_probs)
|
| 133 |
+
predicted_move = idx_to_move[predicted_idx]
|
| 134 |
+
ai_move = counters[predicted_move]
|
| 135 |
+
|
| 136 |
+
# Update lambdas in state
|
| 137 |
+
last_lambdas = new_lambdas
|
| 138 |
+
|
| 139 |
+
# Prepare updated state
|
| 140 |
+
updated_state = {
|
| 141 |
+
"markov_counts": markov_counts,
|
| 142 |
+
"frequency_counts": frequency_counts,
|
| 143 |
+
"player_last_move": None, # Keep until service layer updates it
|
| 144 |
+
"player_second_last_move": player_last_move, # Keep until service layer updates it
|
| 145 |
+
"smoothing": smoothing,
|
| 146 |
+
"temperature": temperature,
|
| 147 |
+
"last_lambdas": last_lambdas
|
| 148 |
+
}
|
| 149 |
+
|
| 150 |
+
# Return AI move and updated state
|
| 151 |
+
return ai_move, updated_state
|
RockPaperScissor/models/base_ai.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# RockPaperScissor/models/base_ai.py
|
| 2 |
+
from typing import Dict, Any, Tuple
|
| 3 |
+
|
| 4 |
+
class BaseAI:
|
| 5 |
+
possible_moves = ["rock", "paper", "scissors"]
|
| 6 |
+
def make_move(self, model_state: Dict[str, Any] = None) -> Tuple[str, Dict[str, Any]]:
|
| 7 |
+
"""Make a move in the game.
|
| 8 |
+
|
| 9 |
+
Args:
|
| 10 |
+
model_state: Optional state data for the AI model
|
| 11 |
+
|
| 12 |
+
Returns:
|
| 13 |
+
Tuple of (move, updated_state) where:
|
| 14 |
+
- move is one of: "rock", "paper", "scissors"
|
| 15 |
+
- updated_state is the new state of the AI model
|
| 16 |
+
"""
|
| 17 |
+
raise NotImplementedError
|
RockPaperScissor/models/random_ai.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# RockPaperScissor/models/random_ai.py
|
| 2 |
+
import random
|
| 3 |
+
from typing import Dict, Any, Tuple, Optional
|
| 4 |
+
from .base_ai import BaseAI # Assuming base_ai.py is in the same directory
|
| 5 |
+
|
| 6 |
+
class RandomAI(BaseAI):
|
| 7 |
+
def make_move(self, model_state: Optional[Dict[str, Any]] = None) -> Tuple[str, Dict[str, Any]]:
|
| 8 |
+
"""Makes a random move."""
|
| 9 |
+
# model_state is ignored by RandomAI
|
| 10 |
+
# Returns the chosen move and an empty dictionary for the (unchanged) state
|
| 11 |
+
return random.choice(self.possible_moves), {}
|
RockPaperScissor/repositories/.gitkeep
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# Placeholder to keep the repositories directory tracked by git
|
RockPaperScissor/repositories/__init__.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# RockPaperScissor/repositories/__init__.py
|
| 2 |
+
from .storage import Storage, StorageError
|
| 3 |
+
from .sql_storage import SQLStorage # This will be our DummySQLStorage
|
| 4 |
+
from .combined_storage import CombinedStorage # This will use DummySQLStorage
|
| 5 |
+
from .s3_storage import S3Storage # Add later if needed
|
| 6 |
+
|
| 7 |
+
__all__ = [
|
| 8 |
+
'Storage', 'StorageError',
|
| 9 |
+
'SQLStorage',
|
| 10 |
+
'CombinedStorage', # Uncommented for S3 testing
|
| 11 |
+
'S3Storage',
|
| 12 |
+
]
|
RockPaperScissor/repositories/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (393 Bytes). View file
|
|
|
RockPaperScissor/repositories/__pycache__/__init__.cpython-313.pyc
ADDED
|
Binary file (450 Bytes). View file
|
|
|
RockPaperScissor/repositories/__pycache__/combined_storage.cpython-310.pyc
ADDED
|
Binary file (2.55 kB). View file
|
|
|
RockPaperScissor/repositories/__pycache__/combined_storage.cpython-313.pyc
ADDED
|
Binary file (4.43 kB). View file
|
|
|
RockPaperScissor/repositories/__pycache__/s3_storage.cpython-310.pyc
ADDED
|
Binary file (9.67 kB). View file
|
|
|
RockPaperScissor/repositories/__pycache__/s3_storage.cpython-313.pyc
ADDED
|
Binary file (13.9 kB). View file
|
|
|
RockPaperScissor/repositories/__pycache__/sql_storage.cpython-310.pyc
ADDED
|
Binary file (10 kB). View file
|
|
|
RockPaperScissor/repositories/__pycache__/sql_storage.cpython-313.pyc
ADDED
|
Binary file (13.8 kB). View file
|
|
|
RockPaperScissor/repositories/__pycache__/storage.cpython-310.pyc
ADDED
|
Binary file (1.02 kB). View file
|
|
|
RockPaperScissor/repositories/__pycache__/storage.cpython-313.pyc
ADDED
|
Binary file (1.28 kB). View file
|
|
|
RockPaperScissor/repositories/combined_storage.py
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# RockPaperScissor/repositories/combined_storage.py
|
| 2 |
+
from .storage import Storage, StorageError
|
| 3 |
+
from .sql_storage import SQLStorage
|
| 4 |
+
from .s3_storage import S3Storage
|
| 5 |
+
from typing import Dict, Any, Optional
|
| 6 |
+
import os
|
| 7 |
+
|
| 8 |
+
class CombinedStorage(Storage):
|
| 9 |
+
def __init__(self, db_path: str = "data/game_history.db"):
|
| 10 |
+
self.sql_storage = SQLStorage(db_path)
|
| 11 |
+
self.s3_storage = S3Storage()
|
| 12 |
+
self._memory_cache = {} # Simple in-memory cache
|
| 13 |
+
|
| 14 |
+
async def initialize(self):
|
| 15 |
+
"""Initialize storage components"""
|
| 16 |
+
await self.sql_storage.initialize()
|
| 17 |
+
await self.s3_storage.initialize()
|
| 18 |
+
|
| 19 |
+
async def save_game_round(self, game_data: Dict[str, Any]) -> bool:
|
| 20 |
+
"""Save game round to both storage systems and cache"""
|
| 21 |
+
# Save to SQL storage
|
| 22 |
+
sql_success = await self.sql_storage.save_game_round(game_data)
|
| 23 |
+
|
| 24 |
+
# Save to S3 storage
|
| 25 |
+
s3_success = await self.s3_storage.save_game_round(game_data)
|
| 26 |
+
|
| 27 |
+
if sql_success or s3_success:
|
| 28 |
+
# Update memory cache
|
| 29 |
+
session_id = game_data.get('game_id')
|
| 30 |
+
if session_id:
|
| 31 |
+
self._memory_cache[session_id] = game_data
|
| 32 |
+
|
| 33 |
+
return sql_success or s3_success
|
| 34 |
+
|
| 35 |
+
async def save_full_session(self, session_id: str, session_data: Dict[str, Any]) -> bool:
|
| 36 |
+
"""Save the complete game session to both storage systems"""
|
| 37 |
+
# Save to SQL storage
|
| 38 |
+
sql_success = await self.sql_storage.save_full_session(session_id, session_data)
|
| 39 |
+
|
| 40 |
+
# Save to S3 storage
|
| 41 |
+
s3_success = await self.s3_storage.save_full_session(session_id, session_data)
|
| 42 |
+
|
| 43 |
+
return sql_success or s3_success
|
| 44 |
+
|
| 45 |
+
async def get_game_history(self, session_id: str) -> Optional[Dict[str, Any]]:
|
| 46 |
+
"""Get game history from cache or storage"""
|
| 47 |
+
# Try cache first
|
| 48 |
+
if session_id in self._memory_cache:
|
| 49 |
+
return self._memory_cache[session_id]
|
| 50 |
+
|
| 51 |
+
# Try SQL storage
|
| 52 |
+
history = await self.sql_storage.get_game_history(session_id)
|
| 53 |
+
if history:
|
| 54 |
+
self._memory_cache[session_id] = history
|
| 55 |
+
return history
|
| 56 |
+
|
| 57 |
+
# Try S3 storage
|
| 58 |
+
history = await self.s3_storage.get_game_history(session_id)
|
| 59 |
+
if history:
|
| 60 |
+
self._memory_cache[session_id] = history
|
| 61 |
+
return history
|
| 62 |
+
|
| 63 |
+
async def get_ai_state(self, session_id: str, ai_type: str) -> Optional[Dict[str, Any]]:
|
| 64 |
+
"""Get AI state from storage"""
|
| 65 |
+
return await self.sql_storage.get_ai_state(session_id, ai_type)
|
| 66 |
+
|
| 67 |
+
async def close(self) -> None:
|
| 68 |
+
"""Close storage connections"""
|
| 69 |
+
await self.sql_storage.close()
|
| 70 |
+
await self.s3_storage.close()
|
| 71 |
+
self._memory_cache.clear()
|
RockPaperScissor/repositories/s3_storage.py
ADDED
|
@@ -0,0 +1,321 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import boto3
|
| 2 |
+
import json
|
| 3 |
+
import sqlite3
|
| 4 |
+
import io
|
| 5 |
+
from datetime import datetime
|
| 6 |
+
from typing import Dict, Any, Optional
|
| 7 |
+
from .storage import Storage
|
| 8 |
+
import os
|
| 9 |
+
import numpy as np
|
| 10 |
+
|
| 11 |
+
class S3Storage(Storage):
|
| 12 |
+
def __init__(self, bucket_name: str = None):
|
| 13 |
+
self.bucket_name = bucket_name or os.getenv('AWS_S3_BUCKET_NAME')
|
| 14 |
+
if not self.bucket_name:
|
| 15 |
+
raise ValueError("S3 bucket name must be provided either through constructor or AWS_S3_BUCKET_NAME environment variable")
|
| 16 |
+
|
| 17 |
+
# Configure S3 client with LocalStack endpoint if available
|
| 18 |
+
endpoint_url = os.getenv('AWS_ENDPOINT_URL')
|
| 19 |
+
self.s3_client = boto3.client(
|
| 20 |
+
's3',
|
| 21 |
+
endpoint_url=endpoint_url,
|
| 22 |
+
region_name=os.getenv('AWS_REGION', 'us-east-1')
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
def convert_ndarray(self, obj):
|
| 26 |
+
if isinstance(obj, np.ndarray):
|
| 27 |
+
return obj.tolist()
|
| 28 |
+
if isinstance(obj, dict):
|
| 29 |
+
return {k: self.convert_ndarray(v) for k, v in obj.items()}
|
| 30 |
+
if isinstance(obj, list):
|
| 31 |
+
return [self.convert_ndarray(x) for x in obj]
|
| 32 |
+
return obj
|
| 33 |
+
|
| 34 |
+
async def initialize(self):
|
| 35 |
+
"""Initialize S3 storage - verify bucket exists"""
|
| 36 |
+
try:
|
| 37 |
+
self.s3_client.head_bucket(Bucket=self.bucket_name)
|
| 38 |
+
except Exception as e:
|
| 39 |
+
# If using LocalStack, create the bucket if it doesn't exist
|
| 40 |
+
if os.getenv('AWS_ENDPOINT_URL'):
|
| 41 |
+
try:
|
| 42 |
+
self.s3_client.create_bucket(Bucket=self.bucket_name)
|
| 43 |
+
except Exception as create_error:
|
| 44 |
+
raise Exception(f"Failed to create S3 bucket {self.bucket_name}: {str(create_error)}")
|
| 45 |
+
else:
|
| 46 |
+
raise Exception(f"Failed to access S3 bucket {self.bucket_name}: {str(e)}")
|
| 47 |
+
|
| 48 |
+
def _get_db_connection(self, session_id: str) -> sqlite3.Connection:
|
| 49 |
+
"""Get SQLite database connection for a session"""
|
| 50 |
+
# Create in-memory database
|
| 51 |
+
conn = sqlite3.connect(':memory:')
|
| 52 |
+
cursor = conn.cursor()
|
| 53 |
+
|
| 54 |
+
# Try to load existing data from S3
|
| 55 |
+
try:
|
| 56 |
+
s3_key = f"game_sessions/{session_id}/game.db"
|
| 57 |
+
response = self.s3_client.get_object(
|
| 58 |
+
Bucket=self.bucket_name,
|
| 59 |
+
Key=s3_key
|
| 60 |
+
)
|
| 61 |
+
db_data = response['Body'].read()
|
| 62 |
+
conn.executescript(db_data.decode('utf-8'))
|
| 63 |
+
except self.s3_client.exceptions.NoSuchKey:
|
| 64 |
+
# No existing data, start with empty tables
|
| 65 |
+
cursor.execute('''
|
| 66 |
+
CREATE TABLE IF NOT EXISTS game_sessions (
|
| 67 |
+
session_id TEXT PRIMARY KEY,
|
| 68 |
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 69 |
+
completed_at TIMESTAMP,
|
| 70 |
+
total_rounds INTEGER DEFAULT 0,
|
| 71 |
+
player_wins INTEGER DEFAULT 0,
|
| 72 |
+
ai_wins INTEGER DEFAULT 0,
|
| 73 |
+
draws INTEGER DEFAULT 0,
|
| 74 |
+
is_completed BOOLEAN DEFAULT FALSE
|
| 75 |
+
)
|
| 76 |
+
''')
|
| 77 |
+
|
| 78 |
+
cursor.execute('''
|
| 79 |
+
CREATE TABLE IF NOT EXISTS game_rounds (
|
| 80 |
+
round_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 81 |
+
session_id TEXT,
|
| 82 |
+
round_number INTEGER,
|
| 83 |
+
player_move TEXT,
|
| 84 |
+
ai_move TEXT,
|
| 85 |
+
result TEXT,
|
| 86 |
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 87 |
+
FOREIGN KEY (session_id) REFERENCES game_sessions(session_id)
|
| 88 |
+
)
|
| 89 |
+
''')
|
| 90 |
+
|
| 91 |
+
cursor.execute('''
|
| 92 |
+
CREATE TABLE IF NOT EXISTS ai_states (
|
| 93 |
+
session_id TEXT,
|
| 94 |
+
ai_type TEXT,
|
| 95 |
+
state_data TEXT,
|
| 96 |
+
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 97 |
+
PRIMARY KEY (session_id, ai_type),
|
| 98 |
+
FOREIGN KEY (session_id) REFERENCES game_sessions(session_id)
|
| 99 |
+
)
|
| 100 |
+
''')
|
| 101 |
+
|
| 102 |
+
return conn
|
| 103 |
+
|
| 104 |
+
def _save_db_to_s3(self, conn: sqlite3.Connection, session_id: str):
|
| 105 |
+
"""Save SQLite database to S3"""
|
| 106 |
+
buffer = io.BytesIO()
|
| 107 |
+
for line in conn.iterdump():
|
| 108 |
+
buffer.write(f'{line}\n'.encode('utf-8'))
|
| 109 |
+
buffer.seek(0)
|
| 110 |
+
|
| 111 |
+
s3_key = f"game_sessions/{session_id}/game.db"
|
| 112 |
+
self.s3_client.put_object(
|
| 113 |
+
Bucket=self.bucket_name,
|
| 114 |
+
Key=s3_key,
|
| 115 |
+
Body=buffer.getvalue()
|
| 116 |
+
)
|
| 117 |
+
|
| 118 |
+
async def save_game_round(self, game_data: Dict[str, Any]) -> bool:
|
| 119 |
+
"""Save a game round to S3"""
|
| 120 |
+
try:
|
| 121 |
+
session_id = game_data.get('game_id')
|
| 122 |
+
if not session_id:
|
| 123 |
+
return False
|
| 124 |
+
|
| 125 |
+
conn = self._get_db_connection(session_id)
|
| 126 |
+
cursor = conn.cursor()
|
| 127 |
+
|
| 128 |
+
# Update or insert game session
|
| 129 |
+
cursor.execute('''
|
| 130 |
+
INSERT INTO game_sessions (session_id, total_rounds, player_wins, ai_wins, draws)
|
| 131 |
+
VALUES (?, 1, ?, ?, ?)
|
| 132 |
+
ON CONFLICT(session_id) DO UPDATE SET
|
| 133 |
+
total_rounds = total_rounds + 1,
|
| 134 |
+
player_wins = player_wins + ?,
|
| 135 |
+
ai_wins = ai_wins + ?,
|
| 136 |
+
draws = draws + ?
|
| 137 |
+
''', (
|
| 138 |
+
session_id,
|
| 139 |
+
1 if game_data['result'] == 'player_win' else 0,
|
| 140 |
+
1 if game_data['result'] == 'ai_win' else 0,
|
| 141 |
+
1 if game_data['result'] == 'draw' else 0,
|
| 142 |
+
1 if game_data['result'] == 'player_win' else 0,
|
| 143 |
+
1 if game_data['result'] == 'ai_win' else 0,
|
| 144 |
+
1 if game_data['result'] == 'draw' else 0
|
| 145 |
+
))
|
| 146 |
+
|
| 147 |
+
# Get current round number
|
| 148 |
+
cursor.execute('''
|
| 149 |
+
SELECT COUNT(*) FROM game_rounds WHERE session_id = ?
|
| 150 |
+
''', (session_id,))
|
| 151 |
+
round_number = cursor.fetchone()[0] + 1
|
| 152 |
+
|
| 153 |
+
# Insert game round
|
| 154 |
+
cursor.execute('''
|
| 155 |
+
INSERT INTO game_rounds (session_id, round_number, player_move, ai_move, result)
|
| 156 |
+
VALUES (?, ?, ?, ?, ?)
|
| 157 |
+
''', (
|
| 158 |
+
session_id,
|
| 159 |
+
round_number,
|
| 160 |
+
game_data['player_move'],
|
| 161 |
+
game_data['ai_move'],
|
| 162 |
+
game_data['result']
|
| 163 |
+
))
|
| 164 |
+
|
| 165 |
+
# Save AI state if provided
|
| 166 |
+
if 'ai_state' in game_data:
|
| 167 |
+
ai_state_serializable = json.dumps(self.convert_ndarray(game_data['ai_state']))
|
| 168 |
+
cursor.execute('''
|
| 169 |
+
INSERT INTO ai_states (session_id, ai_type, state_data)
|
| 170 |
+
VALUES (?, ?, ?)
|
| 171 |
+
ON CONFLICT(session_id, ai_type) DO UPDATE SET
|
| 172 |
+
state_data = ?,
|
| 173 |
+
last_updated = CURRENT_TIMESTAMP
|
| 174 |
+
''', (
|
| 175 |
+
session_id,
|
| 176 |
+
game_data.get('ai_type', 'adaptive_markov'),
|
| 177 |
+
ai_state_serializable,
|
| 178 |
+
ai_state_serializable
|
| 179 |
+
))
|
| 180 |
+
|
| 181 |
+
# Save to S3
|
| 182 |
+
self._save_db_to_s3(conn, session_id)
|
| 183 |
+
conn.close()
|
| 184 |
+
|
| 185 |
+
return True
|
| 186 |
+
|
| 187 |
+
except Exception as e:
|
| 188 |
+
print(f"Error saving game round to S3: {e}")
|
| 189 |
+
return False
|
| 190 |
+
|
| 191 |
+
async def save_full_session(self, session_id: str, session_data: Dict[str, Any]) -> bool:
|
| 192 |
+
"""Save the complete game session to S3"""
|
| 193 |
+
try:
|
| 194 |
+
conn = self._get_db_connection(session_id)
|
| 195 |
+
cursor = conn.cursor()
|
| 196 |
+
|
| 197 |
+
# Insert or update the session summary
|
| 198 |
+
cursor.execute('''
|
| 199 |
+
INSERT INTO game_sessions (session_id, total_rounds, player_wins, ai_wins, draws, is_completed, completed_at)
|
| 200 |
+
VALUES (?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
|
| 201 |
+
ON CONFLICT(session_id) DO UPDATE SET
|
| 202 |
+
total_rounds = excluded.total_rounds,
|
| 203 |
+
player_wins = excluded.player_wins,
|
| 204 |
+
ai_wins = excluded.ai_wins,
|
| 205 |
+
draws = excluded.draws,
|
| 206 |
+
is_completed = excluded.is_completed,
|
| 207 |
+
completed_at = excluded.completed_at
|
| 208 |
+
''', (
|
| 209 |
+
session_id,
|
| 210 |
+
session_data['total_rounds'],
|
| 211 |
+
session_data['player_wins'],
|
| 212 |
+
session_data['ai_wins'],
|
| 213 |
+
session_data['draws'],
|
| 214 |
+
True
|
| 215 |
+
))
|
| 216 |
+
|
| 217 |
+
# Delete existing rounds for this session
|
| 218 |
+
cursor.execute('DELETE FROM game_rounds WHERE session_id = ?', (session_id,))
|
| 219 |
+
|
| 220 |
+
print(f"[DEBUG] session_data['rounds']: {session_data['rounds']}")
|
| 221 |
+
# Insert all rounds
|
| 222 |
+
for round_data in session_data['rounds']:
|
| 223 |
+
created_at = round_data.get('created_at', datetime.now().isoformat())
|
| 224 |
+
cursor.execute('''
|
| 225 |
+
INSERT INTO game_rounds (session_id, round_number, player_move, ai_move, result, created_at)
|
| 226 |
+
VALUES (?, ?, ?, ?, ?, ?)
|
| 227 |
+
''', (
|
| 228 |
+
session_id,
|
| 229 |
+
round_data['round_number'],
|
| 230 |
+
round_data['player_move'],
|
| 231 |
+
round_data['ai_move'],
|
| 232 |
+
round_data['result'],
|
| 233 |
+
created_at
|
| 234 |
+
))
|
| 235 |
+
|
| 236 |
+
# Save AI state if available
|
| 237 |
+
if session_data['rounds']:
|
| 238 |
+
last_round = session_data['rounds'][-1]
|
| 239 |
+
ai_type = last_round.get('ai_type', 'random')
|
| 240 |
+
ai_state = last_round.get('ai_state', {})
|
| 241 |
+
ai_state_serializable = json.dumps(self.convert_ndarray(ai_state))
|
| 242 |
+
cursor.execute('''
|
| 243 |
+
INSERT INTO ai_states (session_id, ai_type, state_data)
|
| 244 |
+
VALUES (?, ?, ?)
|
| 245 |
+
ON CONFLICT(session_id, ai_type) DO UPDATE SET
|
| 246 |
+
state_data = ?,
|
| 247 |
+
last_updated = CURRENT_TIMESTAMP
|
| 248 |
+
''', (
|
| 249 |
+
session_id,
|
| 250 |
+
ai_type,
|
| 251 |
+
ai_state_serializable,
|
| 252 |
+
ai_state_serializable
|
| 253 |
+
))
|
| 254 |
+
|
| 255 |
+
# Save to S3
|
| 256 |
+
self._save_db_to_s3(conn, session_id)
|
| 257 |
+
conn.close()
|
| 258 |
+
|
| 259 |
+
return True
|
| 260 |
+
|
| 261 |
+
except Exception as e:
|
| 262 |
+
print(f"Error saving full session to S3: {e}")
|
| 263 |
+
return False
|
| 264 |
+
|
| 265 |
+
async def get_game_history(self, session_id: str) -> Optional[Dict[str, Any]]:
|
| 266 |
+
"""Retrieve game history for a session from S3"""
|
| 267 |
+
try:
|
| 268 |
+
conn = self._get_db_connection(session_id)
|
| 269 |
+
cursor = conn.cursor()
|
| 270 |
+
|
| 271 |
+
# Get session stats
|
| 272 |
+
cursor.execute('''
|
| 273 |
+
SELECT total_rounds, player_wins, ai_wins, draws, is_completed, completed_at
|
| 274 |
+
FROM game_sessions
|
| 275 |
+
WHERE session_id = ?
|
| 276 |
+
''', (session_id,))
|
| 277 |
+
session_data = cursor.fetchone()
|
| 278 |
+
|
| 279 |
+
if not session_data:
|
| 280 |
+
conn.close()
|
| 281 |
+
return None
|
| 282 |
+
|
| 283 |
+
# Get all rounds
|
| 284 |
+
cursor.execute('''
|
| 285 |
+
SELECT round_number, player_move, ai_move, result, created_at
|
| 286 |
+
FROM game_rounds
|
| 287 |
+
WHERE session_id = ?
|
| 288 |
+
ORDER BY round_number
|
| 289 |
+
''', (session_id,))
|
| 290 |
+
rounds = cursor.fetchall()
|
| 291 |
+
|
| 292 |
+
result = {
|
| 293 |
+
'session_id': session_id,
|
| 294 |
+
'total_rounds': session_data[0],
|
| 295 |
+
'player_wins': session_data[1],
|
| 296 |
+
'ai_wins': session_data[2],
|
| 297 |
+
'draws': session_data[3],
|
| 298 |
+
'is_completed': session_data[4],
|
| 299 |
+
'completed_at': session_data[5],
|
| 300 |
+
'rounds': [
|
| 301 |
+
{
|
| 302 |
+
'round_number': r[0],
|
| 303 |
+
'player_move': r[1],
|
| 304 |
+
'ai_move': r[2],
|
| 305 |
+
'result': r[3],
|
| 306 |
+
'created_at': r[4]
|
| 307 |
+
}
|
| 308 |
+
for r in rounds
|
| 309 |
+
]
|
| 310 |
+
}
|
| 311 |
+
|
| 312 |
+
conn.close()
|
| 313 |
+
return result
|
| 314 |
+
|
| 315 |
+
except Exception as e:
|
| 316 |
+
print(f"Error retrieving game history from S3: {e}")
|
| 317 |
+
return None
|
| 318 |
+
|
| 319 |
+
async def close(self) -> None:
|
| 320 |
+
"""Close S3 client connection"""
|
| 321 |
+
self.s3_client = None
|
RockPaperScissor/repositories/sql_storage.py
ADDED
|
@@ -0,0 +1,307 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# RockPaperScissor/repositories/sql_storage.py
|
| 2 |
+
from .storage import Storage
|
| 3 |
+
from typing import Dict, Any, Optional
|
| 4 |
+
import sqlite3
|
| 5 |
+
import json
|
| 6 |
+
import asyncio
|
| 7 |
+
from pathlib import Path
|
| 8 |
+
from datetime import datetime
|
| 9 |
+
import numpy as np
|
| 10 |
+
|
| 11 |
+
class SQLStorage(Storage):
|
| 12 |
+
def __init__(self, db_path: str = "data/game_history.db"):
|
| 13 |
+
self.db_path = db_path
|
| 14 |
+
self.conn = None
|
| 15 |
+
self._ensure_db_directory()
|
| 16 |
+
|
| 17 |
+
def _ensure_db_directory(self):
|
| 18 |
+
"""Ensure the database directory exists"""
|
| 19 |
+
Path(self.db_path).parent.mkdir(parents=True, exist_ok=True)
|
| 20 |
+
|
| 21 |
+
async def initialize(self):
|
| 22 |
+
"""Initialize the database connection and create tables if they don't exist"""
|
| 23 |
+
self.conn = sqlite3.connect(self.db_path)
|
| 24 |
+
cursor = self.conn.cursor()
|
| 25 |
+
|
| 26 |
+
# Create game_sessions table with completion status
|
| 27 |
+
cursor.execute('''
|
| 28 |
+
CREATE TABLE IF NOT EXISTS game_sessions (
|
| 29 |
+
session_id TEXT PRIMARY KEY,
|
| 30 |
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 31 |
+
completed_at TIMESTAMP,
|
| 32 |
+
total_rounds INTEGER DEFAULT 0,
|
| 33 |
+
player_wins INTEGER DEFAULT 0,
|
| 34 |
+
ai_wins INTEGER DEFAULT 0,
|
| 35 |
+
draws INTEGER DEFAULT 0,
|
| 36 |
+
is_completed BOOLEAN DEFAULT FALSE
|
| 37 |
+
)
|
| 38 |
+
''')
|
| 39 |
+
|
| 40 |
+
# Create game_rounds table
|
| 41 |
+
cursor.execute('''
|
| 42 |
+
CREATE TABLE IF NOT EXISTS game_rounds (
|
| 43 |
+
round_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 44 |
+
session_id TEXT,
|
| 45 |
+
round_number INTEGER,
|
| 46 |
+
player_move TEXT,
|
| 47 |
+
ai_move TEXT,
|
| 48 |
+
result TEXT,
|
| 49 |
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 50 |
+
FOREIGN KEY (session_id) REFERENCES game_sessions(session_id)
|
| 51 |
+
)
|
| 52 |
+
''')
|
| 53 |
+
|
| 54 |
+
# Create ai_states table
|
| 55 |
+
cursor.execute('''
|
| 56 |
+
CREATE TABLE IF NOT EXISTS ai_states (
|
| 57 |
+
session_id TEXT,
|
| 58 |
+
ai_type TEXT,
|
| 59 |
+
state_data TEXT,
|
| 60 |
+
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 61 |
+
PRIMARY KEY (session_id, ai_type),
|
| 62 |
+
FOREIGN KEY (session_id) REFERENCES game_sessions(session_id)
|
| 63 |
+
)
|
| 64 |
+
''')
|
| 65 |
+
|
| 66 |
+
self.conn.commit()
|
| 67 |
+
|
| 68 |
+
async def save_game_round(self, game_data: Dict[str, Any]) -> bool:
|
| 69 |
+
"""Save a game round to the database"""
|
| 70 |
+
try:
|
| 71 |
+
cursor = self.conn.cursor()
|
| 72 |
+
session_id = game_data.get('game_id')
|
| 73 |
+
|
| 74 |
+
# Update or insert game session
|
| 75 |
+
cursor.execute('''
|
| 76 |
+
INSERT INTO game_sessions (session_id, total_rounds, player_wins, ai_wins, draws)
|
| 77 |
+
VALUES (?, 1, ?, ?, ?)
|
| 78 |
+
ON CONFLICT(session_id) DO UPDATE SET
|
| 79 |
+
total_rounds = total_rounds + 1,
|
| 80 |
+
player_wins = player_wins + ?,
|
| 81 |
+
ai_wins = ai_wins + ?,
|
| 82 |
+
draws = draws + ?
|
| 83 |
+
''', (
|
| 84 |
+
session_id,
|
| 85 |
+
1 if game_data['result'] == 'player_win' else 0,
|
| 86 |
+
1 if game_data['result'] == 'ai_win' else 0,
|
| 87 |
+
1 if game_data['result'] == 'draw' else 0,
|
| 88 |
+
1 if game_data['result'] == 'player_win' else 0,
|
| 89 |
+
1 if game_data['result'] == 'ai_win' else 0,
|
| 90 |
+
1 if game_data['result'] == 'draw' else 0
|
| 91 |
+
))
|
| 92 |
+
|
| 93 |
+
# Get current round number
|
| 94 |
+
cursor.execute('''
|
| 95 |
+
SELECT COUNT(*) FROM game_rounds WHERE session_id = ?
|
| 96 |
+
''', (session_id,))
|
| 97 |
+
round_number = cursor.fetchone()[0] + 1
|
| 98 |
+
|
| 99 |
+
# Insert game round
|
| 100 |
+
cursor.execute('''
|
| 101 |
+
INSERT INTO game_rounds (session_id, round_number, player_move, ai_move, result)
|
| 102 |
+
VALUES (?, ?, ?, ?, ?)
|
| 103 |
+
''', (
|
| 104 |
+
session_id,
|
| 105 |
+
round_number,
|
| 106 |
+
game_data['player_move'],
|
| 107 |
+
game_data['ai_move'],
|
| 108 |
+
game_data['result']
|
| 109 |
+
))
|
| 110 |
+
|
| 111 |
+
# Save AI state if provided
|
| 112 |
+
if 'ai_state' in game_data:
|
| 113 |
+
def convert_ndarray(obj):
|
| 114 |
+
if isinstance(obj, np.ndarray):
|
| 115 |
+
return obj.tolist()
|
| 116 |
+
if isinstance(obj, dict):
|
| 117 |
+
return {k: convert_ndarray(v) for k, v in obj.items()}
|
| 118 |
+
if isinstance(obj, list):
|
| 119 |
+
return [convert_ndarray(x) for x in obj]
|
| 120 |
+
return obj
|
| 121 |
+
ai_state_serializable = convert_ndarray(game_data['ai_state'])
|
| 122 |
+
cursor.execute('''
|
| 123 |
+
INSERT INTO ai_states (session_id, ai_type, state_data)
|
| 124 |
+
VALUES (?, ?, ?)
|
| 125 |
+
ON CONFLICT(session_id, ai_type) DO UPDATE SET
|
| 126 |
+
state_data = ?,
|
| 127 |
+
last_updated = CURRENT_TIMESTAMP
|
| 128 |
+
''', (
|
| 129 |
+
session_id,
|
| 130 |
+
game_data.get('ai_type', 'adaptive_markov'),
|
| 131 |
+
json.dumps(ai_state_serializable),
|
| 132 |
+
json.dumps(ai_state_serializable)
|
| 133 |
+
))
|
| 134 |
+
|
| 135 |
+
self.conn.commit()
|
| 136 |
+
return True
|
| 137 |
+
|
| 138 |
+
except Exception as e:
|
| 139 |
+
print(f"Error saving game round: {e}")
|
| 140 |
+
self.conn.rollback()
|
| 141 |
+
return False
|
| 142 |
+
|
| 143 |
+
async def complete_session(self, session_id: str) -> bool:
|
| 144 |
+
"""Mark a game session as completed"""
|
| 145 |
+
try:
|
| 146 |
+
cursor = self.conn.cursor()
|
| 147 |
+
cursor.execute('''
|
| 148 |
+
UPDATE game_sessions
|
| 149 |
+
SET is_completed = TRUE,
|
| 150 |
+
completed_at = CURRENT_TIMESTAMP
|
| 151 |
+
WHERE session_id = ?
|
| 152 |
+
''', (session_id,))
|
| 153 |
+
self.conn.commit()
|
| 154 |
+
return True
|
| 155 |
+
except Exception as e:
|
| 156 |
+
print(f"Error completing session: {e}")
|
| 157 |
+
self.conn.rollback()
|
| 158 |
+
return False
|
| 159 |
+
|
| 160 |
+
async def get_game_history(self, session_id: str) -> Optional[Dict[str, Any]]:
|
| 161 |
+
"""Retrieve game history for a session"""
|
| 162 |
+
try:
|
| 163 |
+
cursor = self.conn.cursor()
|
| 164 |
+
|
| 165 |
+
# Get session stats
|
| 166 |
+
cursor.execute('''
|
| 167 |
+
SELECT total_rounds, player_wins, ai_wins, draws, is_completed, completed_at
|
| 168 |
+
FROM game_sessions
|
| 169 |
+
WHERE session_id = ?
|
| 170 |
+
''', (session_id,))
|
| 171 |
+
session_data = cursor.fetchone()
|
| 172 |
+
|
| 173 |
+
if not session_data:
|
| 174 |
+
return None
|
| 175 |
+
|
| 176 |
+
# Get all rounds
|
| 177 |
+
cursor.execute('''
|
| 178 |
+
SELECT round_number, player_move, ai_move, result, created_at
|
| 179 |
+
FROM game_rounds
|
| 180 |
+
WHERE session_id = ?
|
| 181 |
+
ORDER BY round_number
|
| 182 |
+
''', (session_id,))
|
| 183 |
+
rounds = cursor.fetchall()
|
| 184 |
+
|
| 185 |
+
return {
|
| 186 |
+
'session_id': session_id,
|
| 187 |
+
'total_rounds': session_data[0],
|
| 188 |
+
'player_wins': session_data[1],
|
| 189 |
+
'ai_wins': session_data[2],
|
| 190 |
+
'draws': session_data[3],
|
| 191 |
+
'is_completed': session_data[4],
|
| 192 |
+
'completed_at': session_data[5],
|
| 193 |
+
'rounds': [
|
| 194 |
+
{
|
| 195 |
+
'round_number': r[0],
|
| 196 |
+
'player_move': r[1],
|
| 197 |
+
'ai_move': r[2],
|
| 198 |
+
'result': r[3],
|
| 199 |
+
'created_at': r[4]
|
| 200 |
+
}
|
| 201 |
+
for r in rounds
|
| 202 |
+
]
|
| 203 |
+
}
|
| 204 |
+
|
| 205 |
+
except Exception as e:
|
| 206 |
+
print(f"Error retrieving game history: {e}")
|
| 207 |
+
return None
|
| 208 |
+
|
| 209 |
+
async def get_ai_state(self, session_id: str, ai_type: str) -> Optional[Dict[str, Any]]:
|
| 210 |
+
"""Retrieve AI state for a session"""
|
| 211 |
+
try:
|
| 212 |
+
cursor = self.conn.cursor()
|
| 213 |
+
cursor.execute('''
|
| 214 |
+
SELECT state_data
|
| 215 |
+
FROM ai_states
|
| 216 |
+
WHERE session_id = ? AND ai_type = ?
|
| 217 |
+
''', (session_id, ai_type))
|
| 218 |
+
|
| 219 |
+
result = cursor.fetchone()
|
| 220 |
+
if result:
|
| 221 |
+
return json.loads(result[0])
|
| 222 |
+
return None
|
| 223 |
+
|
| 224 |
+
except Exception as e:
|
| 225 |
+
print(f"Error retrieving AI state: {e}")
|
| 226 |
+
return None
|
| 227 |
+
|
| 228 |
+
async def close(self) -> None:
|
| 229 |
+
"""Close the database connection"""
|
| 230 |
+
if self.conn:
|
| 231 |
+
self.conn.close()
|
| 232 |
+
self.conn = None
|
| 233 |
+
|
| 234 |
+
async def save_full_session(self, session_id: str, session_data: Dict[str, Any]) -> bool:
|
| 235 |
+
"""Save the full session and all rounds to the database in one go."""
|
| 236 |
+
print(f"[DEBUG] save_full_session called for session_id: {session_id}")
|
| 237 |
+
print(f"[DEBUG] session_data: {session_data}")
|
| 238 |
+
try:
|
| 239 |
+
cursor = self.conn.cursor()
|
| 240 |
+
# Insert or update the session summary
|
| 241 |
+
cursor.execute('''
|
| 242 |
+
INSERT INTO game_sessions (session_id, total_rounds, player_wins, ai_wins, draws, is_completed, completed_at)
|
| 243 |
+
VALUES (?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
|
| 244 |
+
ON CONFLICT(session_id) DO UPDATE SET
|
| 245 |
+
total_rounds = excluded.total_rounds,
|
| 246 |
+
player_wins = excluded.player_wins,
|
| 247 |
+
ai_wins = excluded.ai_wins,
|
| 248 |
+
draws = excluded.draws,
|
| 249 |
+
is_completed = excluded.is_completed,
|
| 250 |
+
completed_at = excluded.completed_at
|
| 251 |
+
''', (
|
| 252 |
+
session_id,
|
| 253 |
+
session_data['total_rounds'],
|
| 254 |
+
session_data['player_wins'],
|
| 255 |
+
session_data['ai_wins'],
|
| 256 |
+
session_data['draws'],
|
| 257 |
+
True
|
| 258 |
+
))
|
| 259 |
+
# Delete existing rounds for this session
|
| 260 |
+
cursor.execute('DELETE FROM game_rounds WHERE session_id = ?', (session_id,))
|
| 261 |
+
# Insert all rounds
|
| 262 |
+
for round_data in session_data['rounds']:
|
| 263 |
+
print(f"[DEBUG] Inserting round: {round_data}")
|
| 264 |
+
created_at = round_data.get('created_at', datetime.now().isoformat())
|
| 265 |
+
cursor.execute('''
|
| 266 |
+
INSERT INTO game_rounds (session_id, round_number, player_move, ai_move, result, created_at)
|
| 267 |
+
VALUES (?, ?, ?, ?, ?, ?)
|
| 268 |
+
''', (
|
| 269 |
+
session_id,
|
| 270 |
+
round_data['round_number'],
|
| 271 |
+
round_data['player_move'],
|
| 272 |
+
round_data['ai_move'],
|
| 273 |
+
round_data['result'],
|
| 274 |
+
created_at
|
| 275 |
+
))
|
| 276 |
+
# Save AI state (if available)
|
| 277 |
+
if session_data['rounds']:
|
| 278 |
+
last_round = session_data['rounds'][-1]
|
| 279 |
+
ai_type = last_round.get('ai_type', 'random')
|
| 280 |
+
ai_state = last_round.get('ai_state', {})
|
| 281 |
+
def convert_ndarray(obj):
|
| 282 |
+
if isinstance(obj, np.ndarray):
|
| 283 |
+
return obj.tolist()
|
| 284 |
+
if isinstance(obj, dict):
|
| 285 |
+
return {k: convert_ndarray(v) for k, v in obj.items()}
|
| 286 |
+
if isinstance(obj, list):
|
| 287 |
+
return [convert_ndarray(x) for x in obj]
|
| 288 |
+
return obj
|
| 289 |
+
ai_state_serializable = convert_ndarray(ai_state)
|
| 290 |
+
cursor.execute('''
|
| 291 |
+
INSERT INTO ai_states (session_id, ai_type, state_data)
|
| 292 |
+
VALUES (?, ?, ?)
|
| 293 |
+
ON CONFLICT(session_id, ai_type) DO UPDATE SET
|
| 294 |
+
state_data = ?,
|
| 295 |
+
last_updated = CURRENT_TIMESTAMP
|
| 296 |
+
''', (
|
| 297 |
+
session_id,
|
| 298 |
+
ai_type,
|
| 299 |
+
json.dumps(ai_state_serializable),
|
| 300 |
+
json.dumps(ai_state_serializable)
|
| 301 |
+
))
|
| 302 |
+
self.conn.commit()
|
| 303 |
+
return True
|
| 304 |
+
except Exception as e:
|
| 305 |
+
print(f"Error saving full session: {e}")
|
| 306 |
+
self.conn.rollback()
|
| 307 |
+
return False
|
RockPaperScissor/repositories/storage.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# RockPaperScissor/repositories/storage.py
|
| 2 |
+
from abc import ABC, abstractmethod
|
| 3 |
+
from typing import Dict, Any, Optional
|
| 4 |
+
|
| 5 |
+
class StorageError(Exception):
|
| 6 |
+
pass
|
| 7 |
+
|
| 8 |
+
class Storage(ABC):
|
| 9 |
+
async def initialize(self): # Add initialize method
|
| 10 |
+
pass
|
| 11 |
+
|
| 12 |
+
@abstractmethod
|
| 13 |
+
async def save_game_round(self, game_data: Dict[str, Any]) -> bool:
|
| 14 |
+
pass
|
| 15 |
+
|
| 16 |
+
# We don't need these for the minimal version with GameService not using user states yet
|
| 17 |
+
# def get_user_state(self, user_id: str) -> Optional[Dict[str, Any]]:
|
| 18 |
+
# return None
|
| 19 |
+
# def save_user_state(self, user_id: str, model_name: str, model_state: Dict[str, Any]) -> None:
|
| 20 |
+
# pass
|
| 21 |
+
|
| 22 |
+
async def close(self) -> None:
|
| 23 |
+
pass
|
RockPaperScissor/routes/__init__.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Routes package initialization for RockPaperScissor game.
|
| 3 |
+
Contains API routes definitions.
|
| 4 |
+
"""
|
| 5 |
+
from .game import game_router
|
| 6 |
+
|
| 7 |
+
__all__ = [
|
| 8 |
+
'game_router'
|
| 9 |
+
]
|
RockPaperScissor/routes/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (305 Bytes). View file
|
|
|
RockPaperScissor/routes/__pycache__/__init__.cpython-313.pyc
ADDED
|
Binary file (367 Bytes). View file
|
|
|
RockPaperScissor/routes/__pycache__/game.cpython-310.pyc
ADDED
|
Binary file (1.7 kB). View file
|
|
|
RockPaperScissor/routes/__pycache__/game.cpython-313.pyc
ADDED
|
Binary file (2.65 kB). View file
|
|
|
RockPaperScissor/routes/__pycache__/game_o.cpython-310.pyc
ADDED
|
Binary file (1.8 kB). View file
|
|
|
RockPaperScissor/routes/__pycache__/history.cpython-313.pyc
ADDED
|
Binary file (633 Bytes). View file
|
|
|
RockPaperScissor/routes/__pycache__/stats.cpython-310.pyc
ADDED
|
Binary file (2.45 kB). View file
|
|
|
RockPaperScissor/routes/__pycache__/stats.cpython-313.pyc
ADDED
|
Binary file (3.23 kB). View file
|
|
|
RockPaperScissor/routes/game.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# backend/routes/game.py
|
| 2 |
+
from fastapi import APIRouter, HTTPException, Request
|
| 3 |
+
from RockPaperScissor.schemas.game import GameRequest, GameResponse #, GameSummary, LLMRequest
|
| 4 |
+
from RockPaperScissor.utils.logging import setup_logging
|
| 5 |
+
# from RockPaperScissor.repositories import CombinedStorage # Persistent storage (commented for Hugging Face)
|
| 6 |
+
from RockPaperScissor.services.service_instance import game_service
|
| 7 |
+
|
| 8 |
+
# Set up logger
|
| 9 |
+
logger = setup_logging()
|
| 10 |
+
|
| 11 |
+
game_router = APIRouter()
|
| 12 |
+
|
| 13 |
+
@game_router.post("/play")
|
| 14 |
+
async def play_round(request: Request, game_request: GameRequest):
|
| 15 |
+
"""
|
| 16 |
+
Play a round with the player's move (in-memory only for Hugging Face)
|
| 17 |
+
"""
|
| 18 |
+
result = await game_service.play_round(game_request.session_id, game_request.player_move, game_request.ai_type)
|
| 19 |
+
if "error" in result:
|
| 20 |
+
raise HTTPException(status_code=400, detail=f"GameService error: {result['error']}")
|
| 21 |
+
return result
|
| 22 |
+
|
| 23 |
+
# @game_router.post("/analyze")
|
| 24 |
+
# async def analyze_game_state(llm_request: LLMRequest):
|
| 25 |
+
# """
|
| 26 |
+
# Get LLM analysis of the current game state.
|
| 27 |
+
# """
|
| 28 |
+
# try:
|
| 29 |
+
# # Log the request
|
| 30 |
+
# logger.info(f"Analyze request: {llm_request.model_dump()}")
|
| 31 |
+
#
|
| 32 |
+
# # Get LLM analysis directly from LLM service
|
| 33 |
+
# analysis = llm_service.analyze_game_state(llm_request)
|
| 34 |
+
# return {"analysis": analysis}
|
| 35 |
+
#
|
| 36 |
+
# except Exception as e:
|
| 37 |
+
# logger.error(f"Error analyzing game state: {str(e)}")
|
| 38 |
+
# raise HTTPException(
|
| 39 |
+
# status_code=500,
|
| 40 |
+
# detail="An error occurred while analyzing the game state"
|
| 41 |
+
# )
|
| 42 |
+
|
| 43 |
+
@game_router.post("/end")
|
| 44 |
+
async def end_game(request: Request):
|
| 45 |
+
"""
|
| 46 |
+
End the current game session and save the final state.
|
| 47 |
+
"""
|
| 48 |
+
try:
|
| 49 |
+
data = await request.json()
|
| 50 |
+
session_id = data.get('session_id')
|
| 51 |
+
if not session_id:
|
| 52 |
+
raise HTTPException(
|
| 53 |
+
status_code=400,
|
| 54 |
+
detail="No session ID provided"
|
| 55 |
+
)
|
| 56 |
+
# Save session to database before clearing cache
|
| 57 |
+
db_success = await game_service.save_session_to_db(session_id)
|
| 58 |
+
# Clear in-memory data
|
| 59 |
+
await game_service.clear_session(session_id)
|
| 60 |
+
if db_success:
|
| 61 |
+
return {
|
| 62 |
+
"status": "success",
|
| 63 |
+
"message": "Game session ended and saved to database successfully",
|
| 64 |
+
"game_history": None
|
| 65 |
+
}
|
| 66 |
+
else:
|
| 67 |
+
return {
|
| 68 |
+
"status": "error",
|
| 69 |
+
"message": "Game session ended, but failed to save to database",
|
| 70 |
+
"game_history": None
|
| 71 |
+
}
|
| 72 |
+
except Exception as e:
|
| 73 |
+
logger.error(f"Error ending game session: {str(e)}")
|
| 74 |
+
raise HTTPException(
|
| 75 |
+
status_code=500,
|
| 76 |
+
detail="An error occurred while ending the game session"
|
| 77 |
+
)
|