Spaces:
Sleeping
Sleeping
File size: 3,937 Bytes
9b51d59 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 | """
Create database indexes for refresh token management
Run this script once to set up optimal indexes
"""
import asyncio
import sys
import os
# Add parent directory to path
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from app.core.nosql_client import db
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
async def create_indexes():
"""Create indexes for refresh_tokens collection"""
try:
collection = db["refresh_tokens"]
# Unique index on token_id
await collection.create_index("token_id", unique=True)
logger.info("β Created unique index on token_id")
# Index on customer_id for user session queries
await collection.create_index("customer_id")
logger.info("β Created index on customer_id")
# Index on family_id for token family operations
await collection.create_index("family_id")
logger.info("β Created index on family_id")
# Index on expires_at for cleanup operations
await collection.create_index("expires_at")
logger.info("β Created index on expires_at")
# Compound index for active session queries
await collection.create_index([
("customer_id", 1),
("revoked", 1),
("expires_at", 1)
])
logger.info("β Created compound index on customer_id, revoked, expires_at")
# Index on revoked for filtering
await collection.create_index("revoked")
logger.info("β Created index on revoked")
# Index on used for rotation checks
await collection.create_index("used")
logger.info("β Created index on used")
# TTL index to automatically delete expired tokens after 30 days
await collection.create_index(
"expires_at",
expireAfterSeconds=30 * 24 * 60 * 60 # 30 days
)
logger.info("β Created TTL index on expires_at (30 days)")
logger.info("\nβ
All indexes created successfully!")
# List all indexes
indexes = await collection.list_indexes().to_list(length=None)
logger.info("\nCurrent indexes:")
for idx in indexes:
logger.info(f" - {idx['name']}: {idx.get('key', {})}")
except Exception as e:
logger.error(f"β Error creating indexes: {str(e)}", exc_info=True)
raise
async def create_user_indexes():
"""Create indexes for customers collection"""
try:
collection = db["customers"]
# Unique index on customer_id
await collection.create_index("customer_id", unique=True)
logger.info("β Created unique index on customer_id")
# Unique index on email
await collection.create_index("email", unique=True, sparse=True)
logger.info("β Created unique index on email")
# Unique index on phone
await collection.create_index("phone", unique=True, sparse=True)
logger.info("β Created unique index on phone")
# Index on auth_mode
await collection.create_index("auth_mode")
logger.info("β Created index on auth_mode")
logger.info("\nβ
User indexes created successfully!")
except Exception as e:
logger.error(f"β Error creating user indexes: {str(e)}", exc_info=True)
raise
async def main():
"""Main function to create all indexes"""
logger.info("Starting index creation...\n")
logger.info("Creating refresh token indexes...")
await create_indexes()
logger.info("\nCreating user indexes...")
await create_user_indexes()
logger.info("\nπ All database indexes created successfully!")
if __name__ == "__main__":
asyncio.run(main())
|