Shageenderan Sapai commited on
Commit
370e022
·
1 Parent(s): a1de27a

adde cache for user pickle

Browse files
app/__pycache__/assistants.cpython-312.pyc CHANGED
Binary files a/app/__pycache__/assistants.cpython-312.pyc and b/app/__pycache__/assistants.cpython-312.pyc differ
 
app/__pycache__/main.cpython-312.pyc CHANGED
Binary files a/app/__pycache__/main.cpython-312.pyc and b/app/__pycache__/main.cpython-312.pyc differ
 
app/__pycache__/user.cpython-312.pyc CHANGED
Binary files a/app/__pycache__/user.cpython-312.pyc and b/app/__pycache__/user.cpython-312.pyc differ
 
app/__pycache__/utils.cpython-312.pyc CHANGED
Binary files a/app/__pycache__/utils.cpython-312.pyc and b/app/__pycache__/utils.cpython-312.pyc differ
 
app/utils.py CHANGED
@@ -1,4 +1,3 @@
1
- from datetime import datetime
2
  import logging
3
  import boto3
4
  from botocore.exceptions import ClientError, NoCredentialsError, PartialCredentialsError
@@ -18,6 +17,8 @@ import os
18
  from dotenv import load_dotenv
19
  from app.user import User
20
  import datetime
 
 
21
 
22
  load_dotenv()
23
 
@@ -33,6 +34,9 @@ REGION = os.getenv('AWS_REGION')
33
 
34
  logger = logging.getLogger(__name__)
35
 
 
 
 
36
  def force_file_move(source, destination):
37
  function_name = force_file_move.__name__
38
  logger.info(f"Attempting to move file from {source} to {destination}", extra={'endpoint': function_name})
@@ -51,18 +55,30 @@ def force_file_move(source, destination):
51
  def get_user(user_id):
52
  function_name = get_user.__name__
53
  logger.info(f"Fetching user {user_id}", extra={'user_id': user_id, 'endpoint': function_name})
 
 
 
 
54
  client = OpenAI(api_key=os.getenv('OPENAI_API_KEY'))
55
  user_file = os.path.join('users', 'data', f'{user_id}.pkl')
56
- if (os.path.exists(user_file)):
57
- user = User.load_user(user_id, client)
58
- logger.info(f"User {user_id} loaded successfully from local storage", extra={'user_id': user_id, 'endpoint': function_name})
 
 
 
59
  return user
60
  else:
61
  logger.info(f"User {user_id} not found locally. Attempting to download from S3", extra={'user_id': user_id, 'endpoint': function_name})
62
  download = download_file_from_s3(f'{user_id}.pkl', 'core-ai-assets')
63
  logger.info(f"Download success: {download}", extra={'user_id': user_id, 'endpoint': function_name})
64
  if (download):
65
- user = User.load_user(user_id, client)
 
 
 
 
 
66
  logger.info(f"User {user_id} loaded successfully from S3", extra={'user_id': user_id, 'endpoint': function_name})
67
  return user
68
  else:
@@ -230,7 +246,7 @@ def upload_mementos_to_db(user_id):
230
  data.get('location', ''),
231
  data.get('recurrence', ''),
232
  data.get('context', ''),
233
- datetime.now(),
234
  pd.to_datetime(data.get('follow_up_on', ''))
235
  ]
236
  cursor.execute(base_query, memento_data)
 
 
1
  import logging
2
  import boto3
3
  from botocore.exceptions import ClientError, NoCredentialsError, PartialCredentialsError
 
17
  from dotenv import load_dotenv
18
  from app.user import User
19
  import datetime
20
+ import threading
21
+ import pickle # Replace dill with pickle
22
 
23
  load_dotenv()
24
 
 
34
 
35
  logger = logging.getLogger(__name__)
36
 
37
+ user_cache = {}
38
+ cache_lock = threading.Lock()
39
+
40
  def force_file_move(source, destination):
41
  function_name = force_file_move.__name__
42
  logger.info(f"Attempting to move file from {source} to {destination}", extra={'endpoint': function_name})
 
55
  def get_user(user_id):
56
  function_name = get_user.__name__
57
  logger.info(f"Fetching user {user_id}", extra={'user_id': user_id, 'endpoint': function_name})
58
+ with cache_lock:
59
+ if user_id in user_cache:
60
+ logger.info(f"User {user_id} found in cache", extra={'user_id': user_id, 'endpoint': function_name})
61
+ return user_cache[user_id]
62
  client = OpenAI(api_key=os.getenv('OPENAI_API_KEY'))
63
  user_file = os.path.join('users', 'data', f'{user_id}.pkl')
64
+ if os.path.exists(user_file):
65
+ with open(user_file, 'rb') as f:
66
+ user = pickle.load(f)
67
+ user.client = client
68
+ with cache_lock:
69
+ user_cache[user_id] = user
70
  return user
71
  else:
72
  logger.info(f"User {user_id} not found locally. Attempting to download from S3", extra={'user_id': user_id, 'endpoint': function_name})
73
  download = download_file_from_s3(f'{user_id}.pkl', 'core-ai-assets')
74
  logger.info(f"Download success: {download}", extra={'user_id': user_id, 'endpoint': function_name})
75
  if (download):
76
+ with open(user_file, 'rb') as f:
77
+ user = pickle.load(f)
78
+ user.client = client
79
+ user.conversations.client = client
80
+ with cache_lock:
81
+ user_cache[user_id] = user
82
  logger.info(f"User {user_id} loaded successfully from S3", extra={'user_id': user_id, 'endpoint': function_name})
83
  return user
84
  else:
 
246
  data.get('location', ''),
247
  data.get('recurrence', ''),
248
  data.get('context', ''),
249
+ datetime.datetime.now(),
250
  pd.to_datetime(data.get('follow_up_on', ''))
251
  ]
252
  cursor.execute(base_query, memento_data)