ChandimaPrabath commited on
Commit
e0c041b
·
1 Parent(s): a46919c

unstable 0.0.0.2 Alpha

Browse files
Files changed (1) hide show
  1. LoadBalancer.py +24 -8
LoadBalancer.py CHANGED
@@ -1,6 +1,7 @@
1
  import os
2
  from indexer import indexer
3
  import re
 
4
  from tvdb import fetch_and_cache_json
5
  from threading import Event, Thread
6
  import asyncio
@@ -13,7 +14,7 @@ CACHE_DIR = os.getenv("CACHE_DIR")
13
 
14
  class LoadBalancer:
15
  def __init__(self, cache_dir, token, repo, polling_interval=4, max_retries=3, initial_delay=1):
16
- self.version = "0.0.0.1 Alpha"
17
  self.instances = []
18
  self.instances_health = {}
19
  self.polling_interval = polling_interval
@@ -27,15 +28,14 @@ class LoadBalancer:
27
  self.FILM_STORE = {}
28
  self.TV_STORE = {}
29
  self.file_structure = None
 
30
 
31
  # Ensure CACHE_DIR exists
32
  if not os.path.exists(self.CACHE_DIR):
33
  os.makedirs(self.CACHE_DIR)
34
 
35
- # Initialize file structure and start prefetching
36
  self.file_structure = indexer()
37
-
38
- # Start prefetching in the FastAPI event loop
39
  self.start_prefetching()
40
 
41
  # Start polling and file checking in separate threads
@@ -43,6 +43,11 @@ class LoadBalancer:
43
  polling_thread.daemon = True
44
  polling_thread.start()
45
 
 
 
 
 
 
46
  def start_prefetching(self):
47
  """Start the metadata prefetching in the FastAPI event loop."""
48
  asyncio.create_task(self.prefetch_metadata())
@@ -72,11 +77,24 @@ class LoadBalancer:
72
  year = int(parts[-1])
73
 
74
  # Schedule the fetch and cache task
75
- tasks.append(fetch_and_cache_json(original_title, title, media_type, year))
 
 
76
 
77
  # Run all tasks concurrently
78
  await asyncio.gather(*tasks)
79
 
 
 
 
 
 
 
 
 
 
 
 
80
  def register_instance(self, instance_url):
81
  if instance_url not in self.instances:
82
  self.instances.append(instance_url)
@@ -94,12 +112,10 @@ class LoadBalancer:
94
 
95
  def get_reports(self):
96
  reports = self.instances_api.fetch_reports()
97
-
98
- # Initialize temporary JSON data holders
99
  temp_film_store = {}
100
  temp_tv_store = {}
101
 
102
- for instance_url in self.instances[:]: # Copy list to avoid modification during iteration
103
  if instance_url in reports:
104
  report = reports[instance_url]
105
  logging.info(f"Report from {instance_url}: {report}")
 
1
  import os
2
  from indexer import indexer
3
  import re
4
+ import urllib.parse
5
  from tvdb import fetch_and_cache_json
6
  from threading import Event, Thread
7
  import asyncio
 
14
 
15
  class LoadBalancer:
16
  def __init__(self, cache_dir, token, repo, polling_interval=4, max_retries=3, initial_delay=1):
17
+ self.version = "0.0.0.2 Alpha"
18
  self.instances = []
19
  self.instances_health = {}
20
  self.polling_interval = polling_interval
 
28
  self.FILM_STORE = {}
29
  self.TV_STORE = {}
30
  self.file_structure = None
31
+ self.previous_file_structure = None # To keep track of previous content
32
 
33
  # Ensure CACHE_DIR exists
34
  if not os.path.exists(self.CACHE_DIR):
35
  os.makedirs(self.CACHE_DIR)
36
 
37
+ # Initialize file structure and prefetching
38
  self.file_structure = indexer()
 
 
39
  self.start_prefetching()
40
 
41
  # Start polling and file checking in separate threads
 
43
  polling_thread.daemon = True
44
  polling_thread.start()
45
 
46
+ # Start the periodic content check
47
+ content_check_thread = Thread(target=self.check_for_updates)
48
+ content_check_thread.daemon = True
49
+ content_check_thread.start()
50
+
51
  def start_prefetching(self):
52
  """Start the metadata prefetching in the FastAPI event loop."""
53
  asyncio.create_task(self.prefetch_metadata())
 
77
  year = int(parts[-1])
78
 
79
  # Schedule the fetch and cache task
80
+ json_cache_path = os.path.join(self.CACHE_DIR, f"{urllib.parse.quote(original_title)}.json")
81
+ if not os.path.exists(json_cache_path):
82
+ tasks.append(fetch_and_cache_json(original_title, title, media_type, year))
83
 
84
  # Run all tasks concurrently
85
  await asyncio.gather(*tasks)
86
 
87
+ def check_for_updates(self):
88
+ """Periodically check for updates and trigger prefetching if new content is detected."""
89
+ while not self.stop_event.is_set():
90
+ time.sleep(120) # Wait for 2 minutes
91
+ current_file_structure = indexer()
92
+ if current_file_structure != self.previous_file_structure:
93
+ self.start_prefetching()
94
+ self.previous_file_structure = current_file_structure
95
+ else:
96
+ logging.info("No new content detected.")
97
+
98
  def register_instance(self, instance_url):
99
  if instance_url not in self.instances:
100
  self.instances.append(instance_url)
 
112
 
113
  def get_reports(self):
114
  reports = self.instances_api.fetch_reports()
 
 
115
  temp_film_store = {}
116
  temp_tv_store = {}
117
 
118
+ for instance_url in self.instances[:]:
119
  if instance_url in reports:
120
  report = reports[instance_url]
121
  logging.info(f"Report from {instance_url}: {report}")