Spaces:
Sleeping
Sleeping
Commit
·
321dbb9
1
Parent(s):
08e5d35
improve recent
Browse files- services.py +22 -24
- tvdb.py +22 -3
services.py
CHANGED
|
@@ -2,21 +2,20 @@ import bisect
|
|
| 2 |
|
| 3 |
class RecentList:
|
| 4 |
def __init__(self):
|
| 5 |
-
# Initialize dictionaries to store titles and
|
| 6 |
self.films = {}
|
| 7 |
self.series = {}
|
| 8 |
# Initialize lists to keep track of the sorted entries
|
| 9 |
self.sorted_films = []
|
| 10 |
self.sorted_series = []
|
| 11 |
|
| 12 |
-
def add_entry(self, title, year, media_type):
|
| 13 |
if media_type == 'film':
|
| 14 |
-
self._update_entry(self.films, self.sorted_films, title, year)
|
| 15 |
-
|
| 16 |
elif media_type == 'series':
|
| 17 |
-
self._update_entry(self.series, self.sorted_series, title, year)
|
| 18 |
|
| 19 |
-
def _update_entry(self, dictionary, sorted_list, title, year):
|
| 20 |
try:
|
| 21 |
# Convert year to integer
|
| 22 |
year = int(year)
|
|
@@ -24,44 +23,43 @@ class RecentList:
|
|
| 24 |
raise ValueError(f"Invalid year: {year}. Year must be an integer.")
|
| 25 |
|
| 26 |
if title in dictionary:
|
| 27 |
-
# Remove the old
|
| 28 |
-
old_year = dictionary[title]
|
| 29 |
try:
|
| 30 |
sorted_list.remove((-old_year, title))
|
| 31 |
except ValueError:
|
| 32 |
pass # Ignore if the old entry does not exist in the sorted list
|
| 33 |
|
| 34 |
# Update or add the new entry in the dictionary
|
| 35 |
-
dictionary[title] = year
|
| 36 |
|
| 37 |
-
# Insert the new year into the sorted list
|
| 38 |
bisect.insort(sorted_list, (-year, title))
|
| 39 |
|
| 40 |
def get_sorted_entries(self, media_type):
|
| 41 |
if media_type == 'film':
|
| 42 |
-
#
|
| 43 |
-
return [
|
| 44 |
-
|
| 45 |
elif media_type == 'series':
|
| 46 |
-
#
|
| 47 |
-
return [
|
| 48 |
|
| 49 |
# Example usage:
|
| 50 |
if __name__ == "__main__":
|
| 51 |
media_list = RecentList()
|
| 52 |
|
| 53 |
# Adding entries
|
| 54 |
-
media_list.add_entry("Film A", 2022, 'film')
|
| 55 |
-
media_list.add_entry("Series A", 2023, 'series')
|
| 56 |
-
media_list.add_entry("Film B", 2021, 'film')
|
| 57 |
-
media_list.add_entry("Film A", 2024, 'film') # Updating the year of "Film A"
|
| 58 |
-
media_list.add_entry("Series B", 2021, 'series')
|
| 59 |
|
| 60 |
# Retrieving and printing sorted entries
|
| 61 |
print("Sorted Films:")
|
| 62 |
-
for title, year in media_list.get_sorted_entries('film'):
|
| 63 |
-
print(f"Title: {title}, Year: {year}")
|
| 64 |
|
| 65 |
print("\nSorted Series:")
|
| 66 |
-
for title, year in media_list.get_sorted_entries('series'):
|
| 67 |
-
print(f"Title: {title}, Year: {year}")
|
|
|
|
| 2 |
|
| 3 |
class RecentList:
|
| 4 |
def __init__(self):
|
| 5 |
+
# Initialize dictionaries to store titles and associated data
|
| 6 |
self.films = {}
|
| 7 |
self.series = {}
|
| 8 |
# Initialize lists to keep track of the sorted entries
|
| 9 |
self.sorted_films = []
|
| 10 |
self.sorted_series = []
|
| 11 |
|
| 12 |
+
def add_entry(self, title, year, description, image_link, media_type):
|
| 13 |
if media_type == 'film':
|
| 14 |
+
self._update_entry(self.films, self.sorted_films, title, year, description, image_link)
|
|
|
|
| 15 |
elif media_type == 'series':
|
| 16 |
+
self._update_entry(self.series, self.sorted_series, title, year, description, image_link)
|
| 17 |
|
| 18 |
+
def _update_entry(self, dictionary, sorted_list, title, year, description, image_link):
|
| 19 |
try:
|
| 20 |
# Convert year to integer
|
| 21 |
year = int(year)
|
|
|
|
| 23 |
raise ValueError(f"Invalid year: {year}. Year must be an integer.")
|
| 24 |
|
| 25 |
if title in dictionary:
|
| 26 |
+
# Remove the old entry from the sorted list if it exists
|
| 27 |
+
old_year = dictionary[title][0] # Get the old year
|
| 28 |
try:
|
| 29 |
sorted_list.remove((-old_year, title))
|
| 30 |
except ValueError:
|
| 31 |
pass # Ignore if the old entry does not exist in the sorted list
|
| 32 |
|
| 33 |
# Update or add the new entry in the dictionary
|
| 34 |
+
dictionary[title] = (year, description, image_link)
|
| 35 |
|
| 36 |
+
# Insert the new year and title into the sorted list
|
| 37 |
bisect.insort(sorted_list, (-year, title))
|
| 38 |
|
| 39 |
def get_sorted_entries(self, media_type):
|
| 40 |
if media_type == 'film':
|
| 41 |
+
# Get sorted films with details
|
| 42 |
+
return [(title, -year, self.films[title][1], self.films[title][2]) for year, title in self.sorted_films]
|
|
|
|
| 43 |
elif media_type == 'series':
|
| 44 |
+
# Get sorted series with details
|
| 45 |
+
return [(title, -year, self.series[title][1], self.series[title][2]) for year, title in self.sorted_series]
|
| 46 |
|
| 47 |
# Example usage:
|
| 48 |
if __name__ == "__main__":
|
| 49 |
media_list = RecentList()
|
| 50 |
|
| 51 |
# Adding entries
|
| 52 |
+
media_list.add_entry("Film A", 2022, "Description A", "http://link-to-image-a.com", 'film')
|
| 53 |
+
media_list.add_entry("Series A", 2023, "Description B", "http://link-to-image-b.com", 'series')
|
| 54 |
+
media_list.add_entry("Film B", 2021, "Description C", "http://link-to-image-c.com", 'film')
|
| 55 |
+
media_list.add_entry("Film A", 2024, "Updated Description A", "http://updated-link-to-image-a.com", 'film') # Updating the year of "Film A"
|
| 56 |
+
media_list.add_entry("Series B", 2021, "Description D", "http://link-to-image-d.com", 'series')
|
| 57 |
|
| 58 |
# Retrieving and printing sorted entries
|
| 59 |
print("Sorted Films:")
|
| 60 |
+
for title, year, description, image_link in media_list.get_sorted_entries('film'):
|
| 61 |
+
print(f"Title: {title}, Year: {year}, Description: {description}, Image Link: {image_link}")
|
| 62 |
|
| 63 |
print("\nSorted Series:")
|
| 64 |
+
for title, year, description, image_link in media_list.get_sorted_entries('series'):
|
| 65 |
+
print(f"Title: {title}, Year: {year}, Description: {description}, Image Link: {image_link}")
|
tvdb.py
CHANGED
|
@@ -119,11 +119,30 @@ async def fetch_and_cache_json(original_title, title, media_type, year=None):
|
|
| 119 |
response = requests.get(extended_url, headers=headers)
|
| 120 |
response.raise_for_status()
|
| 121 |
extended_data = response.json()
|
| 122 |
-
cleaned_data=clean_data(extended_data)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 123 |
if media_type == 'movie':
|
| 124 |
-
recent_list.add_entry(original_title, cleaned_data['data']['year'], 'film')
|
| 125 |
elif media_type == 'series':
|
| 126 |
-
recent_list.add_entry(original_title,
|
|
|
|
| 127 |
# Cache the extended JSON response
|
| 128 |
json_cache_path = os.path.join(CACHE_DIR, f"{urllib.parse.quote(original_title)}.json")
|
| 129 |
await save_to_json(cleaned_data, json_cache_path)
|
|
|
|
| 119 |
response = requests.get(extended_url, headers=headers)
|
| 120 |
response.raise_for_status()
|
| 121 |
extended_data = response.json()
|
| 122 |
+
cleaned_data = clean_data(extended_data)
|
| 123 |
+
|
| 124 |
+
# Extract the English description
|
| 125 |
+
description = None
|
| 126 |
+
if cleaned_data['data']['translations'] and cleaned_data['data']['translations']['overviewTranslations']:
|
| 127 |
+
for overview in cleaned_data['data']['translations']['overviewTranslations']:
|
| 128 |
+
if overview['language'] == 'eng' and overview['isPrimary']:
|
| 129 |
+
description = overview['overview']
|
| 130 |
+
break
|
| 131 |
+
|
| 132 |
+
# Extract the artwork type 15 link
|
| 133 |
+
image_link = None
|
| 134 |
+
if cleaned_data['data']['artworks']:
|
| 135 |
+
for artwork in cleaned_data['data']['artworks']:
|
| 136 |
+
if artwork['type'] == 15:
|
| 137 |
+
image_link = artwork['image']
|
| 138 |
+
break
|
| 139 |
+
|
| 140 |
+
# Add the entry to the RecentList with description and image_link
|
| 141 |
if media_type == 'movie':
|
| 142 |
+
recent_list.add_entry(original_title, cleaned_data['data']['year'], description, image_link, 'film')
|
| 143 |
elif media_type == 'series':
|
| 144 |
+
recent_list.add_entry(original_title, cleaned_data['data']['year'], description, image_link, 'series')
|
| 145 |
+
|
| 146 |
# Cache the extended JSON response
|
| 147 |
json_cache_path = os.path.join(CACHE_DIR, f"{urllib.parse.quote(original_title)}.json")
|
| 148 |
await save_to_json(cleaned_data, json_cache_path)
|