Refactor ABSConnector and its mock to improve library series retrieval and add file caching for library data

This commit is contained in:
Yunn Xairou 2025-08-30 12:11:50 +02:00
parent 223bfbf6bc
commit a81d277f17
3 changed files with 72 additions and 27 deletions

View file

@ -1,5 +1,6 @@
import requests
import json
import os
class ABSConnector:
@ -15,22 +16,25 @@ class ABSConnector:
data = response.json()
return data["libraries"]
def get_series_by_library_id(self, library_id, page_size=100):
def _get_library_page(self, library_id, page=0, page_size=100):
endpoint = f"{self.abs_url}/api/libraries/{library_id}/series"
response = self.requests.get(
endpoint,
params={
"limit": page_size,
"page": page,
"minified": 1,
"sort": "name",
},
)
response.raise_for_status()
return response.json()
def get_series_by_library_id(self, library_id, page_size=100):
page = 0
while True:
response = self.requests.get(
endpoint,
params={
"limit": page_size,
"page": page,
"minified": 1,
"sort": "name",
},
)
response.raise_for_status()
data = response.json()
data = self.__get_library_page(library_id, page, page_size)
yield from data["results"]
@ -41,21 +45,43 @@ class ABSConnector:
class ABSConnectorMock(ABSConnector):
def __init__(self, abs_url, token=None):
super().__init__(abs_url, token)
self.directory = "dumps/abs"
if not os.path.exists(self.directory):
os.makedirs(self.directory)
def get_library_ids(self):
with open("dumps/libraries.json", "r") as f:
data = json.load(f)
return data["libraries"]
path = f"{self.directory}/libraries.json"
try:
with open(path, "r") as f:
data = json.load(f)
return data["libraries"]
except FileNotFoundError:
data = ABSConnector.get_library_ids(self)
with open(path, "w+") as f:
json.dump({"libraries": data}, f, indent=4)
return data
def get_series_by_library_id(self, library_id, page_size=100):
page = 0
while True:
with open(f"dumps/library_{library_id}.page{page}.json", "r") as f:
data = json.load(f)
path = f"{self.directory}/library_{library_id}.page_{page}.json"
yield from data["results"]
try:
with open(path, "r") as f:
data = json.load(f)
except FileNotFoundError:
data = ABSConnector._get_library_page(self, library_id, page, page_size)
with open(path, "w+") as f:
json.dump(data, f, indent=4)
page += 1
yield from data["results"]
if data["total"] < page_size * page: # Stop if no more data
break
page += 1
if data["total"] < page_size * page: # Stop if no more data
break