Refactor ABSConnector and its mock to improve library series retrieval and add file caching for library data
This commit is contained in:
parent
223bfbf6bc
commit
a81d277f17
3 changed files with 72 additions and 27 deletions
|
|
@ -1,5 +1,6 @@
|
|||
import requests
|
||||
import json
|
||||
import os
|
||||
|
||||
|
||||
class ABSConnector:
|
||||
|
|
@ -15,22 +16,25 @@ class ABSConnector:
|
|||
data = response.json()
|
||||
return data["libraries"]
|
||||
|
||||
def get_series_by_library_id(self, library_id, page_size=100):
|
||||
def _get_library_page(self, library_id, page=0, page_size=100):
|
||||
endpoint = f"{self.abs_url}/api/libraries/{library_id}/series"
|
||||
response = self.requests.get(
|
||||
endpoint,
|
||||
params={
|
||||
"limit": page_size,
|
||||
"page": page,
|
||||
"minified": 1,
|
||||
"sort": "name",
|
||||
},
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def get_series_by_library_id(self, library_id, page_size=100):
|
||||
page = 0
|
||||
|
||||
while True:
|
||||
response = self.requests.get(
|
||||
endpoint,
|
||||
params={
|
||||
"limit": page_size,
|
||||
"page": page,
|
||||
"minified": 1,
|
||||
"sort": "name",
|
||||
},
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
data = self.__get_library_page(library_id, page, page_size)
|
||||
|
||||
yield from data["results"]
|
||||
|
||||
|
|
@ -41,21 +45,43 @@ class ABSConnector:
|
|||
|
||||
|
||||
class ABSConnectorMock(ABSConnector):
|
||||
def __init__(self, abs_url, token=None):
|
||||
super().__init__(abs_url, token)
|
||||
|
||||
self.directory = "dumps/abs"
|
||||
if not os.path.exists(self.directory):
|
||||
os.makedirs(self.directory)
|
||||
|
||||
def get_library_ids(self):
|
||||
with open("dumps/libraries.json", "r") as f:
|
||||
data = json.load(f)
|
||||
return data["libraries"]
|
||||
path = f"{self.directory}/libraries.json"
|
||||
|
||||
try:
|
||||
with open(path, "r") as f:
|
||||
data = json.load(f)
|
||||
return data["libraries"]
|
||||
except FileNotFoundError:
|
||||
data = ABSConnector.get_library_ids(self)
|
||||
with open(path, "w+") as f:
|
||||
json.dump({"libraries": data}, f, indent=4)
|
||||
return data
|
||||
|
||||
def get_series_by_library_id(self, library_id, page_size=100):
|
||||
page = 0
|
||||
|
||||
while True:
|
||||
with open(f"dumps/library_{library_id}.page{page}.json", "r") as f:
|
||||
data = json.load(f)
|
||||
path = f"{self.directory}/library_{library_id}.page_{page}.json"
|
||||
|
||||
yield from data["results"]
|
||||
try:
|
||||
with open(path, "r") as f:
|
||||
data = json.load(f)
|
||||
except FileNotFoundError:
|
||||
data = ABSConnector._get_library_page(self, library_id, page, page_size)
|
||||
with open(path, "w+") as f:
|
||||
json.dump(data, f, indent=4)
|
||||
|
||||
page += 1
|
||||
yield from data["results"]
|
||||
|
||||
if data["total"] < page_size * page: # Stop if no more data
|
||||
break
|
||||
page += 1
|
||||
|
||||
if data["total"] < page_size * page: # Stop if no more data
|
||||
break
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
from getpass import getpass
|
||||
import audible
|
||||
import json
|
||||
from getpass import getpass
|
||||
import os
|
||||
|
||||
|
||||
class AudibleConnector:
|
||||
|
|
@ -44,13 +44,22 @@ class AudibleConnector:
|
|||
|
||||
|
||||
class AudibleConnectorMock(AudibleConnector):
|
||||
def __init__(self, authFile):
|
||||
super().__init__(authFile)
|
||||
|
||||
self.directory = "dumps/audible"
|
||||
if not os.path.exists(self.directory):
|
||||
os.makedirs(self.directory)
|
||||
|
||||
def get_produce_from_asin(self, asin):
|
||||
path = f"{self.directory}/products_{asin}.json"
|
||||
|
||||
try:
|
||||
with open(f"dumps/products_{asin}.json", "r") as f:
|
||||
with open(path, "r") as f:
|
||||
data = json.load(f)
|
||||
return data["product"]
|
||||
except FileNotFoundError:
|
||||
data = AudibleConnector.get_produce_from_asin(self, asin)
|
||||
with open(f"dumps/products_{asin}.json", "w+") as f:
|
||||
with open(path, "w+") as f:
|
||||
json.dump({"product": data}, f, indent=4)
|
||||
return data
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from ratelimit import limits
|
||||
import requests
|
||||
import json
|
||||
import os
|
||||
|
||||
|
||||
class AudNexusConnector:
|
||||
|
|
@ -17,13 +18,22 @@ class AudNexusConnector:
|
|||
|
||||
|
||||
class AudNexusConnectorMock(AudNexusConnector):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
self.directory = "dumps/audnexus"
|
||||
if not os.path.exists(self.directory):
|
||||
os.makedirs(self.directory)
|
||||
|
||||
def get_book_from_asin(self, book_asin):
|
||||
path = f"{self.directory}/book_{book_asin}.json"
|
||||
|
||||
try:
|
||||
with open(f"dumps/book_{book_asin}.json", "r") as f:
|
||||
with open(path, "r") as f:
|
||||
data = json.load(f)
|
||||
return data
|
||||
except FileNotFoundError:
|
||||
data = AudNexusConnector.get_book_from_asin(self, book_asin)
|
||||
with open(f"dumps/book_{book_asin}.json", "w+") as f:
|
||||
with open(path, "w+") as f:
|
||||
json.dump(data, f, indent=4)
|
||||
return data
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue