added a new custom exception, and an api, to fetch direct url
This commit is contained in:
parent
10f6153199
commit
ff5a79a3c7
@ -1,6 +1,6 @@
|
|||||||
from typing import Optional, Tuple, Type, Set, Union, List
|
from typing import Optional, Tuple, Type, Set, Union, List
|
||||||
|
|
||||||
from . import page_attributes
|
from .page_attributes import Pages
|
||||||
from ..pages import Page
|
from ..pages import Page
|
||||||
from ..objects import Song, Album, Artist, Label, Source
|
from ..objects import Song, Album, Artist, Label, Source
|
||||||
|
|
||||||
@ -10,29 +10,11 @@ MusicObject = Union[Song, Album, Artist, Label]
|
|||||||
class Download:
|
class Download:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
pages: Tuple[Page] = page_attributes.ALL_PAGES,
|
exclude_pages: Set[Type[Page]] = None,
|
||||||
exclude_pages=None,
|
exclude_shady: bool = False
|
||||||
exclude_shady: bool = False,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
if exclude_pages is None:
|
|
||||||
exclude_pages = set()
|
|
||||||
|
|
||||||
_page_list: List[Page] = []
|
self.pages: Pages = Pages(exclude_pages=exclude_pages, exclude_shady=exclude_shady)
|
||||||
_audio_page_list: List[Page] = []
|
|
||||||
|
|
||||||
for page in pages:
|
|
||||||
if exclude_shady and page in page_attributes.SHADY_PAGES:
|
|
||||||
continue
|
|
||||||
if page in exclude_pages:
|
|
||||||
continue
|
|
||||||
|
|
||||||
_page_list.append(page)
|
|
||||||
|
|
||||||
if page in page_attributes.AUDIO_PAGES:
|
|
||||||
_audio_page_list.append(page)
|
|
||||||
|
|
||||||
self.pages: Tuple[Page] = tuple(_page_list)
|
|
||||||
self.audio_pages: Tuple[Page] = tuple(_audio_page_list)
|
|
||||||
|
|
||||||
def fetch_details(self, music_object: MusicObject) -> MusicObject:
|
def fetch_details(self, music_object: MusicObject) -> MusicObject:
|
||||||
for page in self.pages:
|
for page in self.pages:
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
from typing import Tuple, Type, Dict, List, Set
|
from typing import Tuple, Type, Dict, List, Set
|
||||||
|
|
||||||
from .results import SearchResults
|
from .results import SearchResults
|
||||||
from ..objects import DatabaseObject
|
from ..objects import DatabaseObject, Source
|
||||||
from ..utils.enums.source import SourcePages
|
from ..utils.enums.source import SourcePages
|
||||||
from ..utils.support_classes import Query, DownloadResult
|
from ..utils.support_classes import Query, DownloadResult
|
||||||
|
from ..utils.exception.download import UrlNotFoundException
|
||||||
from ..pages import Page, EncyclopaediaMetallum, Musify, INDEPENDENT_DB_OBJECTS
|
from ..pages import Page, EncyclopaediaMetallum, Musify, INDEPENDENT_DB_OBJECTS
|
||||||
|
|
||||||
ALL_PAGES: Set[Type[Page]] = {
|
ALL_PAGES: Set[Type[Page]] = {
|
||||||
@ -83,15 +84,13 @@ class Pages:
|
|||||||
|
|
||||||
return DownloadResult(error_message=f"No audio source has been found for {music_object}.")
|
return DownloadResult(error_message=f"No audio source has been found for {music_object}.")
|
||||||
|
|
||||||
|
def fetch_url(self, url: str, stop_at_level: int = 2) -> DatabaseObject:
|
||||||
"""
|
source = Source.match_url(url, SourcePages.MANUAL)
|
||||||
# this needs to be case-insensitive
|
|
||||||
SHORTHANDS = ('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z')
|
if source is None:
|
||||||
for i, page in enumerate(ALL_PAGES):
|
raise UrlNotFoundException(url=url)
|
||||||
NAME_PAGE_MAP[type(page).__name__.lower()] = page
|
|
||||||
NAME_PAGE_MAP[SHORTHANDS[i].lower()] = page
|
_actual_page = self._source_to_page[source.page_enum]
|
||||||
|
|
||||||
PAGE_NAME_MAP[type(page)] = SHORTHANDS[i]
|
|
||||||
|
return self._page_instances[_actual_page].fetch_object_from_source(source=source, stop_at_level=stop_at_level)
|
||||||
SOURCE_PAGE_MAP[page.SOURCE_TYPE] = page
|
|
||||||
"""
|
|
@ -16,6 +16,11 @@ class SearchResults:
|
|||||||
self.results = Dict[Type[Page], List[DatabaseObject]] = {}
|
self.results = Dict[Type[Page], List[DatabaseObject]] = {}
|
||||||
|
|
||||||
def add(self, page: Type[Page], search_result: List[DatabaseObject]):
|
def add(self, page: Type[Page], search_result: List[DatabaseObject]):
|
||||||
|
"""
|
||||||
|
adds a list of found music objects to the according page
|
||||||
|
WARNING: if a page already has search results, they are just gonna be overwritten
|
||||||
|
"""
|
||||||
|
|
||||||
self.results[page] = search_result
|
self.results[page] = search_result
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
|
11
src/music_kraken/utils/exception/download.py
Normal file
11
src/music_kraken/utils/exception/download.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
class DownloadException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class UrlNotFoundException(DownloadException):
|
||||||
|
def __init__(self, url: str, *args: object) -> None:
|
||||||
|
self.url = url
|
||||||
|
super().__init__(*args)
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"Couldn't find the page of {self.url}"
|
Loading…
Reference in New Issue
Block a user