feat: detecting url type
This commit is contained in:
parent
f6caee41a8
commit
e4fd9faf12
@ -6,9 +6,8 @@ logging.getLogger().setLevel(logging.DEBUG)
|
||||
|
||||
if __name__ == "__main__":
|
||||
commands = [
|
||||
"s: #a I'm in a coffin",
|
||||
"0",
|
||||
"d: 0",
|
||||
"s: #a Crystal F",
|
||||
"10",
|
||||
]
|
||||
|
||||
|
||||
|
@ -49,7 +49,12 @@ class Genius(Page):
|
||||
def get_source_type(self, source: Source) -> Optional[Type[DatabaseObject]]:
|
||||
path = source.parsed_url.path.replace("/", "")
|
||||
|
||||
return super().get_source_type(source)
|
||||
if path.startswith("artists"):
|
||||
return Artist
|
||||
if path.startswith("albums"):
|
||||
return Album
|
||||
|
||||
return Song
|
||||
|
||||
def add_to_artwork(self, artwork: Artwork, url: str):
|
||||
if url is None:
|
||||
@ -158,63 +163,6 @@ class Genius(Page):
|
||||
|
||||
return results
|
||||
|
||||
def fetch_label(self, source: Source, stop_at_level: int = 1) -> Label:
|
||||
return Label()
|
||||
|
||||
def _parse_artist_details(self, soup: BeautifulSoup) -> Artist:
|
||||
name: str = None
|
||||
source_list: List[Source] = []
|
||||
contact_list: List[Contact] = []
|
||||
|
||||
band_name_location: BeautifulSoup = soup.find("p", {"id": "band-name-location"})
|
||||
if band_name_location is not None:
|
||||
title_span = band_name_location.find("span", {"class": "title"})
|
||||
if title_span is not None:
|
||||
name = title_span.text.strip()
|
||||
|
||||
link_container: BeautifulSoup = soup.find("ol", {"id": "band-links"})
|
||||
if link_container is not None:
|
||||
li: BeautifulSoup
|
||||
for li in link_container.find_all("a"):
|
||||
if li is None and li['href'] is not None:
|
||||
continue
|
||||
|
||||
source_list.append(Source.match_url(_parse_artist_url(li['href']), referrer_page=self.SOURCE_TYPE))
|
||||
|
||||
return Artist(
|
||||
name=name,
|
||||
source_list=source_list
|
||||
)
|
||||
|
||||
def _parse_album(self, soup: BeautifulSoup, initial_source: Source) -> List[Album]:
|
||||
title = None
|
||||
source_list: List[Source] = []
|
||||
|
||||
a = soup.find("a")
|
||||
if a is not None and a["href"] is not None:
|
||||
source_list.append(Source(self.SOURCE_TYPE, _get_host(initial_source) + a["href"]))
|
||||
|
||||
title_p = soup.find("p", {"class": "title"})
|
||||
if title_p is not None:
|
||||
title = title_p.text.strip()
|
||||
|
||||
return Album(title=title, source_list=source_list)
|
||||
|
||||
def _parse_artist_data_blob(self, data_blob: dict, artist_url: str):
|
||||
parsed_artist_url = urlparse(artist_url)
|
||||
album_list: List[Album] = []
|
||||
|
||||
for album_json in data_blob.get("buyfulldisco", {}).get("tralbums", []):
|
||||
album_list.append(Album(
|
||||
title=album_json["title"].strip(),
|
||||
source_list=[Source(
|
||||
self.SOURCE_TYPE,
|
||||
urlunparse((parsed_artist_url.scheme, parsed_artist_url.netloc, album_json["page_url"], "", "", ""))
|
||||
)]
|
||||
))
|
||||
|
||||
return album_list
|
||||
|
||||
def fetch_artist(self, source: Source, stop_at_level: int = 1) -> Artist:
|
||||
artist = Artist()
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user