Compare commits
5 Commits
274f1bce90
...
experiment
| Author | SHA1 | Date | |
|---|---|---|---|
| 810aff4163 | |||
| 5ce76c758e | |||
| 93c9a367a2 | |||
| 17c28722fb | |||
| dd99e60afd |
@@ -67,13 +67,14 @@ def write_metadata_to_target(metadata: Metadata, target: Target, song: Song):
|
||||
id3_object = AudioMetadata(file_location=target.file_path)
|
||||
|
||||
LOGGER.info(str(metadata))
|
||||
|
||||
if song.artwork.best_variant is not None:
|
||||
best_variant = song.artwork.best_variant
|
||||
## REWRITE COMPLETLY !!!!!!!!!!!!
|
||||
if len(song.artwork._data) != 0:
|
||||
variants = song.artwork._data.__getitem__(0)
|
||||
best_variant = variants.variants.__getitem__(0)
|
||||
|
||||
r = artwork_connection.get(
|
||||
url=best_variant["url"],
|
||||
name=song.artwork.get_variant_name(best_variant),
|
||||
url=best_variant.url,
|
||||
name=best_variant.url,
|
||||
)
|
||||
|
||||
temp_target: Target = Target.temp()
|
||||
|
||||
@@ -3,6 +3,7 @@ from collections import defaultdict
|
||||
from pathlib import Path
|
||||
import re
|
||||
import logging
|
||||
import subprocess
|
||||
|
||||
from PIL import Image
|
||||
|
||||
@@ -36,8 +37,6 @@ from ..connection import Connection
|
||||
|
||||
from ..pages import Page, EncyclopaediaMetallum, Musify, YouTube, YoutubeMusic, Bandcamp, Genius, INDEPENDENT_DB_OBJECTS
|
||||
|
||||
artwork_connection: Connection = Connection()
|
||||
|
||||
ALL_PAGES: Set[Type[Page]] = {
|
||||
# EncyclopaediaMetallum,
|
||||
Genius,
|
||||
@@ -78,33 +77,37 @@ if DEBUG_PAGES:
|
||||
class Pages:
|
||||
def __init__(self, exclude_pages: Set[Type[Page]] = None, exclude_shady: bool = False, download_options: DownloadOptions = None, fetch_options: FetchOptions = None):
|
||||
self.LOGGER = logging.getLogger("download")
|
||||
|
||||
|
||||
self.download_options: DownloadOptions = download_options or DownloadOptions()
|
||||
self.fetch_options: FetchOptions = fetch_options or FetchOptions()
|
||||
|
||||
# initialize all page instances
|
||||
self._page_instances: Dict[Type[Page], Page] = dict()
|
||||
self._source_to_page: Dict[SourceType, Type[Page]] = dict()
|
||||
|
||||
|
||||
exclude_pages = exclude_pages if exclude_pages is not None else set()
|
||||
|
||||
|
||||
if exclude_shady:
|
||||
exclude_pages = exclude_pages.union(SHADY_PAGES)
|
||||
|
||||
|
||||
if not exclude_pages.issubset(ALL_PAGES):
|
||||
raise ValueError(f"The excluded pages have to be a subset of all pages: {exclude_pages} | {ALL_PAGES}")
|
||||
|
||||
raise ValueError(
|
||||
f"The excluded pages have to be a subset of all pages: {exclude_pages} | {ALL_PAGES}")
|
||||
|
||||
def _set_to_tuple(page_set: Set[Type[Page]]) -> Tuple[Type[Page], ...]:
|
||||
return tuple(sorted(page_set, key=lambda page: page.__name__))
|
||||
|
||||
|
||||
self._pages_set: Set[Type[Page]] = ALL_PAGES.difference(exclude_pages)
|
||||
self.pages: Tuple[Type[Page], ...] = _set_to_tuple(self._pages_set)
|
||||
|
||||
self._audio_pages_set: Set[Type[Page]] = self._pages_set.intersection(AUDIO_PAGES)
|
||||
self.audio_pages: Tuple[Type[Page], ...] = _set_to_tuple(self._audio_pages_set)
|
||||
|
||||
self._audio_pages_set: Set[Type[Page]
|
||||
] = self._pages_set.intersection(AUDIO_PAGES)
|
||||
self.audio_pages: Tuple[Type[Page], ...] = _set_to_tuple(
|
||||
self._audio_pages_set)
|
||||
|
||||
for page_type in self.pages:
|
||||
self._page_instances[page_type] = page_type(fetch_options=self.fetch_options, download_options=self.download_options)
|
||||
self._page_instances[page_type] = page_type(
|
||||
fetch_options=self.fetch_options, download_options=self.download_options)
|
||||
self._source_to_page[page_type.SOURCE_TYPE] = page_type
|
||||
|
||||
def _get_page_from_enum(self, source_page: SourceType) -> Page:
|
||||
@@ -114,24 +117,26 @@ class Pages:
|
||||
|
||||
def search(self, query: Query) -> SearchResults:
|
||||
result = SearchResults()
|
||||
|
||||
|
||||
for page_type in self.pages:
|
||||
result.add(
|
||||
page=page_type,
|
||||
search_result=self._page_instances[page_type].search(query=query)
|
||||
search_result=self._page_instances[page_type].search(
|
||||
query=query)
|
||||
)
|
||||
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def fetch_details(self, data_object: DataObject, stop_at_level: int = 1, **kwargs) -> DataObject:
|
||||
if not isinstance(data_object, INDEPENDENT_DB_OBJECTS):
|
||||
return data_object
|
||||
|
||||
|
||||
source: Source
|
||||
for source in data_object.source_collection.get_sources(source_type_sorting={
|
||||
"only_with_page": True,
|
||||
}):
|
||||
new_data_object = self.fetch_from_source(source=source, stop_at_level=stop_at_level)
|
||||
new_data_object = self.fetch_from_source(
|
||||
source=source, stop_at_level=stop_at_level)
|
||||
if new_data_object is not None:
|
||||
data_object.merge(new_data_object)
|
||||
|
||||
@@ -140,14 +145,14 @@ class Pages:
|
||||
def fetch_from_source(self, source: Source, **kwargs) -> Optional[DataObject]:
|
||||
if not source.has_page:
|
||||
return None
|
||||
|
||||
|
||||
source_type = source.page.get_source_type(source=source)
|
||||
if source_type is None:
|
||||
self.LOGGER.debug(f"Could not determine source type for {source}.")
|
||||
return None
|
||||
|
||||
func = getattr(source.page, fetch_map[source_type])
|
||||
|
||||
|
||||
# fetching the data object and marking it as fetched
|
||||
data_object: DataObject = func(source=source, **kwargs)
|
||||
data_object.mark_as_fetched(source.hash_url)
|
||||
@@ -157,79 +162,48 @@ class Pages:
|
||||
source = Source.match_url(url, ALL_SOURCE_TYPES.MANUAL)
|
||||
if source is None:
|
||||
return None
|
||||
|
||||
|
||||
return self.fetch_from_source(source=source)
|
||||
|
||||
|
||||
def _skip_object(self, data_object: DataObject) -> bool:
|
||||
if isinstance(data_object, Album):
|
||||
if not self.download_options.download_all and data_object.album_type in self.download_options.album_type_blacklist:
|
||||
return True
|
||||
|
||||
|
||||
return False
|
||||
|
||||
def download_artwork_variant_to_target(self, artwork_variant: ArtworkVariant, target: Target):
|
||||
|
||||
r = artwork_connection.get(
|
||||
url=artwork_variant["url"],
|
||||
name=artwork_variant["url"],
|
||||
)
|
||||
|
||||
|
||||
temp_target: Target = Target.temp()
|
||||
with temp_target.open("wb") as f:
|
||||
f.write(r.content)
|
||||
|
||||
converted_target: Target = Target.temp(file_extension=main_settings["image_format"])
|
||||
with Image.open(temp_target.file_path) as img:
|
||||
# crop the image if it isn't square in the middle with minimum data loss
|
||||
width, height = img.size
|
||||
if width != height:
|
||||
if width > height:
|
||||
img = img.crop((width // 2 - height // 2, 0, width // 2 + height // 2, height))
|
||||
else:
|
||||
img = img.crop((0, height // 2 - width // 2, width, height // 2 + width // 2))
|
||||
|
||||
# resize the image to the preferred resolution
|
||||
img.thumbnail((main_settings["preferred_artwork_resolution"], main_settings["preferred_artwork_resolution"]))
|
||||
|
||||
# https://stackoverflow.com/a/59476938/16804841
|
||||
if img.mode != 'RGB':
|
||||
img = img.convert('RGB')
|
||||
|
||||
img.save(target.file_path, main_settings["image_format"])
|
||||
|
||||
def remove_artwork_duplicates(self) -> None:
|
||||
"""
|
||||
This will eliminate duplicates within the given threshold
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def _fetch_artist_artwork(self, artist: Artist, naming: dict):
|
||||
naming: Dict[str, List[str]] = defaultdict(list, naming)
|
||||
naming["artist"].append(artist.name)
|
||||
naming["label"].extend([l.title_value for l in artist.label_collection])
|
||||
naming["label"].extend(
|
||||
[l.title_value for l in artist.label_collection])
|
||||
# removing duplicates from the naming, and process the strings
|
||||
for key, value in naming.items():
|
||||
# https://stackoverflow.com/a/17016257
|
||||
naming[key] = list(dict.fromkeys(value))
|
||||
|
||||
artwork: Artwork = artist.artwork
|
||||
for image_number, variant in enumerate(artwork):
|
||||
naming["image_number"] = [str(image_number)]
|
||||
|
||||
url: str = variant["url"]
|
||||
|
||||
target = Target(
|
||||
relative_to_music_dir=True,
|
||||
file_path=Path(self._parse_path_template(main_settings["artist_artwork_path"], naming=naming))
|
||||
)
|
||||
self.download_artwork_variant_to_target(variant, target)
|
||||
artwork_collection: ArtworkCollection = artist.artwork
|
||||
artwork_collection.compile()
|
||||
for image_number, artwork in enumerate(artwork_collection):
|
||||
for artwork_variant in artwork.variants:
|
||||
naming["image_number"] = [str(image_number)]
|
||||
target = Target(
|
||||
relative_to_music_dir=True,
|
||||
file_path=Path(self._parse_path_template(
|
||||
main_settings["artist_artwork_path"], naming=naming))
|
||||
)
|
||||
if not target.file_path.parent.exists():
|
||||
target.create_path()
|
||||
subprocess.Popen(["gio", "set", target.file_path.parent, "metadata::custom-icon", "file://"+str(target.file_path)])
|
||||
with Image.open(artwork_variant.target.file_path) as img:
|
||||
img.save(target.file_path, main_settings["image_format"])
|
||||
artwork_variant.target = Target
|
||||
|
||||
def download(self, data_object: DataObject, genre: str, **kwargs) -> DownloadResult:
|
||||
# fetch the given object
|
||||
self.fetch_details(data_object)
|
||||
output(f"\nDownloading {data_object.option_string}...", color=BColors.BOLD)
|
||||
output(
|
||||
f"\nDownloading {data_object.option_string}...", color=BColors.BOLD)
|
||||
|
||||
# fetching all parent objects (e.g. if you only download a song)
|
||||
if not kwargs.get("fetched_upwards", False):
|
||||
@@ -247,7 +221,7 @@ class Pages:
|
||||
new_to_fetch.extend(c)
|
||||
|
||||
to_fetch = new_to_fetch
|
||||
|
||||
|
||||
kwargs["fetched_upwards"] = True
|
||||
|
||||
naming = kwargs.get("naming", {
|
||||
@@ -285,13 +259,15 @@ class Pages:
|
||||
return set(re.findall(r"{([^}]+)}", path_template))
|
||||
|
||||
def _parse_path_template(self, path_template: str, naming: Dict[str, List[str]]) -> str:
|
||||
field_names: Set[str] = self._extract_fields_from_template(path_template)
|
||||
|
||||
field_names: Set[str] = self._extract_fields_from_template(
|
||||
path_template)
|
||||
|
||||
for field in field_names:
|
||||
if len(naming[field]) == 0:
|
||||
raise MKMissingNameException(f"Missing field for {field}.")
|
||||
|
||||
path_template = path_template.replace(f"{{{field}}}", naming[field][0])
|
||||
path_template = path_template.replace(
|
||||
f"{{{field}}}", naming[field][0])
|
||||
|
||||
return path_template
|
||||
|
||||
@@ -301,16 +277,17 @@ class Pages:
|
||||
Search the song in the file system.
|
||||
"""
|
||||
r = DownloadResult(total=1)
|
||||
|
||||
|
||||
# pre process the data recursively
|
||||
song.compile()
|
||||
|
||||
|
||||
# manage the naming
|
||||
naming: Dict[str, List[str]] = defaultdict(list, naming)
|
||||
naming["song"].append(song.title_value)
|
||||
naming["isrc"].append(song.isrc)
|
||||
naming["album"].extend(a.title_value for a in song.album_collection)
|
||||
naming["album_type"].extend(a.album_type.value for a in song.album_collection)
|
||||
naming["album_type"].extend(
|
||||
a.album_type.value for a in song.album_collection)
|
||||
naming["artist"].extend(a.name for a in song.artist_collection)
|
||||
naming["artist"].extend(a.name for a in song.feature_artist_collection)
|
||||
for a in song.album_collection:
|
||||
@@ -327,13 +304,16 @@ class Pages:
|
||||
song.target_collection.append(Target(
|
||||
relative_to_music_dir=True,
|
||||
file_path=Path(
|
||||
self._parse_path_template(main_settings["download_path"], naming=naming),
|
||||
self._parse_path_template(main_settings["download_file"], naming=naming),
|
||||
self._parse_path_template(
|
||||
main_settings["download_path"], naming=naming),
|
||||
self._parse_path_template(
|
||||
main_settings["download_file"], naming=naming),
|
||||
)
|
||||
))
|
||||
for target in song.target_collection:
|
||||
if target.exists:
|
||||
output(f'{target.file_path} {BColors.OKGREEN.value}[already exists]', color=BColors.GREY)
|
||||
output(
|
||||
f'{target.file_path} {BColors.OKGREEN.value}[already exists]', color=BColors.GREY)
|
||||
r.found_on_disk += 1
|
||||
|
||||
if not self.download_options.download_again_if_found:
|
||||
@@ -354,8 +334,10 @@ class Pages:
|
||||
break
|
||||
|
||||
used_source = source
|
||||
streaming_results = source.page.download_song_to_target(source=source, target=tmp, desc="download")
|
||||
skip_intervals = source.page.get_skip_intervals(song=song, source=source)
|
||||
streaming_results = source.page.download_song_to_target(
|
||||
source=source, target=tmp, desc="download")
|
||||
skip_intervals = source.page.get_skip_intervals(
|
||||
song=song, source=source)
|
||||
|
||||
# if something has been downloaded but it somehow failed, delete the file
|
||||
if streaming_results.is_fatal_error and tmp.exists:
|
||||
@@ -379,7 +361,8 @@ class Pages:
|
||||
used_source.page.post_process_hook(song=song, temp_target=tmp)
|
||||
|
||||
if not found_on_disk or self.download_options.process_metadata_if_found:
|
||||
write_metadata_to_target(metadata=song.metadata, target=tmp, song=song)
|
||||
write_metadata_to_target(
|
||||
metadata=song.metadata, target=tmp, song=song)
|
||||
|
||||
# copy the tmp target to the final locations
|
||||
for target in song.target_collection:
|
||||
@@ -390,12 +373,10 @@ class Pages:
|
||||
|
||||
def fetch_url(self, url: str, stop_at_level: int = 2) -> Tuple[Type[Page], DataObject]:
|
||||
source = Source.match_url(url, ALL_SOURCE_TYPES.MANUAL)
|
||||
|
||||
|
||||
if source is None:
|
||||
raise UrlNotFoundException(url=url)
|
||||
|
||||
_actual_page = self._source_to_page[source.source_type]
|
||||
|
||||
return _actual_page, self._page_instances[_actual_page].fetch_object_from_source(source=source, stop_at_level=stop_at_level)
|
||||
|
||||
|
||||
_actual_page = self._source_to_page[source.source_type]
|
||||
|
||||
return _actual_page, self._page_instances[_actual_page].fetch_object_from_source(source=source, stop_at_level=stop_at_level)
|
||||
|
||||
@@ -16,6 +16,9 @@ from .metadata import Mapping as id3Mapping
|
||||
from .metadata import Metadata
|
||||
from .parents import OuterProxy as Base
|
||||
from .target import Target
|
||||
from PIL import Image
|
||||
|
||||
import imagehash
|
||||
|
||||
artwork_connection: Connection = Connection(module="artwork")
|
||||
|
||||
@@ -24,7 +27,7 @@ artwork_connection: Connection = Connection(module="artwork")
|
||||
class ArtworkVariant:
|
||||
url: str
|
||||
width: Optional[int] = None
|
||||
height: Optional[int] = None
|
||||
heigth: Optional[int] = None
|
||||
image_format: Optional[str] = None
|
||||
|
||||
def __hash__(self) -> int:
|
||||
@@ -51,7 +54,7 @@ class ArtworkVariant:
|
||||
def fetch(self) -> None:
|
||||
global artwork_connection
|
||||
|
||||
r = artwork_connection.get(self.url, name=hash_url(url))
|
||||
r = artwork_connection.get(self.url, name=hash_url(self.url))
|
||||
if r is None:
|
||||
return
|
||||
|
||||
@@ -167,19 +170,65 @@ class ArtworkCollection:
|
||||
for value in values:
|
||||
self.append(value, **kwargs)
|
||||
|
||||
def compile(self) -> None:
|
||||
def compile(self, **kwargs) -> None:
|
||||
"""
|
||||
This will make the artworks ready for download
|
||||
This will make the artworks ready for download and delete duplicates.
|
||||
"""
|
||||
artwork_hashes: list = list()
|
||||
artwork_urls: list = list()
|
||||
for artwork in self._data:
|
||||
artwork.fetch()
|
||||
index = 0
|
||||
for artwork_variant in artwork.variants:
|
||||
r = artwork_connection.get(
|
||||
url=artwork_variant.url,
|
||||
name=artwork_variant.url,
|
||||
)
|
||||
|
||||
if artwork_variant.url in artwork_urls:
|
||||
artwork.variants.pop(index)
|
||||
continue
|
||||
artwork_urls.append(artwork_variant.url)
|
||||
|
||||
target: Target = artwork_variant.target
|
||||
with target.open("wb") as f:
|
||||
f.write(r.content)
|
||||
|
||||
with Image.open(target.file_path) as img:
|
||||
# https://stackoverflow.com/a/59476938/16804841
|
||||
if img.mode != 'RGB':
|
||||
img = img.convert('RGB')
|
||||
|
||||
try:
|
||||
image_hash = imagehash.crop_resistant_hash(img)
|
||||
except Exception as e:
|
||||
continue
|
||||
|
||||
if image_hash in artwork_hashes:
|
||||
artwork.variants.pop(index)
|
||||
target.delete()
|
||||
continue
|
||||
artwork_hashes.append(image_hash)
|
||||
width, height = img.size
|
||||
if width != height:
|
||||
if width > height:
|
||||
img = img.crop((width // 2 - height // 2, 0, width // 2 + height // 2, height))
|
||||
else:
|
||||
img = img.crop((0, height // 2 - width // 2, width, height // 2 + width // 2))
|
||||
|
||||
# resize the image to the preferred resolution
|
||||
img.thumbnail((main_settings["preferred_artwork_resolution"], main_settings["preferred_artwork_resolution"]))
|
||||
index =+ 1
|
||||
|
||||
|
||||
|
||||
def __merge__(self, other: ArtworkCollection, **kwargs) -> None:
|
||||
self.parent_artworks.update(other.parent_artworks)
|
||||
for other_artwork in other._data:
|
||||
for other_variant in other_artwork.variants:
|
||||
if self.__contains__(other_variant.url):
|
||||
continue
|
||||
self.append(ArtworkVariant(other_variant.url))
|
||||
|
||||
for key, value in other._variant_mapping.items():
|
||||
if key not in self._variant_mapping:
|
||||
self._variant_mapping[key] = value
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return id(self)
|
||||
@@ -190,21 +239,5 @@ class ArtworkCollection:
|
||||
def get_urls(self) -> Generator[str, None, None]:
|
||||
yield from (artwork.url for artwork in self._data if artwork.url is not None)
|
||||
|
||||
"""
|
||||
@property
|
||||
def flat_empty(self) -> bool:
|
||||
return len(self._variant_mapping.keys()) <= 0
|
||||
|
||||
def _get_best_from_list(self, artwork_variants: List[ArtworkVariant]) -> Optional[ArtworkVariant]:
|
||||
return min(artwork_variants, key=lambda x: x["deviation"])
|
||||
|
||||
@property
|
||||
def best_variant(self) -> ArtworkVariant:
|
||||
if self.flat_empty:
|
||||
return self._get_best_from_list([parent.best_variant for parent in self.parent_artworks])
|
||||
return self._get_best_from_list(self._variant_mapping.values())
|
||||
|
||||
def get_variant_name(self, variant: ArtworkVariant) -> str:
|
||||
return f"artwork_{variant['width']}x{variant['height']}_{hash_url(variant['url']).replace('/', '_')}"
|
||||
"""
|
||||
|
||||
|
||||
@@ -31,7 +31,8 @@ class Target(OuterProxy):
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def temp(cls, name: str = str(random.randint(0, HIGHEST_ID)), file_extension: Optional[str] = None) -> P:
|
||||
def temp(cls, name: str = None, file_extension: Optional[str] = None) -> P:
|
||||
name = name or str(random.randint(0, HIGHEST_ID))
|
||||
if file_extension is not None:
|
||||
name = f"{name}.{file_extension}"
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import json
|
||||
import simplejson as json
|
||||
from json_unescape import escape_json, unescape_json
|
||||
from enum import Enum
|
||||
from typing import List, Optional, Type
|
||||
from urllib.parse import urlencode, urlparse, urlunparse
|
||||
@@ -268,8 +269,9 @@ class Genius(Page):
|
||||
# get the contents that are between `JSON.parse('` and `');`
|
||||
content = self.get_json_content_from_response(r, start="window.__PRELOADED_STATE__ = JSON.parse('", end="');\n window.__APP_CONFIG__ = ")
|
||||
if content is not None:
|
||||
content = content.replace("\\\\", "\\").replace('\\"', '"').replace("\\'", "'")
|
||||
data = json.loads(content)
|
||||
#IMPLEMENT FIX FROM HAZEL
|
||||
content = escape_json(content)
|
||||
data = json.loads(content)
|
||||
|
||||
lyrics_html = traverse_json_path(data, "songPage.lyricsData.body.html", default=None)
|
||||
if lyrics_html is not None:
|
||||
|
||||
@@ -8,9 +8,10 @@ import pycountry
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from ..connection import Connection
|
||||
from ..objects import (Album, Artist, ArtworkCollection, DatabaseObject,
|
||||
from ..objects import (Album, Artist, DatabaseObject,
|
||||
FormattedText, ID3Timestamp, Label, Lyrics, Song,
|
||||
Source, Target)
|
||||
from ..objects.artwork import (Artwork, ArtworkVariant, ArtworkCollection)
|
||||
from ..utils import shared, string_processing
|
||||
from ..utils.config import logging_settings, main_settings
|
||||
from ..utils.enums import ALL_SOURCE_TYPES, SourceType
|
||||
@@ -1069,7 +1070,7 @@ class Musify(Page):
|
||||
gallery_body_content: BeautifulSoup = artwork_gallery.find(id="bodyContent")
|
||||
gallery_image_element_list: List[BeautifulSoup] = gallery_body_content.find_all("img")
|
||||
for gallery_image_element in gallery_image_element_list:
|
||||
artist.artwork.add_data(url=gallery_image_element.get("data-src", gallery_image_element.get("src")), width=247, heigth=247)
|
||||
artist.artwork.append(ArtworkVariant(url=gallery_image_element.get("data-src", gallery_image_element.get("src")), width=247, heigth=247))
|
||||
|
||||
|
||||
def fetch_artist(self, source: Source, **kwargs) -> Artist:
|
||||
|
||||
@@ -441,7 +441,7 @@ class YoutubeMusic(SuperYouTube):
|
||||
# fetch artist artwork
|
||||
artist_thumbnails = musicImmersiveHeaderRenderer.get("thumbnail", {}).get("musicThumbnailRenderer", {}).get("thumbnail", {}).get("thumbnails", {})
|
||||
for artist_thumbnail in artist_thumbnails:
|
||||
artist.artwork.append(**artist_thumbnail)
|
||||
artist.artwork.append(artist_thumbnail)
|
||||
|
||||
if DEBUG:
|
||||
for i, content in enumerate(renderer_list):
|
||||
@@ -493,7 +493,7 @@ class YoutubeMusic(SuperYouTube):
|
||||
# album artwork
|
||||
album_thumbnails = musicDetailHeaderRenderer.get("thumbnail", {}).get("croppedSquareThumbnailRenderer", {}).get("thumbnail", {}).get("thumbnails", {})
|
||||
for album_thumbnail in album_thumbnails:
|
||||
album.artwork.append(**album_thumbnail)
|
||||
album.artwork.append(value=album_thumbnail)
|
||||
|
||||
title_runs: List[dict] = musicDetailHeaderRenderer.get("title", {}).get("runs", [])
|
||||
subtitle_runs: List[dict] = musicDetailHeaderRenderer.get("subtitle", {}).get("runs", [])
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Tuple
|
||||
from __future__ import annotations
|
||||
|
||||
from ...utils.config import main_settings, logging_settings
|
||||
from dataclasses import dataclass, field
|
||||
from typing import TYPE_CHECKING, List, Tuple
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ...objects import Target
|
||||
|
||||
from ...utils.config import logging_settings, main_settings
|
||||
from ...utils.enums.colors import BColors
|
||||
from ...objects import Target
|
||||
|
||||
UNIT_PREFIXES: List[str] = ["", "k", "m", "g", "t"]
|
||||
UNIT_DIVISOR = 1024
|
||||
|
||||
Reference in New Issue
Block a user